code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
using System;
using System.Xml;
namespace WinSCP
{
[Flags]
internal enum LogReadFlags
{
ThrowFailures = 0x01
}
internal abstract class CustomLogReader : IDisposable
{
public Session Session { get; private set; }
public XmlNodeType NodeType { get { return Reader.NodeType; } }
public string NamespaceURI { get { return Reader.NamespaceURI; } }
public string LocalName { get { return Reader.LocalName; } }
public bool IsEmptyElement { get { return Reader.IsEmptyElement; } }
public int Depth { get { return Reader.Depth; } }
public string Value { get { return Reader.Value; } }
internal abstract XmlReader Reader { get; }
public abstract bool Read(LogReadFlags flags);
protected CustomLogReader(Session session)
{
Session = session;
}
public virtual void Dispose()
{
}
public bool IsElement()
{
return
(NodeType == XmlNodeType.Element) &&
(NamespaceURI == Session.Namespace);
}
public bool IsElement(string localName)
{
return
IsElement() &&
(LocalName == localName);
}
public bool IsNonEmptyElement(string localName)
{
return
IsElement(localName) &&
!IsEmptyElement;
}
public bool GetEmptyElementValue(string localName, out string value)
{
bool result =
IsElement(localName) &&
IsEmptyElement;
if (result)
{
value = GetAttribute("value");
result = (value != null);
}
else
{
value = null;
}
return result;
}
public bool IsEndElement(string localName)
{
return
(NodeType == XmlNodeType.EndElement) &&
(NamespaceURI == Session.Namespace) &&
(LocalName == localName);
}
public bool TryWaitForNonEmptyElement(string localName, LogReadFlags flags)
{
bool result = false;
while (!result && Read(flags))
{
if (IsNonEmptyElement(localName))
{
result = true;
}
}
return result;
}
public void WaitForNonEmptyElement(string localName, LogReadFlags flags)
{
if (!TryWaitForNonEmptyElement(localName, flags))
{
throw Session.Logger.WriteException(SessionLocalException.CreateElementNotFound(Session, localName));
}
}
public bool TryWaitForEmptyElement(string localName, LogReadFlags flags)
{
bool result = false;
while (!result && Read(flags))
{
if (IsElement(localName) &&
IsEmptyElement)
{
result = true;
}
}
return result;
}
public ElementLogReader CreateLogReader()
{
return new ElementLogReader(this);
}
public ElementLogReader WaitForNonEmptyElementAndCreateLogReader(string localName, LogReadFlags flags)
{
WaitForNonEmptyElement(localName, flags);
return CreateLogReader();
}
public ElementLogReader WaitForGroupAndCreateLogReader()
{
return WaitForNonEmptyElementAndCreateLogReader("group", LogReadFlags.ThrowFailures);
}
public string GetAttribute(string name)
{
return Reader.GetAttribute(name);
}
}
}
| danelkhen/fsync | lib-src/winscp/dotnet/internal/CustomLogReader.cs | C# | apache-2.0 | 3,834 |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/kernels/dataset.h"
#include "tensorflow/core/common_runtime/function.h"
#include "tensorflow/core/framework/partial_tensor_shape.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/lib/random/random.h"
#include "tensorflow/core/kernels/captured_function.h"
namespace tensorflow {
namespace {
// See documentation in ../ops/dataset_ops.cc for a high-level
// description of the following op.
class FlatMapDatasetOp : public UnaryDatasetOpKernel {
public:
explicit FlatMapDatasetOp(OpKernelConstruction* ctx)
: UnaryDatasetOpKernel(ctx),
graph_def_version_(ctx->graph_def_version()) {
OP_REQUIRES_OK(ctx, ctx->GetAttr("f", &func_));
OP_REQUIRES_OK(ctx, ctx->GetAttr("output_types", &output_types_));
OP_REQUIRES_OK(ctx, ctx->GetAttr("output_shapes", &output_shapes_));
}
void MakeDataset(OpKernelContext* ctx, DatasetBase* input,
DatasetBase** output) override {
OpInputList inputs;
OP_REQUIRES_OK(ctx, ctx->input_list("other_arguments", &inputs));
std::vector<Tensor> other_arguments;
other_arguments.reserve(inputs.size());
for (const Tensor& t : inputs) {
other_arguments.push_back(t);
}
std::unique_ptr<CapturedFunction> captured_func;
OP_REQUIRES_OK(ctx, CapturedFunction::Create(ctx, func_, graph_def_version_,
std::move(other_arguments),
&captured_func));
*output = new Dataset(input, std::move(captured_func), output_types_,
output_shapes_);
}
private:
class Dataset : public DatasetBase {
public:
Dataset(const DatasetBase* input,
std::unique_ptr<CapturedFunction> captured_func,
const DataTypeVector& output_types,
const std::vector<PartialTensorShape>& output_shapes)
: input_(input),
captured_func_(std::move(captured_func)),
output_types_(output_types),
output_shapes_(output_shapes) {
input_->Ref();
}
~Dataset() override { input_->Unref(); }
std::unique_ptr<IteratorBase> MakeIterator(
const string& prefix) const override {
return std::unique_ptr<IteratorBase>(
new Iterator({this, strings::StrCat(prefix, "::FlatMap")}));
}
const DataTypeVector& output_dtypes() const override {
return output_types_;
}
const std::vector<PartialTensorShape>& output_shapes() const override {
return output_shapes_;
}
string DebugString() override { return "FlatMapDatasetOp::Dataset"; }
private:
class Iterator : public DatasetIterator<Dataset> {
public:
explicit Iterator(const Params& params)
: DatasetIterator<Dataset>(params),
input_impl_(params.dataset->input_->MakeIterator(params.prefix)) {}
Status GetNextInternal(IteratorContext* ctx,
std::vector<Tensor>* out_tensors,
bool* end_of_sequence) override {
mutex_lock l(mu_);
do {
if (current_element_iterator_) {
// We are currently precessing a mapped element, so try to get the
// next subelement.
bool end_of_element;
TF_RETURN_IF_ERROR(current_element_iterator_->GetNext(
ctx, out_tensors, &end_of_element));
if (!end_of_element) {
// Produce the subelement as output.
*end_of_sequence = false;
return Status::OK();
}
// We have reached the end of the current element, so maybe move on
// to the next element.
current_element_iterator_.reset();
}
// Get the next element from the input dataset.
std::vector<Tensor> args;
TF_RETURN_IF_ERROR(input_impl_->GetNext(ctx, &args, end_of_sequence));
if (*end_of_sequence) {
return Status::OK();
}
FunctionLibraryRuntime::Options opts;
opts.runner = ctx->runner();
// Choose a step ID that is guaranteed not to clash with any
// Session-generated step ID. DirectSession only generates
// non-negative step IDs (contiguous, starting from 0), and
// MasterSession generates 56-bit random step IDs whose MSB
// is always 0, so a negative random step ID should suffice.
opts.step_id = -std::abs(static_cast<int64>(random::New64()));
ScopedStepContainer step_container(
opts.step_id, [this, ctx](const string& name) {
dataset()
->captured_func_->resource_manager()
->Cleanup(name)
.IgnoreError();
});
opts.step_container = &step_container;
std::vector<Tensor> return_values;
TF_RETURN_IF_ERROR(dataset()->captured_func_->Run(
opts, args, &return_values, prefix()));
if (!(return_values.size() == 1 &&
return_values[0].dtype() == DT_RESOURCE &&
TensorShapeUtils::IsScalar(return_values[0].shape()))) {
return errors::InvalidArgument(
"`f` must return a single scalar of dtype DT_RESOURCE.");
}
// Retrieve the dataset that was created in `f`.
DatasetBase* returned_dataset;
const ResourceHandle& dataset_resource =
return_values[0].scalar<ResourceHandle>()();
// NOTE(mrry): We cannot use the core `LookupResource()` or
// `DeleteResource()` functions, because we have an
// `IteratorContext*` and not an `OpKernelContext*`, so we
// replicate the necessary functionality here.
auto type_index = MakeTypeIndex<DatasetBase>();
if (type_index.hash_code() != dataset_resource.hash_code()) {
return errors::InvalidArgument(
"`f` must return a Dataset resource.");
}
TF_RETURN_IF_ERROR(
dataset()->captured_func_->resource_manager()->Lookup(
dataset_resource.container(), dataset_resource.name(),
&returned_dataset));
core::ScopedUnref unref_dataset(returned_dataset);
// Create an iterator for the dataset that was returned by
// `f`. This transfers ownership of the dataset to the
// iterator, so we can delete it from the resource manager.
current_element_iterator_ = returned_dataset->MakeIterator(
strings::StrCat(prefix(), "[", element_index_++, "]"));
TF_RETURN_IF_ERROR(
dataset()
->captured_func_->resource_manager()
->Delete<DatasetBase>(dataset_resource.container(),
dataset_resource.name()));
} while (true);
}
private:
mutex mu_;
size_t element_index_ GUARDED_BY(mu_) = 0;
const std::unique_ptr<IteratorBase> input_impl_ GUARDED_BY(mu_);
std::unique_ptr<IteratorBase> current_element_iterator_ GUARDED_BY(mu_);
};
const DatasetBase* const input_;
const std::unique_ptr<CapturedFunction> captured_func_;
const DataTypeVector output_types_;
const std::vector<PartialTensorShape> output_shapes_;
};
const int graph_def_version_;
DataTypeVector output_types_;
std::vector<PartialTensorShape> output_shapes_;
const NameAttrList* func_;
};
REGISTER_KERNEL_BUILDER(Name("FlatMapDataset").Device(DEVICE_CPU),
FlatMapDatasetOp);
} // namespace
} // namespace tensorflow
| mavenlin/tensorflow | tensorflow/core/kernels/flat_map_dataset_op.cc | C++ | apache-2.0 | 8,343 |
// Copyright (c) CodeSmith Tools, LLC. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.IO;
using Ionic.Zip;
using Microsoft.Build.BuildEngine;
namespace Generator.QuickStart {
public class WebApplicationCreator : ProjectCreator {
public WebApplicationCreator(ProjectBuilderSettings projectBuilder) : base(projectBuilder) {}
public override string ProjectTemplateFile { get { return "WebApplication.zip"; } }
protected override void AddFiles() {
if (!ProjectBuilder.IncludeDataServices)
return;
string directoryName = ProjectDirectory;
string path = Path.Combine(ProjectBuilder.ZipFileFolder, "DataServiceApplication.zip");
using (var zip = new ZipFile(path))
zip.ExtractAll(directoryName, ExtractExistingFileAction.DoNotOverwrite);
string dataService = ProjectBuilder.DatabaseName + "DataService.svc";
string dataServiceClass = ProjectBuilder.DatabaseName + "DataService.svc." + ProjectBuilder.LanguageAppendage;
string dataServicePath = Path.Combine(directoryName, dataService);
string dataServiceClassPath = Path.Combine(directoryName, dataServiceClass);
File.Move(Path.Combine(directoryName, "DataService.svc"), dataServicePath);
File.Move(Path.Combine(directoryName, "DataService.svc." + ProjectBuilder.LanguageAppendage), dataServiceClassPath);
Project project = GetProject();
if (project == null)
return;
BuildItem serviceItem = project.AddNewItem("Content", dataService);
BuildItem serviceClass = project.AddNewItem("Compile", dataServiceClass);
serviceClass.SetMetadata("DependentUpon", dataService);
project.Save(ProjectFile.FullName);
// update vars
string content = File.ReadAllText(dataServicePath);
content = content.Replace("$safeitemname$", Path.GetFileNameWithoutExtension(dataService));
File.WriteAllText(dataServicePath, content);
content = File.ReadAllText(dataServiceClassPath);
content = content.Replace("$safeitemname$", Path.GetFileNameWithoutExtension(dataService));
File.WriteAllText(dataServiceClassPath, content);
}
protected override string ReplaceFileVariables(string content, bool isCSP) {
return base.ReplaceFileVariables(content, isCSP).Replace("$entityNamespace$", ProjectBuilder.DataProjectName).Replace("$datacontext$", ProjectBuilder.DataContextName);
}
}
}
| codesmithtools/SchemaHelper | Source/QuickStart/Creators/WebApplicationCreator.cs | C# | apache-2.0 | 2,717 |
package com.orcchg.dev.maxa.rxmusic.data.injection.local.music.artist;
import dagger.Module;
@Module
public class ArtistMigrationModule {
}
| orcchg/RxMusic | app/src/main/java/com/orcchg/dev/maxa/rxmusic/data/injection/local/music/artist/ArtistMigrationModule.java | Java | apache-2.0 | 142 |
//
// Created by Dawid Drozd aka Gelldur on 7/11/16.
//
#include "Layout.h"
#include <log.h>
#ifndef PLATFORM_IOS
void Layout::layout()
{
WLOG("Function %s not implemented: %s:%d", __func__, __FILE__, __LINE__);
}
#endif
Layout& Layout::getNullObject()
{
static NullLayout nullObject;
return nullObject;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// NullLayout
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void NullLayout::layout()
{
WLOG("NullObject ignoring call: %s in: %s:%d", __func__, __FILE__, __LINE__);
}
| gelldur/DexodeEngine | src/component/view/Layout.cpp | C++ | apache-2.0 | 684 |
package org.apache.maven.artifact.resolver;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.versioning.VersionRange;
import org.codehaus.plexus.logging.Logger;
/**
* Send resolution warning events to the warning log.
*
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
* @version $Id: WarningResolutionListener.java 770390 2009-04-30 18:49:42Z jvanzyl $
*/
public class WarningResolutionListener
implements ResolutionListener
{
private Logger logger;
public WarningResolutionListener( Logger logger )
{
this.logger = logger;
}
public void testArtifact( Artifact node )
{
}
public void startProcessChildren( Artifact artifact )
{
}
public void endProcessChildren( Artifact artifact )
{
}
public void includeArtifact( Artifact artifact )
{
}
public void omitForNearer( Artifact omitted,
Artifact kept )
{
}
public void omitForCycle( Artifact omitted )
{
}
public void updateScopeCurrentPom( Artifact artifact,
String scope )
{
}
public void updateScope( Artifact artifact,
String scope )
{
}
public void manageArtifact( Artifact artifact,
Artifact replacement )
{
}
public void selectVersionFromRange( Artifact artifact )
{
}
public void restrictRange( Artifact artifact,
Artifact replacement,
VersionRange newRange )
{
}
}
| sonatype/maven-demo | maven-compat/src/main/java/org/apache/maven/artifact/resolver/WarningResolutionListener.java | Java | apache-2.0 | 2,441 |
package android.util;
/*
* #%L
* Matos
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Orange SA
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@com.francetelecom.rd.stubs.annotation.ClassDone(0)
public class SparseArray<E>
implements java.lang.Cloneable
{
// Constructors
public SparseArray(){
}
public SparseArray(int arg1){
}
// Methods
@com.francetelecom.rd.stubs.annotation.FieldGet("content")
public E get(int arg1){
return null;
}
@com.francetelecom.rd.stubs.annotation.Code("return (com.francetelecom.rd.stubs.Generator.booleanValue() ? content : arg2);")
public E get(int arg1, E arg2){
return null;
}
public void put(int arg1, @com.francetelecom.rd.stubs.annotation.FieldSet("content") E arg2){
}
public void append(int arg1, @com.francetelecom.rd.stubs.annotation.FieldSet("content") E arg2){
}
public SparseArray<E> clone(){
return (SparseArray) null;
}
public void clear(){
}
public int size(){
return 0;
}
public void remove(int arg1){
}
public void delete(int arg1){
}
public int keyAt(int arg1){
return 0;
}
@com.francetelecom.rd.stubs.annotation.FieldGet("content")
public E valueAt(int arg1){
return null;
}
public int indexOfKey(int arg1){
return 0;
}
public int indexOfValue(E arg1){
return 0;
}
public void setValueAt(int arg1, @com.francetelecom.rd.stubs.annotation.FieldSet("content") E arg2){
}
public void removeAt(int arg1){
}
}
| Orange-OpenSource/matos-profiles | matos-android/src/main/java/android/util/SparseArray.java | Java | apache-2.0 | 2,012 |
<?php
/**
* wallee SDK
*
* This library allows to interact with the wallee payment service.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Wallee\Sdk\Service;
use Wallee\Sdk\ApiClient;
use Wallee\Sdk\ApiException;
use Wallee\Sdk\ApiResponse;
use Wallee\Sdk\Http\HttpRequest;
use Wallee\Sdk\ObjectSerializer;
/**
* PaymentMethodConfigurationService service
*
* @category Class
* @package Wallee\Sdk
* @author customweb GmbH
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License v2
*/
class PaymentMethodConfigurationService {
/**
* The API client instance.
*
* @var ApiClient
*/
private $apiClient;
/**
* Constructor.
*
* @param ApiClient $apiClient the api client
*/
public function __construct(ApiClient $apiClient) {
if (is_null($apiClient)) {
throw new \InvalidArgumentException('The api client is required.');
}
$this->apiClient = $apiClient;
}
/**
* Returns the API client instance.
*
* @return ApiClient
*/
public function getApiClient() {
return $this->apiClient;
}
/**
* Operation count
*
* Count
*
* @param int $space_id (required)
* @param \Wallee\Sdk\Model\EntityQueryFilter $filter The filter which restricts the entities which are used to calculate the count. (optional)
* @throws \Wallee\Sdk\ApiException
* @throws \Wallee\Sdk\VersioningException
* @throws \Wallee\Sdk\Http\ConnectionException
* @return int
*/
public function count($space_id, $filter = null) {
return $this->countWithHttpInfo($space_id, $filter)->getData();
}
/**
* Operation countWithHttpInfo
*
* Count
*
* @param int $space_id (required)
* @param \Wallee\Sdk\Model\EntityQueryFilter $filter The filter which restricts the entities which are used to calculate the count. (optional)
* @throws \Wallee\Sdk\ApiException
* @throws \Wallee\Sdk\VersioningException
* @throws \Wallee\Sdk\Http\ConnectionException
* @return ApiResponse
*/
public function countWithHttpInfo($space_id, $filter = null) {
// verify the required parameter 'space_id' is set
if (is_null($space_id)) {
throw new \InvalidArgumentException('Missing the required parameter $space_id when calling count');
}
// header params
$headerParams = [];
$headerAccept = $this->apiClient->selectHeaderAccept(['application/json;charset=utf-8']);
if (!is_null($headerAccept)) {
$headerParams[HttpRequest::HEADER_KEY_ACCEPT] = $headerAccept;
}
$headerParams[HttpRequest::HEADER_KEY_CONTENT_TYPE] = $this->apiClient->selectHeaderContentType(['application/json;charset=utf-8']);
// query params
$queryParams = [];
if (!is_null($space_id)) {
$queryParams['spaceId'] = $this->apiClient->getSerializer()->toQueryValue($space_id);
}
// path params
$resourcePath = '/payment-method-configuration/count';
// default format to json
$resourcePath = str_replace('{format}', 'json', $resourcePath);
// form params
$formParams = [];
// body params
$tempBody = null;
if (isset($filter)) {
$tempBody = $filter;
}
// for model (json/xml)
$httpBody = '';
if (isset($tempBody)) {
$httpBody = $tempBody; // $tempBody is the method argument, if present
} elseif (!empty($formParams)) {
$httpBody = $formParams; // for HTTP post (form)
}
// make the API Call
try {
$this->apiClient->setConnectionTimeout(ApiClient::CONNECTION_TIMEOUT);
$response = $this->apiClient->callApi(
$resourcePath,
'POST',
$queryParams,
$httpBody,
$headerParams,
'int',
'/payment-method-configuration/count'
);
return new ApiResponse($response->getStatusCode(), $response->getHeaders(), $this->apiClient->getSerializer()->deserialize($response->getData(), 'int', $response->getHeaders()));
} catch (ApiException $e) {
switch ($e->getCode()) {
case 200:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'int',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
case 442:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\ClientError',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
case 542:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\ServerError',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
}
throw $e;
}
}
/**
* Operation read
*
* Read
*
* @param int $space_id (required)
* @param int $id The id of the payment method configuration which should be returned. (required)
* @throws \Wallee\Sdk\ApiException
* @throws \Wallee\Sdk\VersioningException
* @throws \Wallee\Sdk\Http\ConnectionException
* @return \Wallee\Sdk\Model\PaymentMethodConfiguration
*/
public function read($space_id, $id) {
return $this->readWithHttpInfo($space_id, $id)->getData();
}
/**
* Operation readWithHttpInfo
*
* Read
*
* @param int $space_id (required)
* @param int $id The id of the payment method configuration which should be returned. (required)
* @throws \Wallee\Sdk\ApiException
* @throws \Wallee\Sdk\VersioningException
* @throws \Wallee\Sdk\Http\ConnectionException
* @return ApiResponse
*/
public function readWithHttpInfo($space_id, $id) {
// verify the required parameter 'space_id' is set
if (is_null($space_id)) {
throw new \InvalidArgumentException('Missing the required parameter $space_id when calling read');
}
// verify the required parameter 'id' is set
if (is_null($id)) {
throw new \InvalidArgumentException('Missing the required parameter $id when calling read');
}
// header params
$headerParams = [];
$headerAccept = $this->apiClient->selectHeaderAccept(['application/json;charset=utf-8']);
if (!is_null($headerAccept)) {
$headerParams[HttpRequest::HEADER_KEY_ACCEPT] = $headerAccept;
}
$headerParams[HttpRequest::HEADER_KEY_CONTENT_TYPE] = $this->apiClient->selectHeaderContentType(['*/*']);
// query params
$queryParams = [];
if (!is_null($space_id)) {
$queryParams['spaceId'] = $this->apiClient->getSerializer()->toQueryValue($space_id);
}
if (!is_null($id)) {
$queryParams['id'] = $this->apiClient->getSerializer()->toQueryValue($id);
}
// path params
$resourcePath = '/payment-method-configuration/read';
// default format to json
$resourcePath = str_replace('{format}', 'json', $resourcePath);
// form params
$formParams = [];
// for model (json/xml)
$httpBody = '';
if (isset($tempBody)) {
$httpBody = $tempBody; // $tempBody is the method argument, if present
} elseif (!empty($formParams)) {
$httpBody = $formParams; // for HTTP post (form)
}
// make the API Call
try {
$this->apiClient->setConnectionTimeout(ApiClient::CONNECTION_TIMEOUT);
$response = $this->apiClient->callApi(
$resourcePath,
'GET',
$queryParams,
$httpBody,
$headerParams,
'\Wallee\Sdk\Model\PaymentMethodConfiguration',
'/payment-method-configuration/read'
);
return new ApiResponse($response->getStatusCode(), $response->getHeaders(), $this->apiClient->getSerializer()->deserialize($response->getData(), '\Wallee\Sdk\Model\PaymentMethodConfiguration', $response->getHeaders()));
} catch (ApiException $e) {
switch ($e->getCode()) {
case 200:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\PaymentMethodConfiguration',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
case 442:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\ClientError',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
case 542:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\ServerError',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
}
throw $e;
}
}
/**
* Operation search
*
* Search
*
* @param int $space_id (required)
* @param \Wallee\Sdk\Model\EntityQuery $query The query restricts the payment method configuration which are returned by the search. (required)
* @throws \Wallee\Sdk\ApiException
* @throws \Wallee\Sdk\VersioningException
* @throws \Wallee\Sdk\Http\ConnectionException
* @return \Wallee\Sdk\Model\PaymentMethodConfiguration[]
*/
public function search($space_id, $query) {
return $this->searchWithHttpInfo($space_id, $query)->getData();
}
/**
* Operation searchWithHttpInfo
*
* Search
*
* @param int $space_id (required)
* @param \Wallee\Sdk\Model\EntityQuery $query The query restricts the payment method configuration which are returned by the search. (required)
* @throws \Wallee\Sdk\ApiException
* @throws \Wallee\Sdk\VersioningException
* @throws \Wallee\Sdk\Http\ConnectionException
* @return ApiResponse
*/
public function searchWithHttpInfo($space_id, $query) {
// verify the required parameter 'space_id' is set
if (is_null($space_id)) {
throw new \InvalidArgumentException('Missing the required parameter $space_id when calling search');
}
// verify the required parameter 'query' is set
if (is_null($query)) {
throw new \InvalidArgumentException('Missing the required parameter $query when calling search');
}
// header params
$headerParams = [];
$headerAccept = $this->apiClient->selectHeaderAccept(['application/json;charset=utf-8']);
if (!is_null($headerAccept)) {
$headerParams[HttpRequest::HEADER_KEY_ACCEPT] = $headerAccept;
}
$headerParams[HttpRequest::HEADER_KEY_CONTENT_TYPE] = $this->apiClient->selectHeaderContentType(['application/json;charset=utf-8']);
// query params
$queryParams = [];
if (!is_null($space_id)) {
$queryParams['spaceId'] = $this->apiClient->getSerializer()->toQueryValue($space_id);
}
// path params
$resourcePath = '/payment-method-configuration/search';
// default format to json
$resourcePath = str_replace('{format}', 'json', $resourcePath);
// form params
$formParams = [];
// body params
$tempBody = null;
if (isset($query)) {
$tempBody = $query;
}
// for model (json/xml)
$httpBody = '';
if (isset($tempBody)) {
$httpBody = $tempBody; // $tempBody is the method argument, if present
} elseif (!empty($formParams)) {
$httpBody = $formParams; // for HTTP post (form)
}
// make the API Call
try {
$this->apiClient->setConnectionTimeout(ApiClient::CONNECTION_TIMEOUT);
$response = $this->apiClient->callApi(
$resourcePath,
'POST',
$queryParams,
$httpBody,
$headerParams,
'\Wallee\Sdk\Model\PaymentMethodConfiguration[]',
'/payment-method-configuration/search'
);
return new ApiResponse($response->getStatusCode(), $response->getHeaders(), $this->apiClient->getSerializer()->deserialize($response->getData(), '\Wallee\Sdk\Model\PaymentMethodConfiguration[]', $response->getHeaders()));
} catch (ApiException $e) {
switch ($e->getCode()) {
case 200:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\PaymentMethodConfiguration[]',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
case 442:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\ClientError',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
case 542:
$data = ObjectSerializer::deserialize(
$e->getResponseBody(),
'\Wallee\Sdk\Model\ServerError',
$e->getResponseHeaders()
);
$e->setResponseObject($data);
break;
}
throw $e;
}
}
}
| wallee-payment/wallee-php-sdk | lib/Service/PaymentMethodConfigurationService.php | PHP | apache-2.0 | 13,349 |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.hibernate.dialect;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.descriptor.java.StringTypeDescriptor;
@SuppressWarnings("nls")
public class LongNStringType extends AbstractSingleColumnStandardBasicType<String> {
private static final long serialVersionUID = 1L;
public LongNStringType() {
super(LongNVarcharTypeDescriptor.INSTANCE, StringTypeDescriptor.INSTANCE);
}
@Override
public String getName() {
return "materialized_clob";
}
}
| equella/Equella | Source/Plugins/Core/com.equella.serverbase/src/com/tle/hibernate/dialect/LongNStringType.java | Java | apache-2.0 | 1,324 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.core.data;
/**
* @author peng-yongsheng
*/
public interface Operation {
String operate(String newValue, String oldValue);
Long operate(Long newValue, Long oldValue);
Double operate(Double newValue, Double oldValue);
Integer operate(Integer newValue, Integer oldValue);
Boolean operate(Boolean newValue, Boolean oldValue);
byte[] operate(byte[] newValue, byte[] oldValue);
}
| hanahmily/sky-walking | apm-collector/apm-collector-core/src/main/java/org/apache/skywalking/apm/collector/core/data/Operation.java | Java | apache-2.0 | 1,257 |
package com.enrique.eventbusarchitecture.secondfragment;
import android.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.enrique.eventbusarchitecture.R;
import butterknife.ButterKnife;
public class SecondFragment extends Fragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.second_fragment, container, false);
ButterKnife.inject(this, view);
return view;
}
}
| kikoso/Event-Bus-Architecture | app/src/main/java/com/enrique/eventbusarchitecture/secondfragment/SecondFragment.java | Java | apache-2.0 | 601 |
<!DOCTYPE html>
<html lang="en" prefix="og: http://ogp.me/ns#">
<head>
<meta charset="<?php bloginfo('charset'); ?>">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title><?php wp_title('|',true,'right'); bloginfo('name'); ?></title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="apple-touch-icon" sizes="180x180" href="<?php bloginfo('stylesheet_directory'); ?>/img/dist/apple-touch-icon.png">
<link rel="icon" type="image/png" href="<?php bloginfo('stylesheet_directory'); ?>/img/dist/favicon-32x32.png" sizes="32x32">
<link rel="icon" type="image/png" href="<?php bloginfo('stylesheet_directory'); ?>/img/dist/favicon-16x16.png" sizes="16x16">
<link rel="manifest" href="<?php bloginfo('stylesheet_directory'); ?>/img/favicon/manifest.json">
<link rel="mask-icon" href="<?php bloginfo('stylesheet_directory'); ?>/img/dist/safari-pinned-tab.svg" color="#5bbad5">
<link rel="shortcut icon" href="<?php bloginfo('stylesheet_directory'); ?>/img/dist/favicon.ico">
<meta name="msapplication-config" content="<?php bloginfo('stylesheet_directory'); ?>/img/favicon/browserconfig.xml">
<meta name="theme-color" content="#ffffff">
<?php get_template_part('partials/seo'); ?>
<link rel="alternate" type="application/rss+xml" title="<?php bloginfo('name'); ?> RSS Feed" href="<?php bloginfo('rss2_url'); ?>" />
<link rel="pingback" href="<?php bloginfo('pingback_url'); ?>" />
<?php wp_head(); ?>
</head>
<body <?php body_class(); ?>>
<!--[if lt IE 9]><p class="chromeframe">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p><![endif]-->
<section id="main-container">
<!-- start content -->
<header id="header">
<div class="container">
<div id="header-top" class="row">
<div class="col col-6">
<h1 id="site-title">
<a href="<?php echo home_url(); ?>"><?php bloginfo('name'); ?></a>
<nav class="mobile-menu-open u-pointer"><span class="genericon genericon-menu"></span></nav>
</h1>
</div>
<div class="col col-18 only-desktop">
<ul id="menu">
<li class="menu-item"><a href="<?php echo home_url('noticias/'); ?>"><?php echo __('[:es]Noticias[:en]News'); ?></a></li>
<li class="menu-item"><a href="<?php echo home_url('exposiciones/'); ?>"><?php echo __('[:es]Exposiciones[:en]Exhibitions'); ?></a></a></li>
<li class="menu-item"><a href="<?php echo home_url('obra/'); ?>"><?php echo __('[:es]Obra[:en]Work'); ?></a></a></li>
<li class="menu-item"><a href="<?php echo home_url('journal/'); ?>"><?php echo __('[:es]Journal[:en]Journal'); ?></a></a></li>
<li class="menu-item"><a href="<?php echo home_url('publicaciones/'); ?>"><?php echo __('[:es]Publicaciones[:en]Publications'); ?></a></a></li>
<li class="menu-item"><a href="<?php echo home_url('archivo/'); ?>"><?php echo __('[:es]Archivo[:en]Archive'); ?></a></a></li>
<li class="menu-item"><a href="<?php echo home_url('info/'); ?>"><?php echo __('[:es]Info[:en]Info'); ?></a></a></li>
</ul>
</div>
</div>
<div id="header-bottom" class="row font-serif only-desktop">
<div class="col col-18">
<?php echo qtranxf_generateLanguageSelectCode('both'); ?>
</div>
<div class="col col-6">
<?php get_search_form(); ?>
</div>
</div>
<div class="row">
<div class="col col-24 border-bottom"></div>
</div>
</div>
</header> | interglobalvision/gonzalolebrija-com | header.php | PHP | apache-2.0 | 3,706 |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.ui.actions;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.vcs.log.VcsLog;
import com.intellij.vcs.log.VcsLogDataKeys;
import com.intellij.vcs.log.impl.VcsGoToRefComparator;
import com.intellij.vcs.log.impl.VcsLogManager;
import com.intellij.vcs.log.statistics.VcsLogUsageTriggerCollector;
import com.intellij.vcs.log.ui.VcsLogInternalDataKeys;
import com.intellij.vcs.log.ui.VcsLogUiEx;
import com.intellij.vcs.log.util.VcsLogUtil;
import org.jetbrains.annotations.NotNull;
import java.util.Set;
public class GoToHashOrRefAction extends DumbAwareAction {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
VcsLogUsageTriggerCollector.triggerUsage(e, this);
Project project = e.getRequiredData(CommonDataKeys.PROJECT);
VcsLog log = e.getRequiredData(VcsLogDataKeys.VCS_LOG);
VcsLogUiEx logUi = e.getRequiredData(VcsLogInternalDataKeys.LOG_UI_EX);
VcsLogManager logManager = e.getRequiredData(VcsLogInternalDataKeys.LOG_MANAGER);
Set<VirtualFile> visibleRoots = VcsLogUtil.getVisibleRoots(logUi);
GoToHashOrRefPopup popup =
new GoToHashOrRefPopup(project, logUi.getDataPack().getRefs(), visibleRoots, log::jumpToReference,
vcsRef -> logUi.jumpToCommit(vcsRef.getCommitHash(), vcsRef.getRoot()),
logManager.getColorManager(),
new VcsGoToRefComparator(logUi.getDataPack().getLogProviders()));
popup.show(logUi.getTable());
}
@Override
public void update(@NotNull AnActionEvent e) {
VcsLog log = e.getData(VcsLogDataKeys.VCS_LOG);
VcsLogUiEx logUi = e.getData(VcsLogInternalDataKeys.LOG_UI_EX);
e.getPresentation().setEnabledAndVisible(e.getProject() != null && log != null && logUi != null);
}
}
| leafclick/intellij-community | platform/vcs-log/impl/src/com/intellij/vcs/log/ui/actions/GoToHashOrRefAction.java | Java | apache-2.0 | 2,623 |
#include <limits.h>
#include "../src/solution.h"
#include "gtest/gtest.h"
TEST(HasOnlyUniqueTest, TrueText) {
EXPECT_TRUE(HasOnlyUnique("asd"));
}
TEST(HasOnlyUniqueTest, TrueSingleChar) {
EXPECT_TRUE(HasOnlyUnique("a"));
}
TEST(HasOnlyUniqueTest, TrueEmpty) {
EXPECT_TRUE(HasOnlyUnique(""));
}
TEST(HasOnlyUniqueTest, FalseText) {
EXPECT_FALSE(HasOnlyUnique("Almafa"));
}
| akoskaaa/l2c | cpp/11-is-unique/test/solution_unittest.cpp | C++ | apache-2.0 | 394 |
<?php
/*
----------------------------------
*| auther: yc yc@yuanxu.top
*| website: yuanxu.top
---------------------------------------
*/
defined('IN_IA') or exit('Access Denied');
global $_W,$_GPC;
require_once 'core/inc/core.php';
require_once 'core/inc/define.php';
require_once 'core/inc/user.php';
class Yc_shopModuleSite extends Core {
public function doWebShop() {
## 商城统一入口
$this->_exec(__FUNCTION__,true,'shop');
}
public function doMobileIndex() {
//这个操作被定义用来呈现 功能封面.
$this->_exec(__FUNCTION__,false);
}
} | snailto/wq | addons/yc_shop/site.php | PHP | apache-2.0 | 604 |
import sys
import click
from solar.core import testing
from solar.core import resource
from solar.system_log import change
from solar.system_log import operations
from solar.system_log import data
from solar.cli.uids_history import get_uid, remember_uid, SOLARUID
@click.group()
def changes():
pass
@changes.command()
def validate():
errors = resource.validate_resources()
if errors:
for r, error in errors:
print 'ERROR: %s: %s' % (r.name, error)
sys.exit(1)
@changes.command()
@click.option('-d', default=False, is_flag=True)
def stage(d):
log = list(change.stage_changes().reverse())
for item in log:
click.echo(item)
if d:
for line in item.details:
click.echo(' '*4+line)
if not log:
click.echo('No changes')
@changes.command(name='staged-item')
@click.argument('log_action')
def staged_item(log_action):
item = data.SL().get(log_action)
if not item:
click.echo('No staged changes for {}'.format(log_action))
else:
click.echo(item)
for line in item.details:
click.echo(' '*4+line)
@changes.command()
def process():
uid = change.send_to_orchestration()
remember_uid(uid)
click.echo(uid)
@changes.command()
@click.argument('uid', type=SOLARUID)
def commit(uid):
operations.commit(uid)
@changes.command()
@click.option('-n', default=5)
def history(n):
commited = list(data.CL().collection(n))
if not commited:
click.echo('No history.')
return
commited.reverse()
click.echo(commited)
@changes.command()
def test():
results = testing.test_all()
for name, result in results.items():
msg = '[{status}] {name} {message}'
kwargs = {
'name': name,
'message': '',
'status': 'OK',
}
if result['status'] == 'ok':
kwargs['status'] = click.style('OK', fg='green')
else:
kwargs['status'] = click.style('ERROR', fg='red')
kwargs['message'] = result['message']
click.echo(msg.format(**kwargs))
@changes.command(name='clean-history')
def clean_history():
data.CL().clean()
data.CD().clean()
| dshulyak/solar | solar/solar/cli/system_log.py | Python | apache-2.0 | 2,234 |
package ua.com.lsd25.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.social.facebook.api.Facebook;
import org.springframework.social.facebook.api.impl.FacebookTemplate;
/**
* @author vzagnitko
*/
@Configuration
public class FacebookConfig {
@Value("${spring.social.facebook.access.token}")
private String facebookAccessToken;
public FacebookConfig() {
}
@Bean
public Facebook getFacebook() {
return new FacebookTemplate(facebookAccessToken);
}
}
| vzagnitko/BluetoothMusicServer | src/main/java/ua/com/lsd25/config/FacebookConfig.java | Java | apache-2.0 | 650 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.hadoop.pig;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.URLDecoder;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.util.*;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.SyntaxException;
import org.apache.cassandra.auth.IAuthenticator;
import org.apache.cassandra.db.Column;
import org.apache.cassandra.db.IColumn;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.db.marshal.AbstractCompositeType.CompositeComponent;
import org.apache.cassandra.hadoop.*;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.Hex;
import org.apache.cassandra.utils.UUIDGen;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.*;
import org.apache.pig.*;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.data.*;
import org.apache.pig.impl.util.UDFContext;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A LoadStoreFunc for retrieving data from and storing data to Cassandra
*/
public abstract class AbstractCassandraStorage extends LoadFunc implements StoreFuncInterface, LoadMetadata
{
protected enum MarshallerType { COMPARATOR, DEFAULT_VALIDATOR, KEY_VALIDATOR, SUBCOMPARATOR };
// system environment variables that can be set to configure connection info:
// alternatively, Hadoop JobConf variables can be set using keys from ConfigHelper
public final static String PIG_INPUT_RPC_PORT = "PIG_INPUT_RPC_PORT";
public final static String PIG_INPUT_INITIAL_ADDRESS = "PIG_INPUT_INITIAL_ADDRESS";
public final static String PIG_INPUT_PARTITIONER = "PIG_INPUT_PARTITIONER";
public final static String PIG_OUTPUT_RPC_PORT = "PIG_OUTPUT_RPC_PORT";
public final static String PIG_OUTPUT_INITIAL_ADDRESS = "PIG_OUTPUT_INITIAL_ADDRESS";
public final static String PIG_OUTPUT_PARTITIONER = "PIG_OUTPUT_PARTITIONER";
public final static String PIG_RPC_PORT = "PIG_RPC_PORT";
public final static String PIG_INITIAL_ADDRESS = "PIG_INITIAL_ADDRESS";
public final static String PIG_PARTITIONER = "PIG_PARTITIONER";
public final static String PIG_INPUT_FORMAT = "PIG_INPUT_FORMAT";
public final static String PIG_OUTPUT_FORMAT = "PIG_OUTPUT_FORMAT";
public final static String PIG_INPUT_SPLIT_SIZE = "PIG_INPUT_SPLIT_SIZE";
protected String DEFAULT_INPUT_FORMAT;
protected String DEFAULT_OUTPUT_FORMAT;
public final static String PARTITION_FILTER_SIGNATURE = "cassandra.partition.filter";
protected static final Logger logger = LoggerFactory.getLogger(AbstractCassandraStorage.class);
protected String username;
protected String password;
protected String keyspace;
protected String column_family;
protected String loadSignature;
protected String storeSignature;
protected Configuration conf;
protected String inputFormatClass;
protected String outputFormatClass;
protected int splitSize = 64 * 1024;
protected String partitionerClass;
protected boolean usePartitionFilter = false;
public AbstractCassandraStorage()
{
super();
}
/** Deconstructs a composite type to a Tuple. */
protected Tuple composeComposite(AbstractCompositeType comparator, ByteBuffer name) throws IOException
{
List<CompositeComponent> result = comparator.deconstruct(name);
Tuple t = TupleFactory.getInstance().newTuple(result.size());
for (int i=0; i<result.size(); i++)
setTupleValue(t, i, result.get(i).comparator.compose(result.get(i).value));
return t;
}
/** convert a column to a tuple */
protected Tuple columnToTuple(IColumn col, CfDef cfDef, AbstractType comparator) throws IOException
{
Tuple pair = TupleFactory.getInstance().newTuple(2);
// name
if(comparator instanceof AbstractCompositeType)
setTupleValue(pair, 0, composeComposite((AbstractCompositeType)comparator,col.name()));
else
setTupleValue(pair, 0, comparator.compose(col.name()));
// value
if (col instanceof Column)
{
// standard
Map<ByteBuffer,AbstractType> validators = getValidatorMap(cfDef);
if (validators.get(col.name()) == null)
{
Map<MarshallerType, AbstractType> marshallers = getDefaultMarshallers(cfDef);
setTupleValue(pair, 1, marshallers.get(MarshallerType.DEFAULT_VALIDATOR).compose(col.value()));
}
else
setTupleValue(pair, 1, validators.get(col.name()).compose(col.value()));
return pair;
}
else
{
// super
ArrayList<Tuple> subcols = new ArrayList<Tuple>();
for (IColumn subcol : col.getSubColumns())
subcols.add(columnToTuple(subcol, cfDef, parseType(cfDef.getSubcomparator_type())));
pair.set(1, new DefaultDataBag(subcols));
}
return pair;
}
/** set the value to the position of the tuple */
protected void setTupleValue(Tuple pair, int position, Object value) throws ExecException
{
if (value instanceof BigInteger)
pair.set(position, ((BigInteger) value).intValue());
else if (value instanceof ByteBuffer)
pair.set(position, new DataByteArray(ByteBufferUtil.getArray((ByteBuffer) value)));
else if (value instanceof UUID)
pair.set(position, new DataByteArray(UUIDGen.decompose((java.util.UUID) value)));
else if (value instanceof Date)
pair.set(position, DateType.instance.decompose((Date) value).getLong());
else
pair.set(position, value);
}
/** get the columnfamily definition for the signature */
protected CfDef getCfDef(String signature)
{
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(AbstractCassandraStorage.class);
return cfdefFromString(property.getProperty(signature));
}
/** construct a map to store the mashaller type to cassandra data type mapping */
protected Map<MarshallerType, AbstractType> getDefaultMarshallers(CfDef cfDef) throws IOException
{
Map<MarshallerType, AbstractType> marshallers = new EnumMap<MarshallerType, AbstractType>(MarshallerType.class);
AbstractType comparator;
AbstractType subcomparator;
AbstractType default_validator;
AbstractType key_validator;
comparator = parseType(cfDef.getComparator_type());
subcomparator = parseType(cfDef.getSubcomparator_type());
default_validator = parseType(cfDef.getDefault_validation_class());
key_validator = parseType(cfDef.getKey_validation_class());
marshallers.put(MarshallerType.COMPARATOR, comparator);
marshallers.put(MarshallerType.DEFAULT_VALIDATOR, default_validator);
marshallers.put(MarshallerType.KEY_VALIDATOR, key_validator);
marshallers.put(MarshallerType.SUBCOMPARATOR, subcomparator);
return marshallers;
}
/** get the validators */
protected Map<ByteBuffer, AbstractType> getValidatorMap(CfDef cfDef) throws IOException
{
Map<ByteBuffer, AbstractType> validators = new HashMap<ByteBuffer, AbstractType>();
for (ColumnDef cd : cfDef.getColumn_metadata())
{
if (cd.getValidation_class() != null && !cd.getValidation_class().isEmpty())
{
AbstractType validator = null;
try
{
validator = TypeParser.parse(cd.getValidation_class());
validators.put(cd.name, validator);
}
catch (ConfigurationException e)
{
throw new IOException(e);
}
catch (SyntaxException e)
{
throw new IOException(e);
}
}
}
return validators;
}
/** parse the string to a cassandra data type */
protected AbstractType parseType(String type) throws IOException
{
try
{
// always treat counters like longs, specifically CCT.compose is not what we need
if (type != null && type.equals("org.apache.cassandra.db.marshal.CounterColumnType"))
return LongType.instance;
return TypeParser.parse(type);
}
catch (ConfigurationException e)
{
throw new IOException(e);
}
catch (SyntaxException e)
{
throw new IOException(e);
}
}
@Override
public InputFormat getInputFormat()
{
try
{
return FBUtilities.construct(inputFormatClass, "inputformat");
}
catch (ConfigurationException e)
{
throw new RuntimeException(e);
}
}
/** decompose the query to store the parameters in a map */
public static Map<String, String> getQueryMap(String query) throws UnsupportedEncodingException
{
String[] params = query.split("&");
Map<String, String> map = new HashMap<String, String>();
for (String param : params)
{
String[] keyValue = param.split("=");
map.put(keyValue[0], URLDecoder.decode(keyValue[1],"UTF-8"));
}
return map;
}
/** set hadoop cassandra connection settings */
protected void setConnectionInformation() throws IOException
{
if (System.getenv(PIG_RPC_PORT) != null)
{
ConfigHelper.setInputRpcPort(conf, System.getenv(PIG_RPC_PORT));
ConfigHelper.setOutputRpcPort(conf, System.getenv(PIG_RPC_PORT));
}
if (System.getenv(PIG_INPUT_RPC_PORT) != null)
ConfigHelper.setInputRpcPort(conf, System.getenv(PIG_INPUT_RPC_PORT));
if (System.getenv(PIG_OUTPUT_RPC_PORT) != null)
ConfigHelper.setOutputRpcPort(conf, System.getenv(PIG_OUTPUT_RPC_PORT));
if (System.getenv(PIG_INITIAL_ADDRESS) != null)
{
ConfigHelper.setInputInitialAddress(conf, System.getenv(PIG_INITIAL_ADDRESS));
ConfigHelper.setOutputInitialAddress(conf, System.getenv(PIG_INITIAL_ADDRESS));
}
if (System.getenv(PIG_INPUT_INITIAL_ADDRESS) != null)
ConfigHelper.setInputInitialAddress(conf, System.getenv(PIG_INPUT_INITIAL_ADDRESS));
if (System.getenv(PIG_OUTPUT_INITIAL_ADDRESS) != null)
ConfigHelper.setOutputInitialAddress(conf, System.getenv(PIG_OUTPUT_INITIAL_ADDRESS));
if (System.getenv(PIG_PARTITIONER) != null)
{
ConfigHelper.setInputPartitioner(conf, System.getenv(PIG_PARTITIONER));
ConfigHelper.setOutputPartitioner(conf, System.getenv(PIG_PARTITIONER));
}
if(System.getenv(PIG_INPUT_PARTITIONER) != null)
ConfigHelper.setInputPartitioner(conf, System.getenv(PIG_INPUT_PARTITIONER));
if(System.getenv(PIG_OUTPUT_PARTITIONER) != null)
ConfigHelper.setOutputPartitioner(conf, System.getenv(PIG_OUTPUT_PARTITIONER));
if (System.getenv(PIG_INPUT_FORMAT) != null)
inputFormatClass = getFullyQualifiedClassName(System.getenv(PIG_INPUT_FORMAT));
else
inputFormatClass = DEFAULT_INPUT_FORMAT;
if (System.getenv(PIG_OUTPUT_FORMAT) != null)
outputFormatClass = getFullyQualifiedClassName(System.getenv(PIG_OUTPUT_FORMAT));
else
outputFormatClass = DEFAULT_OUTPUT_FORMAT;
}
/** get the full class name */
protected String getFullyQualifiedClassName(String classname)
{
return classname.contains(".") ? classname : "org.apache.cassandra.hadoop." + classname;
}
/** get pig type for the cassandra data type*/
protected byte getPigType(AbstractType type)
{
if (type instanceof LongType || type instanceof DateType) // DateType is bad and it should feel bad
return DataType.LONG;
else if (type instanceof IntegerType || type instanceof Int32Type) // IntegerType will overflow at 2**31, but is kept for compatibility until pig has a BigInteger
return DataType.INTEGER;
else if (type instanceof AsciiType)
return DataType.CHARARRAY;
else if (type instanceof UTF8Type)
return DataType.CHARARRAY;
else if (type instanceof FloatType)
return DataType.FLOAT;
else if (type instanceof DoubleType)
return DataType.DOUBLE;
else if (type instanceof AbstractCompositeType )
return DataType.TUPLE;
return DataType.BYTEARRAY;
}
public ResourceStatistics getStatistics(String location, Job job)
{
return null;
}
@Override
public String relativeToAbsolutePath(String location, Path curDir) throws IOException
{
return location;
}
@Override
public void setUDFContextSignature(String signature)
{
this.loadSignature = signature;
}
/** StoreFunc methods */
public void setStoreFuncUDFContextSignature(String signature)
{
this.storeSignature = signature;
}
public String relToAbsPathForStoreLocation(String location, Path curDir) throws IOException
{
return relativeToAbsolutePath(location, curDir);
}
/** output format */
public OutputFormat getOutputFormat()
{
try
{
return FBUtilities.construct(outputFormatClass, "outputformat");
}
catch (ConfigurationException e)
{
throw new RuntimeException(e);
}
}
public void checkSchema(ResourceSchema schema) throws IOException
{
// we don't care about types, they all get casted to ByteBuffers
}
/** convert object to ByteBuffer */
protected ByteBuffer objToBB(Object o)
{
if (o == null)
return (ByteBuffer)o;
if (o instanceof java.lang.String)
return ByteBuffer.wrap(new DataByteArray((String)o).get());
if (o instanceof Integer)
return Int32Type.instance.decompose((Integer)o);
if (o instanceof Long)
return LongType.instance.decompose((Long)o);
if (o instanceof Float)
return FloatType.instance.decompose((Float)o);
if (o instanceof Double)
return DoubleType.instance.decompose((Double)o);
if (o instanceof UUID)
return ByteBuffer.wrap(UUIDGen.decompose((UUID) o));
if(o instanceof Tuple) {
List<Object> objects = ((Tuple)o).getAll();
List<ByteBuffer> serialized = new ArrayList<ByteBuffer>(objects.size());
int totalLength = 0;
for(Object sub : objects)
{
ByteBuffer buffer = objToBB(sub);
serialized.add(buffer);
totalLength += 2 + buffer.remaining() + 1;
}
ByteBuffer out = ByteBuffer.allocate(totalLength);
for (ByteBuffer bb : serialized)
{
int length = bb.remaining();
out.put((byte) ((length >> 8) & 0xFF));
out.put((byte) (length & 0xFF));
out.put(bb);
out.put((byte) 0);
}
out.flip();
return out;
}
return ByteBuffer.wrap(((DataByteArray) o).get());
}
public void cleanupOnFailure(String failure, Job job)
{
}
/** Methods to get the column family schema from Cassandra */
protected void initSchema(String signature)
{
Properties properties = UDFContext.getUDFContext().getUDFProperties(AbstractCassandraStorage.class);
// Only get the schema if we haven't already gotten it
if (!properties.containsKey(signature))
{
try
{
Cassandra.Client client = ConfigHelper.getClientFromInputAddressList(conf);
client.set_keyspace(keyspace);
if (username != null && password != null)
{
Map<String, String> credentials = new HashMap<String, String>(2);
credentials.put(IAuthenticator.USERNAME_KEY, username);
credentials.put(IAuthenticator.PASSWORD_KEY, password);
try
{
client.login(new AuthenticationRequest(credentials));
}
catch (AuthenticationException e)
{
logger.error("Authentication exception: invalid username and/or password");
throw new RuntimeException(e);
}
catch (AuthorizationException e)
{
throw new AssertionError(e); // never actually throws AuthorizationException.
}
}
// compose the CfDef for the columfamily
CfDef cfDef = getCfDef(client);
if (cfDef != null)
properties.setProperty(signature, cfdefToString(cfDef));
else
throw new RuntimeException(String.format("Column family '%s' not found in keyspace '%s'",
column_family,
keyspace));
}
catch (TException e)
{
throw new RuntimeException(e);
}
catch (InvalidRequestException e)
{
throw new RuntimeException(e);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
catch (UnavailableException e)
{
throw new RuntimeException(e);
}
catch (TimedOutException e)
{
throw new RuntimeException(e);
}
catch (SchemaDisagreementException e)
{
throw new RuntimeException(e);
}
}
}
/** convert CfDef to string */
protected static String cfdefToString(CfDef cfDef)
{
assert cfDef != null;
// this is so awful it's kind of cool!
TSerializer serializer = new TSerializer(new TBinaryProtocol.Factory());
try
{
return Hex.bytesToHex(serializer.serialize(cfDef));
}
catch (TException e)
{
throw new RuntimeException(e);
}
}
/** convert string back to CfDef */
protected static CfDef cfdefFromString(String st)
{
assert st != null;
TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
CfDef cfDef = new CfDef();
try
{
deserializer.deserialize(cfDef, Hex.hexToBytes(st));
}
catch (TException e)
{
throw new RuntimeException(e);
}
return cfDef;
}
/** return the CfDef for the column family */
protected CfDef getCfDef(Cassandra.Client client)
throws InvalidRequestException,
UnavailableException,
TimedOutException,
SchemaDisagreementException,
TException,
CharacterCodingException
{
// get CF meta data
String query = "SELECT type, " +
" comparator," +
" subcomparator," +
" default_validator, " +
" key_validator," +
" key_aliases " +
"FROM system.schema_columnfamilies " +
"WHERE keyspace_name = '%s' " +
" AND columnfamily_name = '%s' ";
CqlResult result = client.execute_cql3_query(
ByteBufferUtil.bytes(String.format(query, keyspace, column_family)),
Compression.NONE,
ConsistencyLevel.ONE);
if (result == null || result.rows == null || result.rows.isEmpty())
return null;
Iterator<CqlRow> iteraRow = result.rows.iterator();
CfDef cfDef = new CfDef();
cfDef.keyspace = keyspace;
cfDef.name = column_family;
boolean cql3Table = false;
if (iteraRow.hasNext())
{
CqlRow cqlRow = iteraRow.next();
cfDef.column_type = ByteBufferUtil.string(cqlRow.columns.get(0).value);
cfDef.comparator_type = ByteBufferUtil.string(cqlRow.columns.get(1).value);
ByteBuffer subComparator = cqlRow.columns.get(2).value;
if (subComparator != null)
cfDef.subcomparator_type = ByteBufferUtil.string(subComparator);
cfDef.default_validation_class = ByteBufferUtil.string(cqlRow.columns.get(3).value);
cfDef.key_validation_class = ByteBufferUtil.string(cqlRow.columns.get(4).value);
List<String> keys = null;
if (cqlRow.columns.get(5).value != null)
{
String keyAliases = ByteBufferUtil.string(cqlRow.columns.get(5).value);
keys = FBUtilities.fromJsonList(keyAliases);
}
// get column meta data
if (keys != null && keys.size() > 0)
cql3Table = true;
}
cfDef.column_metadata = getColumnMetadata(client, cql3Table);
return cfDef;
}
/** get a list of columns */
protected abstract List<ColumnDef> getColumnMetadata(Cassandra.Client client, boolean cql3Table)
throws InvalidRequestException,
UnavailableException,
TimedOutException,
SchemaDisagreementException,
TException,
CharacterCodingException;
/** get column meta data */
protected List<ColumnDef> getColumnMeta(Cassandra.Client client)
throws InvalidRequestException,
UnavailableException,
TimedOutException,
SchemaDisagreementException,
TException,
CharacterCodingException
{
String query = "SELECT column_name, " +
" validator, " +
" index_type " +
"FROM system.schema_columns " +
"WHERE keyspace_name = '%s' " +
" AND columnfamily_name = '%s'";
CqlResult result = client.execute_cql3_query(
ByteBufferUtil.bytes(String.format(query, keyspace, column_family)),
Compression.NONE,
ConsistencyLevel.ONE);
List<CqlRow> rows = result.rows;
List<ColumnDef> columnDefs = new ArrayList<ColumnDef>();
if (rows == null || rows.isEmpty())
return columnDefs;
Iterator<CqlRow> iterator = rows.iterator();
while (iterator.hasNext())
{
CqlRow row = iterator.next();
ColumnDef cDef = new ColumnDef();
cDef.setName(ByteBufferUtil.clone(row.getColumns().get(0).value));
cDef.validation_class = ByteBufferUtil.string(row.getColumns().get(1).value);
ByteBuffer indexType = row.getColumns().get(2).value;
if (indexType != null)
cDef.index_type = getIndexType(ByteBufferUtil.string(indexType));
columnDefs.add(cDef);
}
return columnDefs;
}
/** get keys meta data */
protected List<ColumnDef> getKeysMeta(Cassandra.Client client)
throws InvalidRequestException,
UnavailableException,
TimedOutException,
SchemaDisagreementException,
TException,
IOException
{
String query = "SELECT key_aliases, " +
" column_aliases, " +
" key_validator, " +
" comparator, " +
" keyspace_name, " +
" value_alias, " +
" default_validator " +
"FROM system.schema_columnfamilies " +
"WHERE keyspace_name = '%s'" +
" AND columnfamily_name = '%s' ";
CqlResult result = client.execute_cql3_query(
ByteBufferUtil.bytes(String.format(query, keyspace, column_family)),
Compression.NONE,
ConsistencyLevel.ONE);
if (result == null || result.rows == null || result.rows.isEmpty())
return null;
List<CqlRow> rows = result.rows;
Iterator<CqlRow> iteraRow = rows.iterator();
List<ColumnDef> keys = new ArrayList<ColumnDef>();
if (iteraRow.hasNext())
{
CqlRow cqlRow = iteraRow.next();
String name = ByteBufferUtil.string(cqlRow.columns.get(4).value);
logger.debug("Found ksDef name: {}", name);
String keyString = ByteBufferUtil.string(ByteBuffer.wrap(cqlRow.columns.get(0).getValue()));
logger.debug("partition keys: {}", keyString);
List<String> keyNames = FBUtilities.fromJsonList(keyString);
Iterator<String> iterator = keyNames.iterator();
while (iterator.hasNext())
{
ColumnDef cDef = new ColumnDef();
cDef.name = ByteBufferUtil.bytes(iterator.next());
keys.add(cDef);
}
keyString = ByteBufferUtil.string(ByteBuffer.wrap(cqlRow.columns.get(1).getValue()));
logger.debug("cluster keys: {}", keyString);
keyNames = FBUtilities.fromJsonList(keyString);
iterator = keyNames.iterator();
while (iterator.hasNext())
{
ColumnDef cDef = new ColumnDef();
cDef.name = ByteBufferUtil.bytes(iterator.next());
keys.add(cDef);
}
String validator = ByteBufferUtil.string(ByteBuffer.wrap(cqlRow.columns.get(2).getValue()));
logger.debug("row key validator: {}", validator);
AbstractType<?> keyValidator = parseType(validator);
Iterator<ColumnDef> keyItera = keys.iterator();
if (keyValidator instanceof CompositeType)
{
Iterator<AbstractType<?>> typeItera = ((CompositeType) keyValidator).types.iterator();
while (typeItera.hasNext())
keyItera.next().validation_class = typeItera.next().toString();
}
else
keyItera.next().validation_class = keyValidator.toString();
validator = ByteBufferUtil.string(ByteBuffer.wrap(cqlRow.columns.get(3).getValue()));
logger.debug("cluster key validator: {}", validator);
if (keyItera.hasNext() && validator != null && !validator.isEmpty())
{
AbstractType<?> clusterKeyValidator = parseType(validator);
if (clusterKeyValidator instanceof CompositeType)
{
Iterator<AbstractType<?>> typeItera = ((CompositeType) clusterKeyValidator).types.iterator();
while (keyItera.hasNext())
keyItera.next().validation_class = typeItera.next().toString();
}
else
keyItera.next().validation_class = clusterKeyValidator.toString();
}
// compact value_alias column
if (cqlRow.columns.get(5).value != null)
{
try
{
String compactValidator = ByteBufferUtil.string(ByteBuffer.wrap(cqlRow.columns.get(6).getValue()));
logger.debug("default validator: {}", compactValidator);
AbstractType<?> defaultValidator = parseType(compactValidator);
ColumnDef cDef = new ColumnDef();
cDef.name = cqlRow.columns.get(5).value;
cDef.validation_class = defaultValidator.toString();
keys.add(cDef);
}
catch (Exception e)
{
// no compact column at value_alias
}
}
}
return keys;
}
/** get index type from string */
protected IndexType getIndexType(String type)
{
type = type.toLowerCase();
if ("keys".equals(type))
return IndexType.KEYS;
else if("custom".equals(type))
return IndexType.CUSTOM;
else if("composites".equals(type))
return IndexType.COMPOSITES;
else
return null;
}
/** return partition keys */
public String[] getPartitionKeys(String location, Job job)
{
if (!usePartitionFilter)
return null;
List<ColumnDef> indexes = getIndexes();
String[] partitionKeys = new String[indexes.size()];
for (int i = 0; i < indexes.size(); i++)
{
partitionKeys[i] = new String(indexes.get(i).getName());
}
return partitionKeys;
}
/** get a list of columns with defined index*/
protected List<ColumnDef> getIndexes()
{
CfDef cfdef = getCfDef(loadSignature);
List<ColumnDef> indexes = new ArrayList<ColumnDef>();
for (ColumnDef cdef : cfdef.column_metadata)
{
if (cdef.index_type != null)
indexes.add(cdef);
}
return indexes;
}
}
| dprguiuc/Cassandra-Wasef | src/java/org/apache/cassandra/hadoop/pig/AbstractCassandraStorage.java | Java | apache-2.0 | 31,146 |
package com.cetc;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Date;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.NamedThreadLocal;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import com.cetc.model.RespDataVo;
import com.cetc.utils.Base64;
import com.cetc.utils.DateUtil;
import com.cetc.utils.JsonUtils;
import com.cetc.utils.ValueTool;
import net.sf.json.JSONObject;
/**
*
* @author hp
*
*/
public class TokenInterceptor implements HandlerInterceptor {
private static Logger logger = LoggerFactory.getLogger(TokenInterceptor.class);
private NamedThreadLocal<Long> startTimeThreadLocal = new NamedThreadLocal<>("StopWatch-StartTime");
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
throws Exception {
long beginTime = System.currentTimeMillis();// 1、开始时间
startTimeThreadLocal.set(beginTime);// 线程绑定变量(该数据只有当前请求的线程可见)
String reqJson = "";
String getReqParm = "";
//增加跨域请求
if(request.getHeader("Origin")!=null&&
(request.getHeader("Origin").equals("http://localhost:8000")||
request.getHeader("Origin").equals("http://10.111.10.42:8000"))
){
logger.debug("需要跨域请求{}", request.getRequestURI());
response.setHeader("Access-Control-Allow-Origin",request.getHeader("Origin"));
};
if (request.getRequestURI().contains("user/login")) {
JSONObject obj = new JSONObject();
obj.put("userName", request.getParameter("userName"));
obj.put("userPwd", request.getParameter("userPwd"));
logger.info("*****【" + request.getMethod() + "】请求url地址:" + request.getRequestURL() + " \n 请求参数: "
+ obj.toString());
return true;
} else {
RespDataVo root = new RespDataVo();
String token = null;
if (StringUtils.equals("get", request.getMethod().toLowerCase())) {
token = request.getParameter("token");
getReqParm = request.getQueryString();
} else if (StringUtils.equals("post", request.getMethod().toLowerCase())) {
InputStream inputStream = request.getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "utf-8"));
String tempStr = "";
while ((tempStr = reader.readLine()) != null) {
reqJson += tempStr;
}
reader.close();
inputStream.close();
JSONObject reqBody = JSONObject.fromObject(reqJson);
token = reqBody.getString("token");
}
logger.info("*****【" + request.getMethod() + "】请求url地址:" + request.getRequestURL() + " \n 请求参数: "
+ getReqParm + "" + reqJson);
if (token != null) {
String tokenStr = new String(Base64.decode(token));
if (!tokenStr.contains("_") || tokenStr.split("_").length < 3) { // token
// 不包含"_"
// 或者包含但是格式不正确
String code = "1012";
root.setCode(code);
response.setCharacterEncoding("utf-8");
root.setMsg(ValueTool.getValue().get(code));
response.getWriter().write(JsonUtils.parseBeanToJson(root));
return false;
} else {
String tokenCode = tokenStr.split("_")[1];
if (StringUtils.equals("A123456a", tokenStr.split("_")[2])) {// 校验token是否修改
if (DateUtil.dayDiff(new Date(), tokenCode) - 30 > 0) {// 限制30分钟内可以访问
String code = "1011";
root.setCode(code);
response.setCharacterEncoding("utf-8");
root.setMsg(ValueTool.getValue().get(code));
response.getWriter().write(JsonUtils.parseBeanToJson(root));
return false;
} else {
request.setAttribute("userId", tokenStr.split("_")[0]);
request.setAttribute("param", reqJson);
return true;
}
} else {
String code = "1012";
root.setCode(code);
response.setCharacterEncoding("utf-8");
root.setMsg(ValueTool.getValue().get(code));
response.getWriter().write(JsonUtils.parseBeanToJson(root));
return false;
}
}
}
String code = "1010";
root.setCode(code);
response.setCharacterEncoding("utf-8");
root.setMsg(ValueTool.getValue().get(code));
response.getWriter().write(JsonUtils.parseBeanToJson(root));
return false;
}
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler,
ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex)
throws Exception {
long endTime = System.currentTimeMillis();// 2、结束时间
long beginTime = startTimeThreadLocal.get();// 得到线程绑定的局部变量(开始时间)
long consumeTime = endTime - beginTime;// 3、消耗的时间
if (consumeTime > 500) {// 此处认为处理时间超过500毫秒的请求为慢请求
// TODO 记录到日志文件
System.out.println(String.format("%s consume %d millis", request.getRequestURI(), consumeTime));
logger.info("************end***********" + request.getRequestURL() + " 执行时长:" + consumeTime);
}
}
} | ZHANGLONG678/springmvc | src/main/java/com/cetc/TokenInterceptor.java | Java | apache-2.0 | 5,596 |
package org.sfx.catholicjourney.core.utils;
import android.content.res.ColorStateList;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
/**
* Created by simonadmin on 2015-03-18.
*/
public class TextUtility {
/**
* Set the TextView text. If text is null/empty, hide textview.
*
* @param tv TextView to set text in
* @param text
* @throws Exception
*/
public static void setText(TextView tv, CharSequence text) {
if (tv != null) {
if (TextUtils.isEmpty(text)) {
tv.setVisibility(View.GONE);
} else {
tv.setVisibility(View.VISIBLE);
tv.setText(text);
}
}
}
public static void setTextColor(TextView tv, int color) {
if (tv != null) {
tv.setTextColor(color);
}
}
public static void setTextColor(TextView tv, ColorStateList colorStateList) {
if (tv != null) {
tv.setTextColor(colorStateList);
}
}
}
| simtse/CatholicJourney | app/src/main/java/org/sfx/catholicjourney/core/utils/TextUtility.java | Java | apache-2.0 | 901 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Contains the logic for `aq del cluster systemlist --hostname`. """
from aquilon.aqdb.model import SystemList
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.del_cluster_member_priority import \
CommandDelClusterMemberPriority
class CommandDelClusterSystemList(CommandDelClusterMemberPriority):
required_parameters = ["cluster", "hostname"]
resource_class = SystemList
def render(self, hostname, **kwargs):
super(CommandDelClusterSystemList, self).render(hostname=None,
metacluster=None,
comments=None,
member=hostname,
**kwargs)
| quattor/aquilon | lib/aquilon/worker/commands/del_cluster_systemlist.py | Python | apache-2.0 | 1,563 |
define("gallery/spin/2.0.0/spin-debug", [], function(require, exports, module) {
/**
* Copyright (c) 2011-2014 Felix Gnass
* Licensed under the MIT license
*/
(function(root, factory) {
/* CommonJS */
if (typeof exports == "object") module.exports = factory(); else if (typeof define == "function" && define.amd) define(factory); else root.Spinner = factory();
})(this, function() {
"use strict";
var prefixes = [ "webkit", "Moz", "ms", "O" ], animations = {}, useCssAnimations;
/* Whether to use CSS animations or setTimeout */
/**
* Utility function to create elements. If no tag name is given,
* a DIV is created. Optionally properties can be passed.
*/
function createEl(tag, prop) {
var el = document.createElement(tag || "div"), n;
for (n in prop) el[n] = prop[n];
return el;
}
/**
* Appends children and returns the parent.
*/
function ins(parent) {
for (var i = 1, n = arguments.length; i < n; i++) parent.appendChild(arguments[i]);
return parent;
}
/**
* Insert a new stylesheet to hold the @keyframe or VML rules.
*/
var sheet = function() {
var el = createEl("style", {
type: "text/css"
});
ins(document.getElementsByTagName("head")[0], el);
return el.sheet || el.styleSheet;
}();
/**
* Creates an opacity keyframe animation rule and returns its name.
* Since most mobile Webkits have timing issues with animation-delay,
* we create separate rules for each line/segment.
*/
function addAnimation(alpha, trail, i, lines) {
var name = [ "opacity", trail, ~~(alpha * 100), i, lines ].join("-"), start = .01 + i / lines * 100, z = Math.max(1 - (1 - alpha) / trail * (100 - start), alpha), prefix = useCssAnimations.substring(0, useCssAnimations.indexOf("Animation")).toLowerCase(), pre = prefix && "-" + prefix + "-" || "";
if (!animations[name]) {
sheet.insertRule("@" + pre + "keyframes " + name + "{" + "0%{opacity:" + z + "}" + start + "%{opacity:" + alpha + "}" + (start + .01) + "%{opacity:1}" + (start + trail) % 100 + "%{opacity:" + alpha + "}" + "100%{opacity:" + z + "}" + "}", sheet.cssRules.length);
animations[name] = 1;
}
return name;
}
/**
* Tries various vendor prefixes and returns the first supported property.
*/
function vendor(el, prop) {
var s = el.style, pp, i;
prop = prop.charAt(0).toUpperCase() + prop.slice(1);
for (i = 0; i < prefixes.length; i++) {
pp = prefixes[i] + prop;
if (s[pp] !== undefined) return pp;
}
if (s[prop] !== undefined) return prop;
}
/**
* Sets multiple style properties at once.
*/
function css(el, prop) {
for (var n in prop) el.style[vendor(el, n) || n] = prop[n];
return el;
}
/**
* Fills in default values.
*/
function merge(obj) {
for (var i = 1; i < arguments.length; i++) {
var def = arguments[i];
for (var n in def) if (obj[n] === undefined) obj[n] = def[n];
}
return obj;
}
/**
* Returns the absolute page-offset of the given element.
*/
function pos(el) {
var o = {
x: el.offsetLeft,
y: el.offsetTop
};
while (el = el.offsetParent) o.x += el.offsetLeft, o.y += el.offsetTop;
return o;
}
/**
* Returns the line color from the given string or array.
*/
function getColor(color, idx) {
return typeof color == "string" ? color : color[idx % color.length];
}
// Built-in defaults
var defaults = {
lines: 12,
// The number of lines to draw
length: 7,
// The length of each line
width: 5,
// The line thickness
radius: 10,
// The radius of the inner circle
rotate: 0,
// Rotation offset
corners: 1,
// Roundness (0..1)
color: "#000",
// #rgb or #rrggbb
direction: 1,
// 1: clockwise, -1: counterclockwise
speed: 1,
// Rounds per second
trail: 100,
// Afterglow percentage
opacity: 1 / 4,
// Opacity of the lines
fps: 20,
// Frames per second when using setTimeout()
zIndex: 2e9,
// Use a high z-index by default
className: "spinner",
// CSS class to assign to the element
top: "50%",
// center vertically
left: "50%",
// center horizontally
position: "absolute"
};
/** The constructor */
function Spinner(o) {
this.opts = merge(o || {}, Spinner.defaults, defaults);
}
// Global defaults that override the built-ins:
Spinner.defaults = {};
merge(Spinner.prototype, {
/**
* Adds the spinner to the given target element. If this instance is already
* spinning, it is automatically removed from its previous target b calling
* stop() internally.
*/
spin: function(target) {
this.stop();
var self = this, o = self.opts, el = self.el = css(createEl(0, {
className: o.className
}), {
position: o.position,
width: 0,
zIndex: o.zIndex
}), mid = o.radius + o.length + o.width;
if (target) {
target.insertBefore(el, target.firstChild || null);
css(el, {
left: o.left,
top: o.top
});
}
el.setAttribute("role", "progressbar");
self.lines(el, self.opts);
if (!useCssAnimations) {
// No CSS animation support, use setTimeout() instead
var i = 0, start = (o.lines - 1) * (1 - o.direction) / 2, alpha, fps = o.fps, f = fps / o.speed, ostep = (1 - o.opacity) / (f * o.trail / 100), astep = f / o.lines;
(function anim() {
i++;
for (var j = 0; j < o.lines; j++) {
alpha = Math.max(1 - (i + (o.lines - j) * astep) % f * ostep, o.opacity);
self.opacity(el, j * o.direction + start, alpha, o);
}
self.timeout = self.el && setTimeout(anim, ~~(1e3 / fps));
})();
}
return self;
},
/**
* Stops and removes the Spinner.
*/
stop: function() {
var el = this.el;
if (el) {
clearTimeout(this.timeout);
if (el.parentNode) el.parentNode.removeChild(el);
this.el = undefined;
}
return this;
},
/**
* Internal method that draws the individual lines. Will be overwritten
* in VML fallback mode below.
*/
lines: function(el, o) {
var i = 0, start = (o.lines - 1) * (1 - o.direction) / 2, seg;
function fill(color, shadow) {
return css(createEl(), {
position: "absolute",
width: o.length + o.width + "px",
height: o.width + "px",
background: color,
boxShadow: shadow,
transformOrigin: "left",
transform: "rotate(" + ~~(360 / o.lines * i + o.rotate) + "deg) translate(" + o.radius + "px" + ",0)",
borderRadius: (o.corners * o.width >> 1) + "px"
});
}
for (;i < o.lines; i++) {
seg = css(createEl(), {
position: "absolute",
top: 1 + ~(o.width / 2) + "px",
transform: o.hwaccel ? "translate3d(0,0,0)" : "",
opacity: o.opacity,
animation: useCssAnimations && addAnimation(o.opacity, o.trail, start + i * o.direction, o.lines) + " " + 1 / o.speed + "s linear infinite"
});
if (o.shadow) ins(seg, css(fill("#000", "0 0 4px " + "#000"), {
top: 2 + "px"
}));
ins(el, ins(seg, fill(getColor(o.color, i), "0 0 1px rgba(0,0,0,.1)")));
}
return el;
},
/**
* Internal method that adjusts the opacity of a single line.
* Will be overwritten in VML fallback mode below.
*/
opacity: function(el, i, val) {
if (i < el.childNodes.length) el.childNodes[i].style.opacity = val;
}
});
function initVML() {
/* Utility function to create a VML tag */
function vml(tag, attr) {
return createEl("<" + tag + ' xmlns="urn:schemas-microsoft.com:vml" class="spin-vml">', attr);
}
// No CSS transforms but VML support, add a CSS rule for VML elements:
sheet.addRule(".spin-vml", "behavior:url(#default#VML)");
Spinner.prototype.lines = function(el, o) {
var r = o.length + o.width, s = 2 * r;
function grp() {
return css(vml("group", {
coordsize: s + " " + s,
coordorigin: -r + " " + -r
}), {
width: s,
height: s
});
}
var margin = -(o.width + o.length) * 2 + "px", g = css(grp(), {
position: "absolute",
top: margin,
left: margin
}), i;
function seg(i, dx, filter) {
ins(g, ins(css(grp(), {
rotation: 360 / o.lines * i + "deg",
left: ~~dx
}), ins(css(vml("roundrect", {
arcsize: o.corners
}), {
width: r,
height: o.width,
left: o.radius,
top: -o.width >> 1,
filter: filter
}), vml("fill", {
color: getColor(o.color, i),
opacity: o.opacity
}), vml("stroke", {
opacity: 0
}))));
}
if (o.shadow) for (i = 1; i <= o.lines; i++) seg(i, -2, "progid:DXImageTransform.Microsoft.Blur(pixelradius=2,makeshadow=1,shadowopacity=.3)");
for (i = 1; i <= o.lines; i++) seg(i);
return ins(el, g);
};
Spinner.prototype.opacity = function(el, i, val, o) {
var c = el.firstChild;
o = o.shadow && o.lines || 0;
if (c && i + o < c.childNodes.length) {
c = c.childNodes[i + o];
c = c && c.firstChild;
c = c && c.firstChild;
if (c) c.opacity = val;
}
};
}
var probe = css(createEl("group"), {
behavior: "url(#default#VML)"
});
if (!vendor(probe, "transform") && probe.adj) initVML(); else useCssAnimations = vendor(probe, "animation");
return Spinner;
});
});
| fuxiaoling/gulp | src/static/libs/cmd/gallery/spin/2.0.0/spin-debug.js | JavaScript | apache-2.0 | 12,137 |
// http://microsoftnlayerapp.codeplex.com/license
//===================================================================================
using System;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Repository.Pattern.Ef6;
using Repository.Pattern.Infrastructure;
namespace Factory.StoreDomainModule
{
/// <summary>
/// Base class for entities
/// </summary>
public abstract class EntityDocument : Entity
{
#region Members
int? _requestedHashCode;
#endregion
#region Properties
/// <summary>
/// Get the persisten object identifier
/// </summary>
[Key]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
[Display(AutoGenerateFilter = false)]
public long Id { get; set; }
[Display(ShortName = "Активный")]
public bool IsActive { get; set; }
public int CreatedId { get; set; }
public int? ModifiedId { get; set; }
#endregion
#region Public Methods
/// <summary>
/// Check if this entity is transient, ie, without identity at this moment
/// </summary>
/// <returns>True if entity is transient, else false</returns>
public bool IsTransient()
{
return this.Id == 0;
}
/// <summary>
/// Change current identity for a new non transient identity
/// </summary>
/// <param name="identity">the new identity</param>
public void ChangeCurrentIdentity(int identity)
{
if ( identity != 0)
this.Id = identity;
}
/// <summary>
/// Свойство определяющее,обьект новый или нет
/// </summary>
[NotMapped]
[Display(AutoGenerateFilter = false)]
public bool IsNew
{
get { return Id <= 0; }
}
#endregion
#region Overrides Methods
/// <summary>
/// <see cref="M:System.Object.Equals"/>
/// </summary>
/// <param name="obj"><see cref="M:System.Object.Equals"/></param>
/// <returns><see cref="M:System.Object.Equals"/></returns>
public override bool Equals(object obj)
{
if (obj == null || !(obj is EntityBase))
return false;
if (Object.ReferenceEquals(this, obj))
return true;
EntityBase item = (EntityBase)obj;
if (item.IsTransient() || this.IsTransient())
return false;
else
return item.Id == this.Id;
}
/// <summary>
/// <see cref="M:System.Object.GetHashCode"/>
/// </summary>
/// <returns><see cref="M:System.Object.GetHashCode"/></returns>
public override int GetHashCode()
{
if (!IsTransient())
{
if (!_requestedHashCode.HasValue)
_requestedHashCode = this.Id.GetHashCode() ^ 31; // XOR for random distribution (http://blogs.msdn.com/b/ericlippert/archive/2011/02/28/guidelines-and-rules-for-gethashcode.aspx)
return _requestedHashCode.Value;
}
else
return base.GetHashCode();
}
public static bool operator ==(EntityDocument left, EntityDocument right)
{
if (Object.Equals(left, null))
return (Object.Equals(right, null)) ? true : false;
else
return left.Equals(right);
}
public static bool operator !=(EntityDocument left, EntityDocument right)
{
return !(left == right);
}
#endregion
}
}
| kostyrin/store | Factory.StoreDomainModule/EntityDocument.cs | C# | apache-2.0 | 3,791 |
package nl.pietervanberkel.util;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
import nl.pietervanberkel.model.Model;
@WebListener
public class ServletListener implements ServletContextListener{
@Override
public void contextDestroyed(ServletContextEvent arg0) {
}
@Override
public void contextInitialized(ServletContextEvent sce) {
sce.getServletContext().setAttribute("Model", new Model());
}
}
| pokyno/Webtechnologie | Webtechnologie_Opdracht1/src/nl/pietervanberkel/util/ServletListener.java | Java | apache-2.0 | 504 |
<?php
return array (
'<b>No public contents to display found!</b>' => '<b>keine öffentlichen Inhalte gefunden !</b>',
);
| calonso-conabio/intranet | protected/humhub/modules/dashboard/messages/de/views_dashboard_index_guest.php | PHP | apache-2.0 | 124 |
package ru.job4j.loops;
/**Counting factorial.
*@author Anton Oleynikov
*@version 1
*/
public class Factorial {
/**
*@param max max
*@return count count
*/
public int fac(int max) {
int count = 1;
for (int i = 2; i <= max; i++) {
count = count * i;
}
return count;
}
} | misterflud/aoleynikov | chapter_001/src/main/java/ru/job4j/loops/Factorial.java | Java | apache-2.0 | 343 |
from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1
self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results
| ray-project/ray | python/ray/tune/suggest/_mock.py | Python | apache-2.0 | 1,752 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v9/enums/access_role.proto
namespace Google\Ads\GoogleAds\V9\Enums;
if (false) {
/**
* This class is deprecated. Use Google\Ads\GoogleAds\V9\Enums\AccessRoleEnum\AccessRole instead.
* @deprecated
*/
class AccessRoleEnum_AccessRole {}
}
class_exists(AccessRoleEnum\AccessRole::class);
@trigger_error('Google\Ads\GoogleAds\V9\Enums\AccessRoleEnum_AccessRole is deprecated and will be removed in the next major release. Use Google\Ads\GoogleAds\V9\Enums\AccessRoleEnum\AccessRole instead', E_USER_DEPRECATED);
| googleads/google-ads-php | src/Google/Ads/GoogleAds/V9/Enums/AccessRoleEnum_AccessRole.php | PHP | apache-2.0 | 629 |
<?
if($submenu == 'A')
{
if(empty($mod))
{
if(!empty($_REQUEST[input]))
{
$kodeleasing = strtoupper($_REQUEST['kodeleasing']);
$namaleasing = strtoupper($_REQUEST['namaleasing']);
$q1 = mysql_query("INSERT INTO tbl_leasing (
kodeleasing,
namaleasing)
VALUES (
'$kodeleasing',
'$namaleasing');
");
$q2 = mysql_query("INSERT INTO log_act VALUES (
'',
'tbl_leasing',
CURDATE(),
CURTIME(),
'$_SESSION[user]',
'TAMBAH LEASING $kodeleasing')
");
if($q1 && $q2)
{
//echo "<script>alert ('Proses berhasil.')</script>";
print "<meta http-equiv='refresh' content='0;url=?opt=$opt&menu=$menu&submenu=$submenu'/>";
//exit();
}
else
{
echo "<script>alert ('Proses gagal.')</script>";
print "<meta http-equiv='refresh' content='0;url=?opt=$opt&menu=$menu&submenu=$submenu'/>";
exit();
}
}
if(!empty($_REQUEST[deluser]))
{
$q1 = mysql_query("DELETE FROM tbl_leasing WHERE id='$_REQUEST[deluser]'");
$q2 = mysql_query("INSERT INTO log_act VALUES (
'',
'tbl_leasing',
CURDATE(),
CURTIME(),
'$_SESSION[user]',
'HAPUS LEASING $_REQUEST[kodeleasing]')
");
if($q1 && $q2)
{
}
else
{
echo "<script>alert ('Proses gagal.')</script>";
print "<meta http-equiv='refresh' content='0;url=?opt=$opt&menu=$menu&submenu=$submenu'/>";
exit();
}
}
}
else if($mod == "edit")
{
if(!empty($_REQUEST[ubah]))
{
$kodeleasing = strtoupper($_REQUEST['kodeleasing']);
$namaleasing = strtoupper($_REQUEST['namaleasing']);
$q1 = mysql_query("UPDATE tbl_leasing SET
kodeleasing='$kodeleasing',
namaleasing='$namaleasing'
WHERE id='$_REQUEST[ubah]'
");
$q2 = mysql_query("INSERT INTO log_act VALUES (
'',
'tbl_leasing',
CURDATE(),
CURTIME(),
'$_SESSION[user]',
'UBAH LEASING $kodeleasing')
");
if($q1 && $q2)
{
print "<meta http-equiv='refresh' content='0;url=?opt=$opt&menu=$menu&submenu=$submenu'/>";
exit();
}
else
{
echo "<script>alert ('Proses gagal.')</script>";
print "<meta http-equiv='refresh' content='0;url=?opt=$opt&menu=$menu&submenu=$submenu'/>";
exit();
}
}
}
?>
<aside class="right-side">
<section class="content">
<div class="row">
<?
if(empty($mod))
{
?>
<div class="col-xs-12">
<div class="box box-danger">
<div class="box-body table-responsive" style="overflow-y:auto;overflow-x:hidden;height:520px;">
<h4>MASTER <small>LEASING</small></h4>
<div style="float:right" class="col-xs-7">
<a data-toggle="modal" data-target="#compose-modal-baru-leasing" style="cursor:pointer">
<button type="submit" class="btn btn-warning"><i class="fa fa-plus"></i> Leasing Baru</button>
</a>
</div>
<table id="example1" class="table table-bordered table-striped">
<thead style="color:#666;font-size:13px">
<tr>
<th style="padding:7px">KODE LEASING</th>
<th style="padding:7px">NAMA LEASING</th>
<th width="5%" style="padding:7px">UBAH</th>
</tr>
</thead>
<tbody>
<?
$no=1;
$q1 = mysql_query("SELECT * FROM tbl_leasing");
while($d1 = mysql_fetch_array($q1))
{
?>
<tr style="cursor:pointer">
<td><?echo $d1[kodeleasing]?></td>
<td><?echo $d1[namaleasing]?></td>
<td width="1%" align="center"><div class="btn-group">
<?
if($_SESSION[posisi]=='DIREKSI' OR $_SESSION[posisi]=='PIC' )
{
?>
<a href="<?echo "?opt=$opt&menu=$menu&submenu=$submenu&mod=edit&id=$d1[id]"?>" style="cursor:pointer"><i class="fa fa-edit"></i></a>
<?
}
?>
</td>
</tr>
<?
$no++;
}
?>
</tbody>
</table>
</div>
</div>
</div>
<!-- ################## MODAL TAMBAH LEASING ########################################################################################## -->
<div class="modal fade " id="compose-modal-baru-leasing" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog" style="width:40%;">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h4 class="modal-title">TAMBAH LEASING BARU</h4>
</div>
<form method="post" action="" enctype="multipart/form-data">
<div class="modal-body">
<table width="100%">
<tr>
<td width="30%">KODE LEASING</td>
<td width="2%">:</td>
<td colspan="2"><input type="text" name="kodeleasing" class="form-control" style="width:30%" maxlength="5" onkeypress="return buat_angka(event,' qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM')" required></td>
</tr>
<tr>
<td>NAMA LEASING</td>
<td>:</td>
<td><input type="text" name="namaleasing" class="form-control" maxlength="40" onkeypress="return buat_angka(event,' qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM')" required></td>
</tr>
<input type="hidden" name="input" value="1">
</table>
</div>
<div class="modal-footer clearfix">
<button type="reset" class="btn btn-danger"><i class="fa fa-mail-reply"></i> Batal</button>
<button type="submit" class="btn btn-primary pull-left"><i class="fa fa-save"></i> Simpan</button>
</div>
</form>
</div>
</div>
</div>
<!-- ################################################################################################################################# -->
<?
}
else if($mod == "edit")
{
$d1 = mysql_fetch_array(mysql_query("SELECT * FROM tbl_leasing WHERE id='$_REQUEST[id]'"));
?>
<div class="col-xs-12">
<div class="box box-danger">
<div class="box-body table-responsive" style="overflow-y:auto;overflow-x:hidden;height:520px;">
<h4>MASTER <small>LEASING <i class="fa fa-angle-right"></i> Ubah Detail Leasing</small></h4>
<form method="post" action="" enctype="multipart/form-data">
<div style="padding:20px">
<table style="width:50%;">
<tr>
<td width="30%">KODE LEASING</td>
<td width="2%">:</td>
<td colspan="2"><input type="text" name="kodeleasing" value="<?echo $d1[kodeleasing]?>" class="form-control" style="width:30%" maxlength="5" onkeypress="return buat_angka(event,' qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM')" required=""></td>
</tr>
<tr>
<td>NAMA LEASING</td>
<td>:</td>
<td><input type="text" name="namaleasing" value="<?echo $d1[namaleasing]?>" class="form-control" maxlength="40" onkeypress="return buat_angka(event,' qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM')" required=""></td>
</tr>
<input type="hidden" name="ubah" value="<?echo $d1[id]?>">
</table>
</div>
<div class="modal-footer clearfix">
<button type="button" class="btn btn-danger" onclick="location.href='<?echo "?opt=$opt&menu=$menu&submenu=$submenu"?>'"><i class="fa fa-mail-reply"></i> Kembali</button>
<button type="submit" class="btn btn-primary pull-left"><i class="fa fa-save"></i> Simpan</button>
</div>
</form>
</div>
</div>
</div>
<?
}
?>
</div>
</section>
</aside>
<?
}
?>
<script src="js/jquery.min.js"></script>
<!-- urut table -->
<script type="text/javascript">
$(function() {
$('#example1').dataTable({
"bPaginate": true,
"bLengthChange": false,
"bFilter": true,
"bSort": true,
"bInfo": false,
"bAutoWidth": true
});
});
</script> | cutlist/honda-aj | page/pages/leasing.php | PHP | apache-2.0 | 10,813 |
package org.docksidestage.postgresql.dbflute.cbean.cq.bs;
import java.util.Map;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.*;
import org.dbflute.cbean.coption.*;
import org.dbflute.cbean.cvalue.ConditionValue;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.exception.IllegalConditionBeanOperationException;
import org.docksidestage.postgresql.dbflute.cbean.cq.ciq.*;
import org.docksidestage.postgresql.dbflute.cbean.*;
import org.docksidestage.postgresql.dbflute.cbean.cq.*;
/**
* The base condition-query of product_status.
* @author DBFlute(AutoGenerator)
*/
public class BsProductStatusCQ extends AbstractBsProductStatusCQ {
// ===================================================================================
// Attribute
// =========
protected ProductStatusCIQ _inlineQuery;
// ===================================================================================
// Constructor
// ===========
public BsProductStatusCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) {
super(referrerQuery, sqlClause, aliasName, nestLevel);
}
// ===================================================================================
// InlineView/OrClause
// ===================
/**
* Prepare InlineView query. <br>
* {select ... from ... left outer join (select * from product_status) where FOO = [value] ...}
* <pre>
* cb.query().queryMemberStatus().<span style="color: #CC4747">inline()</span>.setFoo...;
* </pre>
* @return The condition-query for InlineView query. (NotNull)
*/
public ProductStatusCIQ inline() {
if (_inlineQuery == null) { _inlineQuery = xcreateCIQ(); }
_inlineQuery.xsetOnClause(false); return _inlineQuery;
}
protected ProductStatusCIQ xcreateCIQ() {
ProductStatusCIQ ciq = xnewCIQ();
ciq.xsetBaseCB(_baseCB);
return ciq;
}
protected ProductStatusCIQ xnewCIQ() {
return new ProductStatusCIQ(xgetReferrerQuery(), xgetSqlClause(), xgetAliasName(), xgetNestLevel(), this);
}
/**
* Prepare OnClause query. <br>
* {select ... from ... left outer join product_status on ... and FOO = [value] ...}
* <pre>
* cb.query().queryMemberStatus().<span style="color: #CC4747">on()</span>.setFoo...;
* </pre>
* @return The condition-query for OnClause query. (NotNull)
* @throws IllegalConditionBeanOperationException When this condition-query is base query.
*/
public ProductStatusCIQ on() {
if (isBaseQuery()) { throw new IllegalConditionBeanOperationException("OnClause for local table is unavailable!"); }
ProductStatusCIQ inlineQuery = inline(); inlineQuery.xsetOnClause(true); return inlineQuery;
}
// ===================================================================================
// Query
// =====
protected ConditionValue _productStatusCode;
public ConditionValue xdfgetProductStatusCode()
{ if (_productStatusCode == null) { _productStatusCode = nCV(); }
return _productStatusCode; }
protected ConditionValue xgetCValueProductStatusCode() { return xdfgetProductStatusCode(); }
public Map<String, ProductCQ> xdfgetProductStatusCode_ExistsReferrer_ProductList() { return xgetSQueMap("productStatusCode_ExistsReferrer_ProductList"); }
public String keepProductStatusCode_ExistsReferrer_ProductList(ProductCQ sq) { return xkeepSQue("productStatusCode_ExistsReferrer_ProductList", sq); }
public Map<String, ProductCQ> xdfgetProductStatusCode_NotExistsReferrer_ProductList() { return xgetSQueMap("productStatusCode_NotExistsReferrer_ProductList"); }
public String keepProductStatusCode_NotExistsReferrer_ProductList(ProductCQ sq) { return xkeepSQue("productStatusCode_NotExistsReferrer_ProductList", sq); }
public Map<String, ProductCQ> xdfgetProductStatusCode_SpecifyDerivedReferrer_ProductList() { return xgetSQueMap("productStatusCode_SpecifyDerivedReferrer_ProductList"); }
public String keepProductStatusCode_SpecifyDerivedReferrer_ProductList(ProductCQ sq) { return xkeepSQue("productStatusCode_SpecifyDerivedReferrer_ProductList", sq); }
public Map<String, ProductCQ> xdfgetProductStatusCode_QueryDerivedReferrer_ProductList() { return xgetSQueMap("productStatusCode_QueryDerivedReferrer_ProductList"); }
public String keepProductStatusCode_QueryDerivedReferrer_ProductList(ProductCQ sq) { return xkeepSQue("productStatusCode_QueryDerivedReferrer_ProductList", sq); }
public Map<String, Object> xdfgetProductStatusCode_QueryDerivedReferrer_ProductListParameter() { return xgetSQuePmMap("productStatusCode_QueryDerivedReferrer_ProductList"); }
public String keepProductStatusCode_QueryDerivedReferrer_ProductListParameter(Object pm) { return xkeepSQuePm("productStatusCode_QueryDerivedReferrer_ProductList", pm); }
/**
* Add order-by as ascend. <br>
* (商品ステータスコード)product_status_code: {PK, NotNull, bpchar(3)}
* @return this. (NotNull)
*/
public BsProductStatusCQ addOrderBy_ProductStatusCode_Asc() { regOBA("product_status_code"); return this; }
/**
* Add order-by as descend. <br>
* (商品ステータスコード)product_status_code: {PK, NotNull, bpchar(3)}
* @return this. (NotNull)
*/
public BsProductStatusCQ addOrderBy_ProductStatusCode_Desc() { regOBD("product_status_code"); return this; }
protected ConditionValue _productStatusName;
public ConditionValue xdfgetProductStatusName()
{ if (_productStatusName == null) { _productStatusName = nCV(); }
return _productStatusName; }
protected ConditionValue xgetCValueProductStatusName() { return xdfgetProductStatusName(); }
/**
* Add order-by as ascend. <br>
* product_status_name: {NotNull, varchar(50)}
* @return this. (NotNull)
*/
public BsProductStatusCQ addOrderBy_ProductStatusName_Asc() { regOBA("product_status_name"); return this; }
/**
* Add order-by as descend. <br>
* product_status_name: {NotNull, varchar(50)}
* @return this. (NotNull)
*/
public BsProductStatusCQ addOrderBy_ProductStatusName_Desc() { regOBD("product_status_name"); return this; }
protected ConditionValue _displayOrder;
public ConditionValue xdfgetDisplayOrder()
{ if (_displayOrder == null) { _displayOrder = nCV(); }
return _displayOrder; }
protected ConditionValue xgetCValueDisplayOrder() { return xdfgetDisplayOrder(); }
/**
* Add order-by as ascend. <br>
* display_order: {UQ, NotNull, int4(10)}
* @return this. (NotNull)
*/
public BsProductStatusCQ addOrderBy_DisplayOrder_Asc() { regOBA("display_order"); return this; }
/**
* Add order-by as descend. <br>
* display_order: {UQ, NotNull, int4(10)}
* @return this. (NotNull)
*/
public BsProductStatusCQ addOrderBy_DisplayOrder_Desc() { regOBD("display_order"); return this; }
// ===================================================================================
// SpecifiedDerivedOrderBy
// =======================
/**
* Add order-by for specified derived column as ascend.
* <pre>
* cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
* public void query(PurchaseCB subCB) {
* subCB.specify().columnPurchaseDatetime();
* }
* }, <span style="color: #CC4747">aliasName</span>);
* <span style="color: #3F7E5E">// order by [alias-name] asc</span>
* cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Asc</span>(<span style="color: #CC4747">aliasName</span>);
* </pre>
* @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull)
* @return this. (NotNull)
*/
public BsProductStatusCQ addSpecifiedDerivedOrderBy_Asc(String aliasName) { registerSpecifiedDerivedOrderBy_Asc(aliasName); return this; }
/**
* Add order-by for specified derived column as descend.
* <pre>
* cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
* public void query(PurchaseCB subCB) {
* subCB.specify().columnPurchaseDatetime();
* }
* }, <span style="color: #CC4747">aliasName</span>);
* <span style="color: #3F7E5E">// order by [alias-name] desc</span>
* cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Desc</span>(<span style="color: #CC4747">aliasName</span>);
* </pre>
* @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull)
* @return this. (NotNull)
*/
public BsProductStatusCQ addSpecifiedDerivedOrderBy_Desc(String aliasName) { registerSpecifiedDerivedOrderBy_Desc(aliasName); return this; }
// ===================================================================================
// Union Query
// ===========
public void reflectRelationOnUnionQuery(ConditionQuery bqs, ConditionQuery uqs) {
}
// ===================================================================================
// Foreign Query
// =============
protected Map<String, Object> xfindFixedConditionDynamicParameterMap(String property) {
return null;
}
// ===================================================================================
// ScalarCondition
// ===============
public Map<String, ProductStatusCQ> xdfgetScalarCondition() { return xgetSQueMap("scalarCondition"); }
public String keepScalarCondition(ProductStatusCQ sq) { return xkeepSQue("scalarCondition", sq); }
// ===================================================================================
// MyselfDerived
// =============
public Map<String, ProductStatusCQ> xdfgetSpecifyMyselfDerived() { return xgetSQueMap("specifyMyselfDerived"); }
public String keepSpecifyMyselfDerived(ProductStatusCQ sq) { return xkeepSQue("specifyMyselfDerived", sq); }
public Map<String, ProductStatusCQ> xdfgetQueryMyselfDerived() { return xgetSQueMap("queryMyselfDerived"); }
public String keepQueryMyselfDerived(ProductStatusCQ sq) { return xkeepSQue("queryMyselfDerived", sq); }
public Map<String, Object> xdfgetQueryMyselfDerivedParameter() { return xgetSQuePmMap("queryMyselfDerived"); }
public String keepQueryMyselfDerivedParameter(Object pm) { return xkeepSQuePm("queryMyselfDerived", pm); }
// ===================================================================================
// MyselfExists
// ============
protected Map<String, ProductStatusCQ> _myselfExistsMap;
public Map<String, ProductStatusCQ> xdfgetMyselfExists() { return xgetSQueMap("myselfExists"); }
public String keepMyselfExists(ProductStatusCQ sq) { return xkeepSQue("myselfExists", sq); }
// ===================================================================================
// MyselfInScope
// =============
public Map<String, ProductStatusCQ> xdfgetMyselfInScope() { return xgetSQueMap("myselfInScope"); }
public String keepMyselfInScope(ProductStatusCQ sq) { return xkeepSQue("myselfInScope", sq); }
// ===================================================================================
// Very Internal
// =============
// very internal (for suppressing warn about 'Not Use Import')
protected String xCB() { return ProductStatusCB.class.getName(); }
protected String xCQ() { return ProductStatusCQ.class.getName(); }
protected String xCHp() { return HpQDRFunction.class.getName(); }
protected String xCOp() { return ConditionOption.class.getName(); }
protected String xMap() { return Map.class.getName(); }
}
| dbflute-test/dbflute-test-dbms-postgresql | src/main/java/org/docksidestage/postgresql/dbflute/cbean/cq/bs/BsProductStatusCQ.java | Java | apache-2.0 | 13,406 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli.operation;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.security.PrivilegedExceptionAction;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.CharEncoding;
import org.apache.hadoop.hive.common.metrics.common.Metrics;
import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
import org.apache.hadoop.hive.common.metrics.common.MetricsFactory;
import org.apache.hadoop.hive.common.metrics.common.MetricsScope;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.QueryDisplay;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.OperationLog;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.thrift.ThriftJDBCBinarySerDe;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
import org.apache.hive.service.server.ThreadWithGarbageCleanup;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
/**
* SQLOperation.
*
*/
@SuppressWarnings("deprecation")
public class SQLOperation extends ExecuteStatementOperation {
private Driver driver = null;
private CommandProcessorResponse response;
private TableSchema resultSchema = null;
private Schema mResultSchema = null;
private SerDe serde = null;
private boolean fetchStarted = false;
private volatile MetricsScope currentSQLStateScope;
// Display for WebUI.
private SQLOperationDisplay sqlOpDisplay;
private long queryTimeout;
private ScheduledExecutorService timeoutExecutor;
private final boolean runAsync;
/**
* A map to track query count running by each user
*/
private static Map<String, AtomicInteger> userQueries = new HashMap<String, AtomicInteger>();
private static final String ACTIVE_SQL_USER = MetricsConstant.SQL_OPERATION_PREFIX + "active_user";
public SQLOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay,
boolean runInBackground, long queryTimeout) {
// TODO: call setRemoteUser in ExecuteStatementOperation or higher.
// 调用父类ExecuteStatementOperation的构造方法
super(parentSession, statement, confOverlay, runInBackground);
// beeline调用的时候runAsync是true
this.runAsync = runInBackground;
// queryTimeout默认是0
this.queryTimeout = queryTimeout;
setupSessionIO(parentSession.getSessionState());
try {
sqlOpDisplay = new SQLOperationDisplay(this);
} catch (HiveSQLException e) {
LOG.warn("Error calcluating SQL Operation Display for webui", e);
}
}
@Override
public boolean shouldRunAsync() {
return runAsync;
}
private void setupSessionIO(SessionState sessionState) {
try {
sessionState.in = null; // hive server's session input stream is not used
sessionState.out = new PrintStream(System.out, true, CharEncoding.UTF_8);
sessionState.info = new PrintStream(System.err, true, CharEncoding.UTF_8);
sessionState.err = new PrintStream(System.err, true, CharEncoding.UTF_8);
} catch (UnsupportedEncodingException e) {
LOG.error("Error creating PrintStream", e);
e.printStackTrace();
sessionState.out = null;
sessionState.info = null;
sessionState.err = null;
}
}
/**
* Compile the query and extract metadata
* @param queryState
* @throws HiveSQLException
*/
public void prepare(QueryState queryState) throws HiveSQLException {
// 调用父类的setState方法设置状态
setState(OperationState.RUNNING);
try {
// 实例化driver
driver = new Driver(queryState, getParentSession().getUserName());
// Start the timer thread for cancelling the query when query timeout is reached
// queryTimeout == 0 means no timeout
// 当达到查询超时时启动定时器线程取消查询, 如果queryTimeout为0, 则表示不存在超时
LOG.info("-----******>>> queryTimeout:" + queryTimeout);
if (queryTimeout > 0) {
timeoutExecutor = new ScheduledThreadPoolExecutor(1);
Runnable timeoutTask = new Runnable() {
@Override
public void run() {
try {
LOG.info("Query timed out after: " + queryTimeout
+ " seconds. Cancelling the execution now.");
SQLOperation.this.cancel(OperationState.TIMEDOUT);
} catch (HiveSQLException e) {
LOG.error("Error cancelling the query after timeout: " + queryTimeout + " seconds", e);
} finally {
// Stop
timeoutExecutor.shutdown();
}
}
};
timeoutExecutor.schedule(timeoutTask, queryTimeout, TimeUnit.SECONDS);
}
sqlOpDisplay.setQueryDisplay(driver.getQueryDisplay());
// set the operation handle information in Driver, so that thrift API users
// can use the operation handle they receive, to lookup query information in
// Yarn ATS
// 设置OperationId, 比如: KtahFvbXQYGKzFUH40h5fA
String guid64 = Base64.encodeBase64URLSafeString(getHandle().getHandleIdentifier()
.toTHandleIdentifier().getGuid()).trim();
LOG.info("++++++++>> operationId:" + guid64);
driver.setOperationId(guid64);
// In Hive server mode, we are not able to retry in the FetchTask
// case, when calling fetch queries since execute() has returned.
// For now, we disable the test attempts.
driver.setTryCount(Integer.MAX_VALUE);
// 编译statement, 生成执行计划, 权限校验, 核心的地方
response = driver.compileAndRespond(statement);
if (0 != response.getResponseCode()) {
throw toSQLException("Error while compiling statement", response);
}
// 上面在执行driver的compileAndRespond方法的过程中会设置schema
mResultSchema = driver.getSchema();
// hasResultSet should be true only if the query has a FetchTask
// "explain" is an exception for now
if(driver.getPlan().getFetchTask() != null) {
LOG.info("+++++-----++++> getFetchTask() != null");
//Schema has to be set
if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) {
throw new HiveSQLException("Error compiling query: Schema and FieldSchema " +
"should be set when query plan has a FetchTask");
}
resultSchema = new TableSchema(mResultSchema);
// 设置hasResultSet为true, 同时设置OperationHandle的hasResultSet为true, OperationHandle这个值是客户端用来判断是否有result的标识位
setHasResultSet(true);
} else {
setHasResultSet(false);
}
// Set hasResultSet true if the plan has ExplainTask
// TODO explain should use a FetchTask for reading
// 判断是不是ExplainTask, 如果是的话将是否含有ResultSet设为true
for (Task<? extends Serializable> task: driver.getPlan().getRootTasks()) {
if (task.getClass() == ExplainTask.class) {
LOG.info("------>>> mResultSchema:" + mResultSchema);
resultSchema = new TableSchema(mResultSchema);
setHasResultSet(true);
break;
}
}
} catch (HiveSQLException e) {
setState(OperationState.ERROR);
throw e;
} catch (Throwable e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
}
private void runQuery() throws HiveSQLException {
try {
OperationState opState = getStatus().getState();
// Operation may have been cancelled by another thread
if (opState.isTerminal()) {
LOG.info("Not running the query. Operation is already in terminal state: " + opState
+ ", perhaps cancelled due to query timeout or by another thread.");
return;
}
// In Hive server mode, we are not able to retry in the FetchTask
// case, when calling fetch queries since execute() has returned.
// For now, we disable the test attempts.
driver.setTryCount(Integer.MAX_VALUE);
// 调用Driver的run方法来执行mapreduce Task
response = driver.run();
if (0 != response.getResponseCode()) {
throw toSQLException("Error while processing statement", response);
}
} catch (HiveSQLException e) {
/**
* If the operation was cancelled by another thread, or the execution timed out, Driver#run
* may return a non-zero response code. We will simply return if the operation state is
* CANCELED, TIMEDOUT or CLOSED, otherwise throw an exception
*/
if ((getStatus().getState() == OperationState.CANCELED)
|| (getStatus().getState() == OperationState.TIMEDOUT)
|| (getStatus().getState() == OperationState.CLOSED)) {
return;
} else {
setState(OperationState.ERROR);
throw e;
}
} catch (Throwable e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
// 更新状态为finished
setState(OperationState.FINISHED);
}
@Override
public void runInternal() throws HiveSQLException {
// 调用父类方法更新状态
setState(OperationState.PENDING);
// runAsync通过beeline调用的话是true
boolean runAsync = shouldRunAsync();
// runAsync按true处理, hive.server2.async.exec.async.compile默认是false, 所以asyncPrepare是false
final boolean asyncPrepare = runAsync
&& HiveConf.getBoolVar(queryState.getConf(),
HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_ASYNC_COMPILE);
LOG.info("++++>>>+++>>> runAsync:" + runAsync + ", asyncPrePare:" + asyncPrepare);
// asyncPrepare是false, 所以执行prepare方法, 会对hql进行编译生成执行计划
if (!asyncPrepare) {
prepare(queryState);
}
LOG.info("+++++++ prepare finished!");
// runAsync是true执行else中的逻辑, 即异步执行查询计划
if (!runAsync) {
runQuery();
} else {
// We'll pass ThreadLocals in the background thread from the foreground (handler) thread.
// 1) ThreadLocal Hive object needs to be set in background thread
// 2) The metastore client in Hive is associated with right user.
// 3) Current UGI will get used by metastore when metastore is in embedded mode
Runnable work = new BackgroundWork(getCurrentUGI(), parentSession.getSessionHive(),
SessionState.getPerfLogger(), SessionState.get(), asyncPrepare);
try {
// This submit blocks if no background threads are available to run this operation
// 调用HiveSessionImpl的submitBackgroundOperation方法
Future<?> backgroundHandle = getParentSession().submitBackgroundOperation(work);
// 调用setBackgroundHandle方法将上面的backgroundHandle设置给SQLOperation的backgroundHandle
setBackgroundHandle(backgroundHandle);
} catch (RejectedExecutionException rejected) {
setState(OperationState.ERROR);
throw new HiveSQLException("The background threadpool cannot accept" +
" new task for execution, please retry the operation", rejected);
}
}
}
private final class BackgroundWork implements Runnable {
private final UserGroupInformation currentUGI;
private final Hive parentHive;
private final PerfLogger parentPerfLogger;
private final SessionState parentSessionState;
private final boolean asyncPrepare;
private BackgroundWork(UserGroupInformation currentUGI,
Hive parentHive, PerfLogger parentPerfLogger,
SessionState parentSessionState, boolean asyncPrepare) {
this.currentUGI = currentUGI;
this.parentHive = parentHive;
this.parentPerfLogger = parentPerfLogger;
this.parentSessionState = parentSessionState;
this.asyncPrepare = asyncPrepare;
}
@Override
public void run() {
PrivilegedExceptionAction<Object> doAsAction = new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws HiveSQLException {
Hive.set(parentHive);
// TODO: can this result in cross-thread reuse of session state?
SessionState.setCurrentSessionState(parentSessionState);
PerfLogger.setPerfLogger(parentPerfLogger);
// Set current OperationLog in this async thread for keeping on saving query log.
// 将SQLOperation的OperationLog设置给当前线程
registerCurrentOperationLog();
// 注册当前线程的Context
registerLoggingContext();
try {
// 通过beeline调用时, asyncPrepare是false
if (asyncPrepare) {
prepare(queryState);
}
// 最核心的地方, 调用SQLOperation的runQuery方法
runQuery();
} catch (HiveSQLException e) {
// 调用父类方法设置异常信息
setOperationException(e);
LOG.error("Error running hive query: ", e);
} finally {
// 移除设置的Context信息
unregisterLoggingContext();
// 移除注册在当前线程的OperationLog
unregisterOperationLog();
}
return null;
}
};
try {
currentUGI.doAs(doAsAction);
} catch (Exception e) {
// 调用父类的方法设置异常信息
setOperationException(new HiveSQLException(e));
LOG.error("Error running hive query as user : " + currentUGI.getShortUserName(), e);
}
finally {
/**
* We'll cache the ThreadLocal RawStore object for this background thread for an orderly cleanup
* when this thread is garbage collected later.
* @see org.apache.hive.service.server.ThreadWithGarbageCleanup#finalize()
*/
if (ThreadWithGarbageCleanup.currentThread() instanceof ThreadWithGarbageCleanup) {
ThreadWithGarbageCleanup currentThread =
(ThreadWithGarbageCleanup) ThreadWithGarbageCleanup.currentThread();
currentThread.cacheThreadLocalRawStore();
}
}
}
}
/**
* Returns the current UGI on the stack
* @param opConfig
* @return UserGroupInformation
* @throws HiveSQLException
*/
private UserGroupInformation getCurrentUGI() throws HiveSQLException {
try {
return Utils.getUGI();
} catch (Exception e) {
throw new HiveSQLException("Unable to get current user", e);
}
}
private void registerCurrentOperationLog() {
if (isOperationLogEnabled) {
if (operationLog == null) {
LOG.warn("Failed to get current OperationLog object of Operation: " +
getHandle().getHandleIdentifier());
isOperationLogEnabled = false;
return;
}
OperationLog.setCurrentOperationLog(operationLog);
}
}
private synchronized void cleanup(OperationState state) throws HiveSQLException {
setState(state);
if (shouldRunAsync()) {
Future<?> backgroundHandle = getBackgroundHandle();
if (backgroundHandle != null) {
boolean success = backgroundHandle.cancel(true);
if (success) {
LOG.info("The running operation has been successfully interrupted.");
}
}
}
if (driver != null) {
driver.close();
driver.destroy();
}
driver = null;
SessionState ss = SessionState.get();
if (ss == null) {
LOG.warn("Operation seems to be in invalid state, SessionState is null");
} else {
ss.deleteTmpOutputFile();
ss.deleteTmpErrOutputFile();
}
// Shutdown the timeout thread if any, while closing this operation
if ((timeoutExecutor != null) && (state != OperationState.TIMEDOUT) && (state.isTerminal())) {
timeoutExecutor.shutdownNow();
}
}
@Override
public void cancel(OperationState stateAfterCancel) throws HiveSQLException {
cleanup(stateAfterCancel);
cleanupOperationLog();
}
@Override
public void close() throws HiveSQLException {
cleanup(OperationState.CLOSED);
cleanupOperationLog();
}
@Override
public TableSchema getResultSetSchema() throws HiveSQLException {
// Since compilation is always a blocking RPC call, and schema is ready after compilation,
// we can return when are in the RUNNING state.
assertState(new ArrayList<OperationState>(Arrays.asList(OperationState.RUNNING,
OperationState.FINISHED)));
if (resultSchema == null) {
resultSchema = new TableSchema(driver.getSchema());
}
return resultSchema;
}
private transient final List<Object> convey = new ArrayList<Object>();
@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows)
throws HiveSQLException {
validateDefaultFetchOrientation(orientation);
assertState(new ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));
FetchTask fetchTask = driver.getFetchTask();
boolean isBlobBased = false;
if (fetchTask != null && fetchTask.getWork().isUsingThriftJDBCBinarySerDe()) {
// Just fetch one blob if we've serialized thrift objects in final tasks
maxRows = 1;
isBlobBased = true;
}
driver.setMaxRows((int) maxRows);
RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), isBlobBased);
try {
/* if client is requesting fetch-from-start and its not the first time reading from this operation
* then reset the fetch position to beginning
*/
if (orientation.equals(FetchOrientation.FETCH_FIRST) && fetchStarted) {
driver.resetFetch();
}
fetchStarted = true;
driver.setMaxRows((int) maxRows);
if (driver.getResults(convey)) {
return decode(convey, rowSet);
}
return rowSet;
} catch (IOException e) {
throw new HiveSQLException(e);
} catch (CommandNeedRetryException e) {
throw new HiveSQLException(e);
} catch (Exception e) {
throw new HiveSQLException(e);
} finally {
convey.clear();
}
}
@Override
public String getTaskStatus() throws HiveSQLException {
if (driver != null) {
List<QueryDisplay.TaskDisplay> statuses = driver.getQueryDisplay().getTaskDisplays();
if (statuses != null) {
ByteArrayOutputStream out = null;
try {
ObjectMapper mapper = new ObjectMapper();
out = new ByteArrayOutputStream();
mapper.writeValue(out, statuses);
return out.toString("UTF-8");
} catch (JsonGenerationException e) {
throw new HiveSQLException(e);
} catch (JsonMappingException e) {
throw new HiveSQLException(e);
} catch (IOException e) {
throw new HiveSQLException(e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
throw new HiveSQLException(e);
}
}
}
}
}
// Driver not initialized
return null;
}
private RowSet decode(List<Object> rows, RowSet rowSet) throws Exception {
if (driver.isFetchingTable()) {
return prepareFromRow(rows, rowSet);
}
return decodeFromString(rows, rowSet);
}
// already encoded to thrift-able object in ThriftFormatter
private RowSet prepareFromRow(List<Object> rows, RowSet rowSet) throws Exception {
for (Object row : rows) {
rowSet.addRow((Object[]) row);
}
return rowSet;
}
private RowSet decodeFromString(List<Object> rows, RowSet rowSet)
throws SQLException, SerDeException {
getSerDe();
StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector();
List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
Object[] deserializedFields = new Object[fieldRefs.size()];
Object rowObj;
ObjectInspector fieldOI;
int protocol = getProtocolVersion().getValue();
for (Object rowString : rows) {
try {
rowObj = serde.deserialize(new BytesWritable(((String)rowString).getBytes("UTF-8")));
} catch (UnsupportedEncodingException e) {
throw new SerDeException(e);
}
for (int i = 0; i < fieldRefs.size(); i++) {
StructField fieldRef = fieldRefs.get(i);
fieldOI = fieldRef.getFieldObjectInspector();
Object fieldData = soi.getStructFieldData(rowObj, fieldRef);
deserializedFields[i] = SerDeUtils.toThriftPayload(fieldData, fieldOI, protocol);
}
rowSet.addRow(deserializedFields);
}
return rowSet;
}
private SerDe getSerDe() throws SQLException {
if (serde != null) {
return serde;
}
try {
List<FieldSchema> fieldSchemas = mResultSchema.getFieldSchemas();
StringBuilder namesSb = new StringBuilder();
StringBuilder typesSb = new StringBuilder();
if (fieldSchemas != null && !fieldSchemas.isEmpty()) {
for (int pos = 0; pos < fieldSchemas.size(); pos++) {
if (pos != 0) {
namesSb.append(",");
typesSb.append(",");
}
namesSb.append(fieldSchemas.get(pos).getName());
typesSb.append(fieldSchemas.get(pos).getType());
}
}
String names = namesSb.toString();
String types = typesSb.toString();
serde = new LazySimpleSerDe();
Properties props = new Properties();
if (names.length() > 0) {
LOG.debug("Column names: " + names);
props.setProperty(serdeConstants.LIST_COLUMNS, names);
}
if (types.length() > 0) {
LOG.debug("Column types: " + types);
props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types);
}
SerDeUtils.initializeSerDe(serde, new HiveConf(), props, null);
} catch (Exception ex) {
ex.printStackTrace();
throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex);
}
return serde;
}
/**
* Get summary information of this SQLOperation for display in WebUI.
*/
public SQLOperationDisplay getSQLOperationDisplay() {
return sqlOpDisplay;
}
@Override
protected void onNewState(OperationState state, OperationState prevState) {
super.onNewState(state, prevState);
currentSQLStateScope = setMetrics(currentSQLStateScope, MetricsConstant.SQL_OPERATION_PREFIX,
MetricsConstant.COMPLETED_SQL_OPERATION_PREFIX, state);
Metrics metrics = MetricsFactory.getInstance();
if (metrics != null) {
try {
// New state is changed to running from something else (user is active)
if (state == OperationState.RUNNING && prevState != state) {
incrementUserQueries(metrics);
}
// New state is not running (user not active) any more
if (prevState == OperationState.RUNNING && prevState != state) {
decrementUserQueries(metrics);
}
} catch (IOException e) {
LOG.warn("Error metrics", e);
}
}
if (state == OperationState.FINISHED || state == OperationState.CANCELED || state == OperationState.ERROR) {
//update runtime
sqlOpDisplay.setRuntime(getOperationComplete() - getOperationStart());
}
if (state == OperationState.CLOSED) {
sqlOpDisplay.closed();
} else {
//CLOSED state not interesting, state before (FINISHED, ERROR) is.
sqlOpDisplay.updateState(state);
}
}
private void incrementUserQueries(Metrics metrics) throws IOException {
String username = parentSession.getUserName();
if (username != null) {
synchronized (userQueries) {
AtomicInteger count = userQueries.get(username);
if (count == null) {
count = new AtomicInteger(0);
AtomicInteger prev = userQueries.put(username, count);
if (prev == null) {
metrics.incrementCounter(ACTIVE_SQL_USER);
} else {
count = prev;
}
}
count.incrementAndGet();
}
}
}
private void decrementUserQueries(Metrics metrics) throws IOException {
String username = parentSession.getUserName();
if (username != null) {
synchronized (userQueries) {
AtomicInteger count = userQueries.get(username);
if (count != null && count.decrementAndGet() <= 0) {
metrics.decrementCounter(ACTIVE_SQL_USER);
userQueries.remove(username);
}
}
}
}
public String getExecutionEngine() {
return queryState.getConf().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
}
}
| BUPTAnderson/apache-hive-2.1.1-src | service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java | Java | apache-2.0 | 27,525 |
package org.drools.compiler.reteoo.compiled;
import org.drools.core.base.ClassObjectType;
import org.drools.core.base.ValueType;
import org.drools.compiler.compiler.PackageBuilder;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.compiled.AssertHandler;
import org.drools.core.reteoo.compiled.CompiledNetwork;
import org.drools.core.reteoo.compiled.DeclarationsHandler;
import org.drools.core.reteoo.compiled.HashedAlphasDeclaration;
import org.drools.core.reteoo.compiled.ObjectTypeNodeParser;
import org.drools.core.reteoo.compiled.SetNodeReferenceHandler;
import org.drools.compiler.rule.builder.dialect.java.JavaDialect;
import java.util.Collection;
/**
* todo: document
*/
public class ObjectTypeNodeCompiler {
private static final String NEWLINE = "\n";
private static final String PACKAGE_NAME = "org.drools.core.reteoo.compiled";
private static final String BINARY_PACKAGE_NAME = PACKAGE_NAME.replace('.', '/');
/**
* This field hold the fully qualified class name that the {@link ObjectTypeNode} is representing.
*/
private String className;
/**
* This field will hold the "simple" name of the generated class
*/
private String generatedClassSimpleName;
/**
* OTN we are creating a compiled network for
*/
private ObjectTypeNode objectTypeNode;
private StringBuilder builder = new StringBuilder();
private ObjectTypeNodeCompiler(ObjectTypeNode objectTypeNode) {
this.objectTypeNode = objectTypeNode;
ClassObjectType classObjectType = (ClassObjectType) objectTypeNode.getObjectType();
this.className = classObjectType.getClassName();
generatedClassSimpleName = "Compiled" + classObjectType.getClassName().replace('.', '_') + "Network";
}
private String generateSource() {
createClassDeclaration();
ObjectTypeNodeParser parser = new ObjectTypeNodeParser(objectTypeNode);
// create declarations
DeclarationsHandler declarations = new DeclarationsHandler(builder);
parser.accept(declarations);
// we need the hashed declarations when creating the constructor
Collection<HashedAlphasDeclaration> hashedAlphaDeclarations = declarations.getHashedAlphaDeclarations();
createConstructor(hashedAlphaDeclarations);
// create set node method
SetNodeReferenceHandler setNode = new SetNodeReferenceHandler(builder);
parser.accept(setNode);
// create assert method
AssertHandler assertHandler = new AssertHandler(builder, className, hashedAlphaDeclarations.size() > 0);
parser.accept(assertHandler);
// end of class
builder.append("}").append(NEWLINE);
return builder.toString();
}
/**
* This method will output the package statement, followed by the opening of the class declaration
*/
private void createClassDeclaration() {
builder.append("package ").append(PACKAGE_NAME).append(";").append(NEWLINE);
builder.append("public class ").append(generatedClassSimpleName).append(" extends ").
append(CompiledNetwork.class.getName()).append("{ ").append(NEWLINE);
}
/**
* Creates the constructor for the generated class. If the hashedAlphaDeclarations is empty, it will just
* output a empty default constructor; if it is not, the constructor will contain code to fill the hash
* alpha maps with the values and node ids.
*
* @param hashedAlphaDeclarations declarations used for creating statements to populate the hashed alpha
* maps for the generate class
*/
private void createConstructor(Collection<HashedAlphasDeclaration> hashedAlphaDeclarations) {
builder.append("public ").append(generatedClassSimpleName).append("() {").append(NEWLINE);
// for each hashed alpha, we need to fill in the map member variable with the hashed values to node Ids
for (HashedAlphasDeclaration declaration : hashedAlphaDeclarations) {
String mapVariableName = declaration.getVariableName();
for (Object hashedValue : declaration.getHashedValues()) {
Object value = hashedValue;
// need to quote value if it is a string
if (declaration.getValueType() == ValueType.STRING_TYPE) {
value = "\"" + value + "\"";
}
String nodeId = declaration.getNodeId(hashedValue);
// generate the map.put(hashedValue, nodeId) call
builder.append(mapVariableName).append(".put(").append(value).append(", ").append(nodeId).append(");");
builder.append(NEWLINE);
}
}
builder.append("}").append(NEWLINE);
}
/**
* Returns the fully qualified name of the generated subclass of {@link CompiledNetwork}
*
* @return name of generated class
*/
private String getName() {
return getPackageName() + "." + generatedClassSimpleName;
}
/**
* Returns the fully qualified binary name of the generated subclass of {@link CompiledNetwork}
*
* @return binary name of generated class
*/
private String getBinaryName() {
return BINARY_PACKAGE_NAME + "." + generatedClassSimpleName + ".class";
}
private String getPackageName() {
return PACKAGE_NAME;
}
/**
* Creates a {@link CompiledNetwork} for the specified {@link ObjectTypeNode}. The {@link PackageBuilder} is used
* to compile the generated source and load the class.
*
* @param pkgBuilder builder used to compile and load class
* @param objectTypeNode OTN we are generating a compiled network for
* @return CompiledNetwork
*/
public static CompiledNetwork compile(PackageBuilder pkgBuilder, ObjectTypeNode objectTypeNode) {
if (objectTypeNode == null) {
throw new IllegalArgumentException("ObjectTypeNode cannot be null!");
}
if (pkgBuilder == null) {
throw new IllegalArgumentException("PackageBuilder cannot be null!");
}
ObjectTypeNodeCompiler compiler = new ObjectTypeNodeCompiler(objectTypeNode);
String packageName = compiler.getPackageName();
PackageRegistry pkgReg = pkgBuilder.getPackageRegistry(packageName);
if (pkgReg == null) {
pkgBuilder.addPackage(new PackageDescr(packageName));
pkgReg = pkgBuilder.getPackageRegistry(packageName);
}
String source = compiler.generateSource();
String generatedSourceName = compiler.getName();
JavaDialect dialect = (JavaDialect) pkgReg.getDialectCompiletimeRegistry().getDialect("java");
dialect.addSrc(compiler.getBinaryName(), source.getBytes());
pkgBuilder.compileAll();
pkgBuilder.updateResults();
CompiledNetwork network;
try {
network = (CompiledNetwork) Class.forName(generatedSourceName, true, pkgBuilder.getRootClassLoader()).newInstance();
} catch (ClassNotFoundException e) {
throw new RuntimeException("This is a bug. Please contact the development team", e);
} catch (IllegalAccessException e) {
throw new RuntimeException("This is a bug. Please contact the development team", e);
} catch (InstantiationException e) {
throw new RuntimeException("This is a bug. Please contact the development team", e);
}
return network;
}
}
| Buble1981/MyDroolsFork | drools-compiler/src/main/java/org/drools/compiler/reteoo/compiled/ObjectTypeNodeCompiler.java | Java | apache-2.0 | 7,672 |
<?php
/**
* This file is part of the SevenShores/NetSuite library
* AND originally from the NetSuite PHP Toolkit.
*
* New content:
* @package ryanwinchester/netsuite-php
* @copyright Copyright (c) Ryan Winchester
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @link https://github.com/ryanwinchester/netsuite-php
*
* Original content:
* @copyright Copyright (c) NetSuite Inc.
* @license https://raw.githubusercontent.com/ryanwinchester/netsuite-php/master/original/NetSuite%20Application%20Developer%20License%20Agreement.txt
* @link http://www.netsuite.com/portal/developers/resources/suitetalk-sample-applications.shtml
*
* generated: 2019-06-12 10:27:00 AM PDT
*/
namespace NetSuite\Classes;
class EntityGroupSearchRow extends SearchRow {
public $basic;
public $groupMemberJoin;
public $userJoin;
public $customSearchJoin;
static $paramtypesmap = array(
"basic" => "EntityGroupSearchRowBasic",
"groupMemberJoin" => "EntitySearchRowBasic",
"userJoin" => "EmployeeSearchRowBasic",
"customSearchJoin" => "CustomSearchRowBasic[]",
);
}
| fungku/netsuite-php | src/Classes/EntityGroupSearchRow.php | PHP | apache-2.0 | 1,153 |
using Microsoft.AspNet.Mvc;
using Microsoft.AspNet.Mvc.Filters;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace School.Helpers
{
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
public sealed class NoCacheAttribute : ActionFilterAttribute
{
public override void OnResultExecuting(ResultExecutingContext filterContext)
{
var filter=filterContext.Filters.Where(t => t.GetType() == typeof(ResponseCacheFilter)).FirstOrDefault();
if (filter != null)
{
ResponseCacheFilter f = (ResponseCacheFilter)filter;
f.NoStore = true;
//f.Duration = 0;
}
else
{
//Not allowed
//filterContext.Filters.Add(new ResponseCacheAttribute() { NoStore=true} );
}
//For older MVC
//Microsoft.AspNet.Mvc.ResponseCacheAttribute()
//filterContext.HttpContext.Response.Cache.SetExpires(DateTime.UtcNow.AddDays(-1));
//filterContext.HttpContext.Response.Cache.SetValidUntilExpires(false);
//filterContext.HttpContext.Response.Cache.SetRevalidation(System.Web.HttpCacheRevalidation.AllCaches);
//filterContext.HttpContext.Response.Cache.SetCacheability(HttpCacheability.NoCache);
//filterContext.HttpContext.Response.Cache.SetNoStore();
base.OnResultExecuting(filterContext);
}
}
}
| davutg/LookUp | LookUp/School/Helpers/NoCacheAttribute.cs | C# | apache-2.0 | 1,532 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("CAT.Datamining")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CAT.Datamining")]
[assembly: AssemblyCopyright("Copyright © 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("ea9a0a05-202f-48ff-8350-c09453f1b006")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| AlexCatarino/CAT | CAT.Datamining/Properties/AssemblyInfo.cs | C# | apache-2.0 | 1,404 |
/*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.client.v2.routemappings;
import org.junit.Test;
public final class GetRouteMappingRequestTest {
@Test(expected = IllegalStateException.class)
public void noRouteMappingId() {
GetRouteMappingRequest.builder()
.build();
}
@Test
public void valid() {
GetRouteMappingRequest.builder()
.routeMappingId("route-mapping-id")
.build();
}
}
| cloudfoundry/cf-java-client | cloudfoundry-client/src/test/java/org/cloudfoundry/client/v2/routemappings/GetRouteMappingRequestTest.java | Java | apache-2.0 | 1,059 |
package com.qingbo.ginkgo.ygb.web.pojo;
import java.io.Serializable;
public class BrokerAdd implements Serializable{
private static final long serialVersionUID = -6153623054869466896L;
//营销机构userId
private Long marketingUserId;
private String customerNum;
private String userName;
private String realName;
public Long getMarketingUserId() {
return marketingUserId;
}
public void setMarketingUserId(Long marketingUserId) {
this.marketingUserId = marketingUserId;
}
public String getCustomerNum() {
return customerNum;
}
public void setCustomerNum(String customerNum) {
this.customerNum = customerNum;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getRealName() {
return realName;
}
public void setRealName(String realName) {
this.realName = realName;
}
}
| hwxiasn/archetypes | ygb/ygb-web/src/main/java/com/qingbo/ginkgo/ygb/web/pojo/BrokerAdd.java | Java | apache-2.0 | 954 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator.scalar;
import com.google.common.collect.ImmutableList;
import io.trino.jmh.Benchmarks;
import io.trino.metadata.FunctionArgumentDefinition;
import io.trino.metadata.FunctionBinding;
import io.trino.metadata.FunctionListBuilder;
import io.trino.metadata.FunctionMetadata;
import io.trino.metadata.ResolvedFunction;
import io.trino.metadata.Signature;
import io.trino.metadata.SqlScalarFunction;
import io.trino.metadata.TestingFunctionResolution;
import io.trino.operator.DriverYieldSignal;
import io.trino.operator.project.PageProcessor;
import io.trino.spi.Page;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.type.ArrayType;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeSignature;
import io.trino.sql.gen.ExpressionCompiler;
import io.trino.sql.relational.CallExpression;
import io.trino.sql.relational.LambdaDefinitionExpression;
import io.trino.sql.relational.RowExpression;
import io.trino.sql.relational.VariableReferenceExpression;
import io.trino.sql.tree.QualifiedName;
import io.trino.type.FunctionType;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import java.lang.invoke.MethodHandle;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.google.common.base.Verify.verify;
import static io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static io.trino.metadata.FunctionKind.SCALAR;
import static io.trino.metadata.Signature.typeVariable;
import static io.trino.operator.scalar.BenchmarkArrayFilter.ExactArrayFilterFunction.EXACT_ARRAY_FILTER_FUNCTION;
import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.NEVER_NULL;
import static io.trino.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL;
import static io.trino.spi.function.OperatorType.LESS_THAN;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.TypeSignature.arrayType;
import static io.trino.spi.type.TypeSignature.functionType;
import static io.trino.spi.type.TypeUtils.readNativeValue;
import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes;
import static io.trino.sql.relational.Expressions.constant;
import static io.trino.sql.relational.Expressions.field;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static io.trino.util.Reflection.methodHandle;
import static java.lang.Boolean.TRUE;
@SuppressWarnings("MethodMayBeStatic")
@State(Scope.Thread)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Fork(2)
@Warmup(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS)
@BenchmarkMode(Mode.AverageTime)
public class BenchmarkArrayFilter
{
private static final int POSITIONS = 100_000;
private static final int ARRAY_SIZE = 4;
private static final int NUM_TYPES = 1;
private static final List<Type> TYPES = ImmutableList.of(BIGINT);
static {
verify(NUM_TYPES == TYPES.size());
}
@Benchmark
@OperationsPerInvocation(POSITIONS * ARRAY_SIZE * NUM_TYPES)
public List<Optional<Page>> benchmark(BenchmarkData data)
{
return ImmutableList.copyOf(
data.getPageProcessor().process(
SESSION,
new DriverYieldSignal(),
newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()),
data.getPage()));
}
@SuppressWarnings("FieldMayBeFinal")
@State(Scope.Thread)
public static class BenchmarkData
{
@Param({"filter", "exact_filter"})
private String name = "filter";
private Page page;
private PageProcessor pageProcessor;
@Setup
public void setup()
{
TestingFunctionResolution functionResolution = new TestingFunctionResolution()
.addFunctions(new FunctionListBuilder().function(EXACT_ARRAY_FILTER_FUNCTION).getFunctions());
ExpressionCompiler compiler = functionResolution.getExpressionCompiler();
ImmutableList.Builder<RowExpression> projectionsBuilder = ImmutableList.builder();
Block[] blocks = new Block[TYPES.size()];
for (int i = 0; i < TYPES.size(); i++) {
Type elementType = TYPES.get(i);
ArrayType arrayType = new ArrayType(elementType);
ResolvedFunction resolvedFunction = functionResolution.resolveFunction(
QualifiedName.of(name),
fromTypes(arrayType, new FunctionType(ImmutableList.of(BIGINT), BOOLEAN)));
ResolvedFunction lessThan = functionResolution.resolveOperator(LESS_THAN, ImmutableList.of(BIGINT, BIGINT));
projectionsBuilder.add(new CallExpression(resolvedFunction, ImmutableList.of(
field(0, arrayType),
new LambdaDefinitionExpression(
ImmutableList.of(BIGINT),
ImmutableList.of("x"),
new CallExpression(lessThan, ImmutableList.of(constant(0L, BIGINT), new VariableReferenceExpression("x", BIGINT)))))));
blocks[i] = createChannel(POSITIONS, ARRAY_SIZE, arrayType);
}
ImmutableList<RowExpression> projections = projectionsBuilder.build();
pageProcessor = compiler.compilePageProcessor(Optional.empty(), projections).get();
page = new Page(blocks);
}
private static Block createChannel(int positionCount, int arraySize, ArrayType arrayType)
{
BlockBuilder blockBuilder = arrayType.createBlockBuilder(null, positionCount);
for (int position = 0; position < positionCount; position++) {
BlockBuilder entryBuilder = blockBuilder.beginBlockEntry();
for (int i = 0; i < arraySize; i++) {
if (arrayType.getElementType().getJavaType() == long.class) {
arrayType.getElementType().writeLong(entryBuilder, ThreadLocalRandom.current().nextLong());
}
else {
throw new UnsupportedOperationException();
}
}
blockBuilder.closeEntry();
}
return blockBuilder.build();
}
public PageProcessor getPageProcessor()
{
return pageProcessor;
}
public Page getPage()
{
return page;
}
}
public static void main(String[] args)
throws Exception
{
// assure the benchmarks are valid before running
BenchmarkData data = new BenchmarkData();
data.setup();
new BenchmarkArrayFilter().benchmark(data);
Benchmarks.benchmark(BenchmarkArrayFilter.class).run();
}
public static final class ExactArrayFilterFunction
extends SqlScalarFunction
{
public static final ExactArrayFilterFunction EXACT_ARRAY_FILTER_FUNCTION = new ExactArrayFilterFunction();
private static final MethodHandle METHOD_HANDLE = methodHandle(ExactArrayFilterFunction.class, "filter", Type.class, Block.class, MethodHandle.class);
private ExactArrayFilterFunction()
{
super(new FunctionMetadata(
new Signature(
"exact_filter",
ImmutableList.of(typeVariable("T")),
ImmutableList.of(),
arrayType(new TypeSignature("T")),
ImmutableList.of(
arrayType(new TypeSignature("T")),
functionType(new TypeSignature("T"), BOOLEAN.getTypeSignature())),
false),
false,
ImmutableList.of(
new FunctionArgumentDefinition(false),
new FunctionArgumentDefinition(false)),
false,
false,
"return array containing elements that match the given predicate",
SCALAR));
}
@Override
protected ScalarFunctionImplementation specialize(FunctionBinding functionBinding)
{
Type type = functionBinding.getTypeVariable("T");
return new ChoicesScalarFunctionImplementation(
functionBinding,
FAIL_ON_NULL,
ImmutableList.of(NEVER_NULL, NEVER_NULL),
METHOD_HANDLE.bindTo(type));
}
public static Block filter(Type type, Block block, MethodHandle function)
{
int positionCount = block.getPositionCount();
BlockBuilder resultBuilder = type.createBlockBuilder(null, positionCount);
for (int position = 0; position < positionCount; position++) {
Long input = (Long) readNativeValue(type, block, position);
Boolean keep;
try {
keep = (Boolean) function.invokeExact(input);
}
catch (Throwable t) {
throwIfUnchecked(t);
throw new RuntimeException(t);
}
if (TRUE.equals(keep)) {
block.writePositionTo(position, resultBuilder);
}
}
return resultBuilder.build();
}
}
}
| Praveen2112/presto | core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArrayFilter.java | Java | apache-2.0 | 10,904 |
package StructuralPatterns.ProxyPatterns.ProxyClasses;
import StructuralPatterns.ProxyPatterns.OriginalClasses.ConcreteSubject;
import StructuralPatterns.ProxyPatterns.OriginalClasses.Subject;
public class Proxy extends Subject {
ConcreteSubject cs;
@Override
public void doSomeWork() {
System.out.println("Proxy call happening now");
// Lazy initizalization
if(cs == null) {
cs = new ConcreteSubject();
}
cs.doSomeWork();
}
}
| Daniel-Dos/Java-Design-Patterns | src/StructuralPatterns/ProxyPatterns/ProxyClasses/Proxy.java | Java | apache-2.0 | 448 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web.Http;
namespace Practicing_Programmer
{
public static class WebApiConfig
{
public static void Register(HttpConfiguration config)
{
config.Routes.MapHttpRoute(
name: "DefaultApi",
routeTemplate: "api/{controller}/{id}",
defaults: new { id = RouteParameter.Optional }
);
}
}
}
| hunter497/PracticingProgrammer | Practicing_Programmer/App_Start/WebApiConfig.cs | C# | apache-2.0 | 471 |
package com.alex.common.model;
import com.baidu.platform.comapi.basestruct.GeoPoint;
/**
* 地理位置的重新封装
* @author caisenchuan
*/
public class GeoPos extends GeoPoint{
/*--------------------------
* 常量
*-------------------------*/
/*--------------------------
* 自定义类型
*-------------------------*/
/*--------------------------
* 成员变量
*-------------------------*/
/*--------------------------
* public方法
*-------------------------*/
/**
* 直接使用double型的经纬度进行构造
* @param lat
* @param lon
*/
public GeoPos(double lat, double lon) {
super((int)(lat * 1E6), (int)(lon * 1E6));
}
/*--------------------------
* protected、packet方法
*-------------------------*/
/*--------------------------
* private方法
*-------------------------*/
/**
* 使用int型的经纬度进行构造,单位是微度 (度 * 1E6)
* @param arg0
* @param arg1
*/
private GeoPos(int lat, int lon) {
super(lat, lon);
}
}
| alexcaisenchuan/FunWeibo | src/com/alex/common/model/GeoPos.java | Java | apache-2.0 | 1,134 |
package fr.javatronic.blog.massive.annotation2;
import fr.javatronic.blog.processor.Annotation_002;
@Annotation_002
public class Class_039 {
}
| lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation2/Class_039.java | Java | apache-2.0 | 145 |
<?php
class SitesManagerTest extends \Codeception\Test\Unit
{
/**
* @var \UnitTester
*/
protected $tester;
/** @var \Akademiano\Sites\SitesManager */
protected $sitesManager;
protected function _before()
{
$classLoader = include __DIR__ . "/../../vendor/autoload.php";
$environment = Mockery::mock(\Akademiano\HttpWarp\Environment::class);
$this->sitesManager = new \Akademiano\Sites\SitesManager($classLoader, $environment);
$this->sitesManager->setRootDir(__DIR__ . "/../../");
}
protected function _after()
{
unset($this->sitesManager);
Mockery::close();
}
public function testMain()
{
$this->tester->assertNull($this->sitesManager->getSite("not_exist_site"));
$this->tester->expectException(\Exception::class, function () {
$this->sitesManager->getSite("/tmp");
});
$siteName = "_testsite_test";
$site = $this->sitesManager->getSite($siteName);
$this->tester->assertInstanceOf(\Akademiano\Sites\SiteInterface::class, $site);
$themesDir = $site->getThemesDir();
$this->tester->assertInstanceOf(\Akademiano\Sites\Site\ThemesDir::class, $themesDir);
$this->tester->assertNull($themesDir->getTheme("not-exist"));
$this->tester->assertInstanceOf(\Akademiano\Sites\Site\Theme::class, $themesDir->getTheme("test-theme"));
$tmpDir = sys_get_temp_dir();
$tempSubDir = tempnam($tmpDir, '');
$tmpPartSubDir = basename($tempSubDir);
unlink($tempSubDir);
mkdir($tempSubDir);
if (!is_dir($pubSitesDir = $tempSubDir . DIRECTORY_SEPARATOR . \Akademiano\Sites\Site\PublicStorage::GLOBAL_DIR)) {
mkdir($pubSitesDir, 0777);
}
$tempSubDir = realpath($tempSubDir);
$rootDir = $site->getRootDir();
$site->setRootDir($tempSubDir);
$publicGlobalPath = $site->getPublicGlobalPath();
$this->tester->assertEquals($tempSubDir . DIRECTORY_SEPARATOR . \Akademiano\Sites\Site\PublicStorage::GLOBAL_DIR, $site->getPublicDir());
$site->setRootDir($rootDir);
$this->tester->assertEquals(
$tempSubDir . DIRECTORY_SEPARATOR . \Akademiano\Sites\Site\PublicStorage::GLOBAL_DIR . DIRECTORY_SEPARATOR . $siteName,
$publicGlobalPath
);
$this->tester->assertEquals("/" . \Akademiano\Sites\Site\PublicStorage::GLOBAL_DIR . "/" . $siteName, $site->getPublicWebPath());
$publicStore = $site->getPublicStorage();
$this->tester->assertInstanceOf(\Akademiano\Sites\Site\PublicStorage::class, $publicStore);
$this->tester->assertNull($publicStore->getFile("not-exist-file"));
$this->tester->expectException(
\Akademiano\HttpWarp\Exception\AccessDeniedException::class,
function () use ($publicStore) {
return $publicStore->getFile("../../../../../composer.json");
}
);
$fileName = "test-file.txt";
$testFile = $publicStore->getFile($fileName);
$this->tester->assertInstanceOf(\Akademiano\Sites\Site\File::class, $testFile);
$this->tester->assertEquals("/" . \Akademiano\Sites\Site\PublicStorage::GLOBAL_DIR . "/" . $siteName . "/" . $fileName, $testFile->getWebPath());
$this->tester->assertEquals($pubSitesDir . DIRECTORY_SEPARATOR . $siteName . DIRECTORY_SEPARATOR . $fileName, $testFile->getPath());
$this->tester->assertEquals("test-file", $testFile->getContent());
}
}
| mrdatamapper/akademiano-sites | tests/unit/SitesManagerTest.php | PHP | apache-2.0 | 3,556 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.userdefinedjavaclass;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.BlockingRowSet;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleRowException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepData.StepExecutionStatus;
import org.pentaho.di.trans.step.RowListener;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepIOMeta;
import org.pentaho.di.trans.step.StepIOMetaInterface;
import org.pentaho.di.trans.step.StepListener;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.step.errorhandling.Stream;
import org.pentaho.di.trans.step.errorhandling.StreamIcon;
import org.pentaho.di.trans.step.errorhandling.StreamInterface.StreamType;
import org.pentaho.di.trans.steps.userdefinedjavaclass.UserDefinedJavaClassMeta.FieldInfo;
import org.pentaho.di.www.SocketRepository;
public abstract class TransformClassBase {
private static Class<?> PKG = UserDefinedJavaClassMeta.class; // for i18n purposes, needed by Translator2!!
protected boolean first = true;
protected boolean updateRowMeta = true;
protected UserDefinedJavaClass parent;
protected UserDefinedJavaClassMeta meta;
protected UserDefinedJavaClassData data;
public TransformClassBase( UserDefinedJavaClass parent, UserDefinedJavaClassMeta meta,
UserDefinedJavaClassData data ) throws KettleStepException {
this.parent = parent;
this.meta = meta;
this.data = data;
try {
data.inputRowMeta = getTransMeta().getPrevStepFields( getStepMeta() ).clone();
data.outputRowMeta = getTransMeta().getThisStepFields( getStepMeta(), null, data.inputRowMeta.clone() );
data.parameterMap = new HashMap<String, String>();
for ( UsageParameter par : meta.getUsageParameters() ) {
if ( par.tag != null && par.value != null ) {
data.parameterMap.put( par.tag, par.value );
}
}
data.infoMap = new HashMap<String, String>();
for ( StepDefinition stepDefinition : meta.getInfoStepDefinitions() ) {
if ( stepDefinition.tag != null
&& stepDefinition.stepMeta != null && stepDefinition.stepMeta.getName() != null ) {
data.infoMap.put( stepDefinition.tag, stepDefinition.stepMeta.getName() );
}
}
data.targetMap = new HashMap<String, String>();
for ( StepDefinition stepDefinition : meta.getTargetStepDefinitions() ) {
if ( stepDefinition.tag != null
&& stepDefinition.stepMeta != null && stepDefinition.stepMeta.getName() != null ) {
data.targetMap.put( stepDefinition.tag, stepDefinition.stepMeta.getName() );
}
}
} catch ( KettleStepException e ) {
e.printStackTrace();
throw e;
}
}
public void addResultFile( ResultFile resultFile ) {
parent.addResultFileImpl( resultFile );
}
public void addRowListener( RowListener rowListener ) {
parent.addRowListenerImpl( rowListener );
}
public void addStepListener( StepListener stepListener ) {
parent.addStepListenerImpl( stepListener );
}
public boolean checkFeedback( long lines ) {
return parent.checkFeedbackImpl( lines );
}
public void cleanup() {
parent.cleanupImpl();
}
public long decrementLinesRead() {
return parent.decrementLinesReadImpl();
}
public long decrementLinesWritten() {
return parent.decrementLinesWrittenImpl();
}
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) {
parent.disposeImpl( smi, sdi );
}
public RowSet findInputRowSet( String sourceStep ) throws KettleStepException {
return parent.findInputRowSetImpl( sourceStep );
}
public RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ) {
return parent.findInputRowSetImpl( from, fromcopy, to, tocopy );
}
public RowSet findOutputRowSet( String targetStep ) throws KettleStepException {
return parent.findOutputRowSetImpl( targetStep );
}
public RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ) {
return parent.findOutputRowSetImpl( from, fromcopy, to, tocopy );
}
public int getClusterSize() {
return parent.getClusterSizeImpl();
}
public int getCopy() {
return parent.getCopyImpl();
}
public RowMetaInterface getErrorRowMeta() {
return parent.getErrorRowMetaImpl();
}
public long getErrors() {
return parent.getErrorsImpl();
}
public RowMetaInterface getInputRowMeta() {
return parent.getInputRowMetaImpl();
}
public List<RowSet> getInputRowSets() {
return parent.getInputRowSetsImpl();
}
public long getLinesInput() {
return parent.getLinesInputImpl();
}
public long getLinesOutput() {
return parent.getLinesOutputImpl();
}
public long getLinesRead() {
return parent.getLinesReadImpl();
}
public long getLinesRejected() {
return parent.getLinesRejectedImpl();
}
public long getLinesSkipped() {
return parent.getLinesSkippedImpl();
}
public long getLinesUpdated() {
return parent.getLinesUpdatedImpl();
}
public long getLinesWritten() {
return parent.getLinesWrittenImpl();
}
public List<RowSet> getOutputRowSets() {
return parent.getOutputRowSetsImpl();
}
public String getPartitionID() {
return parent.getPartitionIDImpl();
}
public Map<String, BlockingRowSet> getPartitionTargets() {
return parent.getPartitionTargetsImpl();
}
public long getProcessed() {
return parent.getProcessedImpl();
}
public int getRepartitioning() {
return parent.getRepartitioningImpl();
}
public Map<String, ResultFile> getResultFiles() {
return parent.getResultFilesImpl();
}
public Object[] getRow() throws KettleException {
Object[] row = parent.getRowImpl();
if ( updateRowMeta ) {
// Update data.inputRowMeta and data.outputRowMeta
RowMetaInterface inputRowMeta = parent.getInputRowMeta();
data.inputRowMeta = inputRowMeta;
data.outputRowMeta =
inputRowMeta == null ? null : getTransMeta().getThisStepFields(
getStepMeta(), null, inputRowMeta.clone() );
updateRowMeta = false;
}
return row;
}
public Object[] getRowFrom( RowSet rowSet ) throws KettleStepException {
return parent.getRowFromImpl( rowSet );
}
public List<RowListener> getRowListeners() {
return parent.getRowListenersImpl();
}
public long getRuntime() {
return parent.getRuntimeImpl();
}
public int getSlaveNr() {
return parent.getSlaveNrImpl();
}
public SocketRepository getSocketRepository() {
return parent.getSocketRepositoryImpl();
}
public StepExecutionStatus getStatus() {
return parent.getStatusImpl();
}
public String getStatusDescription() {
return parent.getStatusDescriptionImpl();
}
public StepDataInterface getStepDataInterface() {
return parent.getStepDataInterfaceImpl();
}
public String getStepID() {
return parent.getStepIDImpl();
}
public List<StepListener> getStepListeners() {
return parent.getStepListenersImpl();
}
public StepMeta getStepMeta() {
return parent.getStepMetaImpl();
}
public String getStepname() {
return parent.getStepnameImpl();
}
public Trans getTrans() {
return parent.getTransImpl();
}
public TransMeta getTransMeta() {
return parent.getTransMetaImpl();
}
public String getTypeId() {
return parent.getTypeIdImpl();
}
public int getUniqueStepCountAcrossSlaves() {
return parent.getUniqueStepCountAcrossSlavesImpl();
}
public int getUniqueStepNrAcrossSlaves() {
return parent.getUniqueStepNrAcrossSlavesImpl();
}
public String getVariable( String variableName ) {
return parent.getVariableImpl( variableName );
}
public String getVariable( String variableName, String defaultValue ) {
return parent.getVariableImpl( variableName, defaultValue );
}
public long incrementLinesInput() {
return parent.incrementLinesInputImpl();
}
public long incrementLinesOutput() {
return parent.incrementLinesOutputImpl();
}
public long incrementLinesRead() {
return parent.incrementLinesReadImpl();
}
public long incrementLinesRejected() {
return parent.incrementLinesRejectedImpl();
}
public long incrementLinesSkipped() {
return parent.incrementLinesSkippedImpl();
}
public long incrementLinesUpdated() {
return parent.incrementLinesUpdatedImpl();
}
public long incrementLinesWritten() {
return parent.incrementLinesWrittenImpl();
}
public boolean init( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ) {
return parent.initImpl( stepMetaInterface, stepDataInterface );
}
public void initBeforeStart() throws KettleStepException {
parent.initBeforeStartImpl();
}
public boolean isDistributed() {
return parent.isDistributedImpl();
}
public boolean isInitialising() {
return parent.isInitialisingImpl();
}
public boolean isPartitioned() {
return parent.isPartitionedImpl();
}
public boolean isSafeModeEnabled() {
return parent.isSafeModeEnabledImpl();
}
public boolean isStopped() {
return parent.isStoppedImpl();
}
public boolean isUsingThreadPriorityManagment() {
return parent.isUsingThreadPriorityManagmentImpl();
}
public void logBasic( String s ) {
parent.logBasicImpl( s );
}
public void logDebug( String s ) {
parent.logDebugImpl( s );
}
public void logDetailed( String s ) {
parent.logDetailedImpl( s );
}
public void logError( String s ) {
parent.logErrorImpl( s );
}
public void logError( String s, Throwable e ) {
parent.logErrorImpl( s, e );
}
public void logMinimal( String s ) {
parent.logMinimalImpl( s );
}
public void logRowlevel( String s ) {
parent.logRowlevelImpl( s );
}
public void logSummary() {
parent.logSummaryImpl();
}
public void markStart() {
parent.markStartImpl();
}
public void markStop() {
parent.markStopImpl();
}
public void openRemoteInputStepSocketsOnce() throws KettleStepException {
parent.openRemoteInputStepSocketsOnceImpl();
}
public void openRemoteOutputStepSocketsOnce() throws KettleStepException {
parent.openRemoteOutputStepSocketsOnceImpl();
}
public boolean outputIsDone() {
return parent.outputIsDoneImpl();
}
public abstract boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException;
public void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions,
String fieldNames, String errorCodes ) throws KettleStepException {
parent.putErrorImpl( rowMeta, row, nrErrors, errorDescriptions, fieldNames, errorCodes );
}
public void putRow( RowMetaInterface row, Object[] data ) throws KettleStepException {
parent.putRowImpl( row, data );
}
public void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ) throws KettleStepException {
parent.putRowToImpl( rowMeta, row, rowSet );
}
public void removeRowListener( RowListener rowListener ) {
parent.removeRowListenerImpl( rowListener );
}
public int rowsetInputSize() {
return parent.rowsetInputSizeImpl();
}
public int rowsetOutputSize() {
return parent.rowsetOutputSizeImpl();
}
public void safeModeChecking( RowMetaInterface row ) throws KettleRowException {
parent.safeModeCheckingImpl( row );
}
public void setErrors( long errors ) {
parent.setErrorsImpl( errors );
}
public void setInputRowMeta( RowMetaInterface rowMeta ) {
parent.setInputRowMetaImpl( rowMeta );
}
public void setInputRowSets( List<RowSet> inputRowSets ) {
parent.setInputRowSetsImpl( inputRowSets );
}
public void setLinesInput( long newLinesInputValue ) {
parent.setLinesInputImpl( newLinesInputValue );
}
public void setLinesOutput( long newLinesOutputValue ) {
parent.setLinesOutputImpl( newLinesOutputValue );
}
public void setLinesRead( long newLinesReadValue ) {
parent.setLinesReadImpl( newLinesReadValue );
}
public void setLinesRejected( long linesRejected ) {
parent.setLinesRejectedImpl( linesRejected );
}
public void setLinesSkipped( long newLinesSkippedValue ) {
parent.setLinesSkippedImpl( newLinesSkippedValue );
}
public void setLinesUpdated( long newLinesUpdatedValue ) {
parent.setLinesUpdatedImpl( newLinesUpdatedValue );
}
public void setLinesWritten( long newLinesWrittenValue ) {
parent.setLinesWrittenImpl( newLinesWrittenValue );
}
public void setOutputDone() {
parent.setOutputDoneImpl();
}
public void setOutputRowSets( List<RowSet> outputRowSets ) {
parent.setOutputRowSetsImpl( outputRowSets );
}
public void setStepListeners( List<StepListener> stepListeners ) {
parent.setStepListenersImpl( stepListeners );
}
public void setVariable( String variableName, String variableValue ) {
parent.setVariableImpl( variableName, variableValue );
}
public void stopAll() {
parent.stopAllImpl();
}
public void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface )
throws KettleException {
parent.stopRunningImpl( stepMetaInterface, stepDataInterface );
}
public String toString() {
return parent.toStringImpl();
}
public static String[] getInfoSteps() {
return null;
}
@SuppressWarnings( "unchecked" )
public static void getFields( boolean clearResultFields, RowMetaInterface row, String originStepname,
RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, List<?> fields ) throws KettleStepException {
if ( clearResultFields ) {
row.clear();
}
for ( FieldInfo fi : (List<FieldInfo>) fields ) {
try {
ValueMetaInterface v = ValueMetaFactory.createValueMeta( fi.name, fi.type );
v.setLength( fi.length );
v.setPrecision( fi.precision );
v.setOrigin( originStepname );
row.addValueMeta( v );
} catch ( Exception e ) {
throw new KettleStepException( e );
}
}
}
public static StepIOMetaInterface getStepIOMeta( UserDefinedJavaClassMeta meta ) {
StepIOMetaInterface ioMeta = new StepIOMeta( true, true, true, false, true, true );
for ( StepDefinition stepDefinition : meta.getInfoStepDefinitions() ) {
ioMeta.addStream( new Stream(
StreamType.INFO, stepDefinition.stepMeta, stepDefinition.description, StreamIcon.INFO, null ) );
}
for ( StepDefinition stepDefinition : meta.getTargetStepDefinitions() ) {
ioMeta.addStream( new Stream(
StreamType.TARGET, stepDefinition.stepMeta, stepDefinition.description, StreamIcon.TARGET, null ) );
}
return ioMeta;
}
public String getParameter( String tag ) {
if ( tag == null ) {
return null;
}
return parent.environmentSubstitute( data.parameterMap.get( tag ) );
}
public RowSet findInfoRowSet( String tag ) throws KettleException {
if ( tag == null ) {
return null;
}
String stepname = data.infoMap.get( tag );
if ( Const.isEmpty( stepname ) ) {
throw new KettleException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindInfoStepNameForTag", tag ) );
}
RowSet rowSet = findInputRowSet( stepname );
if ( rowSet == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindInfoRowSetForStep", stepname ) );
}
return rowSet;
}
public RowSet findTargetRowSet( String tag ) throws KettleException {
if ( tag == null ) {
return null;
}
String stepname = data.targetMap.get( tag );
if ( Const.isEmpty( stepname ) ) {
throw new KettleException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindTargetStepNameForTag", tag ) );
}
RowSet rowSet = findOutputRowSet( stepname );
if ( rowSet == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindTargetRowSetForStep", stepname ) );
}
return rowSet;
}
private final Map<String, FieldHelper> inFieldHelpers = new HashMap<String, FieldHelper>();
private final Map<String, FieldHelper> infoFieldHelpers = new HashMap<String, FieldHelper>();
private final Map<String, FieldHelper> outFieldHelpers = new HashMap<String, FieldHelper>();
public enum Fields {
In, Out, Info;
}
public FieldHelper get( Fields type, String name ) throws KettleStepException {
FieldHelper fh;
switch ( type ) {
case In:
fh = inFieldHelpers.get( name );
if ( fh == null ) {
try {
fh = new FieldHelper( data.inputRowMeta, name );
} catch ( IllegalArgumentException e ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindFieldHelper", type.name(), name ) );
}
inFieldHelpers.put( name, fh );
}
break;
case Out:
fh = outFieldHelpers.get( name );
if ( fh == null ) {
try {
fh = new FieldHelper( data.outputRowMeta, name );
} catch ( IllegalArgumentException e ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindFieldHelper", type.name(), name ) );
}
outFieldHelpers.put( name, fh );
}
break;
case Info:
fh = infoFieldHelpers.get( name );
if ( fh == null ) {
RowMetaInterface rmi = getTransMeta().getPrevInfoFields( getStepname() );
try {
fh = new FieldHelper( rmi, name );
} catch ( IllegalArgumentException e ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.UnableToFindFieldHelper", type.name(), name ) );
}
infoFieldHelpers.put( name, fh );
}
break;
default:
throw new KettleStepException( BaseMessages.getString(
PKG, "TransformClassBase.Exception.InvalidFieldsType", type.name(), name ) );
}
return fh;
}
public Object[] createOutputRow( Object[] inputRow, int outputRowSize ) {
if ( meta.isClearingResultFields() ) {
return RowDataUtil.allocateRowData( outputRowSize );
} else {
return RowDataUtil.createResizedCopy( inputRow, outputRowSize );
}
}
}
| rfellows/pentaho-kettle | engine/src/org/pentaho/di/trans/steps/userdefinedjavaclass/TransformClassBase.java | Java | apache-2.0 | 20,128 |
/*
* Copyright 2015 Patrick Ahlbrecht
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.onyxbits.raccoon.appmgr;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.URI;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Vector;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.DefaultComboBoxModel;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.event.CaretEvent;
import javax.swing.event.CaretListener;
import de.onyxbits.raccoon.db.DatabaseManager;
import de.onyxbits.raccoon.db.DatasetEvent;
import de.onyxbits.raccoon.db.DatasetListener;
import de.onyxbits.raccoon.db.DatasetListenerProxy;
import de.onyxbits.raccoon.gui.ButtonBarBuilder;
import de.onyxbits.raccoon.gui.TitleStrip;
import de.onyxbits.raccoon.net.ServerManager;
import de.onyxbits.raccoon.qr.CopyContentAction;
import de.onyxbits.raccoon.qr.QrPanel;
import de.onyxbits.raccoon.repo.AndroidApp;
import de.onyxbits.raccoon.repo.AndroidAppDao;
import de.onyxbits.raccoon.repo.AppGroup;
import de.onyxbits.raccoon.repo.AppGroupDao;
import de.onyxbits.weave.LifecycleManager;
import de.onyxbits.weave.swing.AbstractPanelBuilder;
import de.onyxbits.weave.swing.ActionLocalizer;
import de.onyxbits.weave.swing.WindowToggleAction;
/**
* A browser for listing all the apps in local storage.
*
* @author patrick
*
*/
public class MyAppsViewBuilder extends AbstractPanelBuilder implements
CaretListener, ActionListener, DatasetListener {
public static final String ID = MyAppsViewBuilder.class.getSimpleName();
private JComboBox<AppGroup> groupFilter;
private JTextField nameFilter;
private String lastFilter;
private QrPanel transfer;
private ListViewBuilder listView;
private ListWorker listWorker;
private JButton install;
@Override
protected JPanel assemble() {
listView = new ListViewBuilder();
JScrollPane listScroll = new JScrollPane(listView.build(globals));
listScroll.setPreferredSize(new Dimension(400, 500));
listScroll.getVerticalScrollBar().setUnitIncrement(20);
TitleStrip titleStrip = new TitleStrip(Messages.getString(ID + ".title"),
Messages.getString(ID + ".subTitle"), new ImageIcon(getClass()
.getResource("/icons/appicon.png")));
nameFilter = new JTextField(10);
nameFilter.setMargin(new Insets(2, 2, 2, 2));
nameFilter.addCaretListener(this);
nameFilter.requestFocusInWindow();
groupFilter = new JComboBox<AppGroup>();
groupFilter.addActionListener(this);
GridBagConstraints gbc = new GridBagConstraints();
gbc.insets = new Insets(5, 5, 5, 5);
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.anchor = GridBagConstraints.WEST;
JPanel filterPanel = new JPanel();
filterPanel.setLayout(new GridBagLayout());
gbc.gridx = 0;
gbc.gridy = 0;
filterPanel.add(new JLabel(Messages.getString(ID + ".byname")), gbc);
gbc.gridx = 1;
gbc.gridy = 0;
filterPanel.add(nameFilter, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
filterPanel.add(new JLabel(Messages.getString(ID + ".bygroup")), gbc);
gbc.gridx = 1;
gbc.gridy = 1;
filterPanel.add(groupFilter, gbc);
filterPanel.setBorder(BorderFactory.createTitledBorder(Messages
.getString(ID + ".filter")));
ActionLocalizer al = Messages.getLocalizer();
Action toggle = al.localize(new InvertAction(listView), "invertselection");
Action editGroups = al.localize(
new WindowToggleAction(globals.get(LifecycleManager.class),
GroupEditorBuilder.ID), "editgroups");
install = new JButton(listView.installAction);
install.addActionListener(this);
JPanel actionPanel = new ButtonBarBuilder()
.withVerticalAlignment()
.addButton(editGroups)
.addButton(toggle)
.add(install)
.addButton(listView.exportAction)
.addButton(listView.deleteAction)
.withBorder(
BorderFactory.createTitledBorder(Messages
.getString(ID + ".actions"))).build(globals);
JPanel ret = new JPanel();
ret.setLayout(new GridBagLayout());
transfer = new QrPanel(200);
transfer.withActions(new CopyContentAction(globals, transfer));
String location = globals.get(ServerManager.class)
.serve(new ArrayList<AndroidApp>()).toString();
transfer.setContentString(location);
transfer.setBorder(BorderFactory.createTitledBorder(Messages.getString(ID
+ ".transfer")));
gbc = new GridBagConstraints();
gbc.gridx = 0;
gbc.gridy = 0;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.weightx = 1;
gbc.gridwidth = 2;
gbc.insets.bottom = 10;
gbc.anchor = GridBagConstraints.NORTHWEST;
ret.add(titleStrip, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.weightx = 0;
gbc.weighty = 0;
gbc.gridwidth = 1;
gbc.insets.right = 10;
gbc.insets.left = 5;
ret.add(filterPanel, gbc);
gbc.gridx = 0;
gbc.gridy = 2;
ret.add(actionPanel, gbc);
gbc.gridx = 0;
gbc.gridy = 3;
ret.add(transfer, gbc);
gbc.gridx = 1;
gbc.gridy = 1;
gbc.fill = GridBagConstraints.BOTH;
gbc.gridheight = 3;
gbc.weighty = 1;
gbc.weightx = 1;
gbc.insets.right = 5;
gbc.insets.left = 0;
ret.add(listScroll, gbc);
reloadGroups();
reloadList();
globals.get(DatabaseManager.class).get(AndroidAppDao.class)
.addDataSetListener(new DatasetListenerProxy(this));
globals.get(DatabaseManager.class).get(AppGroupDao.class)
.addDataSetListener(new DatasetListenerProxy(this));
return ret;
}
private void reloadGroups() {
try {
Vector<AppGroup> groups = globals.get(DatabaseManager.class)
.get(AppGroupDao.class).list();
groupFilter.setModel(new DefaultComboBoxModel<AppGroup>(groups));
groupFilter.insertItemAt(null, 0);
groupFilter.setSelectedIndex(0);
}
catch (SQLException e) {
e.printStackTrace();
}
}
protected void reloadList() {
if (listWorker != null) {
listWorker.cancel(true);
}
if (listView != null) {
// Entirely possible that this method gets called without the window being
// assembled -> Silently ignore the request.
listView.clear();
listWorker = new ListWorker(listView, transfer, globals);
listWorker.execute();
}
}
@Override
public void caretUpdate(CaretEvent e) {
String tmp = nameFilter.getText();
if (!tmp.equals(lastFilter)) {
lastFilter = tmp;
URI uri = listView.filter(tmp, (AppGroup) groupFilter.getSelectedItem());
transfer.setContentString(uri.toString());
}
}
@Override
public void actionPerformed(ActionEvent e) {
Object src = e.getSource();
if (src == groupFilter) {
URI uri = listView.filter(lastFilter,
(AppGroup) groupFilter.getSelectedItem());
transfer.setContentString(uri.toString());
}
}
@Override
public void onDataSetChange(DatasetEvent event) {
reloadList();
reloadGroups();
}
}
| onyxbits/raccoon4 | src/main/java/de/onyxbits/raccoon/appmgr/MyAppsViewBuilder.java | Java | apache-2.0 | 7,560 |
# Copyright 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import threading
import time
from oslo_messaging._drivers.protocols.amqp import controller
from oslo_messaging._i18n import _LW
from oslo_messaging import exceptions
from six import moves
LOG = logging.getLogger(__name__)
class SendTask(controller.Task):
"""A task that sends a message to a target, and optionally waits for a
reply message. The caller may block until the remote confirms receipt or
the reply message has arrived.
"""
def __init__(self, target, request, wait_for_reply, deadline):
super(SendTask, self).__init__()
self._target = target
self._request = request
self._deadline = deadline
self._wait_for_reply = wait_for_reply
self._results_queue = moves.queue.Queue()
def wait(self, timeout):
"""Wait for the send to complete, and, optionally, a reply message from
the remote. Will raise MessagingTimeout if the send does not complete
or no reply is received within timeout seconds. If the request has
failed for any other reason, a MessagingException is raised.
"""
try:
result = self._results_queue.get(timeout=timeout)
except moves.queue.Empty:
if self._wait_for_reply:
reason = "Timed out waiting for a reply."
else:
reason = "Timed out waiting for send to complete."
raise exceptions.MessagingTimeout(reason)
if result["status"] == "OK":
return result.get("response", None)
raise result["error"]
def execute(self, controller):
"""Runs on eventloop thread - sends request."""
if not self._deadline or self._deadline > time.time():
controller.request(self._target, self._request,
self._results_queue, self._wait_for_reply)
else:
LOG.warning(_LW("Send request to %s aborted: TTL expired."),
self._target)
class ListenTask(controller.Task):
"""A task that creates a subscription to the given target. Messages
arriving from the target are given to the listener.
"""
def __init__(self, target, listener, notifications=False):
"""Create a subscription to the target."""
super(ListenTask, self).__init__()
self._target = target
self._listener = listener
self._notifications = notifications
def execute(self, controller):
"""Run on the eventloop thread - subscribes to target. Inbound messages
are queued to the listener's incoming queue.
"""
if self._notifications:
controller.subscribe_notifications(self._target,
self._listener.incoming)
else:
controller.subscribe(self._target, self._listener.incoming)
class ReplyTask(controller.Task):
"""A task that sends 'response' message to 'address'.
"""
def __init__(self, address, response, log_failure):
super(ReplyTask, self).__init__()
self._address = address
self._response = response
self._log_failure = log_failure
self._wakeup = threading.Event()
def wait(self):
"""Wait for the controller to send the message.
"""
self._wakeup.wait()
def execute(self, controller):
"""Run on the eventloop thread - send the response message."""
controller.response(self._address, self._response)
self._wakeup.set()
| dukhlov/oslo.messaging | oslo_messaging/_drivers/protocols/amqp/drivertasks.py | Python | apache-2.0 | 4,126 |
package com.xxx.market.service.provider;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.xxx.market.model.Product;
import com.xxx.market.model.ProductSpecItem;
import com.xxx.market.model.Promotion;
import com.xxx.market.model.PromotionSet;
import com.xxx.market.service.api.product.ProductPromotionResultDto;
import com.xxx.market.service.api.ump.PromotionParamDto;
import com.xxx.market.service.api.ump.PromotionResultDto;
import com.xxx.market.service.api.ump.PromotionService;
import com.xxx.market.service.api.ump.PromotionSetResultDto;
import com.xxx.market.service.api.ump.UmpException;
import com.xxx.market.service.base.AbstractServiceImpl;
import com.xxx.market.service.utils.DateTimeUtil;
import com.jfinal.kit.StrKit;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Page;
import com.jfinal.plugin.activerecord.Record;
@Service("promotionService")
public class PromotionServiceImpl extends AbstractServiceImpl implements PromotionService{
@Override
@Transactional(rollbackFor = UmpException.class)
public void save(Promotion promotion, Long sellerId, String promotionSetItems) throws UmpException {
if(promotion == null || StrKit.isBlank(promotionSetItems) || sellerId == null)
throw new UmpException("保存限时折扣出错:参数不全");
if(promotion.getId() == null){
promotion.setSellerId(sellerId);
promotion.setActive(true);
promotion.save();
}else{
promotion.update();
}
JSONArray jarr = null;
try {
jarr = JSONArray.parseArray(promotionSetItems);
} catch (Exception e) {
throw new UmpException(e.getMessage());
}
if(jarr == null || jarr.size() <=0) throw new UmpException("未设置活动折扣信息");
//需要添加的折扣设置项
List<PromotionSet> promotionAddSets = new ArrayList<PromotionSet>();
//需要更新的折扣设置项
List<PromotionSet> promotionUpdateSets = new ArrayList<PromotionSet>();
//需要删除的折扣设置项
List<PromotionSet> promotionDelSets = new ArrayList<PromotionSet>();
for(int i=0;i<jarr.size();i++){
JSONObject jsonObj = jarr.getJSONObject(i);
if(jsonObj == null
|| jsonObj.getLong("productId") == null
|| jsonObj.getFloat("jianjia") == null
|| jsonObj.getFloat("zhekou") == null
|| jsonObj.getInteger("ptype") == null
|| jsonObj.getFloat("promotion") == null
|| (!"del".equals(jsonObj.getString("opt")) && jsonObj.getFloat("promotion") <=0)) //新增或修改的时候,打折或减价后商品价格必须大于0
throw new UmpException("保存折扣活动,折扣设置项值有误,请检查,打折后的价格必须大于0");
Long promotionSetId = jsonObj.getLong("psetId");
PromotionSet promotionSet = null;
if(promotionSetId == null){
promotionSet = new PromotionSet();
promotionSet.setProductId(jsonObj.getLong("productId"));
promotionSet.setPromotionId(promotion.getId());
promotionSet.setCreated(new Date());
promotionSet.setActive(true);
promotionSet.setPromotionSetJianjia(jsonObj.getFloat("jianjia"));
promotionSet.setPromotionSetZhekou(jsonObj.getFloat("zhekou"));
promotionSet.setPromotionType(jsonObj.getInteger("ptype"));
promotionSet.setPromotionValue(jsonObj.getFloat("promotion"));
promotionSet.setUpdated(new Date());
promotionAddSets.add(promotionSet);
}else{
promotionSet = PromotionSet.dao.findById(promotionSetId);
if(promotionSet == null) throw new UmpException("折扣设置记录不存在");
if("del".equals(jsonObj.getString("opt"))){
promotionDelSets.add(promotionSet);
try {
promotionSet.delete();
} catch (Exception e) {
throw new UmpException(e.getMessage());
}
}
if("updated".equals(jsonObj.getString("opt"))){
promotionSet.setPromotionSetJianjia(jsonObj.getFloat("jianjia"));
promotionSet.setPromotionSetZhekou(jsonObj.getFloat("zhekou"));
promotionSet.setPromotionType(jsonObj.getInteger("ptype"));
promotionSet.setPromotionValue(jsonObj.getFloat("promotion"));
promotionSet.setUpdated(new Date());
promotionUpdateSets.add(promotionSet);
}
}
}
if(promotionDelSets.size()<=0 && promotionAddSets.size() <=0 && promotionUpdateSets.size()<=0)
throw new UmpException("没有设置折扣项数据");
try {
if(promotionAddSets.size() > 0){
Db.batchSave(promotionAddSets, promotionAddSets.size());
}
if(promotionUpdateSets.size() > 0){
Db.batchUpdate(promotionUpdateSets, promotionUpdateSets.size());
}
} catch (Exception e) {
throw new UmpException(e.getMessage());
}
}
@Override
public PromotionResultDto getPromotionInfo(Long id) throws UmpException {
if(id == null) throw new UmpException("获取折扣信息参数错误");
Promotion promotion = Promotion.dao.findById(id);
if(promotion == null) throw new UmpException("折扣信息不存在");
return getPromotionInfo(promotion);
}
@Override
public PromotionResultDto getPromotionInfo(Promotion promotion) throws UmpException {
PromotionResultDto promotionDto = new PromotionResultDto();
promotionDto.setPromotionId(promotion.getId());
promotionDto.setPromotionName(promotion.getPromotionName());
promotionDto.setPromotionTag(promotion.getPromotionTag());
promotionDto.setStartDate(promotion.getStartDate());
promotionDto.setEndDate(promotion.getEndDate());
List<Record> promotionSets = Db.find("select ps.*, p.id as product_id, p.name, p.image, p.price from " + PromotionSet.table + " ps "
+ " left join " + Product.table + " p on ps.product_id=p.id "
+ " where promotion_id=? ", promotion.getId());
List<PromotionSetResultDto> setResultDtos = new ArrayList<PromotionSetResultDto>();
for(Record record : promotionSets){
PromotionSetResultDto psrDto = new PromotionSetResultDto();
psrDto.setId(record.getLong("id"));
psrDto.setProductId(record.getLong("product_id"));
psrDto.setPromotinId(promotion.getId());
psrDto.setProductName(record.getStr("name"));
psrDto.setProductImg(getImageDomain() + record.getStr("image"));
psrDto.setProductPrice(record.getStr("price"));
psrDto.setType(record.getInt("promotion_type"));
psrDto.setJianjia(new BigDecimal(record.getFloat("promotion_set_jianjia")).setScale(2, BigDecimal.ROUND_HALF_UP).floatValue());
psrDto.setZhekou(new BigDecimal(record.getFloat("promotion_set_zhekou")).setScale(2, BigDecimal.ROUND_HALF_UP).floatValue());
psrDto.setPromotionValue(new BigDecimal(record.getFloat("promotion_value")).setScale(2, BigDecimal.ROUND_HALF_UP).floatValue());
setResultDtos.add(psrDto);
}
promotionDto.setPromotionSets(setResultDtos);
return promotionDto;
}
@Override
public Page<PromotionResultDto> list(PromotionParamDto promotionParam) throws UmpException{
if(promotionParam == null || promotionParam.getSellerId() == null)
throw new UmpException("获取限时打折列表数据参数错误");
Page<Promotion> pages = Promotion.dao.paginate(promotionParam.getPageNo(), promotionParam.getPageSize(),
"select * ",
" from " + Promotion.table + " where seller_id=? ", promotionParam.getSellerId());
List<PromotionResultDto> promotionDtos = new ArrayList<PromotionResultDto>();
for(Promotion promotion : pages.getList()){
PromotionResultDto promotionDto = new PromotionResultDto();
promotionDto.setPromotionId(promotion.getId());
promotionDto.setPromotionName(promotion.getPromotionName());
promotionDto.setPromotionTag(promotion.getPromotionTag());
promotionDto.setStartDate(promotion.getStartDate());
promotionDto.setEndDate(promotion.getEndDate());
promotionDtos.add(promotionDto);
}
return new Page<PromotionResultDto> (promotionDtos, promotionParam.getPageNo(), promotionParam.getPageSize(), pages.getTotalPage(), pages.getTotalRow());
}
@Override
public ProductPromotionResultDto getProductPromotion(Product product) throws UmpException {
//查询卖家有效时间范围内的促销活动
List<Promotion> promotions = Promotion.dao.find(
"select * from " + Promotion.table + " where seller_id=? and start_date<=? and end_date>=? ",
product.getSellerId(), new Date(), new Date());
for(Promotion promo : promotions){
PromotionResultDto promotionResultDto=getPromotionInfo(promo);
List<PromotionSetResultDto> promotionSets = promotionResultDto.getPromotionSets();
for(PromotionSetResultDto psrDto : promotionSets){
if(psrDto.getProductId() == product.getId()){
//获取该商品对应的打折数据
ProductPromotionResultDto prodprom = new ProductPromotionResultDto();
prodprom.setPromotionTag(promo.getPromotionTag()); //活动标签
prodprom.setPromotionPrice(psrDto.getPromotionValue().toString());//打折后的价格
if(psrDto.getType() == 1){
prodprom.setPromotionInfo(psrDto.getZhekou() + "折 ");
}else{
prodprom.setPromotionInfo("减¥" + new BigDecimal(psrDto.getJianjia()).setScale(2, BigDecimal.ROUND_HALF_UP));
}
if(promo.getStartDate().after(new Date())){
prodprom.setPromotionTime("还差" + DateTimeUtil.compareDay(promo.getStartDate(), new Date()) + "天开始");
}else {
prodprom.setPromotionTime("剩余" + DateTimeUtil.compareDay(promo.getEndDate(), new Date()) + "天结束");
}
//如果商品是多规格的话,折扣价也是一个范围
String promotionPrice = getProductPromotionPriceSection(product, psrDto);
if(StrKit.notBlank(promotionPrice)) prodprom.setPromotionPrice(promotionPrice);
return prodprom;
}
}
}
return null;
}
@Override
public String getProductPromotionPriceSection(Product product, PromotionSetResultDto promotionSetParam) throws UmpException {
List<ProductSpecItem> productSpecItems = ProductSpecItem.dao.find(" select * from "
+ ProductSpecItem.table + " where product_id=? ", product.getId());
ProductSpecItem prodSpecFirst = null;
if(productSpecItems !=null && productSpecItems.size()>0){
prodSpecFirst = productSpecItems.get(0);
BigDecimal min, max; min = max = prodSpecFirst.getPrice();
for(ProductSpecItem productSpecItem : productSpecItems){
BigDecimal bprice = productSpecItem.getPrice();
if(bprice.compareTo(max)==1){
max= bprice;
}
if(bprice.compareTo(min)==-1){
min= bprice;
}
}
BigDecimal minProm, maxProm; String promotionPrice = "";
if(max.compareTo(min)==0) {
if(promotionSetParam.getType() == 1) {
//打折 保留两位小数,四舍五入
minProm = min.multiply(new BigDecimal(promotionSetParam.getZhekou()/10)).setScale(2, BigDecimal.ROUND_HALF_UP);
}else {
//减价
minProm = min.subtract(new BigDecimal(promotionSetParam.getJianjia())).setScale(2, BigDecimal.ROUND_HALF_UP);
}
promotionPrice = minProm.toString();
}else {
if(promotionSetParam.getType() ==1) {
//打折
minProm = min.multiply(new BigDecimal(promotionSetParam.getZhekou()/10)).setScale(2, BigDecimal.ROUND_HALF_UP);
maxProm = max.multiply(new BigDecimal(promotionSetParam.getZhekou()/10)).setScale(2, BigDecimal.ROUND_HALF_UP);
}else {
//减价
minProm = min.subtract(new BigDecimal(promotionSetParam.getJianjia())).setScale(2, BigDecimal.ROUND_HALF_UP);
maxProm = max.subtract(new BigDecimal(promotionSetParam.getJianjia())).setScale(2, BigDecimal.ROUND_HALF_UP);
}
promotionPrice = minProm.toString() + " ~ " + maxProm.toString();
}
return promotionPrice;
}
return null;
}
@Override
public PromotionSet getProductPromotionSet(Product product) throws UmpException {
List<Promotion> promotions = Promotion.dao.find(
"select * from " + Promotion.table + " where seller_id=? and start_date<=? and end_date>=? ",
product.getSellerId(), new Date(), new Date());
for(Promotion promo : promotions){
List<PromotionSet> promotionSets = PromotionSet.dao.find("select * from " + PromotionSet.table + " where promotion_id=? ", promo.getId());
for(PromotionSet ps : promotionSets){
if(ps.getProductId() == product.getId())
return ps;
}
}
return null;
}
@Override
public String getProductPromotionPrice(Product product, ProductSpecItem specItem) throws UmpException {
PromotionSet promotionSet = getProductPromotionSet(product);
if(promotionSet !=null){
BigDecimal promoPrice = null;
BigDecimal oldPrice = (specItem != null) ? specItem.getPrice() : new BigDecimal(product.getPrice());
if(promotionSet.getPromotionType() == 1){//打折
promoPrice = oldPrice.multiply(new BigDecimal(promotionSet.getPromotionSetZhekou()/10));
}else{
//减价
promoPrice = oldPrice.subtract(new BigDecimal(promotionSet.getPromotionSetJianjia()));
}
if(promoPrice != null) {
promoPrice = promoPrice.setScale(2, BigDecimal.ROUND_HALF_UP);
return promoPrice.toString();
}
}
return null;
}
@Override
public String getProductPromotionPrice(Product product) throws UmpException {
return getProductPromotionPrice(product, null);
}
}
| pjworkspace/wscs | wscs-service-provider/src/main/java/com/xxx/market/service/provider/PromotionServiceImpl.java | Java | apache-2.0 | 13,513 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.procedure;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import com.google.common.collect.MapMaker;
/**
* Process to kick off and manage a running {@link Subprocedure} on a member. This is the
* specialized part of a {@link Procedure} that actually does procedure type-specific work
* and reports back to the coordinator as it completes each phase.
* <p>
* If there is a connection error ({@link #controllerConnectionFailure(String, IOException)}), all
* currently running subprocedures are notify to failed since there is no longer a way to reach any
* other members or coordinators since the rpcs are down.
*/
@InterfaceAudience.Private
public class ProcedureMember implements Closeable {
private static final Log LOG = LogFactory.getLog(ProcedureMember.class);
final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000;
private final SubprocedureFactory builder;
private final ProcedureMemberRpcs rpcs;
private final ConcurrentMap<String,Subprocedure> subprocs =
new MapMaker().concurrencyLevel(4).weakValues().makeMap();
private final ExecutorService pool;
/**
* Instantiate a new ProcedureMember. This is a slave that executes subprocedures.
*
* @param rpcs controller used to send notifications to the procedure coordinator
* @param pool thread pool to submit subprocedures
* @param factory class that creates instances of a subprocedure.
*/
public ProcedureMember(ProcedureMemberRpcs rpcs, ThreadPoolExecutor pool,
SubprocedureFactory factory) {
this.pool = pool;
this.rpcs = rpcs;
this.builder = factory;
}
/**
* Default thread pool for the procedure
*
* @param memberName
* @param procThreads the maximum number of threads to allow in the pool
*/
public static ThreadPoolExecutor defaultPool(String memberName, int procThreads) {
return defaultPool(memberName, procThreads, KEEP_ALIVE_MILLIS_DEFAULT);
}
/**
* Default thread pool for the procedure
*
* @param memberName
* @param procThreads the maximum number of threads to allow in the pool
* @param keepAliveMillis the maximum time (ms) that excess idle threads will wait for new tasks
*/
public static ThreadPoolExecutor defaultPool(String memberName, int procThreads,
long keepAliveMillis) {
return new ThreadPoolExecutor(1, procThreads, keepAliveMillis, TimeUnit.MILLISECONDS,
new SynchronousQueue<Runnable>(),
new DaemonThreadFactory("member: '" + memberName + "' subprocedure-pool"));
}
/**
* Package exposed. Not for public use.
*
* @return reference to the Procedure member's rpcs object
*/
ProcedureMemberRpcs getRpcs() {
return rpcs;
}
/**
* This is separated from execution so that we can detect and handle the case where the
* subprocedure is invalid and inactionable due to bad info (like DISABLED snapshot type being
* sent here)
* @param opName
* @param data
* @return subprocedure
*/
public Subprocedure createSubprocedure(String opName, byte[] data) {
return builder.buildSubprocedure(opName, data);
}
/**
* Submit an subprocedure for execution. This starts the local acquire phase.
* @param subproc the subprocedure to execute.
* @return <tt>true</tt> if the subprocedure was started correctly, <tt>false</tt> if it
* could not be started. In the latter case, the subprocedure holds a reference to
* the exception that caused the failure.
*/
public boolean submitSubprocedure(Subprocedure subproc) {
// if the submitted subprocedure was null, bail.
if (subproc == null) {
LOG.warn("Submitted null subprocedure, nothing to run here.");
return false;
}
String procName = subproc.getName();
if (procName == null || procName.length() == 0) {
LOG.error("Subproc name cannot be null or the empty string");
return false;
}
// make sure we aren't already running an subprocedure of that name
Subprocedure rsub = subprocs.get(procName);
if (rsub != null) {
if (!rsub.isComplete()) {
LOG.error("Subproc '" + procName + "' is already running. Bailing out");
return false;
}
LOG.warn("A completed old subproc " + procName + " is still present, removing");
if (!subprocs.remove(procName, rsub)) {
LOG.error("Another thread has replaced existing subproc '" + procName + "'. Bailing out");
return false;
}
}
LOG.debug("Submitting new Subprocedure:" + procName);
// kick off the subprocedure
try {
if (subprocs.putIfAbsent(procName, subproc) == null) {
this.pool.submit(subproc);
return true;
} else {
LOG.error("Another thread has submitted subproc '" + procName + "'. Bailing out");
return false;
}
} catch (RejectedExecutionException e) {
subprocs.remove(procName, subproc);
// the thread pool is full and we can't run the subprocedure
String msg = "Subprocedure pool is full!";
subproc.cancel(msg, e.getCause());
}
LOG.error("Failed to start subprocedure '" + procName + "'");
return false;
}
/**
* Notification that procedure coordinator has reached the global barrier
* @param procName name of the subprocedure that should start running the in-barrier phase
*/
public void receivedReachedGlobalBarrier(String procName) {
Subprocedure subproc = subprocs.get(procName);
if (subproc == null) {
LOG.warn("Unexpected reached glabal barrier message for Sub-Procedure '" + procName + "'");
return;
}
subproc.receiveReachedGlobalBarrier();
}
/**
* Best effort attempt to close the threadpool via Thread.interrupt.
*/
@Override
public void close() throws IOException {
// have to use shutdown now to break any latch waiting
pool.shutdownNow();
}
/**
* Shutdown the threadpool, and wait for upto timeoutMs millis before bailing
* @param timeoutMs timeout limit in millis
* @return true if successfully, false if bailed due to timeout.
* @throws InterruptedException
*/
boolean closeAndWait(long timeoutMs) throws InterruptedException {
pool.shutdown();
return pool.awaitTermination(timeoutMs, TimeUnit.MILLISECONDS);
}
/**
* The connection to the rest of the procedure group (member and coordinator) has been
* broken/lost/failed. This should fail any interested subprocedure, but not attempt to notify
* other members since we cannot reach them anymore.
* @param message description of the error
* @param cause the actual cause of the failure
*
* TODO i'm tempted to just remove this code completely and treat it like any other abort.
* Implementation wise, if this happens it is a ZK failure which means the RS will abort.
*/
public void controllerConnectionFailure(final String message, final IOException cause) {
Collection<Subprocedure> toNotify = subprocs.values();
LOG.error(message, cause);
for (Subprocedure sub : toNotify) {
// TODO notify the elements, if they aren't null
sub.cancel(message, cause);
}
}
/**
* Send abort to the specified procedure
* @param procName name of the procedure to about
* @param ee exception information about the abort
*/
public void receiveAbortProcedure(String procName, ForeignException ee) {
LOG.debug("Request received to abort procedure " + procName, ee);
// if we know about the procedure, notify it
Subprocedure sub = subprocs.get(procName);
if (sub == null) {
LOG.info("Received abort on procedure with no local subprocedure " + procName +
", ignoring it.", ee);
return; // Procedure has already completed
}
String msg = "Propagating foreign exception to subprocedure " + sub.getName();
LOG.error(msg, ee);
sub.cancel(msg, ee);
}
}
| mapr/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java | Java | apache-2.0 | 9,277 |
package pohkahkong.livewallpaper.collage;
import android.app.Activity;
import android.app.WallpaperManager;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.LinearLayout;
import android.widget.Toast;
/**
*
* @author Poh Kah Kong
*
*/
public class StartDialog extends Activity implements OnClickListener {
private final String androidRateUri = "https://play.google.com/store/apps/details?id=pohkahkong.livewallpaper.collage&feature=search_result#?t=W251bGwsMSwxLDEsInBvaGthaGtvbmcubGl2ZXdhbGxwYXBlci5jb2xsYWdlIl0.";
private final String facebookLikeUri = "https://www.facebook.com/CollageLiveWallpaper";
private final String rainbowUri = "https://play.google.com/store/apps/details?id=pohkahkong.game.rainbow&feature=more_from_developer#?t=W251bGwsMSwxLDEwMiwicG9oa2Foa29uZy5nYW1lLnJhaW5ib3ciXQ..";
private LinearLayout applicationLL;
private LinearLayout androidRateLL;
private LinearLayout facebookLikeLL;
private LinearLayout rainbowLL;
private LinearLayout closeLL;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog);
applicationLL = (LinearLayout) findViewById(R.id.applicationLL);
androidRateLL = (LinearLayout) findViewById(R.id.androidRateLL);
facebookLikeLL = (LinearLayout) findViewById(R.id.facebookLikeLL);
rainbowLL = (LinearLayout) findViewById(R.id.rainbowLL);
closeLL = (LinearLayout) findViewById(R.id.closeLL);
applicationLL.setOnClickListener(this);
androidRateLL.setOnClickListener(this);
facebookLikeLL.setOnClickListener(this);
rainbowLL.setOnClickListener(this);
closeLL.setOnClickListener(this);
Toast msg = Toast.makeText(this, "Double tap on the screen to\ndrag or zoom the wallpaper!", Toast.LENGTH_LONG);
msg.show();
}
public void onClick(View view) {
// TODO Auto-generated method stub
if (view.getId()==R.id.applicationLL) {
Intent intent = new Intent();
intent.setAction(WallpaperManager.ACTION_LIVE_WALLPAPER_CHOOSER);
finish();
startActivity(intent);
} else if (view.getId()==R.id.androidRateLL)
goWebSite(androidRateUri);
else if (view.getId()==R.id.facebookLikeLL)
goWebSite(facebookLikeUri);
else if (view.getId()==R.id.rainbowLL)
goWebSite(rainbowUri);
else if (view.getId()==R.id.closeLL)
finish();
}
private void goWebSite(String uriStr) {
Uri uri = Uri.parse(uriStr);
finish();
startActivity( new Intent( Intent.ACTION_VIEW, uri ) );
}
} | strengthandwill/collage-lwp | src/pohkahkong/livewallpaper/collage/StartDialog.java | Java | apache-2.0 | 2,776 |
package com.rcextract.minecord.event.server;
import org.bukkit.event.HandlerList;
import com.rcextract.minecord.Server;
public class ServerSetPermanentEvent extends ServerEvent {
private static final HandlerList handlers = new HandlerList();
public static HandlerList getHandlerList() {
return handlers;
}
private boolean permanent;
public ServerSetPermanentEvent(Server server, boolean permanent) {
super(server);
this.permanent = permanent;
}
@Override
public HandlerList getHandlers() {
return handlers;
}
public boolean isPermanent() {
return permanent;
}
public void setPermanent(boolean permanent) {
this.permanent = permanent;
}
}
| RcExtract/Minecord | src/main/java/com/rcextract/minecord/event/server/ServerSetPermanentEvent.java | Java | apache-2.0 | 671 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ranger.patch;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.ranger.biz.RangerBizUtil;
import org.apache.ranger.biz.ServiceDBStore;
import org.apache.ranger.common.AppConstants;
import org.apache.ranger.common.JSONUtil;
import org.apache.ranger.common.RangerCommonEnums;
import org.apache.ranger.common.SearchCriteria;
import org.apache.ranger.common.ServiceUtil;
import org.apache.ranger.common.StringUtil;
import org.apache.ranger.db.RangerDaoManager;
import org.apache.ranger.entity.XXAsset;
import org.apache.ranger.entity.XXAuditMap;
import org.apache.ranger.entity.XXGroup;
import org.apache.ranger.entity.XXPolicy;
import org.apache.ranger.entity.XXPolicyConditionDef;
import org.apache.ranger.entity.XXPortalUser;
import org.apache.ranger.entity.XXResource;
import org.apache.ranger.entity.XXServiceConfigDef;
import org.apache.ranger.entity.XXServiceDef;
import org.apache.ranger.entity.XXUser;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItem;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItemAccess;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyResource;
import org.apache.ranger.plugin.store.EmbeddedServiceDefsUtil;
import org.apache.ranger.service.RangerPolicyService;
import org.apache.ranger.service.XPermMapService;
import org.apache.ranger.service.XPolicyService;
import org.apache.ranger.util.CLIUtil;
import org.apache.ranger.view.VXPermMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class PatchMigration_J10002 extends BaseLoader {
private static final Logger logger = Logger.getLogger(PatchMigration_J10002.class);
@Autowired
RangerDaoManager daoMgr;
@Autowired
ServiceDBStore svcDBStore;
@Autowired
JSONUtil jsonUtil;
@Autowired
RangerPolicyService policyService;
@Autowired
StringUtil stringUtil;
@Autowired
XPolicyService xPolService;
@Autowired
XPermMapService xPermMapService;
@Autowired
RangerBizUtil bizUtil;
private static int policyCounter = 0;
private static int serviceCounter = 0;
static Set<String> unsupportedLegacyPermTypes = new HashSet<String>();
static {
unsupportedLegacyPermTypes.add("Unknown");
unsupportedLegacyPermTypes.add("Reset");
unsupportedLegacyPermTypes.add("Obfuscate");
unsupportedLegacyPermTypes.add("Mask");
}
public static void main(String[] args) {
logger.info("main()");
try {
PatchMigration_J10002 loader = (PatchMigration_J10002) CLIUtil.getBean(PatchMigration_J10002.class);
loader.init();
while (loader.isMoreToProcess()) {
loader.load();
}
logger.info("Load complete. Exiting!!!");
System.exit(0);
} catch (Exception e) {
logger.error("Error loading", e);
System.exit(1);
}
}
@Override
public void init() throws Exception {
// Do Nothing
}
@Override
public void execLoad() {
logger.info("==> MigrationPatch.execLoad()");
try {
migrateServicesToNewSchema();
migratePoliciesToNewSchema();
updateSequences();
} catch (Exception e) {
logger.error("Error whille migrating data.", e);
}
logger.info("<== MigrationPatch.execLoad()");
}
@Override
public void printStats() {
logger.info("Total Number of migrated repositories/services: " + serviceCounter);
logger.info("Total Number of migrated resources/policies: " + policyCounter);
}
public void migrateServicesToNewSchema() throws Exception {
logger.info("==> MigrationPatch.migrateServicesToNewSchema()");
try {
List<XXAsset> repoList = daoMgr.getXXAsset().getAll();
if (repoList.isEmpty()) {
return;
}
if (!repoList.isEmpty()) {
EmbeddedServiceDefsUtil.instance().init(svcDBStore);
}
svcDBStore.setPopulateExistingBaseFields(true);
for (XXAsset xAsset : repoList) {
if (xAsset.getActiveStatus() == AppConstants.STATUS_DELETED) {
continue;
}
RangerService existing = svcDBStore.getServiceByName(xAsset.getName());
if (existing != null) {
logger.info("Repository/Service already exists. Ignoring migration of repo: " + xAsset.getName());
continue;
}
RangerService service = new RangerService();
service = mapXAssetToService(service, xAsset);
service = svcDBStore.createService(service);
serviceCounter++;
logger.info("New Service created. ServiceName: " + service.getName());
}
svcDBStore.setPopulateExistingBaseFields(false);
} catch (Exception e) {
throw new Exception("Error while migrating data to new Plugin Schema.", e);
}
logger.info("<== MigrationPatch.migrateServicesToNewSchema()");
}
public void migratePoliciesToNewSchema() throws Exception {
logger.info("==> MigrationPatch.migratePoliciesToNewSchema()");
try {
List<XXResource> resList = daoMgr.getXXResource().getAll();
if (resList.isEmpty()) {
return;
}
svcDBStore.setPopulateExistingBaseFields(true);
for (XXResource xRes : resList) {
if (xRes.getResourceStatus() == AppConstants.STATUS_DELETED) {
continue;
}
XXAsset xAsset = daoMgr.getXXAsset().getById(xRes.getAssetId());
if (xAsset == null) {
logger.error("No Repository found for policyName: " + xRes.getPolicyName());
continue;
}
RangerService service = svcDBStore.getServiceByName(xAsset.getName());
if (service == null) {
logger.error("No Service found for policy. Ignoring migration of such policy, policyName: "
+ xRes.getPolicyName());
continue;
}
XXPolicy existing = daoMgr.getXXPolicy().findByNameAndServiceId(xRes.getPolicyName(), service.getId());
if (existing != null) {
logger.info("Policy already exists. Ignoring migration of policy: " + existing.getName());
continue;
}
RangerPolicy policy = new RangerPolicy();
policy = mapXResourceToPolicy(policy, xRes, service);
if(policy != null) {
policy = svcDBStore.createPolicy(policy);
policyCounter++;
logger.info("New policy created. policyName: " + policy.getName());
}
}
svcDBStore.setPopulateExistingBaseFields(false);
} catch (Exception e) {
throw new Exception("Error while migrating data to new Plugin Schema.", e);
}
logger.info("<== MigrationPatch.migratePoliciesToNewSchema()");
}
private RangerService mapXAssetToService(RangerService service, XXAsset xAsset) throws Exception {
String type = "";
String name = xAsset.getName();
String description = xAsset.getDescription();
Map<String, String> configs = null;
int typeInt = xAsset.getAssetType();
XXServiceDef serviceDef = daoMgr.getXXServiceDef().findByName(AppConstants.getLabelFor_AssetType(typeInt).toLowerCase());
if (serviceDef == null) {
throw new Exception("No ServiceDefinition found for repository: " + name);
}
type = serviceDef.getName();
configs = jsonUtil.jsonToMap(xAsset.getConfig());
List<XXServiceConfigDef> mandatoryConfigs = daoMgr.getXXServiceConfigDef().findByServiceDefName(type);
for (XXServiceConfigDef serviceConf : mandatoryConfigs) {
if (serviceConf.getIsMandatory()) {
if (!stringUtil.isEmpty(configs.get(serviceConf.getName()))) {
continue;
}
String dataType = serviceConf.getType();
String defaultValue = serviceConf.getDefaultvalue();
if (stringUtil.isEmpty(defaultValue)) {
defaultValue = getDefaultValueForDataType(dataType);
}
configs.put(serviceConf.getName(), defaultValue);
}
}
service.setType(type);
service.setName(name);
service.setDescription(description);
service.setConfigs(configs);
service.setCreateTime(xAsset.getCreateTime());
service.setUpdateTime(xAsset.getUpdateTime());
XXPortalUser createdByUser = daoMgr.getXXPortalUser().getById(xAsset.getAddedByUserId());
XXPortalUser updByUser = daoMgr.getXXPortalUser().getById(xAsset.getUpdatedByUserId());
if (createdByUser != null) {
service.setCreatedBy(createdByUser.getLoginId());
}
if (updByUser != null) {
service.setUpdatedBy(updByUser.getLoginId());
}
service.setId(xAsset.getId());
return service;
}
private String getDefaultValueForDataType(String dataType) {
String defaultValue = "";
switch (dataType) {
case "int":
defaultValue = "0";
break;
case "string":
defaultValue = "unknown";
break;
case "bool":
defaultValue = "false";
break;
case "enum":
defaultValue = "0";
break;
case "password":
defaultValue = "password";
break;
default:
break;
}
return defaultValue;
}
private RangerPolicy mapXResourceToPolicy(RangerPolicy policy, XXResource xRes, RangerService service) {
String serviceName = service.getName();
String serviceType = service.getType();
String name = xRes.getPolicyName();
String description = xRes.getDescription();
Boolean isAuditEnabled = true;
Boolean isEnabled = true;
Map<String, RangerPolicyResource> resources = new HashMap<String, RangerPolicyResource>();
List<RangerPolicyItem> policyItems = new ArrayList<RangerPolicyItem>();
XXServiceDef svcDef = daoMgr.getXXServiceDef().findByName(serviceType);
if(svcDef == null) {
logger.error(serviceType + ": service-def not found. Skipping policy '" + name + "'");
return null;
}
List<XXAuditMap> auditMapList = daoMgr.getXXAuditMap().findByResourceId(xRes.getId());
if (stringUtil.isEmpty(auditMapList)) {
isAuditEnabled = false;
}
if (xRes.getResourceStatus() == AppConstants.STATUS_DISABLED) {
isEnabled = false;
}
Boolean isPathRecursive = xRes.getIsRecursive() == RangerCommonEnums.BOOL_TRUE;
Boolean isTableExcludes = xRes.getTableType() == RangerCommonEnums.POLICY_EXCLUSION;
Boolean isColumnExcludes = xRes.getColumnType() == RangerCommonEnums.POLICY_EXCLUSION;
if (StringUtils.equalsIgnoreCase(serviceType, "hdfs")) {
toRangerResourceList(xRes.getName(), "path", Boolean.FALSE, isPathRecursive, resources);
} else if (StringUtils.equalsIgnoreCase(serviceType, "hbase")) {
toRangerResourceList(xRes.getTables(), "table", isTableExcludes, Boolean.FALSE, resources);
toRangerResourceList(xRes.getColumnFamilies(), "column-family", Boolean.FALSE, Boolean.FALSE, resources);
toRangerResourceList(xRes.getColumns(), "column", isColumnExcludes, Boolean.FALSE, resources);
} else if (StringUtils.equalsIgnoreCase(serviceType, "hive")) {
toRangerResourceList(xRes.getDatabases(), "database", Boolean.FALSE, Boolean.FALSE, resources);
toRangerResourceList(xRes.getTables(), "table", isTableExcludes, Boolean.FALSE, resources);
toRangerResourceList(xRes.getColumns(), "column", isColumnExcludes, Boolean.FALSE, resources);
toRangerResourceList(xRes.getUdfs(), "udf", Boolean.FALSE, Boolean.FALSE, resources);
} else if (StringUtils.equalsIgnoreCase(serviceType, "knox")) {
toRangerResourceList(xRes.getTopologies(), "topology", Boolean.FALSE, Boolean.FALSE, resources);
toRangerResourceList(xRes.getServices(), "service", Boolean.FALSE, Boolean.FALSE, resources);
} else if (StringUtils.equalsIgnoreCase(serviceType, "storm")) {
toRangerResourceList(xRes.getTopologies(), "topology", Boolean.FALSE, Boolean.FALSE, resources);
}
policyItems = getPolicyItemListForRes(xRes, svcDef);
policy.setService(serviceName);
policy.setName(name);
policy.setDescription(description);
policy.setIsAuditEnabled(isAuditEnabled);
policy.setIsEnabled(isEnabled);
policy.setResources(resources);
policy.setPolicyItems(policyItems);
policy.setCreateTime(xRes.getCreateTime());
policy.setUpdateTime(xRes.getUpdateTime());
XXPortalUser createdByUser = daoMgr.getXXPortalUser().getById(xRes.getAddedByUserId());
XXPortalUser updByUser = daoMgr.getXXPortalUser().getById(xRes.getUpdatedByUserId());
if (createdByUser != null) {
policy.setCreatedBy(createdByUser.getLoginId());
}
if (updByUser != null) {
policy.setUpdatedBy(updByUser.getLoginId());
}
policy.setId(xRes.getId());
return policy;
}
private Map<String, RangerPolicy.RangerPolicyResource> toRangerResourceList(String resourceString, String resourceType, Boolean isExcludes, Boolean isRecursive, Map<String, RangerPolicy.RangerPolicyResource> resources) {
Map<String, RangerPolicy.RangerPolicyResource> ret = resources == null ? new HashMap<String, RangerPolicy.RangerPolicyResource>() : resources;
if(StringUtils.isNotBlank(resourceString)) {
RangerPolicy.RangerPolicyResource resource = ret.get(resourceType);
if(resource == null) {
resource = new RangerPolicy.RangerPolicyResource();
resource.setIsExcludes(isExcludes);
resource.setIsRecursive(isRecursive);
ret.put(resourceType, resource);
}
Collections.addAll(resource.getValues(), resourceString.split(","));
}
return ret;
}
private List<RangerPolicyItem> getPolicyItemListForRes(XXResource xRes, XXServiceDef svcDef) {
List<RangerPolicyItem> policyItems = new ArrayList<RangerPolicyItem>();
SearchCriteria sc = new SearchCriteria();
sc.addParam("resourceId", xRes.getId());
List<VXPermMap> permMapList = xPermMapService.searchXPermMaps(sc).getVXPermMaps();
HashMap<String, List<VXPermMap>> sortedPermMap = new HashMap<String, List<VXPermMap>>();
// re-group the list with permGroup as the key
if (permMapList != null) {
for(VXPermMap permMap : permMapList) {
String permGrp = permMap.getPermGroup();
List<VXPermMap> sortedList = sortedPermMap.get(permGrp);
if(sortedList == null) {
sortedList = new ArrayList<VXPermMap>();
sortedPermMap.put(permGrp, sortedList);
}
sortedList.add(permMap);
}
}
for (Entry<String, List<VXPermMap>> entry : sortedPermMap.entrySet()) {
List<String> userList = new ArrayList<String>();
List<String> groupList = new ArrayList<String>();
List<RangerPolicyItemAccess> accessList = new ArrayList<RangerPolicyItemAccess>();
String ipAddress = null;
RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem();
for(VXPermMap permMap : entry.getValue()) {
if(permMap.getPermFor() == AppConstants.XA_PERM_FOR_USER) {
String userName = getUserName(permMap);
if (! userList.contains(userName)) {
userList.add(userName);
}
} else if(permMap.getPermFor() == AppConstants.XA_PERM_FOR_GROUP) {
String groupName = getGroupName(permMap);
if (! groupList.contains(groupName)) {
groupList.add(groupName);
}
}
String accessType = ServiceUtil.toAccessType(permMap.getPermType());
if(StringUtils.isBlank(accessType) || unsupportedLegacyPermTypes.contains(accessType)) {
logger.info(accessType + ": is not a valid access-type, ignoring accesstype for policy: " + xRes.getPolicyName());
continue;
}
if(StringUtils.equalsIgnoreCase(accessType, "Admin")) {
policyItem.setDelegateAdmin(Boolean.TRUE);
if ( svcDef.getId() == EmbeddedServiceDefsUtil.instance().getHBaseServiceDefId()) {
addAccessType(accessType, accessList);
}
} else {
addAccessType(accessType, accessList);
}
ipAddress = permMap.getIpAddress();
}
if(CollectionUtils.isEmpty(accessList)) {
logger.info("no access specified. ignoring policyItem for policy: " + xRes.getPolicyName());
continue;
}
if(CollectionUtils.isEmpty(userList) && CollectionUtils.isEmpty(groupList)) {
logger.info("no user or group specified. ignoring policyItem for policy: " + xRes.getPolicyName());
continue;
}
policyItem.setUsers(userList);
policyItem.setGroups(groupList);
policyItem.setAccesses(accessList);
if(ipAddress != null && !ipAddress.isEmpty()) {
XXPolicyConditionDef policyCond = daoMgr.getXXPolicyConditionDef().findByServiceDefIdAndName(svcDef.getId(), "ip-range");
if(policyCond != null) {
RangerPolicy.RangerPolicyItemCondition ipCondition = new RangerPolicy.RangerPolicyItemCondition("ip-range", Collections.singletonList(ipAddress));
policyItem.getConditions().add(ipCondition);
}
}
policyItems.add(policyItem);
}
return policyItems;
}
private void addAccessType(String accessType, List<RangerPolicyItemAccess> accessList) {
boolean alreadyExists = false;
for(RangerPolicyItemAccess access : accessList) {
if(StringUtils.equalsIgnoreCase(accessType, access.getType())) {
alreadyExists = true;
break;
}
}
if(!alreadyExists) {
accessList.add(new RangerPolicyItemAccess(accessType));
}
}
private void updateSequences() {
daoMgr.getXXServiceDef().updateSequence();
daoMgr.getXXService().updateSequence();
daoMgr.getXXPolicy().updateSequence();
}
private String getUserName(VXPermMap permMap) {
String userName = permMap.getUserName();
if(userName == null || userName.isEmpty()) {
Long userId = permMap.getUserId();
if(userId != null) {
XXUser xxUser = daoMgr.getXXUser().getById(userId);
if(xxUser != null) {
userName = xxUser.getName();
}
}
}
return userName;
}
private String getGroupName(VXPermMap permMap) {
String groupName = permMap.getGroupName();
if(groupName == null || groupName.isEmpty()) {
Long groupId = permMap.getGroupId();
if(groupId != null) {
XXGroup xxGroup = daoMgr.getXXGroup().getById(groupId);
if(xxGroup != null) {
groupName = xxGroup.getName();
}
}
}
return groupName;
}
}
| gzsombor/ranger | security-admin/src/main/java/org/apache/ranger/patch/PatchMigration_J10002.java | Java | apache-2.0 | 18,542 |
package org.ovirt.mobile.movirt.ui.mvp;
public interface FinishableView extends BaseView {
void finish();
}
| matobet/moVirt | moVirt/src/main/java/org/ovirt/mobile/movirt/ui/mvp/FinishableView.java | Java | apache-2.0 | 114 |
/*
* PolicyEditorKit.java
*
* Created on October 20, 2005, 5:14 PM
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package org.netbeans.modules.policysupport;
import javax.swing.text.Document;
import org.netbeans.editor.Syntax;
import org.netbeans.modules.editor.NbEditorKit;
import org.openide.ErrorManager;
/**
*
* @author HuberB1
*/
public class PolicyEditorKit extends NbEditorKit {
protected final static long serialVersionUID = 20060730114300L;
private static final ErrorManager LOGGER = ErrorManager.getDefault().getInstance("org.netbeans.modules.policysupport.PolicyEditorKit");
private static final boolean LOG = LOGGER.isLoggable(ErrorManager.INFORMATIONAL);
public static final String MIME_TYPE = "text/x-java-policy"; // NOI18N
/**
*
* Creates a new instance of PolicyEditorKit
*/
public PolicyEditorKit() {
}
/**
* Create a syntax object suitable for highlighting Policy file syntax
*/
public Syntax createSyntax(Document doc) {
if (LOG) {
LOGGER.log(ErrorManager.INFORMATIONAL, "createSyntax"); // NOI18N
}
return new PolicySyntax();
}
/**
* Retrieves the content type for this editor kit
*/
public String getContentType() {
return MIME_TYPE;
}
} | bernhardhuber/netbeansplugins | nb-policy-support/src/org/netbeans/modules/policysupport/PolicyEditorKit.java | Java | apache-2.0 | 1,443 |
/*
* Copyright 2020 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*************************************************************
* Current structure of a Player document:
*
* Player
* UserId - [private] user associated with this player
* Name - [public] nickname
* AvatarUrl - [public] image url
* Allegiance - [public] player's allegiance/team
*
************************************************************/
import * as Defaults from './defaults';
import * as Universal from './universal';
export const COLLECTION_PATH = "claimCodes";
export const FIELD__ID = Universal.FIELD__USER_ID;
export const FIELD__CODE = "code";
export const FIELD__REDEEMER = "redeemer";
export const FIELD__TIMESTAMP = "timestamp";
export function create(code: string): { [key: string]: any; } {
return {
[FIELD__CODE]: code,
[FIELD__REDEEMER]: Defaults.EMPTY_REWARD_REDEEMER
};
}
| google/playhvz | firebaseFunctions/functions/src/data/claimcode.ts | TypeScript | apache-2.0 | 1,421 |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.ui.actions;
import com.intellij.application.options.CodeStyle;
import com.intellij.codeEditor.printing.ExportToHTMLSettings;
import com.intellij.codeInspection.InspectionApplication;
import com.intellij.codeInspection.InspectionsBundle;
import com.intellij.codeInspection.ex.*;
import com.intellij.codeInspection.export.ExportToHTMLDialog;
import com.intellij.codeInspection.export.InspectionTreeHtmlWriter;
import com.intellij.codeInspection.ui.*;
import com.intellij.configurationStore.JbXmlOutputter;
import com.intellij.icons.AllIcons;
import com.intellij.ide.BrowserUtil;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.util.ui.tree.TreeUtil;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jdom.output.Format;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.*;
import java.util.Collection;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ExportHTMLAction extends AnAction implements DumbAware {
private static final Logger LOG = Logger.getInstance(ExportHTMLAction.class);
private final InspectionResultsView myView;
@NonNls private static final String ROOT = "root";
@NonNls private static final String AGGREGATE = "_aggregate";
@NonNls private static final String HTML = "HTML";
@NonNls private static final String XML = "XML";
public ExportHTMLAction(final InspectionResultsView view) {
super(InspectionsBundle.message("inspection.action.export.html"), null, AllIcons.ToolbarDecorator.Export);
myView = view;
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final ListPopup popup = JBPopupFactory.getInstance().createListPopup(
new BaseListPopupStep<String>(InspectionsBundle.message("inspection.action.export.popup.title"), HTML, XML) {
@Override
public PopupStep onChosen(final String selectedValue, final boolean finalChoice) {
return doFinalStep(() -> exportHTML(Comparing.strEqual(selectedValue, HTML)));
}
});
InspectionResultsView.showPopup(e, popup);
}
private void exportHTML(final boolean exportToHTML) {
ExportToHTMLDialog exportToHTMLDialog = new ExportToHTMLDialog(myView.getProject(), exportToHTML);
final ExportToHTMLSettings exportToHTMLSettings = ExportToHTMLSettings.getInstance(myView.getProject());
if (exportToHTMLSettings.OUTPUT_DIRECTORY == null) {
exportToHTMLSettings.OUTPUT_DIRECTORY = PathManager.getHomePath() + File.separator + "inspections";
}
exportToHTMLDialog.reset();
if (!exportToHTMLDialog.showAndGet()) {
return;
}
exportToHTMLDialog.apply();
final String outputDirectoryName = exportToHTMLSettings.OUTPUT_DIRECTORY;
ApplicationManager.getApplication().invokeLater(() -> {
final Runnable exportRunnable = () -> ApplicationManager.getApplication().runReadAction(() -> {
if (!exportToHTML) {
dump2xml(outputDirectoryName);
}
else {
try {
new InspectionTreeHtmlWriter(myView, outputDirectoryName);
}
catch (ProcessCanceledException e) {
// Do nothing here.
}
}
});
if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(exportRunnable,
InspectionsBundle.message(exportToHTML
? "inspection.generating.html.progress.title"
: "inspection.generating.xml.progress.title"), true,
myView.getProject())) {
return;
}
if (exportToHTML && exportToHTMLSettings.OPEN_IN_BROWSER) {
BrowserUtil.browse(new File(exportToHTMLSettings.OUTPUT_DIRECTORY, "index.html"));
}
});
}
private void dump2xml(final String outputDirectoryName) {
try {
final File outputDir = new File(outputDirectoryName);
if (!outputDir.exists() && !outputDir.mkdirs()) {
throw new IOException("Cannot create \'" + outputDir + "\'");
}
final InspectionTreeNode root = myView.getTree().getRoot();
final Exception[] ex = new Exception[1];
final Set<String> visitedTools = new THashSet<>();
Format format = JDOMUtil.createFormat("\n");
XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newInstance();
TreeUtil.treeNodeTraverser(root).traverse().processEach(node -> {
if (node instanceof InspectionNode) {
InspectionNode toolNode = (InspectionNode)node;
if (toolNode.isExcluded()) return true;
InspectionToolWrapper toolWrapper = toolNode.getToolWrapper();
if (!visitedTools.add(toolNode.getToolWrapper().getShortName())) return true;
String name = toolWrapper.getShortName();
try (XmlWriterWrapper reportWriter = new XmlWriterWrapper(myView.getProject(), outputDirectoryName, name,
xmlOutputFactory, format, GlobalInspectionContextBase.PROBLEMS_TAG_NAME);
XmlWriterWrapper aggregateWriter = new XmlWriterWrapper(myView.getProject(), outputDirectoryName, name + AGGREGATE,
xmlOutputFactory, format, ROOT)) {
reportWriter.checkOpen();
for (InspectionToolPresentation presentation : getPresentationsFromAllScopes(toolNode)) {
presentation.exportResults(reportWriter::writeElement, presentation::isExcluded, presentation::isExcluded);
if (presentation instanceof AggregateResultsExporter) {
((AggregateResultsExporter)presentation).exportAggregateResults(aggregateWriter::writeElement);
}
}
}
catch (XmlWriterWrapperException e) {
Throwable cause = e.getCause();
ex[0] = cause instanceof Exception ? (Exception)cause : e;
}
}
return true;
});
if (ex[0] != null) {
throw ex[0];
}
final Element element = new Element(InspectionApplication.INSPECTIONS_NODE);
final String profileName = myView.getCurrentProfileName();
if (profileName != null) {
element.setAttribute(InspectionApplication.PROFILE, profileName);
}
JDOMUtil.write(element,
new File(outputDirectoryName, InspectionApplication.DESCRIPTIONS + InspectionApplication.XML_EXTENSION),
CodeStyle.getDefaultSettings().getLineSeparator());
}
catch (Exception e) {
LOG.error(e);
ApplicationManager.getApplication().invokeLater(() -> Messages.showErrorDialog(myView, e.getMessage()));
}
}
@NotNull
public static BufferedWriter getWriter(String outputDirectoryName, String name) throws FileNotFoundException {
File file = getInspectionResultFile(outputDirectoryName, name);
FileUtil.createParentDirs(file);
return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), CharsetToolkit.UTF8_CHARSET));
}
@NotNull
public static File getInspectionResultFile(String outputDirectoryName, String name) {
return new File(outputDirectoryName, name + InspectionApplication.XML_EXTENSION);
}
@NotNull
private Collection<InspectionToolPresentation> getPresentationsFromAllScopes(@NotNull InspectionNode node) {
final InspectionToolWrapper wrapper = node.getToolWrapper();
Stream<InspectionToolWrapper> wrappers;
if (myView.getCurrentProfileName() == null){
wrappers = Stream.of(wrapper);
} else {
final String shortName = wrapper.getShortName();
final GlobalInspectionContextImpl context = myView.getGlobalInspectionContext();
final Tools tools = context.getTools().get(shortName);
if (tools != null) { //dummy entry points tool
wrappers = tools.getTools().stream().map(ScopeToolState::getTool);
} else {
wrappers = Stream.empty();
}
}
return wrappers.map(w -> myView.getGlobalInspectionContext().getPresentation(w)).collect(Collectors.toList());
}
private static class XmlWriterWrapper implements Closeable {
private final Project myProject;
private final String myOutputDirectoryName;
private final String myName;
private final XMLOutputFactory myFactory;
private final Format myFormat;
private final String myRootTagName;
private XMLStreamWriter myXmlWriter;
private Writer myFileWriter;
XmlWriterWrapper(@NotNull Project project,
@NotNull String outputDirectoryName,
@NotNull String name,
@NotNull XMLOutputFactory factory,
@NotNull Format format,
@NotNull String rootTagName) {
myProject = project;
myOutputDirectoryName = outputDirectoryName;
myName = name;
myFactory = factory;
myFormat = format;
myRootTagName = rootTagName;
}
void writeElement(@NotNull Element element) {
try {
checkOpen();
myXmlWriter.writeCharacters(myFormat.getLineSeparator() + myFormat.getIndent());
myXmlWriter.flush();
JbXmlOutputter.collapseMacrosAndWrite(element, myProject, myFileWriter);
myFileWriter.flush();
}
catch (XMLStreamException | IOException e) {
throw new XmlWriterWrapperException(e);
}
}
void checkOpen() {
if (myXmlWriter == null) {
myFileWriter = openFile(myOutputDirectoryName, myName);
myXmlWriter = startWritingXml(myFileWriter);
}
}
@Override
public void close() {
if (myXmlWriter != null) {
try {
endWritingXml(myXmlWriter);
}
finally {
myXmlWriter = null;
try {
closeFile(myFileWriter);
}
finally {
myFileWriter = null;
}
}
}
}
@NotNull
private static Writer openFile(@NotNull String outputDirectoryName, @NotNull String name) {
try {
return getWriter(outputDirectoryName, name);
}
catch (FileNotFoundException e) {
throw new XmlWriterWrapperException(e);
}
}
private static void closeFile(@NotNull Writer fileWriter) {
try {
fileWriter.close();
}
catch (IOException e) {
throw new XmlWriterWrapperException(e);
}
}
@NotNull
private XMLStreamWriter startWritingXml(@NotNull Writer fileWriter) {
try {
XMLStreamWriter xmlWriter = myFactory.createXMLStreamWriter(fileWriter);
xmlWriter.writeStartElement(myRootTagName);
return xmlWriter;
}
catch (XMLStreamException e) {
throw new XmlWriterWrapperException(e);
}
}
private void endWritingXml(@NotNull XMLStreamWriter xmlWriter) {
try {
try {
xmlWriter.writeCharacters(myFormat.getLineSeparator());
xmlWriter.writeEndElement();
xmlWriter.flush();
}
finally {
xmlWriter.close();
}
}
catch (XMLStreamException e) {
throw new XmlWriterWrapperException(e);
}
}
}
private static class XmlWriterWrapperException extends RuntimeException {
private XmlWriterWrapperException(Throwable cause) {
super(cause.getMessage(), cause);
}
}
}
| mdanielwork/intellij-community | platform/lang-impl/src/com/intellij/codeInspection/ui/actions/ExportHTMLAction.java | Java | apache-2.0 | 12,805 |
package org.pikater.web;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.pikater.shared.logging.GeneralPikaterLogger;
import org.pikater.shared.logging.IPikaterLogger;
/**
* Special logger wrapper to be used by the web application.
*
* @author SkyCrawl
*/
public class PikaterWebLogger extends GeneralPikaterLogger
{
private static final IPikaterLogger innerLogger = createPikaterLogger(Logger.getLogger("log4j"));
public static IPikaterLogger getLogger()
{
return innerLogger;
}
public static void logThrowable(String message, Throwable t)
{
getLogger().logThrowable(message, t);
}
public static void log(Level logLevel, String message)
{
getLogger().log(logLevel, message);
}
public static void log(Level logLevel, String source, String message)
{
getLogger().log(logLevel, source, message);
}
} | SkyCrawl/pikater-vaadin | src/org/pikater/web/PikaterWebLogger.java | Java | apache-2.0 | 857 |
//-----------------------------------------------------------------------
// <copyright file="DebugPanel.cs" company="Google LLC">
//
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// </copyright>
//-----------------------------------------------------------------------
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
/// <summary>
/// An easy-to-use generic debug panel in Singleton pattern to print real-time outputs
/// directly on the phone.
/// ## Usage
/// * DebugPanel.Instance.Print("This is a message.");
/// * DebugPanel.Instance.SetGreen();
/// The DebugPanel will be initialized when first called.
/// </summary>
public class DebugPanel : MonoBehaviour
{
/// <summary>
/// Shows the debug console and prints out the collision-related variables.
/// </summary>
public bool ShowDebugOverlay = true;
// A red color to indicate collision in the bottom debug panel.
private static Color _colorRed = new Color(252.0f / 255, 141.0f / 255, 89.0f / 255);
// A green color to indicate no collision in the bottom debug panel.
private static Color _colorGreen = new Color(145.0f / 255, 207.0f / 255, 96.0f / 255);
private static DebugPanel _instance;
private Image _debugButton;
private Text _debugConsole;
/// <summary>
/// Gets the Singleton class of the DebugPanel.
/// </summary>
public static DebugPanel Instance
{
get
{
return _instance;
}
}
/// <summary>
/// Prints a message on the debug panel.
/// </summary>
/// <param name="message">The string to print.</param>
public void Print(string message)
{
if (ShowDebugOverlay)
{
Debug.Log(message);
_debugConsole.text = message;
}
}
/// <summary>
/// Sets the background color of the debug panel to green.
/// </summary>
public void SetGreen()
{
SetColor(_colorGreen);
}
/// <summary>
/// Sets the background color of the debug panel to red.
/// </summary>
public void SetRed()
{
SetColor(_colorRed);
}
/// <summary>
/// Sets the background color of the debug panel to a specific color.
/// </summary>
/// <param name="color">The background color to set.</param>
public void SetColor(Color color)
{
if (ShowDebugOverlay)
{
_debugButton.color = color;
}
}
/// <summary>
/// Updates the collision animiation every frame: rotates Andy when collision occurs.
/// </summary>
protected void Start()
{
if (GameObject.Find("DebugButton") == null || GameObject.Find("DebugConsole") == null)
{
Debug.LogError("Cannot find the debug panel in the scene. \n" +
"Please copy DebugButton and DebugConsole from other scenes.");
ShowDebugOverlay = false;
return;
}
if (ShowDebugOverlay)
{
_debugButton = GameObject.Find("DebugButton").GetComponent<Image>();
_debugConsole = GameObject.Find("DebugConsole").GetComponent<Text>();
}
else
{
GameObject.Find("DebugButton").SetActive(false);
GameObject.Find("DebugConsole").SetActive(false);
}
}
/// <summary>
/// Checks if there is a different instance and destroys it when necesssary.
/// </summary>
private void Awake()
{
if (_instance != null && _instance != this)
{
Destroy(this.gameObject);
}
else
{
_instance = this;
}
}
}
| googlesamples/arcore-depth-lab | Assets/ARRealismDemos/Common/Scripts/DebugPanel.cs | C# | apache-2.0 | 4,224 |
// Copyright 2015 by the person represented as ThoroughlyLostExplorer on GitHub
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
////////////////////////////////////////////////////////////////////////////////
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ExplOCR
{
class NeuralNet : IDisposable
{
public NeuralNet(int dX, int dY, IEnumerable<char> chars, IEnumerable<string> files)
{
netKeys = new List<char>(chars);
knowledgeFiles = new List<string>(files);
dimensionX = dX;
dimensionY = dY;
}
public string SaveFile
{
get { return saveFile; }
set { saveFile = value; }
}
public float Factor
{
get { return factor; }
set { factor = value; }
}
public void Train(int samples)
{
if (samples > 0 && TryLoadTrainingSet(samples))
{
return;
}
List<string> knowledge = new List<string>();
foreach (string file in knowledgeFiles)
{
knowledge.AddRange(File.ReadAllLines(file));
}
string[] lines = knowledge.ToArray();
samples = lines.Length; ;
int pixels = DimensionY * DimensionX;
int classes = netKeys.Count;
Emgu.CV.Matrix<Single> training = new Emgu.CV.Matrix<Single>(samples, pixels);
Emgu.CV.Matrix<Single> class_training = new Emgu.CV.Matrix<Single>(samples, classes);
Emgu.CV.Matrix<int> layers = new Emgu.CV.Matrix<int>(3, 1);
layers[0, 0] = pixels;
layers[1, 0] = (int)(factor * netKeys.Count);
layers[2, 0] = classes;
for (int i = 0; i < samples; i++)
{
LetterInfo info = LetterInfo.ReadLetterInfoLine(lines[i]);
byte[] bytes = Convert.FromBase64String(info.Base64);
float[] input = AdjustInput(bytes);
for (int j = 0; j < pixels; j++)
{
training[i, j] = input[j];
}
/*
for (int a = -1; a <= 1; a++)
for (int b = -1; b <= 1; b++)
for (int c = 0; c < DimensionX; c++)
for (int d = 0; d < DimensionY; d++)
{
if (0 > c + a || c + a >= DimensionX) continue;
if (0 > d + b || d + b >= DimensionY) continue;
training[i, d * DimensionX + c] = bytes[(b + d) * DimensionX + (a + c)];
} */
int d = netKeys.IndexOf(info.Char);
class_training[i, d] = 1;
}
nnet = new Emgu.CV.ML.ANN_MLP(layers, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 0.6, 1);
Emgu.CV.ML.Structure.MCvANN_MLP_TrainParams p = new Emgu.CV.ML.Structure.MCvANN_MLP_TrainParams();
p.term_crit.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_EPS | Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;
p.term_crit.max_iter = 1000;
p.term_crit.epsilon = 0.000001;
p.train_method = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
p.bp_dw_scale = 0.1;
p.bp_moment_scale = 0.1;
bool success = false;
try
{
if (File.Exists(saveFile))
{
nnet.Load(saveFile);
success = true;
}
}
catch
{
}
if (!success)
{
int iteration = nnet.Train(training, class_training, null, p, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);
if (saveFile != null)
{
Directory.CreateDirectory(Path.GetDirectoryName(saveFile));
nnet.Save(saveFile);
}
}
}
private bool TryLoadTrainingSet(int samples)
{
int pixels = DimensionY * DimensionX;
int classes = netKeys.Count;
Emgu.CV.Matrix<Single> training = new Emgu.CV.Matrix<Single>(samples, pixels);
Emgu.CV.Matrix<Single> class_training = new Emgu.CV.Matrix<Single>(samples, classes);
Emgu.CV.Matrix<int> layers = new Emgu.CV.Matrix<int>(3, 1);
layers[0, 0] = pixels;
layers[1, 0] = (int)(factor * netKeys.Count);
layers[2, 0] = classes;
nnet = new Emgu.CV.ML.ANN_MLP(layers, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 0.6, 1);
Emgu.CV.ML.Structure.MCvANN_MLP_TrainParams p = new Emgu.CV.ML.Structure.MCvANN_MLP_TrainParams();
p.term_crit.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_EPS | Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;
p.term_crit.max_iter = 1000;
p.term_crit.epsilon = 0.000001;
p.train_method = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
p.bp_dw_scale = 0.1;
p.bp_moment_scale = 0.1;
try
{
if (File.Exists(saveFile))
{
nnet.Load(saveFile);
return true;
}
}
catch
{
}
return false;
}
public int DimensionX
{
get { return dimensionX; }
}
public int DimensionY
{
get { return dimensionY; }
}
public char[] NetKeys
{
get { return netKeys.ToArray(); }
}
internal float[] PredictDetailed(byte[] bytes)
{
int pixels = DimensionX * DimensionY;
int samples = 1;
int classes = netKeys.Count;
Emgu.CV.Matrix<Single> test = new Emgu.CV.Matrix<Single>(samples, pixels);
Emgu.CV.Matrix<Single> class_test = new Emgu.CV.Matrix<Single>(samples, classes);
Emgu.CV.Matrix<Single> result = new Emgu.CV.Matrix<float>(1, classes);
for (int j = 0; j < pixels; j++)
{
test[0, j] = bytes[j];
}
float[] floats = new float[classes];
Emgu.CV.Matrix<Single> sample = test.GetRow(0);
nnet.Predict(sample, result);
for (int j = 0; j < result.Cols; j++)
{
floats[j] = result[0, j];
}
return floats;
}
internal char Predict(byte[] bytes, bool margin)
{
double quality;
return Predict(bytes, margin, out quality);
}
internal char Predict(byte[] bytes, bool margin, out double quality)
{
int pixels = DimensionX * DimensionY;
int samples = 1;
int classes = netKeys.Count;
Emgu.CV.Matrix<Single> test = new Emgu.CV.Matrix<Single>(samples, pixels);
Emgu.CV.Matrix<Single> class_test = new Emgu.CV.Matrix<Single>(samples, classes);
Emgu.CV.Matrix<Single> result = new Emgu.CV.Matrix<float>(1, classes);
float[] input = AdjustInput(bytes);
for (int j = 0; j < pixels; j++)
{
test[0, j] = input[j];
}
float max, max2;
int max_idx;
Emgu.CV.Matrix<Single> sample = test.GetRow(0);
nnet.Predict(sample, result);
max_idx = 0;
max = result[0, 0];
max2 = 0;
for (int j = 0; j < classes; j++)
{
if (result[0, j] > max)
{
max_idx = j;
max = result[0, j];
}
}
for (int j = 0; j < classes; j++)
{
if (result[0, j] > max2 && j != max_idx)
{
max2 = result[0, j];
}
}
quality = max;
quality = quality - Math.Max(0.25 - Math.Abs(max - max2), 0);
// Map the range [0.5, 1] to [0, 1]
quality = (quality * 1.5) - 0.5;
quality = Math.Max(0.0, Math.Min(quality, 1.0));
if (margin && Math.Abs(max - max2) < 0.1 && max > 0.5)
return '*';
if (max > 0.5) return netKeys[max_idx];
//if (max > max2+0.25) return netKeys[max_idx];
return '*';
}
private float[] AdjustInput(byte[] bytes)
{
float target = 170;
float[] input = new float[bytes.Length];
float max = 1;
for (int i = 0; i < bytes.Length; i++)
{
input[i] = bytes[i];
max = Math.Max(max, input[i]);
}
float factor = target / max;
if (factor > 1)
{
for (int i = 0; i < bytes.Length; i++)
{
input[i] = input[i] * factor;
}
}
return input;
}
public System.Drawing.Size InputSize
{
get { return new System.Drawing.Size(dimensionX, dimensionY); }
}
public void Dispose()
{
if (nnet != null)
{
nnet.Dispose();
}
}
Emgu.CV.ML.ANN_MLP nnet;
List<char> netKeys;
List<string> knowledgeFiles;
readonly int dimensionX;
readonly int dimensionY;
string saveFile;
float factor = 1.5f;
}
}
| ThoroughlyLostExplorer/ExplOCR | ExplOCR/NeuralNet.cs | C# | apache-2.0 | 10,340 |
package com.planet_ink.coffee_mud.WebMacros;
import com.planet_ink.miniweb.interfaces.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.core.exceptions.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
import java.util.regex.Pattern;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class QuestMaker extends StdWebMacro
{
@Override public String name() {return "QuestMaker";}
@Override public boolean isAdminMacro() {return true;}
private static final Pattern keyPattern=Pattern.compile("^AT_(.+)");
public DVector getPage(MOB mob, HTTPRequest httpReq, String template, String page, String fileToGet)
{
DVector pageList=(DVector)httpReq.getRequestObjects().get("QM_PAGE_LIST");
DVector filePages=(DVector)httpReq.getRequestObjects().get("QM_FILE_PAGES");
if(template.length()==0)
{
httpReq.removeUrlParameter("QM_FILE_PAGES");
filePages=null;
if(pageList!=null) return pageList;
pageList=CMLib.quests().getQuestTemplate(mob, fileToGet);
httpReq.getRequestObjects().put("QM_PAGE_LIST",pageList);
return pageList;
}
final int pageNumber=CMath.s_int(page)-1;
if(filePages==null)
{
filePages=CMLib.quests().getQuestTemplate(mob, template);
httpReq.getRequestObjects().put("QM_FILE_PAGES",filePages);
}
final List<DVector> qPages=(List<DVector>)filePages.elementAt(0,4);
if(pageNumber<=0) return qPages.get(0);
if(pageNumber>=qPages.size()) return qPages.get(qPages.size()-1);
return qPages.get(pageNumber);
}
private String itemList(List<Item> itemList, Item oldItem, String oldValue)
{
final StringBuffer list=new StringBuffer("");
if(oldItem==null) oldItem=RoomData.getItemFromCatalog(oldValue);
for (final Item I : itemList)
{
list.append("<OPTION VALUE=\""+RoomData.getItemCode(itemList, I)+"\" ");
if((oldItem!=null)&&(oldItem.sameAs(I)))
list.append("SELECTED");
list.append(">");
list.append(I.Name()+RoomData.getObjIDSuffix(I));
}
list.append("<OPTION VALUE=\"\">------ CATALOGED -------");
final String[] names=CMLib.catalog().getCatalogItemNames();
for (final String name : names)
{
list.append("<OPTION VALUE=\"CATALOG-"+name+"\"");
if((oldItem!=null)
&&(CMLib.flags().isCataloged(oldItem))
&&(oldItem.Name().equalsIgnoreCase(name)))
list.append(" SELECTED");
list.append(">"+name);
}
return list.toString();
}
public List<MOB> getCatalogMobsForList(Physical[] fromList)
{
final List<MOB> toList=new Vector<MOB>();
for(Physical P : fromList)
{
P=(Physical)P.copyOf();
CMLib.catalog().changeCatalogUsage(P,true);
if(P instanceof MOB)
toList.add((MOB)P);
}
return toList;
}
public List<Item> getCatalogItemsForList(Physical[] fromList)
{
final List<Item> toList=new Vector<Item>();
for(Physical P : fromList)
{
P=(Physical)P.copyOf();
CMLib.catalog().changeCatalogUsage(P,true);
if(P instanceof Item)
toList.add((Item)P);
}
return toList;
}
private String mobList(List<MOB> mobList, MOB oldMob, String oldValue)
{
final StringBuffer list=new StringBuffer("");
if(oldMob==null) oldMob=RoomData.getMOBFromCatalog(oldValue);
for (final MOB M2 : mobList)
{
list.append("<OPTION VALUE=\""+RoomData.getMOBCode(mobList, M2)+"\" ");
if((oldMob!=null)&&(oldMob.sameAs(M2)))
list.append("SELECTED");
list.append(">");
list.append(M2.Name()+RoomData.getObjIDSuffix(M2));
}
list.append("<OPTION VALUE=\"\">------ CATALOGED -------");
final String[] names=CMLib.catalog().getCatalogMobNames();
for (final String name : names)
{
list.append("<OPTION VALUE=\"CATALOG-"+name+"\"");
if((oldMob!=null)
&&(CMLib.flags().isCataloged(oldMob))
&&(oldMob.Name().equalsIgnoreCase(name)))
list.append(" SELECTED");
list.append(">"+name);
}
return list.toString();
}
@Override
public String runMacro(HTTPRequest httpReq, String parm)
{
final java.util.Map<String,String> parms=parseParms(parm);
if((parms==null)||(parms.size()==0)) return "";
final MOB M = Authenticate.getAuthenticatedMob(httpReq);
if(M==null) return "[error -- no authenticated mob!]";
String qFileToGet=null;
if(parms.containsKey("QMFILETOGET"))
qFileToGet=parms.get("QMFILETOGET");
String qTemplate=httpReq.getUrlParameter("QMTEMPLATE");
if((qTemplate==null)||(qTemplate.length()==0)) qTemplate="";
String qPageStr=httpReq.getUrlParameter("QMPAGE");
if((qPageStr==null)||(qPageStr.length()==0)) qPageStr="";
String qPageErrors=httpReq.getUrlParameter("QMPAGEERRORS");
if((qPageErrors==null)||(qPageErrors.length()==0)) qPageErrors="";
if(parms.containsKey("QMPAGETITLE"))
{
final DVector pageData=getPage(M,httpReq,qTemplate,qPageStr,null);
if(pageData==null) return "[error -- no page selected!]";
return (String)pageData.elementAt(0,2);
}
else
if(parms.containsKey("QMPAGEINSTR"))
{
final DVector pageData=getPage(M,httpReq,qTemplate,qPageStr,null);
if(pageData==null) return "[error -- no page selected!]";
return (String)pageData.elementAt(0,3);
}
else
if(parms.containsKey("QMPAGEFIELDS"))
{
final DVector pageData=getPage(M,httpReq,qTemplate,qPageStr,qFileToGet);
if(pageData==null) return "[error - no page data?!]";
String labelColor=parms.get("LABELCOLOR");
if(labelColor==null) labelColor="<FONT COLOR=YELLOW><B>";
String descColor=parms.get("DESCCOLOR");
if(descColor==null) descColor="<FONT COLOR=WHITE><I>";
final StringBuffer list=new StringBuffer("");
if(qTemplate.length()==0)
{
String oldTemplate=httpReq.getUrlParameter("QMOLDTEMPLATE");
if((oldTemplate==null)||(oldTemplate.length()==0)) oldTemplate="";
for(int d=0;d<pageData.size();d++)
{
list.append("<TR><TD VALIGN=TOP><INPUT TYPE=RADIO NAME=QMTEMPLATE VALUE=\""+htmlOutgoingFilter((String)pageData.elementAt(d,3))+"\"");
if(pageData.elementAt(d,3).equals(oldTemplate))
list.append(" CHECKED");
list.append("> "+labelColor+(String)pageData.elementAt(d,1)+"</B></FONT></I></TD>");
list.append("<TD>"+descColor+(String)pageData.elementAt(d,2)+"</B></FONT></I></TD></TR>");
list.append("<TR><TD><BR></TD><TD><BR></TD></TR>");
}
return list.toString();
}
final List<String> V=new XVector<String>();
for(final String str : httpReq.getUrlParameters() )
if(keyPattern.matcher(str.toUpperCase().subSequence(0, str.length())).matches())
V.add(str.toUpperCase());
list.append("<TR><TD COLSPAN=2>");
for(int v=0;v<V.size();v++)
{
final String key=V.get(v);
if((!key.startsWith("AT_")))
continue;
boolean thisPage=false;
for(int step=1;step<pageData.size();step++)
{
final String keyName=(String)pageData.elementAt(step,2);
if(keyName.startsWith("$")
&&(key.substring(3).toUpperCase().equals(keyName.substring(1))
||(key.substring(3).toUpperCase().startsWith(keyName.substring(1)+"_")
&&CMath.isNumber(key.substring(3+keyName.length())))))
{ thisPage=true; break;}
}
if(thisPage) continue;
String oldVal=httpReq.getUrlParameter(key);
if(oldVal==null) oldVal="";
list.append("<INPUT TYPE=HIDDEN NAME="+key+" VALUE=\""+htmlOutgoingFilter(oldVal)+"\">\n\r");
}
list.append("</TD></TR>\n\r");
String lastLabel=null;
for(int step=1;step<pageData.size();step++)
{
final Integer stepType=(Integer)pageData.elementAt(step,1);
final String keyName=(String)pageData.elementAt(step,2);
final String defValue=(String)pageData.elementAt(step,3);
String httpKeyName=keyName;
if(httpKeyName.startsWith("$")) httpKeyName=httpKeyName.substring(1);
final String keyNameFixed=CMStrings.capitalizeAndLower(httpKeyName.replace('_',' '));
httpKeyName="AT_"+httpKeyName;
final boolean optionalEntry=CMath.bset(stepType.intValue(),QuestManager.QM_COMMAND_OPTIONAL);
final int inputCode=stepType.intValue()&QuestManager.QM_COMMAND_MASK;
String oldValue=httpReq.getUrlParameter(httpKeyName);
switch(inputCode)
{
case QuestManager.QM_COMMAND_$TITLE: break;
case QuestManager.QM_COMMAND_$LABEL: lastLabel=defValue; break;
case QuestManager.QM_COMMAND_$EXPRESSION:
case QuestManager.QM_COMMAND_$TIMEEXPRESSION:
case QuestManager.QM_COMMAND_$UNIQUE_QUEST_NAME:
{
if(oldValue==null) oldValue=defValue;
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><INPUT TYPE=TEXT SIZE=20 NAME="+httpKeyName+" ");
list.append(" VALUE=\""+htmlOutgoingFilter(oldValue)+"\"></TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$LONG_STRING:
{
if(oldValue==null) oldValue=defValue;
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><TEXTAREA ROWS=3 COLS=40 NAME="+httpKeyName+">");
list.append(oldValue+"</TEXTAREA></TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$ZAPPERMASK:
{
if(oldValue==null) oldValue=defValue;
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><TEXTAREA COLS=40 ROWS=2 NAME="+httpKeyName+">");
list.append(oldValue+"</TEXTAREA></TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$STRING:
case QuestManager.QM_COMMAND_$ROOMID:
case QuestManager.QM_COMMAND_$NAME:
case QuestManager.QM_COMMAND_$AREA:
{
if(oldValue==null) oldValue=defValue;
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><INPUT TYPE=TEXT SIZE=40 NAME="+httpKeyName+" ");
list.append(" VALUE=\""+htmlOutgoingFilter(oldValue)+"\"></TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$HIDDEN:
break;
case QuestManager.QM_COMMAND_$ABILITY:
{
if(oldValue==null) oldValue=defValue;
if(oldValue==null) oldValue="";
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+">");
if(optionalEntry) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
Ability A=null;
for(final Enumeration<Ability> e=CMClass.abilities();e.hasMoreElements();)
{
A=e.nextElement();
if(((A.classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_ARCHON)&&(!CMSecurity.isASysOp(M)))
continue;
list.append("<OPTION VALUE=\""+A.ID()+"\" ");
if(oldValue.equals(A.ID())) list.append("SELECTED");
list.append(">");
list.append(A.ID());
}
list.append("</SELECT>");
list.append("</TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$EXISTING_QUEST_NAME:
{
if(oldValue==null) oldValue=defValue;
if(oldValue==null) oldValue="";
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+">");
if(optionalEntry) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
for(int q=0;q<CMLib.quests().numQuests();q++)
{
final Quest Q2=CMLib.quests().fetchQuest(q);
list.append("<OPTION VALUE=\""+Q2.name()+"\" ");
if(oldValue.equals(Q2.name())) list.append("SELECTED");
list.append(">");
list.append(Q2.name());
}
list.append("</SELECT>");
list.append("</TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$CHOOSE:
{
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+">");
final List<String> options=CMParms.parseCommas(defValue.toUpperCase(),true);
if(optionalEntry) options.add(0,"");
for(int o=0;o<options.size();o++)
{
final String val=options.get(o);
list.append("<OPTION VALUE=\""+val+"\" ");
if(val.equalsIgnoreCase(oldValue)) list.append("SELECTED");
list.append(">");
list.append(val);
}
list.append("</SELECT></TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$ITEMXML:
{
if(oldValue==null) oldValue=defValue;
if(oldValue==null) oldValue="";
List<Item> itemList=new Vector();
itemList=RoomData.contributeItems(itemList);
final Item oldItem=RoomData.getItemFromAnywhere(itemList,oldValue);
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+">");
if(optionalEntry) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
list.append(itemList(itemList,oldItem,oldValue));
list.append("</SELECT>");
list.append("<INPUT TYPE=BUTTON NAME=BUTT_"+httpKeyName+" VALUE=\"NEW\" ONCLICK=\"AddNewItem();\">");
list.append("</TD></TR>");
break;
}
case QuestManager.QM_COMMAND_$ITEMXML_ONEORMORE:
{
if(oldValue==null) oldValue=defValue;
List<Item> itemList=new Vector();
itemList=RoomData.contributeItems(itemList);
final Vector oldValues=new Vector();
int which=1;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
while(oldValue!=null)
{
if((!oldValue.equalsIgnoreCase("DELETE"))&&(oldValue.length()>0))
oldValues.addElement(oldValue);
which++;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
}
oldValues.addElement("");
for(int i=0;i<oldValues.size();i++)
{
oldValue=(String)oldValues.elementAt(i);
final Item oldItem=(oldValue.length()>0)?RoomData.getItemFromAnywhere(itemList,oldValue):null;
if(i==0)
{
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
}
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+"_"+(i+1)+" ONCHANGE=\"Refresh();\">");
if(i<oldValues.size()-1) list.append("<OPTION VALUE=\"DELETE\">Delete!");
if(oldValue.length()==0) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
list.append(itemList(itemList,oldItem,oldValue));
list.append("</SELECT>");
if(i==oldValues.size()-1)
list.append("<INPUT TYPE=BUTTON NAME=BUTT_"+httpKeyName+" VALUE=\"NEW\" ONCLICK=\"AddNewItem();\">");
list.append("</TD></TR>");
}
break;
}
case QuestManager.QM_COMMAND_$MOBXML:
{
if(oldValue==null) oldValue=defValue;
if(oldValue != null)
{
List<MOB> mobList=new Vector();
mobList=RoomData.contributeMOBs(mobList);
final MOB oldMob=RoomData.getMOBFromCode(mobList,oldValue);
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+">");
if(optionalEntry) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
list.append(mobList(mobList,oldMob,oldValue));
list.append("</SELECT>");
list.append("<INPUT TYPE=BUTTON NAME=BUTT_"+httpKeyName+" VALUE=\"NEW\" ONCLICK=\"AddNewMob();\">");
list.append("</TD></TR>");
}
break;
}
case QuestManager.QM_COMMAND_$MOBXML_ONEORMORE:
{
if(oldValue==null) oldValue=defValue;
final List<MOB>mobList=RoomData.contributeMOBs(new Vector<MOB>());
final Vector oldValues=new Vector();
int which=1;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
while(oldValue!=null)
{
if((!oldValue.equalsIgnoreCase("DELETE"))&&(oldValue.length()>0))
oldValues.addElement(oldValue);
which++;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
}
oldValues.addElement("");
for(int i=0;i<oldValues.size();i++)
{
oldValue=(String)oldValues.elementAt(i);
final MOB oldMob=(oldValue.length()>0)?RoomData.getMOBFromCode(mobList,oldValue):null;
if(i==0)
{
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
}
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+"_"+(i+1)+" ONCHANGE=\"Refresh();\">");
if(i<oldValues.size()-1) list.append("<OPTION VALUE=\"DELETE\">Delete!");
if(oldValue.length()==0) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
list.append(mobList(mobList,oldMob,oldValue));
list.append("</SELECT>");
if(i==oldValues.size()-1)
list.append("<INPUT TYPE=BUTTON NAME=BUTT_"+httpKeyName+" VALUE=\"NEW\" ONCLICK=\"AddNewMob();\">");
list.append("</TD></TR>");
}
break;
}
case QuestManager.QM_COMMAND_$FACTION:
{
if(oldValue==null) oldValue=defValue;
if(oldValue==null) oldValue="";
list.append("<TR><TD COLSPAN=2><BR></TD></TR>\n\r");
list.append("<TR><TD COLSPAN=2>"+descColor+lastLabel+"</B></FONT></I></TD></TR>\n\r");
list.append("<TR><TD>"+labelColor+keyNameFixed+"</B></FONT></I></TD>");
list.append("<TD><SELECT NAME="+httpKeyName+">");
if(optionalEntry) list.append("<OPTION VALUE=\"\" "+((oldValue.length()==0)?"SELECTED":"")+">");
for(final Enumeration f=CMLib.factions().factions();f.hasMoreElements();)
{
final Faction F=(Faction)f.nextElement();
final String fkey=F.factionID().toUpperCase().trim();
list.append("<OPTION VALUE=\""+fkey+"\" ");
if(oldValue.equals(fkey)) list.append("SELECTED");
list.append(">");
list.append(F.name());
}
list.append("</SELECT>");
list.append("</TD></TR>");
break;
}
}
}
return list.toString();
}
else
if(parms.containsKey("QMLASTPAGE"))
{
final DVector pageData=getPage(M,httpReq,qTemplate,qPageStr,null);
if(pageData==null) return "false";
final DVector filePages=(DVector)httpReq.getRequestObjects().get("QM_FILE_PAGES");
if(filePages==null) return "false";
return(((Vector)filePages.elementAt(0,4)).lastElement()==pageData)?"true":"false";
}
else
if(parms.containsKey("QMPAGEERRORS")) return qPageErrors;
else
if(parms.containsKey("QMERRORS")) return qPageErrors;
else
if(parms.containsKey("QMTEMPLATE")) return qTemplate;
else
if(parms.containsKey("QMPAGE")) return qPageStr;
else
if(parms.containsKey("NEXT")||parms.containsKey("FINISH"))
{
if((qTemplate.length()>0)&&(CMath.s_int(qPageStr)<=0))
{
httpReq.addFakeUrlParameter("QMPAGE","1");
httpReq.addFakeUrlParameter("QMERRORS","");
httpReq.addFakeUrlParameter("QMPAGEERRORS","");
return "";
}
if(qTemplate.length()==0) return "[error - no template chosen?!]";
final DVector pageData=getPage(M,httpReq,qTemplate,qPageStr,null);
if(pageData==null) return "[error - no page data?!]";
final StringBuffer errors=new StringBuffer("");
for(int step=1;step<pageData.size();step++)
{
final Integer stepType=(Integer)pageData.elementAt(step,1);
final String keyName=(String)pageData.elementAt(step,2);
final String defValue=(String)pageData.elementAt(step,3);
String httpKeyName=keyName;
if(httpKeyName.startsWith("$")) httpKeyName=httpKeyName.substring(1);
final String keyNameFixed=CMStrings.capitalizeAndLower(httpKeyName.replace('_',' '));
httpKeyName="AT_"+httpKeyName;
final boolean optionalEntry=CMath.bset(stepType.intValue(),QuestManager.QM_COMMAND_OPTIONAL);
final int inputCode=stepType.intValue()&QuestManager.QM_COMMAND_MASK;
String oldValue=httpReq.getUrlParameter(httpKeyName);
final GenericEditor.CMEval eval= QuestManager.QM_COMMAND_TESTS[inputCode];
try
{
switch(inputCode)
{
case QuestManager.QM_COMMAND_$TITLE: break;
case QuestManager.QM_COMMAND_$LABEL: break;
case QuestManager.QM_COMMAND_$HIDDEN:
httpReq.addFakeUrlParameter(httpKeyName,defValue);
break;
case QuestManager.QM_COMMAND_$ITEMXML_ONEORMORE:
{
final List<Item> rawitemlist=RoomData.contributeItems(new Vector<Item>());
rawitemlist.addAll(getCatalogItemsForList(CMLib.catalog().getCatalogItems()));
final Vector oldValues=new Vector();
int which=1;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
while(oldValue!=null)
{
if((!oldValue.equalsIgnoreCase("DELETE"))&&(oldValue.length()>0))
oldValues.addElement(oldValue);
which++;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
}
if(oldValues.size()==0) oldValues.addElement("");
String newVal="";
for(int i=0;i<oldValues.size();i++)
{
oldValue=(String)oldValues.elementAt(i);
Item I2=oldValue.length()>0?RoomData.getItemFromAnywhere(rawitemlist,oldValue):null;
if(I2==null)
I2=oldValue.length()>0?RoomData.getItemFromCatalog(oldValue):null;
if(I2!=null)
{
if(CMLib.flags().isCataloged(I2))
oldValue=CMLib.english().getContextSameName(rawitemlist,I2);
else
oldValue=CMLib.english().getContextName(rawitemlist,I2);
}
final Object[] choices=rawitemlist.toArray();
final String thisVal=(String)eval.eval(oldValue,choices,optionalEntry);
if(thisVal.length()>0)
{
final Item I3=(Item)CMLib.english().fetchEnvironmental(rawitemlist, thisVal, false);
if(I3!=null)
{
if(CMLib.flags().isCataloged(I3))
newVal+="CATALOG-"+I3.Name()+";";
else
newVal+=RoomData.getItemCode(rawitemlist, I3)+";";
}
}
}
httpReq.addFakeUrlParameter(httpKeyName,newVal);
break;
}
case QuestManager.QM_COMMAND_$ITEMXML:
{
final List<Item> rawitemlist=RoomData.contributeItems(new Vector<Item>());
rawitemlist.addAll(getCatalogItemsForList(CMLib.catalog().getCatalogItems()));
if(oldValue==null) oldValue="";
Item I2=oldValue.length()>0?RoomData.getItemFromAnywhere(rawitemlist,oldValue):null;
if(I2==null)
I2=oldValue.length()>0?RoomData.getItemFromCatalog(oldValue):null;
if(I2!=null)
{
if(CMLib.flags().isCataloged(I2))
oldValue=CMLib.english().getContextSameName(rawitemlist,I2);
else
oldValue=CMLib.english().getContextName(rawitemlist,I2);
}
final Object[] choices=rawitemlist.toArray();
String newVal=(String)eval.eval(oldValue,choices,optionalEntry);
if(newVal.length()>0)
{
final Item I3=(Item)CMLib.english().fetchEnvironmental(rawitemlist, newVal, false);
if(I3!=null)
{
if(CMLib.flags().isCataloged(I3))
newVal="CATALOG-"+I3.Name()+";";
else
newVal=RoomData.getItemCode(rawitemlist, I3)+";";
}
}
httpReq.addFakeUrlParameter(httpKeyName,newVal);
break;
}
case QuestManager.QM_COMMAND_$MOBXML_ONEORMORE:
{
final List<MOB> rawmoblist=RoomData.contributeMOBs(new Vector<MOB>());
rawmoblist.addAll(getCatalogMobsForList(CMLib.catalog().getCatalogMobs()));
final Vector oldValues=new Vector();
int which=1;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
while(oldValue!=null)
{
if((!oldValue.equalsIgnoreCase("DELETE"))&&(oldValue.length()>0))
oldValues.addElement(oldValue);
which++;
oldValue=httpReq.getUrlParameter(httpKeyName+"_"+which);
}
if(oldValues.size()==0) oldValues.addElement("");
String newVal="";
for(int i=0;i<oldValues.size();i++)
{
oldValue=(String)oldValues.elementAt(i);
MOB M2=oldValue.length()>0?RoomData.getMOBFromCode(rawmoblist,oldValue):null;
if(M2==null)
M2=oldValue.length()>0?RoomData.getMOBFromCatalog(oldValue):null;
if(M2!=null)
{
if(CMLib.flags().isCataloged(M2))
oldValue=CMLib.english().getContextSameName(rawmoblist,M2);
else
oldValue=CMLib.english().getContextName(rawmoblist,M2);
}
final Object[] choices=rawmoblist.toArray();
final String thisVal=(String)eval.eval(oldValue,choices,optionalEntry);
if(thisVal.length()>0)
{
final MOB M3=(MOB)CMLib.english().fetchEnvironmental(rawmoblist, thisVal, false);
if(M3!=null)
{
if(CMLib.flags().isCataloged(M3))
newVal+="CATALOG-"+M3.Name()+";";
else
newVal+=RoomData.getMOBCode(rawmoblist, M3)+";";
}
}
}
httpReq.addFakeUrlParameter(httpKeyName,newVal);
break;
}
case QuestManager.QM_COMMAND_$MOBXML:
{
final List<MOB> rawmoblist=RoomData.contributeMOBs(new Vector<MOB>());
rawmoblist.addAll(getCatalogMobsForList(CMLib.catalog().getCatalogMobs()));
if(oldValue==null) oldValue="";
MOB M2=oldValue.length()>0?RoomData.getMOBFromCode(rawmoblist,oldValue):null;
if(M2==null)
M2=oldValue.length()>0?RoomData.getMOBFromCatalog(oldValue):null;
if(M2!=null)
{
if(CMLib.flags().isCataloged(M2))
oldValue=CMLib.english().getContextSameName(rawmoblist,M2);
else
oldValue=CMLib.english().getContextName(rawmoblist,M2);
}
final Object[] choices=rawmoblist.toArray();
String newVal=(String)eval.eval(oldValue,choices,optionalEntry);
if(newVal.length()>0)
{
final MOB M3=(MOB)CMLib.english().fetchEnvironmental(rawmoblist, newVal, false);
if(M3!=null)
{
if(CMLib.flags().isCataloged(M3))
newVal="CATALOG-"+M3.Name()+";";
else
newVal=RoomData.getMOBCode(rawmoblist, M3)+";";
}
}
httpReq.addFakeUrlParameter(httpKeyName,newVal);
break;
}
case QuestManager.QM_COMMAND_$CHOOSE:
{
if(oldValue==null) oldValue="";
final Object[] choices=CMParms.parseCommas(defValue.toUpperCase(),true).toArray();
final String newVal=(String)eval.eval(oldValue,choices,optionalEntry);
httpReq.addFakeUrlParameter(httpKeyName,newVal);
break;
}
default:
{
if(oldValue==null) oldValue="";
final String newVal=(String)eval.eval(oldValue,null,optionalEntry);
httpReq.addFakeUrlParameter(httpKeyName,newVal);
break;
}
}
}
catch(final CMException e)
{
errors.append("Error in field '"+keyNameFixed+"': "+e.getMessage()+"<BR>");
}
}
httpReq.addFakeUrlParameter("QMPAGEERRORS",errors.toString());
if(errors.toString().length()>0) return "";
if(parms.containsKey("FINISH"))
{
String name="";
final DVector filePages=(DVector)httpReq.getRequestObjects().get("QM_FILE_PAGES");
String script=((StringBuffer)filePages.elementAt(0,5)).toString();
String var=null;
String val=null;
final List<DVector> qPages=(List<DVector>)filePages.elementAt(0,4);
for(int page=0;page<qPages.size();page++)
{
final DVector pageDV=qPages.get(page);
for(int v=0;v<pageDV.size();v++)
{
var=(String)pageDV.elementAt(v,2);
String httpKeyName=var;
if(httpKeyName.startsWith("$"))
httpKeyName=httpKeyName.substring(1);
else
continue;
httpKeyName="AT_"+httpKeyName;
val=httpReq.getUrlParameter(httpKeyName);
if(val==null) val="";
switch(((Integer)pageDV.elementAt(v,1)).intValue()&QuestManager.QM_COMMAND_MASK)
{
case QuestManager.QM_COMMAND_$UNIQUE_QUEST_NAME:
name=val;
break;
case QuestManager.QM_COMMAND_$ITEMXML:
case QuestManager.QM_COMMAND_$ITEMXML_ONEORMORE:
{
final List<String> V=CMParms.parseSemicolons(val,true);
val="";
for(int v1=0;v1<V.size();v1++)
{
Item I=RoomData.getItemFromCode(RoomData.getItemCache(),V.get(v1));
if(I==null)
I=RoomData.getItemFromAnywhere(RoomData.getItemCache(),V.get(v1));
if(I==null)
I=RoomData.getItemFromCatalog(V.get(v1));
if(I!=null)
val+=CMLib.coffeeMaker().getItemXML(I).toString();
}
break;
}
case QuestManager.QM_COMMAND_$MOBXML:
case QuestManager.QM_COMMAND_$MOBXML_ONEORMORE:
{
final List<String> V=CMParms.parseSemicolons(val,true);
val="";
for(int v1=0;v1<V.size();v1++)
{
MOB M2=RoomData.getMOBFromCode(RoomData.getMOBCache(),V.get(v1));
if(M2==null) M2=RoomData.getMOBFromCatalog(V.get(v1));
if(M2!=null)
val+=CMLib.coffeeMaker().getMobXML(M2).toString();
}
break;
}
}
script=CMStrings.replaceAll(script,var,val);
}
}
script=CMStrings.replaceAll(script,"$#AUTHOR",M.Name());
final Quest Q=(Quest)CMClass.getCommon("DefaultQuest");
final CMFile newQF=new CMFile(Resources.makeFileResourceName("quests/"+name+".quest"),M,CMFile.FLAG_LOGERRORS);
if(!newQF.saveText(script))
{
httpReq.addFakeUrlParameter("QMPAGEERRORS","Unable to save your quest. Please consult the log.");
return "";
}
Q.setScript("LOAD=quests/"+name+".quest",true);
if((Q.name().trim().length()==0)||(Q.duration()<0))
{
httpReq.addFakeUrlParameter("QMPAGEERRORS","Unable to create your quest. Please consult the log.");
return "";
}
final Quest badQ=CMLib.quests().fetchQuest(name);
if(badQ!=null)
{
httpReq.addFakeUrlParameter("QMPAGEERRORS","Unable to create your quest. One of that name already exists!");
return "";
}
Log.sysOut("QuestMgr",M.Name()+" created quest '"+Q.name()+"'");
CMLib.quests().addQuest(Q);
CMLib.quests().save();
}
httpReq.addFakeUrlParameter("QMPAGE",""+(CMath.s_int(qPageStr)+1));
return "";
}
else
if(parms.containsKey("BACK"))
{
final int pageNumber=CMath.s_int(qPageStr);
if(pageNumber>1)
httpReq.addFakeUrlParameter("QMPAGE",""+(CMath.s_int(qPageStr)-1));
else
{
httpReq.addFakeUrlParameter("QMTEMPLATE","");
httpReq.addFakeUrlParameter("QMPAGE","");
}
}
return "";
}
}
| vjanmey/EpicMudfia | com/planet_ink/coffee_mud/WebMacros/QuestMaker.java | Java | apache-2.0 | 32,575 |
/*
* Copyright (C) open knowledge GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package de.openknowledge.jaxrs.versioning.conversion;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.ext.InterceptorContext;
/**
* @author Arne Limburg - open knowledge GmbH
*/
public class Version {
private static final String VERSION_PROPERTY_NAME = Version.class.getName().toLowerCase();
private static final ThreadLocal<String> VERSION = new ThreadLocal<String>();
public static String get(InterceptorContext context) {
return VERSION.get();
// return (String)context.getProperty(VERSION_PROPERTY_NAME);
}
public static String get(ContainerRequestContext context) {
return VERSION.get();
// return (String)context.getProperty(VERSION_PROPERTY_NAME);
}
public static void set(ContainerRequestContext context, String version) {
VERSION.set(version);
// context.setProperty(VERSION_PROPERTY_NAME, version);
}
public static void unset(InterceptorContext context) {
VERSION.remove();
// context.removeProperty(VERSION_PROPERTY_NAME);
}
}
| openknowledge/jaxrs-versioning | jaxrs-versioning/src/main/java/de/openknowledge/jaxrs/versioning/conversion/Version.java | Java | apache-2.0 | 1,612 |
class ChangeDeliveryTypeInOrders < ActiveRecord::Migration[5.2]
def change
change_column :orders, :delivery, 'integer USING CAST(delivery AS integer)'
end
end
| luckypike/mint | db/migrate/20190925094319_change_delivery_type_in_orders.rb | Ruby | apache-2.0 | 167 |
/**
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.sha1coin.core;
import com.google.sha1coin.script.Script;
import com.google.sha1coin.wallet.AbstractKeyChainEventListener;
import java.util.List;
/**
* Convenience implementation of {@link WalletEventListener}.
*/
public abstract class AbstractWalletEventListener extends AbstractKeyChainEventListener implements WalletEventListener {
@Override
public void onCoinsReceived(Wallet wallet, Transaction tx, Coin prevBalance, Coin newBalance) {
onChange();
}
@Override
public void onCoinsSent(Wallet wallet, Transaction tx, Coin prevBalance, Coin newBalance) {
onChange();
}
@Override
public void onReorganize(Wallet wallet) {
onChange();
}
@Override
public void onTransactionConfidenceChanged(Wallet wallet, Transaction tx) {
onChange();
}
@Override
public void onKeysAdded(List<ECKey> keys) {
onChange();
}
@Override
public void onScriptsAdded(Wallet wallet, List<Script> scripts) {
onChange();
}
@Override
public void onWalletChanged(Wallet wallet) {
onChange();
}
public void onChange() {
}
}
| ohac/sha1coinj | core/src/main/java/com/google/sha1coin/core/AbstractWalletEventListener.java | Java | apache-2.0 | 1,767 |
import sys
sys.path.insert(1, "../../../")
import h2o
def binop_plus(ip,port):
# Connect to h2o
h2o.init(ip,port)
iris = h2o.import_frame(path=h2o.locate("smalldata/iris/iris_wheader_65_rows.csv"))
rows, cols = iris.dim()
iris.show()
###################################################################
# LHS: scaler, RHS: H2OFrame
res = 2 + iris
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == cols, "dimension mismatch"
for x, y in zip([res[c].sum() for c in range(cols-1)], [469.9, 342.6, 266.9, 162.2]):
assert abs(x - y) < 1e-1, "expected same values"
# LHS: scaler, RHS: scaler
res = 2 + iris[0]
res2 = 1.1 + res[21,:]
assert abs(res2 - 8.2) < 1e-1, "expected same values"
###################################################################
# LHS: scaler, RHS: H2OFrame
res = 1.2 + iris[2]
res2 = res[21,:] + iris
res2.show()
# LHS: scaler, RHS: H2OVec
res = 1.2 + iris[2]
res2 = res[21,:] + iris[1]
res2.show()
# LHS: scaler, RHS: scaler
res = 1.1 + iris[2]
res2 = res[21,:] + res[10,:]
assert abs(res2 - 5.2) < 1e-1, "expected same values"
# LHS: scaler, RHS: scaler
res = 2 + iris[0]
res2 = res[21,:] + 3
assert abs(res2 - 10.1) < 1e-1, "expected same values"
###################################################################
# LHS: H2OVec, RHS: H2OFrame
#try:
# res = iris[2] + iris
# res.show()
# assert False, "expected error. objects with different dimensions not supported."
#except EnvironmentError:
# pass
# LHS: H2OVec, RHS: scaler
res = 1.2 + iris[2]
res2 = iris[1] + res[21,:]
res2.show()
###################################################################
# LHS: H2OFrame, RHS: H2OFrame
res = iris + iris
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == cols, "dimension mismatch"
res = iris[0:2] + iris[1:3]
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == 2, "dimension mismatch"
#try:
# res = iris + iris[0:3]
# res.show()
# assert False, "expected error. frames are different dimensions."
#except EnvironmentError:
# pass
# LHS: H2OFrame, RHS: H2OVec
#try:
# res = iris + iris[0]
# res.show()
# assert False, "expected error. objects of different dimensions not supported."
#except EnvironmentError:
# pass
# LHS: H2OFrame, RHS: scaler
res = 1.2 + iris[2]
res2 = iris + res[21,:]
res2.show()
# LHS: H2OFrame, RHS: scaler
res = iris + 2
res_rows, res_cols = res.dim()
assert res_rows == rows and res_cols == cols, "dimension mismatch"
for x, y in zip([res[c].sum() for c in range(cols-1)], [469.9, 342.6, 266.9, 162.2]):
assert abs(x - y) < 1e-1, "expected same values"
###################################################################
if __name__ == "__main__":
h2o.run_test(sys.argv, binop_plus)
| ChristosChristofidis/h2o-3 | h2o-py/tests/testdir_munging/binop/pyunit_binop2_plus.py | Python | apache-2.0 | 3,072 |
/*
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements.
See the NOTICE file distributed with this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
*/
package de.othsoft.cache.memcached;
import de.othsoft.cache.base.ICache;
import de.othsoft.cache.base.error.CacheException;
import de.othsoft.cache.base.util.CacheValue;
import de.othsoft.helper.base.Identifier;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeoutException;
import net.rubyeye.xmemcached.MemcachedClient;
import net.rubyeye.xmemcached.XMemcachedClient;
import net.rubyeye.xmemcached.exception.MemcachedException;
import net.rubyeye.xmemcached.transcoders.IntegerTranscoder;
import net.rubyeye.xmemcached.transcoders.LongTranscoder;
import net.rubyeye.xmemcached.transcoders.StringTranscoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author eiko
*/
public class CacheImpl implements ICache {
private final static int MAX_EXPIRES=7*24*60*60; // one week - no better idea
MemcachedClient client = null;
String serverAddr = null;
int serverPort = 0;
long timeout=5000;
private static long appCount;
private static long userCount;
private final static long KEY_BASE = new Date().getTime();
private static final StringTranscoder STRING_TRANSCODER = new StringTranscoder("UTF-8");
private static final IntegerTranscoder INT_TRANSCODER = new IntegerTranscoder();
private static final LongTranscoder LONG_TRANSCODER = new LongTranscoder();
/**
*
* @param address address of memcached server, something like 127.0.0.1:12000
*/
public void setServer(String address,int port) {
try {
this.serverAddr=address;
if (client!=null) {
client = null;
}
client=new XMemcachedClient(address,port);
}
catch(IOException e) {
logger.error("<<{}>> error while create memcached client for address {}: [{}] {}",
Identifier.getInst().getName(),address,e.getClass().getName(),e.getMessage());
client = null;
this.serverAddr = null;
}
}
public void closeServerCon() {
if (client instanceof XMemcachedClient) {
logger.info("<<{}>> remove server with addr {}",
Identifier.getInst().getName(),serverAddr);
try {
((XMemcachedClient)client).shutdown();
}
catch(IOException io) {
logger.error("<<{}>> error while close memcached client for address {}: [{}] {}",
Identifier.getInst().getName(),serverAddr,io.getClass().getName(),io.getMessage());
}
}
client = null;
}
private void checkInitAndIfWrongThrowException() throws CacheException {
if (client==null) {
throw new CacheException("memcached client not initialized");
}
}
private String getMemcachedKey(String appKey,String userKey,String entryKey) {
return appKey+"_"+userKey+"_"+entryKey;
}
@Override
public synchronized String createUniqueUserKey(String base) throws CacheException{
userCount++;
return String.format("%d-%s-%d", KEY_BASE,base,userCount);
}
@Override
public synchronized String createUniqueAppKey(String base) throws CacheException{
appCount++;
return String.format("%d-%s-%d", KEY_BASE,base,appCount);
}
@Override
public void setStrValue(String appKey,String userKey,String entryKey, String value,int expireSeconds) throws CacheException {
if (value==null) throw new CacheException("setStrValue - null values are not allowed");
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey, entryKey);
try {
client.set(memcachedKey,expireSeconds,value);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setStrValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setStrValue(String appKey,String userKey,String entryKey,String value) throws CacheException {
if (value==null) throw new CacheException("setStrValue - null values are not allowed");
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
client.set(memcachedKey,MAX_EXPIRES,value);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setStrValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setBoolValue(String appKey,String userKey,String entryKey,Boolean value,int expireSeconds) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
if (value==null)
client.delete(memcachedKey);
else {
int iValue = value ? 1 : 0;
client.set(memcachedKey,expireSeconds,iValue);
}
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setBoolValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setBoolValue(String appKey,String userKey,String entryKey,Boolean value) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
if (value==null)
client.delete(memcachedKey);
else {
int iValue = value ? 1 : 0;
client.set(memcachedKey,MAX_EXPIRES,iValue);
}
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setBoolValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setIntValue(String appKey,String userKey,String entryKey,Integer value,int expireSeconds) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
if (value==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,expireSeconds,value);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setIntValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setIntValue(String appKey,String userKey,String entryKey,Integer value) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
if (value==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,MAX_EXPIRES,value);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setIntValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setLongValue(String appKey,String userKey,String entryKey,Long value,int expireSeconds) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
if (value==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,expireSeconds,value);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setLongValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setLongValue(String appKey,String userKey,String entryKey,Long value) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
if (value==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,MAX_EXPIRES,value);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setLongValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void setValues(String appKey,String userKey,List<CacheValue> cacheValueArray) throws CacheException {
checkInitAndIfWrongThrowException();
try {
for (CacheValue cacheValue:cacheValueArray) {
String memcachedKey=getMemcachedKey(appKey, userKey,cacheValue.getKey());
if (cacheValue.getType()==String.class) {
String v = (String) cacheValue.getValue();
if (v==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,cacheValue.getExpireSeconds(),v);
}
else if (cacheValue.getType()==Integer.class) {
Integer v = (Integer) cacheValue.getValue();
if (v==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,cacheValue.getExpireSeconds(),v);
}
else if (cacheValue.getType()==Long.class) {
Long v = (Long) cacheValue.getValue();
if (v==null)
client.delete(memcachedKey);
else
client.set(memcachedKey,cacheValue.getExpireSeconds(),v);
}
else if (cacheValue.getType()==Boolean.class) {
Boolean b = (Boolean)cacheValue.getValue();
if (b==null)
client.delete(memcachedKey);
else if (b==true)
client.set(memcachedKey,cacheValue.getExpireSeconds(),1);
else
client.set(memcachedKey,cacheValue.getExpireSeconds(),0);
}
}
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setValues - app: %s, user: %s",appKey,userKey));
}
}
@Override
public void touchValues(String appKey,String userKey,List<String> keyArray,int expireSeconds) throws CacheException {
checkInitAndIfWrongThrowException();
try {
for (String entryKey:keyArray) {
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
client.touch(memcachedKey,expireSeconds);
}
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while touchValues - app: %s, user: %s",appKey,userKey));
}
}
@Override
public void touchValue(String appKey,String userKey,String entryKey,int expireSeconds) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
client.touch(memcachedKey,expireSeconds);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while touchValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void removeValues(String appKey,String userKey,List<String> keyArray) throws CacheException {
checkInitAndIfWrongThrowException();
try {
for (String entryKey:keyArray) {
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
client.delete(memcachedKey);
}
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while removeValues - app: %s, user: %s",
appKey,userKey));
}
}
@Override
public void removeValue(String appKey,String userKey,String entryKey) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
client.delete(memcachedKey);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while removeValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public String getStrValue(String appKey,String userKey,String entryKey) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
return client.get(memcachedKey,timeout,STRING_TRANSCODER);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while getStrValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public Boolean getBoolValue(String appKey,String userKey,String entryKey) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
Integer i = client.get(memcachedKey,timeout,INT_TRANSCODER);
if (i==null) return null;
return i==1;
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while getBoolValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public Integer getIntValue(String appKey,String userKey,String entryKey) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
return client.get(memcachedKey,timeout,INT_TRANSCODER);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while getIntValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public Long getLongValue(String appKey,String userKey,String entryKey) throws CacheException {
checkInitAndIfWrongThrowException();
String memcachedKey=getMemcachedKey(appKey, userKey,entryKey);
try {
return client.get(memcachedKey,timeout,LONG_TRANSCODER);
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while getLongValue - app: %s, user: %s, key: %s",
appKey,userKey,entryKey));
}
}
@Override
public void getValues(String appKey,String userKey,List<CacheValue> cacheValueArray) throws CacheException {
checkInitAndIfWrongThrowException();
try {
for (CacheValue cacheValue:cacheValueArray) {
String memcachedKey=getMemcachedKey(appKey, userKey,cacheValue.getKey());
if (cacheValue.getType()==String.class) {
cacheValue.setValue(client.get(memcachedKey,timeout,STRING_TRANSCODER));
}
else if (cacheValue.getType()==Integer.class) {
cacheValue.setValue(client.get(memcachedKey,timeout,INT_TRANSCODER));
}
else if (cacheValue.getType()==Long.class) {
cacheValue.setValue(client.get(memcachedKey,timeout,LONG_TRANSCODER));
}
else if (cacheValue.getType()==Boolean.class) {
Integer i = client.get(memcachedKey,timeout,INT_TRANSCODER);
if (i==null)
cacheValue.setValue(null);
else if (i==1)
cacheValue.setValue(true);
else
cacheValue.setValue(false);
}
}
}
catch(TimeoutException | InterruptedException | MemcachedException e) {
throw new CacheException(e,String.format("error while setValues - app: %s, user: %s",appKey,userKey));
}
}
private static Logger logger = LoggerFactory.getLogger(CacheImpl.class);
}
| OkieOth/othCache | memcachedCacheImpl/src/main/java/de/othsoft/cache/memcached/CacheImpl.java | Java | apache-2.0 | 18,579 |
#pragma once
#include "../acl_cpp_define.hpp"
#include "../stdlib/thread.hpp"
#include "../stream/aio_handle.hpp"
#include <vector>
namespace acl
{
class aio_handle;
class check_client;
class connect_manager;
class rpc_service;
class socket_stream;
class aio_socket_stream;
class ACL_CPP_API connect_monitor : public thread
{
public:
/**
* ¹¹Ô캯Êý
* @param manager {connect_manager&}
*/
connect_monitor(connect_manager& manager);
virtual ~connect_monitor();
/**
* µ±Ï£Íû²ÉÓÃ×èÈûʽ¼ì²â·þÎñ¶ËÁ¬½Óʱ£¬ÐèÒªÏȵ÷Óñ¾º¯Êý´ò¿ª
* acl::rpc_service ×èÈû½Ó¿Ú´¦Àí·þÎñ£»Èç¹ûÔÚ³õʼ»¯Ê±²»µ÷Óñ¾º¯Êý£¬
* Ôò²ÉÓ÷Ç×èÈû·½Ê½½øÐÐ IO ¼ì²â
* @param max_threads {int} rpc_service ·þÎñÏ̳߳ØÖÐÔËÐеÄ×î´óÏß³ÌÊý
* @param addr {const char*} Ï£Íû rpc_service ·þÎñ¼àÌýµÄ±¾»úµØÖ·£¬¿ÉÒÔ
* Ϊ±¾»úµÄ»ØµØÖ·»òÔÚ UNIX ƽ̨ÏÂʹÓÃÓòÌ×½Ó¿ÚµØÖ·
* @return {connect_monitor&}
*/
connect_monitor& open_rpc_service(int max_threads,
const char* addr = NULL);
/**
* ÉèÖüì²â¶¨Ê±Æ÷Æô¶¯µÄʱ¼ä¼ä¸ô
* @param n {int} ʱ¼ä¼ä¸ô£¨Ã룩
* @return {connect_mointor&}
*/
connect_monitor& set_check_inter(int n);
/**
* ÉèÖÃÁ¬½Ó±»¼ì²â·þÎñÆ÷µÄ³¬Ê±Ê±¼ä
* @param n {int} ³¬Ê±Ê±¼ä£¨Ã룩
* @return {connect_monitor&}
*/
connect_monitor& set_conn_timeout(int n);
/**
* Í£Ö¹¼ì²âÏß³Ì
* @param graceful {bool} ÊÇ·ñÎÄÃ÷µØ¹Ø±Õ¼ì²â¹ý³Ì£¬Èç¹ûΪ true
* Ôò»áµÈËùÓеļì²âÁ¬½Ó¹Ø±Õºó¼ì²âÏ̲߳ŷµ»Ø£»·ñÔò£¬ÔòÖ±½Ó¼ì²âÏß³Ì
* Ö±½Ó·µ»Ø£¬¿ÉÄÜ»áÔì³ÉһЩÕýÔÚ¼ì²âµÄÁ¬½Óδ±»ÊÍ·Å¡£ÕýÒòÈç´Ë£¬Èç¹û
* Á¬½Ó³Ø¼¯Èº¹ÜÀí¶ÔÏóÊǽø³ÌÄÚÈ«¾ÖµÄ£¬¿ÉÒÔ½«´Ë²ÎÊýÉèΪ false£¬Èç¹û
* Á¬½Ó³Ø¼¯Èº¹ÜÀí¶ÔÏóÔÚÔËÐйý³ÌÖÐÐèÒª±»¶à´Î´´½¨ÓëÊÍ·Å£¬ÔòÓ¦¸ÃÉèΪ true
*/
void stop(bool graceful);
/**
* »ñµÃ connect_manager ÒýÓöÔÏó
* @return {connect_manager&}
*/
connect_manager& get_manager() const
{
return manager_;
}
/**
* Ð麯Êý£¬×ÓÀà¿ÉÒÔÖØÔØ±¾º¯ÊýÓÃÀ´½øÒ»²½ÅжϸÃÁ¬½ÓÊÇ·ñÊÇ´æ»îµÄ£¬¸Ã»Øµ÷
* º¯ÊýµÄÔËÐпռäΪµ±Ç°·Ç×èÈû¼ì²âÏ̵߳ÄÔËÐпռ䣬Òò´ËÔڸûص÷º¯ÊýÖв»
* µÃÓÐ×èÈû¹ý³Ì£¬·ñÔò½«»á×èÈûÕû¸ö·Ç×èÈû¼ì²âÏß³Ì
* @param checker {check_client&} ·þÎñ¶ËÁ¬½ÓµÄ¼ì²é¶ÔÏ󣬿ÉÒÔͨ¹ý
* check_client ÀàÖеķ½·¨ÈçÏ£º
* 1) get_conn »ñµÃ·Ç×èÈûÁ¬½Ó¾ä±ú
* 2) get_addr »ñµÃ·þÎñ¶ËµØÖ·
* 3) set_alive ÉèÖÃÁ¬½ÓÊÇ·ñ´æ»î
* 4) close ¹Ø±ÕÁ¬½Ó
*/
virtual void nio_check(check_client& checker, aio_socket_stream& conn);
/**
* ͬ²½ IO ¼ì²âÐ麯Êý£¬¸Ãº¯ÊýÔÚÏ̳߳صÄij¸ö×ÓÏ߳̿ռäÖÐÔËÐУ¬×ÓÀà¿ÉÒÔ
* ÖØÔØ±¾º¯ÊýÒÔ¼ì²âʵ¼ÊÓ¦ÓõÄÍøÂçÁ¬½Ó´æ»î״̬£¬¿ÉÒÔÔÚ±¾º¯ÊýÄÚÓÐ×èÈû
* IO ¹ý³Ì
* @param checker {check_client&} ·þÎñ¶ËÁ¬½ÓµÄ¼ì²é¶ÔÏó
* check_client ÀàÖÐÔÊÐíµ÷Óõķ½·¨ÈçÏ£º
* 1) get_addr »ñµÃ·þÎñ¶ËµØÖ·
* 2) set_alive ÉèÖÃÁ¬½ÓÊÇ·ñ´æ»î
* check_client ÀàÖнûÖ¹µ÷Óõķ½·¨ÈçÏ£º
* 1) get_conn »ñµÃ·Ç×èÈûÁ¬½Ó¾ä±ú
* 2) close ¹Ø±ÕÁ¬½Ó
*/
virtual void sio_check(check_client& checker, socket_stream& conn);
/**
* µ±Á¬½Ó³É¹¦Ê±µÄ»Øµ÷·½·¨£¬×ÓÀà¿ÉÒÔʵÏÖ±¾·½·¨
* @param checker {check_client&}
* @param cost {double} ´Ó·¢ÆðÁ¬½ÓÇëÇóµ½³¬Ê±µÄʱ¼ä¼ä¸ô£¨Ã룩
*/
virtual void on_connected(const check_client& /* checker */,
double /* cost */)
{
}
/**
* µ±Á¬½Ó³¬Ê±Ê±µÄ»Øµ÷·½·¨£¬×ÓÀà¿ÉÒÔʵÏÖ±¾·½·¨
* @param addr {const char*} ±»¼ì²âµÄ·þÎñÆ÷µØÖ·£¬¸ñʽ: ip:port
* @param cost {double} ´Ó·¢ÆðÁ¬½ÓÇëÇóµ½³¬Ê±µÄʱ¼ä¼ä¸ô£¨Ã룩
*/
virtual void on_timeout(const char* /* addr */, double /* cost */)
{
}
/**
* µ±Á¬½Ó·þÎñÆ÷ʱ±»¾Ü¾øÊ±µÄ»Øµ÷·½·¨£¬×ÓÀà¿ÉʵÏÖ±¾·½·¨
* @param addr {const char*} ±»¼ì²âµÄ·þÎñÆ÷µØÖ·£¬¸ñʽ: ip:port
* @param cost {double} ´Ó·¢ÆðÁ¬½ÓÇëÇóµ½±»¶Ï¿ªµÄʱ¼ä¼ä¸ô£¨Ã룩
*/
virtual void on_refused(const char* /* addr */, double /* cost */)
{
}
public:
// ËäÈ»ÏÂÃæµÄº¯ÊýÊÇ public µÄ£¬µ«Ö»¹©ÄÚ²¿Ê¹ÓÃ
/**
* µ±Óë·þÎñ¶Ë½¨Á¢Á¬½Óºóµ÷Óô˺¯Êý
* @param checker {check_client&}
*/
void on_open(check_client& checker);
protected:
// »ùÀà´¿Ð麯Êý
virtual void* run();
private:
bool stop_;
bool stop_graceful_;
aio_handle handle_; // ºǫ́¼ì²âÏ̵߳ķÇ×èÈû¾ä±ú
connect_manager& manager_; // Á¬½Ó³Ø¼¯ºÏ¹ÜÀí¶ÔÏó
int check_inter_; // ¼ì²âÁ¬½Ó³Ø×´Ì¬µÄʱ¼ä¼ä¸ô(Ãë)
int conn_timeout_; // Á¬½Ó·þÎñÆ÷µÄ³¬Ê±Ê±¼ä
rpc_service* rpc_service_; // Òì²½ RPC ͨÐÅ·þÎñ¾ä±ú
};
} // namespace acl
| feixiao/c_practice | acl/include/acl_cpp/connpool/connect_monitor.hpp | C++ | apache-2.0 | 4,111 |
// Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package tikv
import (
"bytes"
"context"
"fmt"
"io"
"sort"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"unsafe"
"github.com/cznic/mathutil"
"github.com/gogo/protobuf/proto"
"github.com/pingcap/errors"
"github.com/pingcap/failpoint"
"github.com/pingcap/kvproto/pkg/coprocessor"
"github.com/pingcap/kvproto/pkg/kvrpcpb"
"github.com/pingcap/parser/terror"
"github.com/pingcap/tidb/domain/infosync"
"github.com/pingcap/tidb/errno"
"github.com/pingcap/tidb/kv"
"github.com/pingcap/tidb/metrics"
"github.com/pingcap/tidb/store/tikv/tikvrpc"
"github.com/pingcap/tidb/util/execdetails"
"github.com/pingcap/tidb/util/logutil"
"github.com/pingcap/tidb/util/memory"
"github.com/pingcap/tipb/go-tipb"
"go.uber.org/zap"
)
var (
tikvTxnRegionsNumHistogramWithCoprocessor = metrics.TiKVTxnRegionsNumHistogram.WithLabelValues("coprocessor")
tikvTxnRegionsNumHistogramWithBatchCoprocessor = metrics.TiKVTxnRegionsNumHistogram.WithLabelValues("batch_coprocessor")
coprCacheHistogramEvict = metrics.DistSQLCoprCacheHistogram.WithLabelValues("evict")
)
// CopClient is coprocessor client.
type CopClient struct {
kv.RequestTypeSupportedChecker
store *tikvStore
replicaReadSeed uint32
}
// Send builds the request and gets the coprocessor iterator response.
func (c *CopClient) Send(ctx context.Context, req *kv.Request, vars *kv.Variables, sessionMemTracker *memory.Tracker) kv.Response {
if req.StoreType == kv.TiFlash && req.BatchCop {
logutil.BgLogger().Debug("send batch requests")
return c.sendBatch(ctx, req, vars)
}
ctx = context.WithValue(ctx, txnStartKey, req.StartTs)
bo := NewBackofferWithVars(ctx, copBuildTaskMaxBackoff, vars)
tasks, err := buildCopTasks(bo, c.store.regionCache, &copRanges{mid: req.KeyRanges}, req)
if err != nil {
return copErrorResponse{err}
}
it := &copIterator{
store: c.store,
req: req,
concurrency: req.Concurrency,
finishCh: make(chan struct{}),
vars: vars,
memTracker: req.MemTracker,
replicaReadSeed: c.replicaReadSeed,
rpcCancel: NewRPCanceller(),
maxID: &maxIDHandler{},
}
it.maxID.maxID = 0
it.minCommitTSPushed.data = make(map[uint64]struct{}, 5)
it.tasks = tasks
if it.concurrency > len(tasks) {
it.concurrency = len(tasks)
}
if it.concurrency < 1 {
// Make sure that there is at least one worker.
it.concurrency = 1
}
if it.req.KeepOrder {
it.sendRate = newRateLimit(2 * it.concurrency)
} else {
it.respChan = make(chan *copResponse, it.concurrency)
it.sendRate = newRateLimit(it.concurrency)
}
it.actionOnExceed = newRateLimitAction(uint(cap(it.sendRate.token)), sync.NewCond(&sync.Mutex{}))
if sessionMemTracker != nil {
sessionMemTracker.FallbackOldAndSetNewAction(it.actionOnExceed)
}
if !it.req.Streaming {
ctx = context.WithValue(ctx, RPCCancellerCtxKey{}, it.rpcCancel)
}
it.open(ctx)
return it
}
// copTask contains a related Region and KeyRange for a kv.Request.
type copTask struct {
id uint32
region RegionVerID
ranges *copRanges
respChan chan *copResponse
storeAddr string
cmdType tikvrpc.CmdType
storeType kv.StoreType
}
func (r *copTask) String() string {
return fmt.Sprintf("region(%d %d %d) ranges(%d) store(%s)",
r.region.id, r.region.confVer, r.region.ver, r.ranges.len(), r.storeAddr)
}
// copRanges is like []kv.KeyRange, but may has extra elements at head/tail.
// It's for avoiding alloc big slice during build copTask.
type copRanges struct {
first *kv.KeyRange
mid []kv.KeyRange
last *kv.KeyRange
}
func (r *copRanges) String() string {
var s string
r.do(func(ran *kv.KeyRange) {
s += fmt.Sprintf("[%q, %q]", ran.StartKey, ran.EndKey)
})
return s
}
func (r *copRanges) len() int {
var l int
if r.first != nil {
l++
}
l += len(r.mid)
if r.last != nil {
l++
}
return l
}
func (r *copRanges) at(i int) kv.KeyRange {
if r.first != nil {
if i == 0 {
return *r.first
}
i--
}
if i < len(r.mid) {
return r.mid[i]
}
return *r.last
}
func (r *copRanges) slice(from, to int) *copRanges {
var ran copRanges
if r.first != nil {
if from == 0 && to > 0 {
ran.first = r.first
}
if from > 0 {
from--
}
if to > 0 {
to--
}
}
if to <= len(r.mid) {
ran.mid = r.mid[from:to]
} else {
if from <= len(r.mid) {
ran.mid = r.mid[from:]
}
if from < to {
ran.last = r.last
}
}
return &ran
}
func (r *copRanges) do(f func(ran *kv.KeyRange)) {
if r.first != nil {
f(r.first)
}
for _, ran := range r.mid {
f(&ran)
}
if r.last != nil {
f(r.last)
}
}
func (r *copRanges) toPBRanges() []*coprocessor.KeyRange {
ranges := make([]*coprocessor.KeyRange, 0, r.len())
r.do(func(ran *kv.KeyRange) {
ranges = append(ranges, &coprocessor.KeyRange{
Start: ran.StartKey,
End: ran.EndKey,
})
})
return ranges
}
// split ranges into (left, right) by key.
func (r *copRanges) split(key []byte) (*copRanges, *copRanges) {
n := sort.Search(r.len(), func(i int) bool {
cur := r.at(i)
return len(cur.EndKey) == 0 || bytes.Compare(cur.EndKey, key) > 0
})
// If a range p contains the key, it will split to 2 parts.
if n < r.len() {
p := r.at(n)
if bytes.Compare(key, p.StartKey) > 0 {
left := r.slice(0, n)
left.last = &kv.KeyRange{StartKey: p.StartKey, EndKey: key}
right := r.slice(n+1, r.len())
right.first = &kv.KeyRange{StartKey: key, EndKey: p.EndKey}
return left, right
}
}
return r.slice(0, n), r.slice(n, r.len())
}
// rangesPerTask limits the length of the ranges slice sent in one copTask.
const rangesPerTask = 25000
func buildCopTasks(bo *Backoffer, cache *RegionCache, ranges *copRanges, req *kv.Request) ([]*copTask, error) {
start := time.Now()
cmdType := tikvrpc.CmdCop
if req.Streaming {
cmdType = tikvrpc.CmdCopStream
}
if req.StoreType == kv.TiDB {
return buildTiDBMemCopTasks(ranges, req)
}
rangesLen := ranges.len()
var tasks []*copTask
appendTask := func(regionWithRangeInfo *KeyLocation, ranges *copRanges) {
// TiKV will return gRPC error if the message is too large. So we need to limit the length of the ranges slice
// to make sure the message can be sent successfully.
rLen := ranges.len()
for i := 0; i < rLen; {
nextI := mathutil.Min(i+rangesPerTask, rLen)
tasks = append(tasks, &copTask{
region: regionWithRangeInfo.Region,
ranges: ranges.slice(i, nextI),
// Channel buffer is 2 for handling region split.
// In a common case, two region split tasks will not be blocked.
respChan: make(chan *copResponse, 2),
cmdType: cmdType,
storeType: req.StoreType,
})
i = nextI
}
}
err := splitRanges(bo, cache, ranges, appendTask)
if err != nil {
return nil, errors.Trace(err)
}
if req.Desc {
reverseTasks(tasks)
}
if elapsed := time.Since(start); elapsed > time.Millisecond*500 {
logutil.BgLogger().Warn("buildCopTasks takes too much time",
zap.Duration("elapsed", elapsed),
zap.Int("range len", rangesLen),
zap.Int("task len", len(tasks)))
}
tikvTxnRegionsNumHistogramWithCoprocessor.Observe(float64(len(tasks)))
return tasks, nil
}
func buildTiDBMemCopTasks(ranges *copRanges, req *kv.Request) ([]*copTask, error) {
servers, err := infosync.GetAllServerInfo(context.Background())
if err != nil {
return nil, err
}
cmdType := tikvrpc.CmdCop
if req.Streaming {
cmdType = tikvrpc.CmdCopStream
}
tasks := make([]*copTask, 0, len(servers))
for _, ser := range servers {
if req.TiDBServerID > 0 && req.TiDBServerID != ser.ServerIDGetter() {
continue
}
addr := ser.IP + ":" + strconv.FormatUint(uint64(ser.StatusPort), 10)
tasks = append(tasks, &copTask{
ranges: ranges,
respChan: make(chan *copResponse, 2),
cmdType: cmdType,
storeType: req.StoreType,
storeAddr: addr,
})
}
return tasks, nil
}
func splitRanges(bo *Backoffer, cache *RegionCache, ranges *copRanges, fn func(regionWithRangeInfo *KeyLocation, ranges *copRanges)) error {
for ranges.len() > 0 {
loc, err := cache.LocateKey(bo, ranges.at(0).StartKey)
if err != nil {
return errors.Trace(err)
}
// Iterate to the first range that is not complete in the region.
var i int
for ; i < ranges.len(); i++ {
r := ranges.at(i)
if !(loc.Contains(r.EndKey) || bytes.Equal(loc.EndKey, r.EndKey)) {
break
}
}
// All rest ranges belong to the same region.
if i == ranges.len() {
fn(loc, ranges)
break
}
r := ranges.at(i)
if loc.Contains(r.StartKey) {
// Part of r is not in the region. We need to split it.
taskRanges := ranges.slice(0, i)
taskRanges.last = &kv.KeyRange{
StartKey: r.StartKey,
EndKey: loc.EndKey,
}
fn(loc, taskRanges)
ranges = ranges.slice(i+1, ranges.len())
ranges.first = &kv.KeyRange{
StartKey: loc.EndKey,
EndKey: r.EndKey,
}
} else {
// rs[i] is not in the region.
taskRanges := ranges.slice(0, i)
fn(loc, taskRanges)
ranges = ranges.slice(i, ranges.len())
}
}
return nil
}
// SplitRegionRanges get the split ranges from pd region.
func SplitRegionRanges(bo *Backoffer, cache *RegionCache, keyRanges []kv.KeyRange) ([]kv.KeyRange, error) {
ranges := copRanges{mid: keyRanges}
var ret []kv.KeyRange
appendRange := func(regionWithRangeInfo *KeyLocation, ranges *copRanges) {
for i := 0; i < ranges.len(); i++ {
ret = append(ret, ranges.at(i))
}
}
err := splitRanges(bo, cache, &ranges, appendRange)
if err != nil {
return nil, errors.Trace(err)
}
return ret, nil
}
func reverseTasks(tasks []*copTask) {
for i := 0; i < len(tasks)/2; i++ {
j := len(tasks) - i - 1
tasks[i], tasks[j] = tasks[j], tasks[i]
}
}
type copIterator struct {
store *tikvStore
req *kv.Request
concurrency int
finishCh chan struct{}
// If keepOrder, results are stored in copTask.respChan, read them out one by one.
tasks []*copTask
// curr indicates the curr id of the finished copTask
curr int
// maxID indicates the max id of the running copTask
maxID *maxIDHandler
// sendRate controls the sending rate of copIteratorTaskSender
sendRate *rateLimit
// Otherwise, results are stored in respChan.
respChan chan *copResponse
vars *kv.Variables
memTracker *memory.Tracker
replicaReadSeed uint32
rpcCancel *RPCCanceller
wg sync.WaitGroup
// closed represents when the Close is called.
// There are two cases we need to close the `finishCh` channel, one is when context is done, the other one is
// when the Close is called. we use atomic.CompareAndSwap `closed` to to make sure the channel is not closed twice.
closed uint32
minCommitTSPushed
actionOnExceed *rateLimitAction
}
// copIteratorWorker receives tasks from copIteratorTaskSender, handles tasks and sends the copResponse to respChan.
type copIteratorWorker struct {
taskCh <-chan *copTask
wg *sync.WaitGroup
store *tikvStore
req *kv.Request
respChan chan<- *copResponse
finishCh <-chan struct{}
vars *kv.Variables
clientHelper
memTracker *memory.Tracker
replicaReadSeed uint32
sendRate *rateLimit
actionOnExceed *rateLimitAction
maxID *maxIDHandler
}
// copIteratorTaskSender sends tasks to taskCh then wait for the workers to exit.
type copIteratorTaskSender struct {
taskCh chan<- *copTask
wg *sync.WaitGroup
tasks []*copTask
finishCh <-chan struct{}
respChan chan<- *copResponse
sendRate *rateLimit
}
type copResponse struct {
pbResp *coprocessor.Response
detail *CopRuntimeStats
startKey kv.Key
err error
respSize int64
respTime time.Duration
}
const (
sizeofExecDetails = int(unsafe.Sizeof(execdetails.ExecDetails{}))
sizeofCommitDetails = int(unsafe.Sizeof(execdetails.CommitDetails{}))
)
// GetData implements the kv.ResultSubset GetData interface.
func (rs *copResponse) GetData() []byte {
return rs.pbResp.Data
}
// GetStartKey implements the kv.ResultSubset GetStartKey interface.
func (rs *copResponse) GetStartKey() kv.Key {
return rs.startKey
}
func (rs *copResponse) GetCopRuntimeStats() *CopRuntimeStats {
return rs.detail
}
// MemSize returns how many bytes of memory this response use
func (rs *copResponse) MemSize() int64 {
if rs.respSize != 0 {
return rs.respSize
}
// ignore rs.err
rs.respSize += int64(cap(rs.startKey))
if rs.detail != nil {
rs.respSize += int64(sizeofExecDetails)
}
if rs.pbResp != nil {
// Using a approximate size since it's hard to get a accurate value.
rs.respSize += int64(rs.pbResp.Size())
}
return rs.respSize
}
func (rs *copResponse) RespTime() time.Duration {
return rs.respTime
}
const minLogCopTaskTime = 300 * time.Millisecond
// run is a worker function that get a copTask from channel, handle it and
// send the result back.
func (worker *copIteratorWorker) run(ctx context.Context) {
defer worker.wg.Done()
for task := range worker.taskCh {
respCh := worker.respChan
if respCh == nil {
respCh = task.respChan
}
worker.handleTask(ctx, task, respCh)
close(task.respChan)
worker.maxID.setMaxIDIfLarger(task.id)
worker.actionOnExceed.destroyTokenIfNeeded(func() {
worker.sendRate.putToken()
})
if worker.vars != nil && worker.vars.Killed != nil && atomic.LoadUint32(worker.vars.Killed) == 1 {
return
}
select {
case <-worker.finishCh:
return
default:
}
}
}
// open starts workers and sender goroutines.
func (it *copIterator) open(ctx context.Context) {
taskCh := make(chan *copTask, 1)
it.wg.Add(it.concurrency)
// Start it.concurrency number of workers to handle cop requests.
for i := 0; i < it.concurrency; i++ {
worker := &copIteratorWorker{
taskCh: taskCh,
wg: &it.wg,
store: it.store,
req: it.req,
respChan: it.respChan,
finishCh: it.finishCh,
vars: it.vars,
clientHelper: clientHelper{
LockResolver: it.store.lockResolver,
RegionCache: it.store.regionCache,
minCommitTSPushed: &it.minCommitTSPushed,
Client: it.store.client,
},
memTracker: it.memTracker,
replicaReadSeed: it.replicaReadSeed,
sendRate: it.sendRate,
actionOnExceed: it.actionOnExceed,
maxID: it.maxID,
}
go worker.run(ctx)
}
taskSender := &copIteratorTaskSender{
taskCh: taskCh,
wg: &it.wg,
tasks: it.tasks,
finishCh: it.finishCh,
sendRate: it.sendRate,
}
taskSender.respChan = it.respChan
it.actionOnExceed.setEnabled(true)
go taskSender.run()
}
func (sender *copIteratorTaskSender) run() {
// Send tasks to feed the worker goroutines.
for i, t := range sender.tasks {
// we control the sending rate to prevent all tasks
// being done (aka. all of the responses are buffered) by copIteratorWorker.
// We keep the number of inflight tasks within the number of 2 * concurrency when Keep Order is true.
// If KeepOrder is false, the number equals the concurrency.
// It sends one more task if a task has been finished in copIterator.Next.
exit := sender.sendRate.getToken(sender.finishCh)
if exit {
break
}
t.id = uint32(i)
exit = sender.sendToTaskCh(t)
if exit {
break
}
}
close(sender.taskCh)
// Wait for worker goroutines to exit.
sender.wg.Wait()
if sender.respChan != nil {
close(sender.respChan)
}
}
func (it *copIterator) recvFromRespCh(ctx context.Context, respCh <-chan *copResponse) (resp *copResponse, ok bool, exit bool) {
ticker := time.NewTicker(3 * time.Second)
defer ticker.Stop()
for {
select {
case resp, ok = <-respCh:
if it.memTracker != nil && resp != nil {
consumed := resp.MemSize()
failpoint.Inject("testRateLimitActionMockConsume", func(val failpoint.Value) {
if val.(bool) {
consumed = 100
}
})
it.memTracker.Consume(-consumed)
}
return
case <-it.finishCh:
exit = true
return
case <-ticker.C:
if atomic.LoadUint32(it.vars.Killed) == 1 {
resp = &copResponse{err: ErrQueryInterrupted}
ok = true
return
}
case <-ctx.Done():
// We select the ctx.Done() in the thread of `Next` instead of in the worker to avoid the cost of `WithCancel`.
if atomic.CompareAndSwapUint32(&it.closed, 0, 1) {
close(it.finishCh)
}
exit = true
return
}
}
}
func (sender *copIteratorTaskSender) sendToTaskCh(t *copTask) (exit bool) {
select {
case sender.taskCh <- t:
case <-sender.finishCh:
exit = true
}
return
}
func (worker *copIteratorWorker) sendToRespCh(resp *copResponse, respCh chan<- *copResponse, checkOOM bool) (exit bool) {
if worker.memTracker != nil && checkOOM {
consumed := resp.MemSize()
failpoint.Inject("testRateLimitActionMockConsume", func(val failpoint.Value) {
if val.(bool) {
consumed = 100
}
})
worker.memTracker.Consume(consumed)
}
select {
case respCh <- resp:
case <-worker.finishCh:
exit = true
}
return
}
// Next returns next coprocessor result.
// NOTE: Use nil to indicate finish, so if the returned ResultSubset is not nil, reader should continue to call Next().
func (it *copIterator) Next(ctx context.Context) (kv.ResultSubset, error) {
var (
resp *copResponse
ok bool
closed bool
)
// wait unit at least 2 copResponse received.
failpoint.Inject("testRateLimitActionMockWaitMax", func(val failpoint.Value) {
if val.(bool) {
for it.memTracker.MaxConsumed() < 200 {
}
}
})
// If data order matters, response should be returned in the same order as copTask slice.
// Otherwise all responses are returned from a single channel.
if it.respChan != nil {
// Get next fetched resp from chan
resp, ok, closed = it.recvFromRespCh(ctx, it.respChan)
if !ok || closed {
it.actionOnExceed.close()
return nil, nil
}
// The respCh has been drained out
it.actionOnExceed.broadcastIfNeeded(len(it.respChan) < 1)
} else {
for {
if it.curr >= len(it.tasks) {
// Resp will be nil if iterator is finishCh.
it.actionOnExceed.close()
return nil, nil
}
task := it.tasks[it.curr]
resp, ok, closed = it.recvFromRespCh(ctx, task.respChan)
if closed {
// Close() is already called, so Next() is invalid.
return nil, nil
}
if ok {
break
}
finishedTaskID := it.tasks[it.curr].id
// Switch to next task.
it.tasks[it.curr] = nil
it.curr++
maxID := it.maxID.getMaxID()
// The tasks whose id is less than maxID are assumed that being sending to their task channel.
// So the response channel would be thought as drained out if the current taskID is greater or equal than
// the maxID as all the workers are being suspended at that time.
it.actionOnExceed.broadcastIfNeeded(finishedTaskID >= maxID)
}
}
if resp.err != nil {
return nil, errors.Trace(resp.err)
}
err := it.store.CheckVisibility(it.req.StartTs)
if err != nil {
return nil, errors.Trace(err)
}
return resp, nil
}
// Associate each region with an independent backoffer. In this way, when multiple regions are
// unavailable, TiDB can execute very quickly without blocking
func chooseBackoffer(ctx context.Context, backoffermap map[uint64]*Backoffer, task *copTask, worker *copIteratorWorker) *Backoffer {
bo, ok := backoffermap[task.region.id]
if ok {
return bo
}
newbo := NewBackofferWithVars(ctx, copNextMaxBackoff, worker.vars)
backoffermap[task.region.id] = newbo
return newbo
}
// handleTask handles single copTask, sends the result to channel, retry automatically on error.
func (worker *copIteratorWorker) handleTask(ctx context.Context, task *copTask, respCh chan<- *copResponse) {
defer func() {
r := recover()
if r != nil {
logutil.BgLogger().Error("copIteratorWork meet panic",
zap.Reflect("r", r),
zap.Stack("stack trace"))
resp := &copResponse{err: errors.Errorf("%v", r)}
// if panic has happened, set checkOOM to false to avoid another panic.
worker.sendToRespCh(resp, respCh, false)
}
}()
remainTasks := []*copTask{task}
backoffermap := make(map[uint64]*Backoffer)
for len(remainTasks) > 0 {
curTask := remainTasks[0]
bo := chooseBackoffer(ctx, backoffermap, curTask, worker)
tasks, err := worker.handleTaskOnce(bo, curTask, respCh)
if err != nil {
resp := &copResponse{err: errors.Trace(err)}
worker.sendToRespCh(resp, respCh, true)
return
}
// test whether the ctx is cancelled
if bo.vars != nil && bo.vars.Killed != nil && atomic.LoadUint32(bo.vars.Killed) == 1 {
return
}
if len(tasks) > 0 {
remainTasks = append(tasks, remainTasks[1:]...)
} else {
remainTasks = remainTasks[1:]
}
}
if worker.store.coprCache != nil && worker.store.coprCache.cache.Metrics != nil {
coprCacheHistogramEvict.Observe(float64(worker.store.coprCache.cache.Metrics.KeysEvicted()))
}
}
// handleTaskOnce handles single copTask, successful results are send to channel.
// If error happened, returns error. If region split or meet lock, returns the remain tasks.
func (worker *copIteratorWorker) handleTaskOnce(bo *Backoffer, task *copTask, ch chan<- *copResponse) ([]*copTask, error) {
failpoint.Inject("handleTaskOnceError", func(val failpoint.Value) {
if val.(bool) {
failpoint.Return(nil, errors.New("mock handleTaskOnce error"))
}
})
copReq := coprocessor.Request{
Tp: worker.req.Tp,
StartTs: worker.req.StartTs,
Data: worker.req.Data,
Ranges: task.ranges.toPBRanges(),
SchemaVer: worker.req.SchemaVar,
}
var cacheKey []byte = nil
var cacheValue *coprCacheValue = nil
// If there are many ranges, it is very likely to be a TableLookupRequest. They are not worth to cache since
// computing is not the main cost. Ignore such requests directly to avoid slowly building the cache key.
if task.cmdType == tikvrpc.CmdCop && worker.store.coprCache != nil && worker.req.Cacheable && len(copReq.Ranges) < 10 {
cKey, err := coprCacheBuildKey(&copReq)
if err == nil {
cacheKey = cKey
cValue := worker.store.coprCache.Get(cKey)
copReq.IsCacheEnabled = true
if cValue != nil && cValue.RegionID == task.region.id && cValue.TimeStamp <= worker.req.StartTs {
// Append cache version to the request to skip Coprocessor computation if possible
// when request result is cached
copReq.CacheIfMatchVersion = cValue.RegionDataVersion
cacheValue = cValue
} else {
copReq.CacheIfMatchVersion = 0
}
} else {
logutil.BgLogger().Warn("Failed to build copr cache key", zap.Error(err))
}
}
req := tikvrpc.NewReplicaReadRequest(task.cmdType, &copReq, worker.req.ReplicaRead, &worker.replicaReadSeed, kvrpcpb.Context{
IsolationLevel: pbIsolationLevel(worker.req.IsolationLevel),
Priority: kvPriorityToCommandPri(worker.req.Priority),
NotFillCache: worker.req.NotFillCache,
HandleTime: true,
ScanDetail: true,
TaskId: worker.req.TaskID,
})
req.StoreTp = task.storeType
startTime := time.Now()
if worker.Stats == nil {
worker.Stats = make(map[tikvrpc.CmdType]*RPCRuntimeStats)
}
resp, rpcCtx, storeAddr, err := worker.SendReqCtx(bo, req, task.region, ReadTimeoutMedium, task.storeType, task.storeAddr)
if err != nil {
if task.storeType == kv.TiDB {
err = worker.handleTiDBSendReqErr(err, task, ch)
return nil, err
}
return nil, errors.Trace(err)
}
// Set task.storeAddr field so its task.String() method have the store address information.
task.storeAddr = storeAddr
costTime := time.Since(startTime)
if costTime > minLogCopTaskTime {
worker.logTimeCopTask(costTime, task, bo, resp)
}
metrics.TiKVCoprocessorHistogram.Observe(costTime.Seconds())
if task.cmdType == tikvrpc.CmdCopStream {
return worker.handleCopStreamResult(bo, rpcCtx, resp.Resp.(*tikvrpc.CopStreamResponse), task, ch, costTime)
}
// Handles the response for non-streaming copTask.
return worker.handleCopResponse(bo, rpcCtx, &copResponse{pbResp: resp.Resp.(*coprocessor.Response)}, cacheKey, cacheValue, task, ch, nil, costTime)
}
type minCommitTSPushed struct {
data map[uint64]struct{}
sync.RWMutex
}
func (m *minCommitTSPushed) Update(from []uint64) {
m.Lock()
for _, v := range from {
m.data[v] = struct{}{}
}
m.Unlock()
}
func (m *minCommitTSPushed) Get() []uint64 {
m.RLock()
defer m.RUnlock()
if len(m.data) == 0 {
return nil
}
ret := make([]uint64, 0, len(m.data))
for k := range m.data {
ret = append(ret, k)
}
return ret
}
// clientHelper wraps LockResolver and RegionRequestSender.
// It's introduced to support the new lock resolving pattern in the large transaction.
// In the large transaction protocol, sending requests and resolving locks are
// context-dependent. For example, when a send request meets a secondary lock, we'll
// call ResolveLock, and if the lock belongs to a large transaction, we may retry
// the request. If there is no context information about the resolved locks, we'll
// meet the secondary lock again and run into a deadloop.
type clientHelper struct {
*LockResolver
*RegionCache
*minCommitTSPushed
Client
resolveLite bool
RegionRequestRuntimeStats
}
// ResolveLocks wraps the ResolveLocks function and store the resolved result.
func (ch *clientHelper) ResolveLocks(bo *Backoffer, callerStartTS uint64, locks []*Lock) (int64, error) {
var err error
var resolvedLocks []uint64
var msBeforeTxnExpired int64
if ch.Stats != nil {
defer func(start time.Time) {
recordRegionRequestRuntimeStats(ch.Stats, tikvrpc.CmdResolveLock, time.Since(start))
}(time.Now())
}
if ch.resolveLite {
msBeforeTxnExpired, resolvedLocks, err = ch.LockResolver.resolveLocksLite(bo, callerStartTS, locks)
} else {
msBeforeTxnExpired, resolvedLocks, err = ch.LockResolver.ResolveLocks(bo, callerStartTS, locks)
}
if err != nil {
return msBeforeTxnExpired, err
}
if len(resolvedLocks) > 0 {
ch.minCommitTSPushed.Update(resolvedLocks)
return 0, nil
}
return msBeforeTxnExpired, nil
}
// SendReqCtx wraps the SendReqCtx function and use the resolved lock result in the kvrpcpb.Context.
func (ch *clientHelper) SendReqCtx(bo *Backoffer, req *tikvrpc.Request, regionID RegionVerID, timeout time.Duration, sType kv.StoreType, directStoreAddr string) (*tikvrpc.Response, *RPCContext, string, error) {
sender := NewRegionRequestSender(ch.RegionCache, ch.Client)
if len(directStoreAddr) > 0 {
sender.storeAddr = directStoreAddr
}
sender.Stats = ch.Stats
req.Context.ResolvedLocks = ch.minCommitTSPushed.Get()
resp, ctx, err := sender.SendReqCtx(bo, req, regionID, timeout, sType)
return resp, ctx, sender.storeAddr, err
}
const (
minLogBackoffTime = 100
minLogKVProcessTime = 100
minLogKVWaitTime = 200
)
func (worker *copIteratorWorker) logTimeCopTask(costTime time.Duration, task *copTask, bo *Backoffer, resp *tikvrpc.Response) {
logStr := fmt.Sprintf("[TIME_COP_PROCESS] resp_time:%s txnStartTS:%d region_id:%d store_addr:%s", costTime, worker.req.StartTs, task.region.id, task.storeAddr)
if bo.totalSleep > minLogBackoffTime {
backoffTypes := strings.Replace(fmt.Sprintf("%v", bo.types), " ", ",", -1)
logStr += fmt.Sprintf(" backoff_ms:%d backoff_types:%s", bo.totalSleep, backoffTypes)
}
var detail *kvrpcpb.ExecDetails
if resp.Resp != nil {
switch r := resp.Resp.(type) {
case *coprocessor.Response:
detail = r.ExecDetails
case *tikvrpc.CopStreamResponse:
// streaming request returns io.EOF, so the first CopStreamResponse.Response maybe nil.
if r.Response != nil {
detail = r.Response.ExecDetails
}
default:
panic("unreachable")
}
}
if detail != nil && detail.HandleTime != nil {
processMs := detail.HandleTime.ProcessMs
waitMs := detail.HandleTime.WaitMs
if processMs > minLogKVProcessTime {
logStr += fmt.Sprintf(" kv_process_ms:%d", processMs)
if detail.ScanDetail != nil {
logStr = appendScanDetail(logStr, "write", detail.ScanDetail.Write)
logStr = appendScanDetail(logStr, "data", detail.ScanDetail.Data)
logStr = appendScanDetail(logStr, "lock", detail.ScanDetail.Lock)
}
if detail.ScanDetailV2 != nil {
logStr += fmt.Sprintf(" processed versions: %d", detail.ScanDetailV2.ProcessedVersions)
logStr += fmt.Sprintf(" total versions: %d", detail.ScanDetailV2.TotalVersions)
logStr += fmt.Sprintf(" delete skipped count: %d", detail.ScanDetailV2.RocksdbDeleteSkippedCount)
logStr += fmt.Sprintf(" key skipped count: %d", detail.ScanDetailV2.RocksdbKeySkippedCount)
logStr += fmt.Sprintf(" cache hit count: %d", detail.ScanDetailV2.RocksdbBlockCacheHitCount)
logStr += fmt.Sprintf(" read count: %d", detail.ScanDetailV2.RocksdbBlockReadCount)
logStr += fmt.Sprintf(" read byte: %d", detail.ScanDetailV2.RocksdbBlockReadByte)
}
}
if waitMs > minLogKVWaitTime {
logStr += fmt.Sprintf(" kv_wait_ms:%d", waitMs)
if processMs <= minLogKVProcessTime {
logStr = strings.Replace(logStr, "TIME_COP_PROCESS", "TIME_COP_WAIT", 1)
}
}
}
logutil.Logger(bo.ctx).Info(logStr)
}
func appendScanDetail(logStr string, columnFamily string, scanInfo *kvrpcpb.ScanInfo) string {
if scanInfo != nil {
logStr += fmt.Sprintf(" scan_total_%s:%d", columnFamily, scanInfo.Total)
logStr += fmt.Sprintf(" scan_processed_%s:%d", columnFamily, scanInfo.Processed)
}
return logStr
}
func (worker *copIteratorWorker) handleCopStreamResult(bo *Backoffer, rpcCtx *RPCContext, stream *tikvrpc.CopStreamResponse, task *copTask, ch chan<- *copResponse, costTime time.Duration) ([]*copTask, error) {
defer stream.Close()
var resp *coprocessor.Response
var lastRange *coprocessor.KeyRange
resp = stream.Response
if resp == nil {
// streaming request returns io.EOF, so the first Response is nil.
return nil, nil
}
for {
remainedTasks, err := worker.handleCopResponse(bo, rpcCtx, &copResponse{pbResp: resp}, nil, nil, task, ch, lastRange, costTime)
if err != nil || len(remainedTasks) != 0 {
return remainedTasks, errors.Trace(err)
}
resp, err = stream.Recv()
if err != nil {
if errors.Cause(err) == io.EOF {
return nil, nil
}
if err1 := bo.Backoff(boTiKVRPC, errors.Errorf("recv stream response error: %v, task: %s", err, task)); err1 != nil {
return nil, errors.Trace(err)
}
// No coprocessor.Response for network error, rebuild task based on the last success one.
if errors.Cause(err) == context.Canceled {
logutil.BgLogger().Info("stream recv timeout", zap.Error(err))
} else {
logutil.BgLogger().Info("stream unknown error", zap.Error(err))
}
return worker.buildCopTasksFromRemain(bo, lastRange, task)
}
if resp.Range != nil {
lastRange = resp.Range
}
}
}
// handleCopResponse checks coprocessor Response for region split and lock,
// returns more tasks when that happens, or handles the response if no error.
// if we're handling streaming coprocessor response, lastRange is the range of last
// successful response, otherwise it's nil.
func (worker *copIteratorWorker) handleCopResponse(bo *Backoffer, rpcCtx *RPCContext, resp *copResponse, cacheKey []byte, cacheValue *coprCacheValue, task *copTask, ch chan<- *copResponse, lastRange *coprocessor.KeyRange, costTime time.Duration) ([]*copTask, error) {
if regionErr := resp.pbResp.GetRegionError(); regionErr != nil {
if rpcCtx != nil && task.storeType == kv.TiDB {
resp.err = errors.Errorf("error: %v", regionErr)
worker.sendToRespCh(resp, ch, true)
return nil, nil
}
errStr := fmt.Sprintf("region_id:%v, region_ver:%v, store_type:%s, peer_addr:%s, error:%s",
task.region.id, task.region.ver, task.storeType.Name(), task.storeAddr, regionErr.String())
if err := bo.Backoff(BoRegionMiss, errors.New(errStr)); err != nil {
return nil, errors.Trace(err)
}
// We may meet RegionError at the first packet, but not during visiting the stream.
return buildCopTasks(bo, worker.store.regionCache, task.ranges, worker.req)
}
if lockErr := resp.pbResp.GetLocked(); lockErr != nil {
logutil.BgLogger().Debug("coprocessor encounters",
zap.Stringer("lock", lockErr))
msBeforeExpired, err1 := worker.ResolveLocks(bo, worker.req.StartTs, []*Lock{NewLock(lockErr)})
if err1 != nil {
return nil, errors.Trace(err1)
}
if msBeforeExpired > 0 {
if err := bo.BackoffWithMaxSleep(boTxnLockFast, int(msBeforeExpired), errors.New(lockErr.String())); err != nil {
return nil, errors.Trace(err)
}
}
return worker.buildCopTasksFromRemain(bo, lastRange, task)
}
if otherErr := resp.pbResp.GetOtherError(); otherErr != "" {
err := errors.Errorf("other error: %s", otherErr)
logutil.BgLogger().Warn("other error",
zap.Uint64("txnStartTS", worker.req.StartTs),
zap.Uint64("regionID", task.region.id),
zap.String("storeAddr", task.storeAddr),
zap.Error(err))
return nil, errors.Trace(err)
}
// When the request is using streaming API, the `Range` is not nil.
if resp.pbResp.Range != nil {
resp.startKey = resp.pbResp.Range.Start
} else if task.ranges != nil && task.ranges.len() > 0 {
resp.startKey = task.ranges.at(0).StartKey
}
if resp.detail == nil {
resp.detail = new(CopRuntimeStats)
}
resp.detail.Stats = worker.Stats
worker.Stats = nil
resp.detail.BackoffTime = time.Duration(bo.totalSleep) * time.Millisecond
resp.detail.BackoffSleep = make(map[string]time.Duration, len(bo.backoffTimes))
resp.detail.BackoffTimes = make(map[string]int, len(bo.backoffTimes))
for backoff := range bo.backoffTimes {
backoffName := backoff.String()
resp.detail.BackoffTimes[backoffName] = bo.backoffTimes[backoff]
resp.detail.BackoffSleep[backoffName] = time.Duration(bo.backoffSleepMS[backoff]) * time.Millisecond
}
if rpcCtx != nil {
resp.detail.CalleeAddress = rpcCtx.Addr
}
resp.respTime = costTime
if pbDetails := resp.pbResp.ExecDetails; pbDetails != nil {
if handleTime := pbDetails.HandleTime; handleTime != nil {
resp.detail.WaitTime = time.Duration(handleTime.WaitMs) * time.Millisecond
resp.detail.ProcessTime = time.Duration(handleTime.ProcessMs) * time.Millisecond
}
if scanDetailV2 := pbDetails.ScanDetailV2; scanDetailV2 != nil {
copDetail := &execdetails.CopDetails{
ProcessedKeys: int64(scanDetailV2.ProcessedVersions),
TotalKeys: int64(scanDetailV2.TotalVersions),
RocksdbDeleteSkippedCount: scanDetailV2.RocksdbDeleteSkippedCount,
RocksdbKeySkippedCount: scanDetailV2.RocksdbKeySkippedCount,
RocksdbBlockCacheHitCount: scanDetailV2.RocksdbBlockCacheHitCount,
RocksdbBlockReadCount: scanDetailV2.RocksdbBlockReadCount,
RocksdbBlockReadByte: scanDetailV2.RocksdbBlockReadByte,
}
resp.detail.CopDetail = copDetail
} else if scanDetail := pbDetails.ScanDetail; scanDetail != nil {
if scanDetail.Write != nil {
resp.detail.CopDetail = &execdetails.CopDetails{
ProcessedKeys: scanDetail.Write.Processed,
TotalKeys: scanDetail.Write.Total,
}
}
}
}
if resp.pbResp.IsCacheHit {
if cacheValue == nil {
return nil, errors.New("Internal error: received illegal TiKV response")
}
// Cache hit and is valid: use cached data as response data and we don't update the cache.
data := make([]byte, len(cacheValue.Data))
copy(data, cacheValue.Data)
resp.pbResp.Data = data
resp.detail.CoprCacheHit = true
} else {
// Cache not hit or cache hit but not valid: update the cache if the response can be cached.
if cacheKey != nil && resp.pbResp.CanBeCached && resp.pbResp.CacheLastVersion > 0 {
if worker.store.coprCache.CheckAdmission(resp.pbResp.Data.Size(), resp.detail.ProcessTime) {
data := make([]byte, len(resp.pbResp.Data))
copy(data, resp.pbResp.Data)
newCacheValue := coprCacheValue{
Data: data,
TimeStamp: worker.req.StartTs,
RegionID: task.region.id,
RegionDataVersion: resp.pbResp.CacheLastVersion,
}
worker.store.coprCache.Set(cacheKey, &newCacheValue)
}
}
}
worker.sendToRespCh(resp, ch, true)
return nil, nil
}
// CopRuntimeStats contains execution detail information.
type CopRuntimeStats struct {
execdetails.ExecDetails
RegionRequestRuntimeStats
CoprCacheHit bool
}
func (worker *copIteratorWorker) handleTiDBSendReqErr(err error, task *copTask, ch chan<- *copResponse) error {
errCode := errno.ErrUnknown
errMsg := err.Error()
if terror.ErrorEqual(err, ErrTiKVServerTimeout) {
errCode = errno.ErrTiKVServerTimeout
errMsg = "TiDB server timeout, address is " + task.storeAddr
}
selResp := tipb.SelectResponse{
Warnings: []*tipb.Error{
{
Code: int32(errCode),
Msg: errMsg,
},
},
}
data, err := proto.Marshal(&selResp)
if err != nil {
return errors.Trace(err)
}
resp := &copResponse{
pbResp: &coprocessor.Response{
Data: data,
},
detail: &CopRuntimeStats{},
}
worker.sendToRespCh(resp, ch, true)
return nil
}
func (worker *copIteratorWorker) buildCopTasksFromRemain(bo *Backoffer, lastRange *coprocessor.KeyRange, task *copTask) ([]*copTask, error) {
remainedRanges := task.ranges
if worker.req.Streaming && lastRange != nil {
remainedRanges = worker.calculateRemain(task.ranges, lastRange, worker.req.Desc)
}
return buildCopTasks(bo, worker.store.regionCache, remainedRanges, worker.req)
}
// calculateRemain splits the input ranges into two, and take one of them according to desc flag.
// It's used in streaming API, to calculate which range is consumed and what needs to be retry.
// For example:
// ranges: [r1 --> r2) [r3 --> r4)
// split: [s1 --> s2)
// In normal scan order, all data before s1 is consumed, so the remain ranges should be [s1 --> r2) [r3 --> r4)
// In reverse scan order, all data after s2 is consumed, so the remain ranges should be [r1 --> r2) [r3 --> s2)
func (worker *copIteratorWorker) calculateRemain(ranges *copRanges, split *coprocessor.KeyRange, desc bool) *copRanges {
if desc {
left, _ := ranges.split(split.End)
return left
}
_, right := ranges.split(split.Start)
return right
}
func (it *copIterator) Close() error {
if atomic.CompareAndSwapUint32(&it.closed, 0, 1) {
close(it.finishCh)
}
it.rpcCancel.CancelAll()
it.actionOnExceed.close()
it.wg.Wait()
return nil
}
type rateLimit struct {
token chan struct{}
}
func newRateLimit(n int) *rateLimit {
return &rateLimit{
token: make(chan struct{}, n),
}
}
func (r *rateLimit) getToken(done <-chan struct{}) (exit bool) {
select {
case <-done:
return true
case r.token <- struct{}{}:
return false
}
}
func (r *rateLimit) putToken() {
select {
case <-r.token:
default:
panic("put a redundant token")
}
}
// copErrorResponse returns error when calling Next()
type copErrorResponse struct{ error }
func (it copErrorResponse) Next(ctx context.Context) (kv.ResultSubset, error) {
return nil, it.error
}
func (it copErrorResponse) Close() error {
return nil
}
// rateLimitAction an OOM Action which is used to control the token if OOM triggered. The token number should be
// set on initial. Each time the Action is triggered, one token would be destroyed. If the count of the token is less
// than 2, the action would be delegated to the fallback action.
type rateLimitAction struct {
// enabled indicates whether the rateLimitAction is permitted to Action. 1 means permitted, 0 denied.
enabled uint32
fallbackAction memory.ActionOnExceed
// totalTokenNum indicates the total token at initial
totalTokenNum uint
cond struct {
*sync.Cond
// exceeded indicates whether have encountered OOM situation.
exceeded bool
// remainingTokenNum indicates the count of tokens which still exists
remainingTokenNum uint
// isTokenDestroyed indicates whether there is one token has been isTokenDestroyed after Action been triggered
isTokenDestroyed bool
once sync.Once
// waitingWorkerCnt indicates the total count of workers which is under condition.Waiting
waitingWorkerCnt uint
// triggerCountForTest indicates the total count of the rateLimitAction's Action being executed
triggerCountForTest uint
}
}
func newRateLimitAction(totalTokenNumber uint, cond *sync.Cond) *rateLimitAction {
return &rateLimitAction{
totalTokenNum: totalTokenNumber,
cond: struct {
*sync.Cond
exceeded bool
remainingTokenNum uint
isTokenDestroyed bool
once sync.Once
waitingWorkerCnt uint
triggerCountForTest uint
}{
Cond: cond,
exceeded: false,
remainingTokenNum: totalTokenNumber,
once: sync.Once{},
},
}
}
// Action implements ActionOnExceed.Action
func (e *rateLimitAction) Action(t *memory.Tracker) {
failpoint.Inject("testRateLimitActionDisable", func(val failpoint.Value) {
if val.(bool) {
e.setEnabled(false)
}
})
if !e.isEnabled() {
if e.fallbackAction != nil {
e.fallbackAction.Action(t)
}
return
}
e.conditionLock()
defer e.conditionUnlock()
e.cond.once.Do(func() {
if e.cond.remainingTokenNum < 2 {
e.setEnabled(false)
logutil.BgLogger().Info("memory exceed quota, rateLimitAction delegate to fallback action",
zap.Uint("total token count", e.totalTokenNum))
if e.fallbackAction != nil {
e.fallbackAction.Action(t)
}
return
}
failpoint.Inject("testRateLimitActionMockConsumeAndAssert", func(val failpoint.Value) {
if val.(bool) {
if e.cond.triggerCountForTest+e.cond.remainingTokenNum != e.totalTokenNum {
panic("triggerCount + remainingTokenNum not equal to totalTokenNum")
}
if e.cond.waitingWorkerCnt > 0 {
panic("waitingWorkerCnt not equal to 0")
}
}
})
logutil.BgLogger().Info("memory exceeds quota, destroy one token now.",
zap.Int64("consumed", t.BytesConsumed()),
zap.Int64("quota", t.GetBytesLimit()),
zap.Uint("total token count", e.totalTokenNum),
zap.Uint("remaining token count", e.cond.remainingTokenNum))
e.cond.isTokenDestroyed = false
e.cond.exceeded = true
e.cond.triggerCountForTest++
})
}
// SetLogHook implements ActionOnExceed.SetLogHook
func (e *rateLimitAction) SetLogHook(hook func(uint64)) {
}
// SetFallback implements ActionOnExceed.SetFallback
func (e *rateLimitAction) SetFallback(a memory.ActionOnExceed) {
e.fallbackAction = a
}
// broadcastIfNeeded will broadcast the condition to recover all suspended workers when exceeded is enabled
// and one token have already been destroyed.
func (e *rateLimitAction) broadcastIfNeeded(needed bool) {
if !needed {
return
}
e.conditionLock()
defer e.conditionUnlock()
if !e.cond.exceeded || e.cond.waitingWorkerCnt < 1 {
return
}
for !e.cond.isTokenDestroyed {
e.cond.Wait()
}
e.cond.exceeded = false
e.cond.Broadcast()
}
// destroyTokenIfNeeded will check the `exceed` flag after copWorker finished one task.
// If the exceed flag is true and there is no token been destroyed before, one token will be destroyed,
// or the token would be return back.
func (e *rateLimitAction) destroyTokenIfNeeded(returnToken func()) {
e.conditionLock()
defer e.conditionUnlock()
if !e.cond.exceeded {
returnToken()
return
}
// If actionOnExceed has been triggered and there is no token have been destroyed before,
// destroy one token.
if !e.cond.isTokenDestroyed {
e.cond.remainingTokenNum = e.cond.remainingTokenNum - 1
e.cond.isTokenDestroyed = true
e.cond.Broadcast()
} else {
returnToken()
}
// we suspend worker when `exceeded` is true until being notified by `broadcastIfNeeded`
for e.cond.exceeded {
e.cond.waitingWorkerCnt++
e.cond.Wait()
e.cond.waitingWorkerCnt--
}
// only when all the waiting workers have been resumed, the Action could be initialized again.
if e.cond.waitingWorkerCnt < 1 {
e.cond.once = sync.Once{}
}
}
func (e *rateLimitAction) conditionLock() {
e.cond.L.Lock()
}
func (e *rateLimitAction) conditionUnlock() {
e.cond.L.Unlock()
}
func (e *rateLimitAction) close() {
e.setEnabled(false)
e.conditionLock()
defer e.conditionUnlock()
e.cond.exceeded = false
e.cond.isTokenDestroyed = true
e.cond.waitingWorkerCnt = 0
// broadcast the signal in order not to leak worker goroutine if it is being suspended
e.cond.Broadcast()
}
func (e *rateLimitAction) setEnabled(enabled bool) {
newValue := uint32(0)
if enabled {
newValue = uint32(1)
}
atomic.StoreUint32(&e.enabled, newValue)
}
func (e *rateLimitAction) isEnabled() bool {
return atomic.LoadUint32(&e.enabled) > 0
}
type maxIDHandler struct {
sync.Mutex
maxID uint32
}
func (handler *maxIDHandler) getMaxID() uint32 {
handler.Lock()
defer handler.Unlock()
return handler.maxID
}
func (handler *maxIDHandler) setMaxIDIfLarger(newID uint32) {
handler.Lock()
defer handler.Unlock()
if newID > handler.maxID {
handler.maxID = newID
}
}
| XuHuaiyu/tidb | store/tikv/coprocessor.go | GO | apache-2.0 | 44,916 |
/*******************************************************************************
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package hr.fer.zemris.vhdllab.entity;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.UniqueConstraint;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.hibernate.validator.Length;
import org.hibernate.validator.NotNull;
@Entity
@Table(name = "client_logs", uniqueConstraints = { @UniqueConstraint(columnNames = {
"user_id", "created_on" }) })
public class ClientLog extends OwnedEntity {
private static final long serialVersionUID = 2460564318284652078L;
@NotNull
@Length(max = 16000000) // ~ 16MB
private String data;
@NotNull
@Column(name = "created_on", updatable = false)
@Temporal(TemporalType.TIMESTAMP)
private Date createdOn;
public ClientLog() {
super();
}
public ClientLog(String data) {
this(null, data);
}
public ClientLog(String userId, String data) {
super(userId, null);
setData(data);
Date timestamp = new Date();
setCreatedOn(timestamp);
setName(timestamp.toString());
}
public ClientLog(ClientLog clone) {
super(clone);
setData(clone.data);
setCreatedOn(clone.createdOn);
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public Date getCreatedOn() {
return createdOn;
}
public void setCreatedOn(Date createdOn) {
this.createdOn = createdOn;
}
@Override
public String toString() {
return new ToStringBuilder(this)
.appendSuper(super.toString())
.append("createdOn", createdOn)
.append("dataLength", StringUtils.length(data))
.toString();
}
}
| mbezjak/vhdllab | vhdllab-common/src/main/java/hr/fer/zemris/vhdllab/entity/ClientLog.java | Java | apache-2.0 | 2,807 |
#!/usr/bin/python
from __future__ import print_function
from guild.actor import Actor, actor_method, process_method, late_bind
class Dog(Actor):
@actor_method # Input - triggered by data coming in
def woof(self):
print("Woof", self)
@process_method # Process - triggered each time it's run
def process(self):
#print(" ", end="")
pass
@late_bind # Output
def produce(self):
pass
class Shitzu(Dog):
def __init__(self):
self.count = 0
super(Dog, self).__init__()
@process_method
def process(self):
self.count += 1
print("I don't go meow", self.count)
if self.count >= 20:
self.stop()
return False
if __name__ == "__main__":
import time
dog = Dog()
shitzu = Shitzu()
dog.start()
shitzu.start()
dog.woof()
shitzu.woof()
time.sleep(0.1)
shitzu.join()
time.sleep(0.1)
dog.stop()
dog.join()
| sparkslabs/guild | examples/dogs_go_woof_actors.py | Python | apache-2.0 | 988 |
import os
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.test import TestCase
from mock import patch, Mock
import re
import rdflib
from rdflib import RDF
from urllib import urlencode, unquote
from eulxml.xmlmap import load_xmlobject_from_file, XmlObject
from eulfedora.server import Repository
from piffle import iiif
from readux.annotations.models import Annotation
from readux.books import abbyyocr
from readux.books.models import SolrVolume, Volume, VolumeV1_0, Book, BIBO, \
DC, Page, PageV1_1
FIXTURE_DIR = os.path.join(settings.BASE_DIR, 'readux', 'books', 'fixtures')
class SolrVolumeTest(TestCase):
# primarily testing BaseVolume logic here
def test_properties(self):
ocm = 'ocn460678076'
vol = 'V.1'
noid = '1234'
volume = SolrVolume(label='%s_%s' % (ocm, vol),
pid='testpid:%s' % noid)
self.assertEqual(ocm, volume.control_key)
self.assertEqual(vol, volume.volume)
self.assertEqual(noid, volume.noid)
# don't display volume zero
vol = 'V.0'
volume.data['label'] = '%s_%s' % (ocm, vol)
self.assertEqual('', volume.volume)
# should also work without volume info
volume.data['label'] = ocm
self.assertEqual(ocm, volume.control_key)
self.assertEqual('', volume.volume)
def test_fulltext_absolute_url(self):
volume = SolrVolume(label='ocn460678076_V.1',
pid='testpid:1234')
url = volume.fulltext_absolute_url()
self.assert_(url.startswith('https://'))
self.assert_(url.endswith(reverse('books:text', kwargs={'pid': volume.pid})))
current_site = Site.objects.get_current()
self.assert_(current_site.domain in url)
def test_voyant_url(self):
# Volume with English Lang
volume1 = SolrVolume(label='ocn460678076_V.1',
pid='testpid:1234', language='eng')
url = volume1.voyant_url()
self.assert_(urlencode({'corpus': volume1.pid}) in url,
'voyant url should include volume pid as corpus identifier')
self.assert_(urlencode({'archive': volume1.fulltext_absolute_url()}) in url,
'voyant url should include volume fulltext url as archive')
self.assert_(urlencode({'stopList': 'stop.en.taporware.txt'}) in url,
'voyant url should not include english stopword list when volume is in english')
# volume language is French
volume2 = SolrVolume(label='ocn460678076_V.1',
pid='testpid:1235', language='fra')
url_fra = volume2.voyant_url()
self.assert_(urlencode({'stopList': 'stop.en.taporware.txt'}) not in url_fra,
'voyant url should not include english stopword list when language is not english')
def test_pdf_url(self):
# no start page set
vol = SolrVolume(pid='vol:123')
pdf_url = vol.pdf_url()
self.assertEqual(unquote(reverse('books:pdf', kwargs={'pid': vol.pid})), pdf_url)
# start page
vol = SolrVolume(pid='vol:123', start_page=6)
pdf_url = vol.pdf_url()
self.assert_(pdf_url.startswith(unquote(reverse('books:pdf', kwargs={'pid': vol.pid}))))
self.assert_('#page=6' in pdf_url)
class VolumeTest(TestCase):
# borrowing fixture & test accounts from readux.annotations.tests
fixtures = ['test_annotation_data.json']
user_credentials = {
'user': {'username': 'testuser', 'password': 'testing'},
'superuser': {'username': 'testsuper', 'password': 'superme'}
}
def test_annotations(self):
# find annotations associated with a volume, optionally filtered
# by user
User = get_user_model()
testuser = User.objects.create(username='tester')
testadmin = User.objects.create(username='super', is_superuser=True)
mockapi = Mock()
vol = Volume(mockapi, 'vol:1')
# create annotations to test finding
p1 = Annotation.objects.create(user=testuser, text='testuser p1',
uri=reverse('books:page', kwargs={'vol_pid': vol.pid, 'pid': 'p:1'}),
volume_uri=vol.absolute_url)
p2 = Annotation.objects.create(user=testuser, text='testuser p2',
uri=reverse('books:page', kwargs={'vol_pid': vol.pid, 'pid': 'p:2'}),
volume_uri=vol.absolute_url)
p3 = Annotation.objects.create(user=testuser, text='testuser p3',
uri=reverse('books:page', kwargs={'vol_pid': vol.pid, 'pid': 'p:3'}),
volume_uri=vol.absolute_url)
v2p1 = Annotation.objects.create(user=testuser, text='testuser vol2 p1',
uri=reverse('books:page', kwargs={'vol_pid': 'vol:2', 'pid': 'p:1'}),
volume_uri='http://example.com/books/vol:2/')
sup2 = Annotation.objects.create(user=testadmin, text='testsuper p2',
uri=reverse('books:page', kwargs={'vol_pid': vol.pid, 'pid': 'p:2'}),
volume_uri=vol.absolute_url)
annotations = vol.annotations()
self.assertEqual(4, annotations.count())
self.assert_(v2p1 not in annotations)
# filter by user
annotations = vol.annotations().visible_to(testuser)
self.assertEqual(3, annotations.count())
self.assert_(sup2 not in annotations)
annotations = vol.annotations().visible_to(testadmin)
self.assertEqual(4, annotations.count())
self.assert_(sup2 in annotations)
# annotation counts per page
annotation_count = vol.page_annotation_count()
self.assertEqual(1, annotation_count[p1.uri])
self.assertEqual(2, annotation_count[p2.uri])
self.assertEqual(1, annotation_count[p3.uri])
# by user
annotation_count = vol.page_annotation_count(testuser)
self.assertEqual(1, annotation_count[p2.uri])
annotation_count = vol.page_annotation_count(testadmin)
self.assertEqual(2, annotation_count[p2.uri])
# total for a volume
self.assertEqual(4, vol.annotation_count())
self.assertEqual(3, vol.annotation_count(testuser))
self.assertEqual(4, vol.annotation_count(testadmin))
# total for all volumes
totals = Volume.volume_annotation_count()
self.assertEqual(1, totals['http://example.com/books/vol:2/'])
self.assertEqual(4, totals[vol.absolute_url])
totals = Volume.volume_annotation_count(testuser)
self.assertEqual(3, totals[vol.absolute_url])
def test_has_pages(self):
mockapi = Mock()
vol = Volume(mockapi, 'vol:1')
vol.pages = []
self.assertFalse(vol.has_pages)
# one page (i.e. cover image) is not enough to count as having pages
vol.pages = [Mock(spec=Page)]
self.assertFalse(vol.has_pages)
vol.pages = [Mock(spec=Page), Mock(spec=Page)]
self.assertTrue(vol.has_pages)
def test_has_tei(self):
mockapi = Mock()
vol = Volume(mockapi, 'vol:1')
p1 = Mock(spec=Page)
p1.tei.exists = False
p2 = Mock(spec=Page)
p2.tei.exists = False
vol.pages = [p1, p2]
self.assertFalse(vol.has_tei)
p2.tei.exists = True
self.assertTrue(vol.has_tei)
class VolumeV1_0Test(TestCase):
def setUp(self):
# use uningested objects for testing purposes
repo = Repository()
self.vol = repo.get_object(type=VolumeV1_0)
self.vol.label = 'ocn460678076_V.1'
self.vol.pid = 'rdxtest:4606'
def test_ark_uri(self):
ark_uri = 'http://pid.co/ark:/12345/ba45'
self.vol.dc.content.identifier_list.extend([ark_uri, 'pid:ba45', 'otherid'])
self.assertEqual(ark_uri, self.vol.ark_uri)
def test_rdf_dc(self):
# add metadata to test rdf generated
ark_uri = 'http://pid.co/ark:/12345/ba45'
self.vol.dc.content.identifier_list.append(ark_uri)
self.vol.dc.content.title = 'Sunset, a novel'
self.vol.dc.content.format = 'application/pdf'
self.vol.dc.content.language = 'eng'
self.vol.dc.content.rights = 'public domain'
# NOTE: patching on class instead of instance because related object is a descriptor
with patch.object(Volume, 'book', new=Mock(spec=Book)) as mockbook:
mockbook.dc.content.creator_list = ['Author, Joe']
mockbook.dc.content.date_list = ['1801', '2010']
mockbook.dc.content.description_list = ['digitized edition', 'mystery novel']
mockbook.dc.content.publisher = 'Nashville, Tenn. : Barbee & Smith'
mockbook.dc.content.relation_list = [
'http://pid.co/ark:/12345/book',
'http://pid.co/ark:/12345/volpdf'
]
graph = self.vol.rdf_dc_graph()
lit = rdflib.Literal
uri = rdflib.URIRef(self.vol.ark_uri)
self.assert_((uri, RDF.type, BIBO.book) in graph,
'rdf graph type should be bibo:book')
self.assert_((uri, DC.title, lit(self.vol.dc.content.title)) in graph,
'title should be set as dc:title')
self.assert_((uri, BIBO.volume, lit(self.vol.volume)) in graph,
'volume label should be set as bibo:volume')
self.assert_((uri, DC['format'], lit(self.vol.dc.content.format)) in graph,
'format should be set as dc:format')
self.assert_((uri, DC.language, lit(self.vol.dc.content.language)) in graph,
'language should be set as dc:language')
self.assert_((uri, DC.rights, lit(self.vol.dc.content.rights)) in graph,
'rights should be set as dc:rights')
for rel in self.vol.dc.content.relation_list:
self.assert_((uri, DC.relation, lit(rel)) in graph,
'related item %s should be set as dc:relation' % rel)
# metadata pulled from book obj because not present in volume
self.assert_((uri, DC.creator, lit(mockbook.dc.content.creator_list[0])) in graph,
'creator from book metadata should be set as dc:creator when not present in volume metadata')
self.assert_((uri, DC.publisher, lit(mockbook.dc.content.publisher)) in graph,
'publisher from book metadata should be set as dc:publisher when not present in volume metadata')
# earliest date only
self.assert_((uri, DC.date, lit('1801')) in graph,
'earliest date 1801 from book metadata should be set as dc:date when not present in volume metadata')
for d in mockbook.dc.content.description_list:
self.assert_((uri, DC.description, lit(d)) in graph,
'description from book metadata should be set as dc:description when not present in volume metadata')
# volume-level metadata should be used when present instead of book
self.vol.dc.content.creator_list = ['Writer, Jane']
self.vol.dc.content.date_list = ['1832', '2012']
self.vol.dc.content.description_list = ['digital edition']
self.vol.dc.content.publisher = 'So & So Publishers'
graph = self.vol.rdf_dc_graph()
self.assert_((uri, DC.creator, lit(self.vol.dc.content.creator_list[0])) in graph,
'creator from volume metadata should be set as dc:creator when present')
self.assert_((uri, DC.publisher, lit(self.vol.dc.content.publisher)) in graph,
'publisher from volume metadata should be set as dc:publisher when present')
# earliest date *only* should be present
self.assert_((uri, DC.date, lit('1832')) in graph,
'earliest date 1832 from volume metadata should be set as dc:date when present')
for d in self.vol.dc.content.description_list:
self.assert_((uri, DC.description, lit(d)) in graph,
'description from volume metadata should be set as dc:description when present')
def test_index_data(self):
self.vol.owner = ''
self.vol.dc.content.date = 1842
# NOTE: patching on class instead of instance because related object is a descriptor
with patch.object(Volume, 'book', new=Mock(spec=Book)) as mockbook:
mockbook.pid = 'book:123'
mockbook.collection.pid = 'coll:123',
mockbook.collection.short_label = 'Pile O\' Books'
mockbook.dc.content.creator_list = ['Author, Joe']
mockbook.dc.content.date_list = ['1801', '2010']
mockbook.dc.content.description_list = ['digitized edition', 'mystery novel']
mockbook.dc.content.publisher = 'Nashville, Tenn. : Barbee & Smith'
mockbook.dc.content.relation_list = [
'http://pid.co/ark:/12345/book',
'http://pid.co/ark:/12345/volpdf'
]
mockbook.dc.content.subject_list = []
data = self.vol.index_data()
self.assert_('fulltext' not in data,
'fulltext should not be set in index data when volume has no ocr')
self.assert_('hasPrimaryImage' not in data,
'hasPrimaryImage should not be set in index data when volume has no cover')
self.assertEqual(mockbook.pid, data['book_id'],
'associated book pid should be set as book id')
self.assertEqual(mockbook.collection.pid, data['collection_id'],
'associated collection pid should be set as collection id')
self.assertEqual(mockbook.collection.short_label, data['collection_label'],
'associated collection label short label should be set as collection label')
self.assertEqual(mockbook.dc.content.creator_list, data['creator'],
'creator should be set from book DC creator')
self.assertEqual(self.vol.dc.content.date_list, data['date'],
'date should be set from earliest volume DC date')
self.assert_('subject' not in data,
'subject should not be set in index data when book has no subjects')
self.assertEqual(0, data['page_count'],
'page count should be set to zero when volume has no pages loaded')
# test hasPrimaryImage
mockpage = Mock(spec=Page)
mockpage.pid = 'page:1234'
mockpage.uriref = rdflib.URIRef('info:fedora/%s' % mockpage.pid)
self.vol.primary_image = mockpage
data = self.vol.index_data()
self.assertEqual(mockpage.pid, data['hasPrimaryImage'],
'hasPrimaryImage should be set to cover page pid, when present')
# test subjects
mockbook.dc.content.subject_list = ['subj1', 'subj2']
data = self.vol.index_data()
self.assertEqual(mockbook.dc.content.subject_list, data['subject'],
'subject should be set when present in book DC')
# test full-text
with patch.object(self.vol, 'ocr') as mockocr:
mockocr.exists = True
ocr_xml = load_xmlobject_from_file(os.path.join(FIXTURE_DIR,
'abbyyocr_fr8v2.xml'))
mockocr.content = ocr_xml
data = self.vol.index_data()
self.assert_('fulltext' in data,
'fulltext should be set in index data when OCR is available')
# use mock to test pdf size indexing
with patch.object(self.vol, 'pdf') as mockpdf:
mockpdf.size = 1234567
data = self.vol.index_data()
self.assertEqual(mockpdf.size, data['pdf_size'],
'pdf_size should be set from pdf size, when available')
def test_voyant_url(self):
# NOTE: this test is semi-redundant with the same test for the SolrVolume,
# but since the method is implemented in BaseVolume and depends on
# properties set on the subclasses, testing here to ensure it works
# in both cases
# no language
self.vol.pid = 'vol:1234'
url = self.vol.voyant_url()
self.assert_(urlencode({'corpus': self.vol.pid}) in url,
'voyant url should include volume pid as corpus identifier')
self.assert_(urlencode({'archive': self.vol.fulltext_absolute_url()}) in url,
'voyant url should include volume fulltext url as archive')
self.assert_(urlencode({'stopList': 'stop.en.taporware.txt'}) not in url,
'voyant url should not include english stopword list when volume is not in english')
# english
self.vol.dc.content.language = 'eng'
url = self.vol.voyant_url()
self.assert_(urlencode({'stopList': 'stop.en.taporware.txt'}) in url,
'voyant url should include english stopword list when volume is in english')
def test_get_fulltext(self):
with patch.object(self.vol, 'ocr') as mockocr:
mockocr.exists = True
# abbyy finereader v8
ocr_xml = load_xmlobject_from_file(os.path.join(FIXTURE_DIR,
'abbyyocr_fr8v2.xml'))
mockocr.content = ocr_xml
text = self.vol.get_fulltext()
# check for arbitrary text content
self.assert_('In presenting this, the initial volume of the' in text,
'ocr text content should be present in plain text')
self.assert_('Now, kind reader, we ask that you do not crit' in text,
'ocr text content should be present in plain text')
self.assert_(re.search(r'Baldwin\s+Dellinger\s+Brice', text),
'table row content should be displayed on a single line')
# abbyy finereader v6
ocr_xml = load_xmlobject_from_file(os.path.join(FIXTURE_DIR,
'abbyyocr_fr6v1.xml'))
mockocr.content = ocr_xml
text = self.vol.get_fulltext()
# check for arbitrary text content
self.assert_('was late in the autumn, the vines yet kept their leaves,' in text,
'ocr text content should be present in plain text')
self.assert_('walked up the steps. The lady had not moved, and made' in text,
'ocr text content should be present in plain text')
self.assert_(re.search(r'Modern\.\s+New Standard\.\s+Popular\.', text),
'table row content should be displayed on a single line')
def test_ocr_ids(self):
# pach in fixture ocr content
with patch.object(self.vol, 'ocr') as mockocr:
mockocr.exists = True
ocr_xml = load_xmlobject_from_file(os.path.join(FIXTURE_DIR,
'abbyyocr_fr8v2.xml'))
mockocr.content = ocr_xml
self.assertFalse(self.vol.ocr_has_ids)
self.vol.add_ocr_ids()
self.assertTrue(self.vol.ocr_has_ids)
class PageV1_1Test(TestCase):
metsalto_doc = os.path.join(FIXTURE_DIR, 'mets_alto.xml')
def setUp(self):
self.mets_alto = load_xmlobject_from_file(self.metsalto_doc, XmlObject)
def test_ocr_ids(self):
page = PageV1_1(Mock()) # use mock for fedora api, since we won't make any calls
page.pid = 'rdxtest:4607'
with patch.object(page, 'ocr') as mockocr:
mockocr.exists = True
mockocr.content = self.mets_alto
self.assertFalse(page.ocr_has_ids)
page.add_ocr_ids()
self.assertTrue(page.ocr_has_ids)
class AbbyyOCRTestCase(TestCase):
fr6v1_doc = os.path.join(FIXTURE_DIR, 'abbyyocr_fr6v1.xml')
fr8v2_doc = os.path.join(FIXTURE_DIR, 'abbyyocr_fr8v2.xml')
# language code
eng = 'EnglishUnitedStates'
def setUp(self):
self.fr6v1 = load_xmlobject_from_file(self.fr6v1_doc, abbyyocr.Document)
self.fr8v2 = load_xmlobject_from_file(self.fr8v2_doc, abbyyocr.Document)
def test_document(self):
# top-level document properties
# finereader 6 v1
self.assertEqual(132, self.fr6v1.page_count)
self.assertEqual(self.eng, self.fr6v1.language)
self.assertEqual(self.eng, self.fr6v1.languages)
self.assert_(self.fr6v1.pages, 'page list should be non-empty')
self.assertEqual(132, len(self.fr6v1.pages),
'number of pages should match page count')
self.assert_(isinstance(self.fr6v1.pages[0], abbyyocr.Page))
# finereader 8 v2
self.assertEqual(186, self.fr8v2.page_count)
self.assertEqual(self.eng, self.fr8v2.language)
self.assertEqual(self.eng, self.fr8v2.languages)
self.assert_(self.fr8v2.pages, 'page list should be non-empty')
self.assertEqual(186, len(self.fr8v2.pages),
'number of pages should match page count')
self.assert_(isinstance(self.fr8v2.pages[0], abbyyocr.Page))
def test_page(self):
# finereader 6 v1
self.assertEqual(1500, self.fr6v1.pages[0].width)
self.assertEqual(2174, self.fr6v1.pages[0].height)
self.assertEqual(300, self.fr6v1.pages[0].resolution)
# second page has picture block, no text
self.assertEqual(1, len(self.fr6v1.pages[1].blocks))
self.assertEqual(1, len(self.fr6v1.pages[1].picture_blocks))
self.assertEqual(0, len(self.fr6v1.pages[1].text_blocks))
self.assert_(isinstance(self.fr6v1.pages[1].blocks[0], abbyyocr.Block))
# fourth page has paragraph text
self.assert_(self.fr6v1.pages[3].paragraphs)
self.assert_(isinstance(self.fr6v1.pages[3].paragraphs[0],
abbyyocr.Paragraph))
# finereader 8 v2
self.assertEqual(2182, self.fr8v2.pages[0].width)
self.assertEqual(3093, self.fr8v2.pages[0].height)
self.assertEqual(300, self.fr8v2.pages[0].resolution)
# first page has multiple text/pic blocks
self.assert_(self.fr8v2.pages[0].blocks)
self.assert_(self.fr8v2.pages[0].picture_blocks)
self.assert_(self.fr8v2.pages[0].text_blocks)
self.assert_(isinstance(self.fr8v2.pages[0].blocks[0], abbyyocr.Block))
# first page has paragraph text
self.assert_(self.fr8v2.pages[0].paragraphs)
self.assert_(isinstance(self.fr8v2.pages[0].paragraphs[0],
abbyyocr.Paragraph))
def test_block(self):
# finereader 6 v1
# - basic block attributes
b = self.fr6v1.pages[1].blocks[0]
self.assertEqual('Picture', b.type)
self.assertEqual(144, b.left)
self.assertEqual(62, b.top)
self.assertEqual(1358, b.right)
self.assertEqual(2114, b.bottom)
# - block with text
b = self.fr6v1.pages[3].blocks[0]
self.assert_(b.paragraphs)
self.assert_(isinstance(b.paragraphs[0], abbyyocr.Paragraph))
# finereader 8 v2
b = self.fr8v2.pages[0].blocks[0]
self.assertEqual('Text', b.type)
self.assertEqual(282, b.left)
self.assertEqual(156, b.top)
self.assertEqual(384, b.right)
self.assertEqual(228, b.bottom)
self.assert_(b.paragraphs)
self.assert_(isinstance(b.paragraphs[0], abbyyocr.Paragraph))
def test_paragraph_line(self):
# finereader 6 v1
para = self.fr6v1.pages[3].paragraphs[0]
# untested: align, left/right/start indent
self.assert_(para.lines)
self.assert_(isinstance(para.lines[0], abbyyocr.Line))
line = para.lines[0]
self.assertEqual(283, line.baseline)
self.assertEqual(262, line.left)
self.assertEqual(220, line.top)
self.assertEqual(1220, line.right)
self.assertEqual(294, line.bottom)
# line text available via unicode
self.assertEqual(u'MABEL MEREDITH;', unicode(line))
# also mapped as formatted text (could repeat/segment)
self.assert_(line.formatted_text) # should be non-empty
self.assert_(isinstance(line.formatted_text[0], abbyyocr.Formatting))
self.assertEqual(self.eng, line.formatted_text[0].language)
self.assertEqual(u'MABEL MEREDITH;', line.formatted_text[0].text) # not normalized
# finereader 8 v2
para = self.fr8v2.pages[1].paragraphs[0]
self.assert_(para.lines)
self.assert_(isinstance(para.lines[0], abbyyocr.Line))
line = para.lines[0]
self.assertEqual(1211, line.baseline)
self.assertEqual(845, line.left)
self.assertEqual(1160, line.top)
self.assertEqual(1382, line.right)
self.assertEqual(1213, line.bottom)
self.assertEqual(u'EMORY UNIVERSITY', unicode(line))
self.assert_(line.formatted_text) # should be non-empty
self.assert_(isinstance(line.formatted_text[0], abbyyocr.Formatting))
self.assertEqual(self.eng, line.formatted_text[0].language)
self.assertEqual(u'EMORY UNIVERSITY', line.formatted_text[0].text)
def test_frns(self):
self.assertEqual('fr6v1:par|fr8v2:par', abbyyocr.frns('par'))
self.assertEqual('fr6v1:text/fr6v1:par|fr8v2:text/fr8v2:par',
abbyyocr.frns('text/par'))
| emory-libraries/readux | readux/books/tests/models.py | Python | apache-2.0 | 25,398 |
package org.ajp.server.job;
import java.util.concurrent.Callable;
import org.ajp.server.job.JobResult.JobResultStatus;
import org.ajp.server.model.JobRequest;
public class Job implements Callable<JobResult> {
private JobRequest jobRequest = null;
public Job(final JobRequest jreq) {
this.jobRequest = jreq;
}
@Override
public JobResult call() throws Exception {
JobResult jres = new JobResult();
jobRequest.getId();
jres.setStatus(JobResultStatus.SUCCESS);
return jres;
}
}
| kartaa/AsyncJobProcessor | src/main/java/org/ajp/server/job/Job.java | Java | apache-2.0 | 506 |
/* ==========================================================
* bootstrap-affix.js v2.2.2
* http://twitter.github.com/bootstrap/javascript.html#affix
* ==========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* AFFIX CLASS DEFINITION
* ====================== */
var Affix = function (element, options) {
this.options = $.extend({}, $.fn.affix.defaults, options)
this.$window = $(window)
.on('scroll.affix.data-api', $.proxy(this.checkPosition, this))
.on('click.affix.data-api', $.proxy(function () { setTimeout($.proxy(this.checkPosition, this), 1) }, this))
this.$element = $(element)
this.checkPosition()
}
Affix.prototype.checkPosition = function () {
if (!this.$element.is(':visible')) return
var scrollHeight = $(document).height()
, scrollTop = this.$window.scrollTop()
, position = this.$element.offset()
, offset = this.options.offset
, offsetBottom = offset.bottom
, offsetTop = offset.top
, reset = 'affix affix-top affix-bottom'
, affix
if (typeof offset != 'object') offsetBottom = offsetTop = offset
if (typeof offsetTop == 'function') offsetTop = offset.top()
if (typeof offsetBottom == 'function') offsetBottom = offset.bottom()
affix = this.unpin != null && (scrollTop + this.unpin <= position.top) ?
false : offsetBottom != null && (position.top + this.$element.height() >= scrollHeight - offsetBottom) ?
'bottom' : offsetTop != null && scrollTop <= offsetTop ?
'top' : false
if (this.affixed === affix) return
this.affixed = affix
this.unpin = affix == 'bottom' ? position.top - scrollTop : null
this.$element.removeClass(reset).addClass('affix' + (affix ? '-' + affix : ''))
}
/* AFFIX PLUGIN DEFINITION
* ======================= */
var old = $.fn.affix
$.fn.affix = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('affix')
, options = typeof option == 'object' && option
if (!data) $this.data('affix', (data = new Affix(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.affix.Constructor = Affix
$.fn.affix.defaults = {
offset: 0
}
/* AFFIX NO CONFLICT
* ================= */
$.fn.affix.noConflict = function () {
$.fn.affix = old
return this
}
/* AFFIX DATA-API
* ============== */
$(window).on('load', function () {
$('[data-spy="affix"]').each(function () {
var $spy = $(this)
, data = $spy.data()
data.offset = data.offset || {}
data.offsetBottom && (data.offset.bottom = data.offsetBottom)
data.offsetTop && (data.offset.top = data.offsetTop)
$spy.affix(data)
})
})
}(window.jQuery);
| michaelcouck/ikube | code/war/src/main/webapp/assets/javascripts/bootstrap/bootstrap-affix.js | JavaScript | apache-2.0 | 3,485 |
#include <string>
using std::string;
string sa[10]; // ten elements of empty string
int ia[10]; // ten elements of 0
int main() {
string sa2[10]; // ten elements of empty string
int ia2[10]; // ten elements of undefined value
}
| jaege/Cpp-Primer-5th-Exercises | ch3/3.28.cpp | C++ | apache-2.0 | 244 |
/*
* Copyright 2018 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.cloudfoundry.client;
import static com.netflix.spinnaker.clouddriver.cloudfoundry.client.CloudFoundryClientUtils.collectPageResources;
import static com.netflix.spinnaker.clouddriver.cloudfoundry.client.CloudFoundryClientUtils.safelyCall;
import static java.util.Collections.emptySet;
import static java.util.Collections.singletonList;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.netflix.spinnaker.clouddriver.cloudfoundry.client.api.RouteService;
import com.netflix.spinnaker.clouddriver.cloudfoundry.client.model.RouteId;
import com.netflix.spinnaker.clouddriver.cloudfoundry.client.model.v2.Resource;
import com.netflix.spinnaker.clouddriver.cloudfoundry.client.model.v2.Route;
import com.netflix.spinnaker.clouddriver.cloudfoundry.client.model.v2.RouteMapping;
import com.netflix.spinnaker.clouddriver.cloudfoundry.model.CloudFoundryDomain;
import com.netflix.spinnaker.clouddriver.cloudfoundry.model.CloudFoundryLoadBalancer;
import com.netflix.spinnaker.clouddriver.cloudfoundry.model.CloudFoundryServerGroup;
import com.netflix.spinnaker.clouddriver.cloudfoundry.model.CloudFoundrySpace;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class Routes {
private static final Pattern VALID_ROUTE_REGEX =
Pattern.compile("^([a-zA-Z0-9_-]+)\\.([a-zA-Z0-9_.-]+)(:[0-9]+)?([/a-zA-Z0-9_.-]+)?$");
private final String account;
private final RouteService api;
private final Applications applications;
private final Domains domains;
private final Spaces spaces;
private final Integer resultsPerPage;
private final ForkJoinPool forkJoinPool;
private LoadingCache<String, List<RouteMapping>> routeMappings;
public Routes(
String account,
RouteService api,
Applications applications,
Domains domains,
Spaces spaces,
Integer resultsPerPage,
ForkJoinPool forkJoinPool) {
this.account = account;
this.api = api;
this.applications = applications;
this.domains = domains;
this.spaces = spaces;
this.resultsPerPage = resultsPerPage;
this.forkJoinPool = forkJoinPool;
this.routeMappings =
CacheBuilder.newBuilder()
.expireAfterWrite(3, TimeUnit.MINUTES)
.build(
new CacheLoader<String, List<RouteMapping>>() {
@Override
public List<RouteMapping> load(@Nonnull String guid)
throws CloudFoundryApiException, ResourceNotFoundException {
return collectPageResources("route mappings", pg -> api.routeMappings(guid, pg))
.stream()
.map(Resource::getEntity)
.collect(Collectors.toList());
}
});
}
private CloudFoundryLoadBalancer map(Resource<Route> res) throws CloudFoundryApiException {
Route route = res.getEntity();
Set<CloudFoundryServerGroup> mappedApps = emptySet();
try {
mappedApps =
routeMappings.get(res.getMetadata().getGuid()).stream()
.map(rm -> applications.findById(rm.getAppGuid()))
.collect(Collectors.toSet());
} catch (ExecutionException e) {
if (!(e.getCause() instanceof ResourceNotFoundException))
throw new CloudFoundryApiException(e.getCause(), "Unable to find route mappings by id");
}
return CloudFoundryLoadBalancer.builder()
.account(account)
.id(res.getMetadata().getGuid())
.host(route.getHost())
.path(route.getPath())
.port(route.getPort())
.space(spaces.findById(route.getSpaceGuid()))
.domain(domains.findById(route.getDomainGuid()))
.mappedApps(mappedApps)
.build();
}
@Nullable
public CloudFoundryLoadBalancer find(RouteId routeId, String spaceId)
throws CloudFoundryApiException {
CloudFoundrySpace id = spaces.findById(spaceId);
String orgId = id.getOrganization().getId();
List<String> queryParams = new ArrayList<>();
queryParams.add("host:" + routeId.getHost());
queryParams.add("organization_guid:" + orgId);
queryParams.add("domain_guid:" + routeId.getDomainGuid());
if (routeId.getPath() != null) queryParams.add("path:" + routeId.getPath());
if (routeId.getPort() != null) queryParams.add("port:" + routeId.getPort().toString());
return collectPageResources("route mappings", pg -> api.all(pg, 1, queryParams)).stream()
.filter(
routeResource ->
(routeId.getPath() != null || routeResource.getEntity().getPath().isEmpty())
&& (routeId.getPort() != null || routeResource.getEntity().getPort() == null))
.findFirst()
.map(this::map)
.orElse(null);
}
@Nullable
public RouteId toRouteId(String uri) throws CloudFoundryApiException {
Matcher matcher = VALID_ROUTE_REGEX.matcher(uri);
if (matcher.find()) {
CloudFoundryDomain domain = domains.findByName(matcher.group(2)).orElse(null);
if (domain == null) {
return null;
}
RouteId routeId = new RouteId();
routeId.setHost(matcher.group(1));
routeId.setDomainGuid(domain.getId());
routeId.setPort(
matcher.group(3) == null ? null : Integer.parseInt(matcher.group(3).substring(1)));
routeId.setPath(matcher.group(4));
return routeId;
} else {
return null;
}
}
public List<CloudFoundryLoadBalancer> all(List<CloudFoundrySpace> spaces)
throws CloudFoundryApiException {
try {
if (!spaces.isEmpty()) {
List<String> spaceGuids = spaces.stream().map(s -> s.getId()).collect(Collectors.toList());
String orgFilter =
"organization_guid IN "
+ spaces.stream()
.map(s -> s.getOrganization().getId())
.collect(Collectors.joining(","));
return forkJoinPool
.submit(
() ->
collectPageResources(
"routes", pg -> api.all(pg, resultsPerPage, singletonList(orgFilter)))
.parallelStream()
.map(this::map)
.filter(lb -> spaceGuids.contains(lb.getSpace().getId()))
.collect(Collectors.toList()))
.get();
} else {
return forkJoinPool
.submit(
() ->
collectPageResources("routes", pg -> api.all(pg, resultsPerPage, null))
.parallelStream()
.map(this::map)
.collect(Collectors.toList()))
.get();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public CloudFoundryLoadBalancer createRoute(RouteId routeId, String spaceId)
throws CloudFoundryApiException {
Route route = new Route(routeId, spaceId);
try {
Resource<Route> newRoute =
safelyCall(() -> api.createRoute(route))
.orElseThrow(
() ->
new CloudFoundryApiException(
"Cloud Foundry signaled that route creation succeeded but failed to provide a response."));
return map(newRoute);
} catch (CloudFoundryApiException e) {
if (e.getErrorCode() == null) throw e;
switch (e.getErrorCode()) {
case ROUTE_HOST_TAKEN:
case ROUTE_PATH_TAKEN:
case ROUTE_PORT_TAKEN:
return this.find(routeId, spaceId);
default:
throw e;
}
}
}
public void deleteRoute(String loadBalancerGuid) throws CloudFoundryApiException {
safelyCall(() -> api.deleteRoute(loadBalancerGuid));
}
public static boolean isValidRouteFormat(String route) {
return VALID_ROUTE_REGEX.matcher(route).find();
}
}
| ajordens/clouddriver | clouddriver-cloudfoundry/src/main/java/com/netflix/spinnaker/clouddriver/cloudfoundry/client/Routes.java | Java | apache-2.0 | 8,900 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/annotation.proto
package com.google.cloud.datalabeling.v1beta1;
public interface ImageSegmentationAnnotationOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The mapping between rgb color and annotation spec. The key is the rgb
* color represented in format of rgb(0, 0, 0). The value is the
* AnnotationSpec.
* </pre>
*
* <code>
* map<string, .google.cloud.datalabeling.v1beta1.AnnotationSpec> annotation_colors = 1;
* </code>
*/
int getAnnotationColorsCount();
/**
*
*
* <pre>
* The mapping between rgb color and annotation spec. The key is the rgb
* color represented in format of rgb(0, 0, 0). The value is the
* AnnotationSpec.
* </pre>
*
* <code>
* map<string, .google.cloud.datalabeling.v1beta1.AnnotationSpec> annotation_colors = 1;
* </code>
*/
boolean containsAnnotationColors(java.lang.String key);
/** Use {@link #getAnnotationColorsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, com.google.cloud.datalabeling.v1beta1.AnnotationSpec>
getAnnotationColors();
/**
*
*
* <pre>
* The mapping between rgb color and annotation spec. The key is the rgb
* color represented in format of rgb(0, 0, 0). The value is the
* AnnotationSpec.
* </pre>
*
* <code>
* map<string, .google.cloud.datalabeling.v1beta1.AnnotationSpec> annotation_colors = 1;
* </code>
*/
java.util.Map<java.lang.String, com.google.cloud.datalabeling.v1beta1.AnnotationSpec>
getAnnotationColorsMap();
/**
*
*
* <pre>
* The mapping between rgb color and annotation spec. The key is the rgb
* color represented in format of rgb(0, 0, 0). The value is the
* AnnotationSpec.
* </pre>
*
* <code>
* map<string, .google.cloud.datalabeling.v1beta1.AnnotationSpec> annotation_colors = 1;
* </code>
*/
com.google.cloud.datalabeling.v1beta1.AnnotationSpec getAnnotationColorsOrDefault(
java.lang.String key, com.google.cloud.datalabeling.v1beta1.AnnotationSpec defaultValue);
/**
*
*
* <pre>
* The mapping between rgb color and annotation spec. The key is the rgb
* color represented in format of rgb(0, 0, 0). The value is the
* AnnotationSpec.
* </pre>
*
* <code>
* map<string, .google.cloud.datalabeling.v1beta1.AnnotationSpec> annotation_colors = 1;
* </code>
*/
com.google.cloud.datalabeling.v1beta1.AnnotationSpec getAnnotationColorsOrThrow(
java.lang.String key);
/**
*
*
* <pre>
* Image format.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The mimeType.
*/
java.lang.String getMimeType();
/**
*
*
* <pre>
* Image format.
* </pre>
*
* <code>string mime_type = 2;</code>
*
* @return The bytes for mimeType.
*/
com.google.protobuf.ByteString getMimeTypeBytes();
/**
*
*
* <pre>
* A byte string of a full image's color map.
* </pre>
*
* <code>bytes image_bytes = 3;</code>
*
* @return The imageBytes.
*/
com.google.protobuf.ByteString getImageBytes();
}
| googleapis/java-datalabeling | proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ImageSegmentationAnnotationOrBuilder.java | Java | apache-2.0 | 3,972 |
/**
* Copyright 2015 Tobias Gierke <tobias.gierke@code-sourcery.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.codesourcery.j2048;
import java.awt.Component;
/**
* Abstraction for receiving either input from either the user or the AI,
*
* @author tobias.gierke@code-sourcery.de
*/
public interface IInputProvider
{
/**
* Possible actions.
*
* @author tobias.gierke@code-sourcery.de
*/
public static enum Action
{
NONE,TILT_DOWN,TILT_UP,TILT_LEFT,TILT_RIGHT,RESTART;
}
/**
* Returns the current action for a given board state.
*
* @param state
* @return
*/
public Action getAction(BoardState state);
/**
* Attaches this input provider to its UI peer.
* @param peer
*/
public void attach(Component peer);
} | toby1984/j2048 | src/main/java/de/codesourcery/j2048/IInputProvider.java | Java | apache-2.0 | 1,279 |
# Copyright (c) 2019 Verizon Media
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
ALIAS = 'tag-ports-during-bulk-creation'
IS_SHIM_EXTENSION = True
IS_STANDARD_ATTR_EXTENSION = False
NAME = 'Tag Ports During Bulk Creation'
DESCRIPTION = 'Allow to tag ports during bulk creation'
UPDATED_TIMESTAMP = '2019-12-29T19:00:00-00:00'
RESOURCE_ATTRIBUTE_MAP = {}
SUB_RESOURCE_ATTRIBUTE_MAP = {}
ACTION_MAP = {}
REQUIRED_EXTENSIONS = []
OPTIONAL_EXTENSIONS = []
ACTION_STATUS = {}
| openstack/neutron-lib | neutron_lib/api/definitions/tag_ports_during_bulk_creation.py | Python | apache-2.0 | 1,024 |
describe('Service: angelloModel', function() {
//load module for service
beforeEach(module('Angello'));
var modelService;
beforeEach(inject(function(angelloModel) {
modelService = angelloModel;
}));
describe('#getStatuses', function() {
it('should return seven different statuses', function() {
expect(modelService.getStatuses().length).toBe(7);
});
it('should have a status named "To Do"', function() {
expect(modelService.getStatuses().map(function(status) { // get just the name of each status
return status.name;
})).toContain('To Do');
});
describe('#getTypes', function() {
it('should return four different types', function() {
expect(modelService.getTypes().length).toBe(4);
});
it('should have a type named "Bug"', function() {
expect(modelService.getTypes().map(function(status) { // get just the name of each status
return status.name;
})).
toContain('Bug');
});
});
describe('#getStories', function() {
it('should return six different stories', function() {
expect(modelService.getStories().length).toBe(6);
});
it('should return stories that have a description property',
function() {
modelService.getStories().forEach(function(story) {
expect(story.description).toBeDefined();
});
});
});
});
});
| jimfmunro/angello | tests/angelloModelSpec.js | JavaScript | apache-2.0 | 1,326 |
package acceptance_test
import (
. "github.com/mokiat/gostub/acceptance"
"github.com/mokiat/gostub/acceptance/acceptance_stubs"
"github.com/mokiat/gostub/acceptance/external/external_dup"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("TypeEllipsis", func() {
var stub *acceptance_stubs.EllipsisSupportStub
var methodWasCalled bool
var methodEllipsisArg []external.Address
var firstAddress external.Address
var secondAddress external.Address
BeforeEach(func() {
stub = new(acceptance_stubs.EllipsisSupportStub)
methodWasCalled = false
methodEllipsisArg = []external.Address{}
firstAddress = external.Address{
Value: 1,
}
secondAddress = external.Address{
Value: 2,
}
})
It("stub is assignable to interface", func() {
_, assignable := interface{}(stub).(EllipsisSupport)
Ω(assignable).Should(BeTrue())
})
It("is possible to stub the behavior", func() {
stub.MethodStub = func(arg1 string, arg2 int, ellipsis ...external.Address) {
methodWasCalled = true
methodEllipsisArg = ellipsis
}
stub.Method("whatever", 0, firstAddress, secondAddress)
Ω(methodWasCalled).Should(BeTrue())
Ω(methodEllipsisArg).Should(Equal([]external.Address{firstAddress, secondAddress}))
})
It("is possible to get call count", func() {
stub.Method("whatever", 0, firstAddress, secondAddress)
stub.Method("whatever", 0, firstAddress, secondAddress)
Ω(stub.MethodCallCount()).Should(Equal(2))
})
It("is possible to get arguments for call", func() {
stub.Method("first", 1, firstAddress)
stub.Method("second", 2, firstAddress, secondAddress)
_, _, argAddresses := stub.MethodArgsForCall(0)
Ω(argAddresses).Should(Equal([]external.Address{firstAddress}))
_, _, argAddresses = stub.MethodArgsForCall(1)
Ω(argAddresses).Should(Equal([]external.Address{firstAddress, secondAddress}))
})
})
| momchil-atanasov/gostub | acceptance/type_ellipsis_test.go | GO | apache-2.0 | 1,875 |
namespace xcite.somon {
/// <summary> Describes a property of a SOMON object. </summary>
public class SomonProperty {
/// <summary> Initializes the new instance. </summary>
/// <param name="kind">Kind of property</param>
/// <param name="id">Property id</param>
/// <param name="fields">Fields of the property</param>
public SomonProperty(string kind, string id, SomonField[] fields) {
Kind = kind;
Id = id;
Fields = fields ?? new SomonField[0];
}
/// <summary> Property kind </summary>
public string Kind { get; }
/// <summary> (Optional) Property id. </summary>
public string Id { get; }
/// <summary> Property fields </summary>
public SomonField[] Fields { get; }
}
} | astorch/xcite | source/xcite.somon/SomonProperty.cs | C# | apache-2.0 | 860 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yardstickframework.spark.model;
import java.io.*;
/**
* Person record used for query test.
*/
public class Person implements Externalizable {
/** Person ID. */
private int id;
/** Person ID. */
private int orgId;
/** First name (not-indexed). */
private String firstName;
/** Last name (not indexed). */
private String lastName;
/** Salary. */
private double salary;
/**
* Constructs empty person.
*/
public Person() {
// No-op.
}
/**
* Constructs person record that is not linked to any organization.
*
* @param id Person ID.
* @param firstName First name.
* @param lastName Last name.
* @param salary Salary.
*/
public Person(int id, String firstName, String lastName, double salary) {
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
this.salary = salary;
}
/**
* Constructs person record that is not linked to any organization.
*
* @param id Person ID.
* @param orgId Organization ID.
* @param firstName First name.
* @param lastName Last name.
* @param salary Salary.
*/
public Person(int id, int orgId, String firstName, String lastName, double salary) {
this.id = id;
this.orgId = orgId;
this.firstName = firstName;
this.lastName = lastName;
this.salary = salary;
}
/**
* @return Person id.
*/
public int getId() {
return id;
}
/**
* @param id Person id.
*/
public void setId(int id) {
this.id = id;
}
/**
* @return Person first name.
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName Person first name.
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @return Person last name.
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName Person last name.
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @return Salary.
*/
public double getSalary() {
return salary;
}
/**
* @param salary Salary.
*/
public void setSalary(double salary) {
this.salary = salary;
}
/**
* @return Organization ID.
*/
public int getOrgId() {
return orgId;
}
/**
* @param orgId Organization ID.
*/
public void setOrgId(int orgId) {
this.orgId = orgId;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(id);
out.writeUTF(firstName);
out.writeUTF(lastName);
out.writeDouble(salary);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
id = in.readInt();
firstName = in.readUTF();
lastName = in.readUTF();
salary = in.readDouble();
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
return this == o || (o instanceof Person) && id == ((Person)o).id;
}
/** {@inheritDoc} */
@Override public int hashCode() {
return id;
}
/** {@inheritDoc} */
@Override public String toString() {
return "Person [firstName=" + firstName +
", id=" + id +
", lastName=" + lastName +
", salary=" + salary +
']';
}
}
| yardstick-benchmarks/yardstick-spark | src/main/java/org/yardstickframework/spark/model/Person.java | Java | apache-2.0 | 4,426 |
package org.pac4j.cas.client;
import org.junit.Test;
import org.pac4j.core.context.MockWebContext;
import org.pac4j.core.exception.HttpAction;
import org.pac4j.core.util.TestsConstants;
import org.pac4j.core.util.TestsHelper;
import static org.junit.Assert.*;
/**
* This class tests the {@link CasProxyReceptor} class.
*
* @author Jerome Leleu
* @since 1.4.0
*/
public final class CasProxyReceptorTests implements TestsConstants {
@Test
public void testMissingCallbackUrl() {
final CasProxyReceptor client = new CasProxyReceptor();
TestsHelper.initShouldFail(client, "callbackUrl cannot be blank");
}
@Test
public void testMissingStorage() {
final CasProxyReceptor client = new CasProxyReceptor();
client.setCallbackUrl(CALLBACK_URL);
client.setStore(null);
TestsHelper.initShouldFail(client, "store cannot be null");
}
@Test
public void testMissingPgt() {
final CasProxyReceptor client = new CasProxyReceptor();
client.setCallbackUrl(CALLBACK_URL);
final MockWebContext context = MockWebContext.create();
try {
client.getCredentials(context.addRequestParameter(CasProxyReceptor.PARAM_PROXY_GRANTING_TICKET, VALUE));
} catch (final HttpAction e) {
assertEquals(200, context.getResponseStatus());
assertEquals("", context.getResponseContent());
assertEquals("Missing proxyGrantingTicket or proxyGrantingTicketIou", e.getMessage());
}
}
@Test
public void testMissingPgtiou() {
final CasProxyReceptor client = new CasProxyReceptor();
client.setCallbackUrl(CALLBACK_URL);
final MockWebContext context = MockWebContext.create();
TestsHelper.expectException(() -> client.getCredentials(context.addRequestParameter(CasProxyReceptor.PARAM_PROXY_GRANTING_TICKET_IOU, VALUE)), HttpAction.class,
"Missing proxyGrantingTicket or proxyGrantingTicketIou");
assertEquals(200, context.getResponseStatus());
assertEquals("", context.getResponseContent());
}
@Test
public void testOk() {
final CasProxyReceptor client = new CasProxyReceptor();
client.setCallbackUrl(CALLBACK_URL);
final MockWebContext context = MockWebContext.create()
.addRequestParameter(CasProxyReceptor.PARAM_PROXY_GRANTING_TICKET, VALUE)
.addRequestParameter(CasProxyReceptor.PARAM_PROXY_GRANTING_TICKET_IOU, VALUE);
TestsHelper.expectException(() -> client.getCredentials(context), HttpAction.class, "No credential for CAS proxy receptor -> returns ok");
assertEquals(200, context.getResponseStatus());
assertTrue(context.getResponseContent().length() > 0);
}
}
| JacobASeverson/pac4j | pac4j-cas/src/test/java/org/pac4j/cas/client/CasProxyReceptorTests.java | Java | apache-2.0 | 2,771 |
/*
* Copyright 2009-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.builder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import org.apache.ibatis.cache.Cache;
import org.apache.ibatis.cache.decorators.LruCache;
import org.apache.ibatis.cache.impl.PerpetualCache;
import org.apache.ibatis.executor.ErrorContext;
import org.apache.ibatis.executor.keygen.KeyGenerator;
import org.apache.ibatis.mapping.CacheBuilder;
import org.apache.ibatis.mapping.Discriminator;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.ParameterMap;
import org.apache.ibatis.mapping.ParameterMapping;
import org.apache.ibatis.mapping.ParameterMode;
import org.apache.ibatis.mapping.ResultFlag;
import org.apache.ibatis.mapping.ResultMap;
import org.apache.ibatis.mapping.ResultMapping;
import org.apache.ibatis.mapping.ResultSetType;
import org.apache.ibatis.mapping.SqlCommandType;
import org.apache.ibatis.mapping.SqlSource;
import org.apache.ibatis.mapping.StatementType;
import org.apache.ibatis.reflection.MetaClass;
import org.apache.ibatis.scripting.LanguageDriver;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.TypeHandler;
public class MapperBuilderAssistant extends BaseBuilder {
private String currentNamespace;
private String resource;
private Cache currentCache;
public MapperBuilderAssistant(Configuration configuration, String resource) {
super(configuration);
ErrorContext.instance().resource(resource);
this.resource = resource;
}
public String getCurrentNamespace() {
return currentNamespace;
}
public void setCurrentNamespace(String currentNamespace) {
if (currentNamespace == null) {
throw new BuilderException("The mapper element requires a namespace attribute to be specified.");
}
if (this.currentNamespace != null && !this.currentNamespace.equals(currentNamespace)) {
throw new BuilderException("Wrong namespace. Expected '"
+ this.currentNamespace + "' but found '" + currentNamespace + "'.");
}
this.currentNamespace = currentNamespace;
}
public String applyCurrentNamespace(String base, boolean isReference) {
if (base == null) return null;
if (isReference) {
// is it qualified with any namespace yet?
if (base.contains(".")) return base;
} else {
// is it qualified with this namespace yet?
if (base.startsWith(currentNamespace + ".")) return base;
if (base.contains(".")) throw new BuilderException("Dots are not allowed in element names, please remove it from " + base);
}
return currentNamespace + "." + base;
}
public Cache useCacheRef(String namespace) {
if (namespace == null) {
throw new BuilderException("cache-ref element requires a namespace attribute.");
}
try {
Cache cache = configuration.getCache(namespace);
if (cache == null) {
throw new IncompleteElementException("No cache for namespace '" + namespace + "' could be found.");
}
currentCache = cache;
return cache;
} catch (IllegalArgumentException e) {
throw new IncompleteElementException("No cache for namespace '" + namespace + "' could be found.", e);
}
}
public Cache useNewCache(Class<? extends Cache> typeClass,
Class<? extends Cache> evictionClass,
Long flushInterval,
Integer size,
boolean readWrite,
Properties props) {
typeClass = valueOrDefault(typeClass, PerpetualCache.class);
evictionClass = valueOrDefault(evictionClass, LruCache.class);
Cache cache = new CacheBuilder(currentNamespace)
.implementation(typeClass)
.addDecorator(evictionClass)
.clearInterval(flushInterval)
.size(size)
.readWrite(readWrite)
.properties(props)
.build();
configuration.addCache(cache);
currentCache = cache;
return cache;
}
public ParameterMap addParameterMap(String id, Class<?> parameterClass, List<ParameterMapping> parameterMappings) {
id = applyCurrentNamespace(id, false);
ParameterMap.Builder parameterMapBuilder = new ParameterMap.Builder(configuration, id, parameterClass, parameterMappings);
ParameterMap parameterMap = parameterMapBuilder.build();
configuration.addParameterMap(parameterMap);
return parameterMap;
}
public ParameterMapping buildParameterMapping(
Class<?> parameterType,
String property,
Class<?> javaType,
JdbcType jdbcType,
String resultMap,
ParameterMode parameterMode,
Class<? extends TypeHandler<?>> typeHandler,
Integer numericScale) {
resultMap = applyCurrentNamespace(resultMap, true);
// Class parameterType = parameterMapBuilder.type();
Class<?> javaTypeClass = resolveParameterJavaType(parameterType, property, javaType, jdbcType);
TypeHandler<?> typeHandlerInstance = resolveTypeHandler(javaTypeClass, typeHandler);
ParameterMapping.Builder builder = new ParameterMapping.Builder(configuration, property, javaTypeClass);
builder.jdbcType(jdbcType);
builder.resultMapId(resultMap);
builder.mode(parameterMode);
builder.numericScale(numericScale);
builder.typeHandler(typeHandlerInstance);
return builder.build();
}
public ResultMap addResultMap(
String id,
Class<?> type,
String extend,
Discriminator discriminator,
List<ResultMapping> resultMappings,
Boolean autoMapping) {
id = applyCurrentNamespace(id, false);
extend = applyCurrentNamespace(extend, true);
ResultMap.Builder resultMapBuilder = new ResultMap.Builder(configuration, id, type, resultMappings, autoMapping);
if (extend != null) {
if (!configuration.hasResultMap(extend)) {
throw new IncompleteElementException("Could not find a parent resultmap with id '" + extend + "'");
}
ResultMap resultMap = configuration.getResultMap(extend);
List<ResultMapping> extendedResultMappings = new ArrayList<ResultMapping>(resultMap.getResultMappings());
extendedResultMappings.removeAll(resultMappings);
// Remove parent constructor if this resultMap declares a constructor.
boolean declaresConstructor = false;
for (ResultMapping resultMapping : resultMappings) {
if (resultMapping.getFlags().contains(ResultFlag.CONSTRUCTOR)) {
declaresConstructor = true;
break;
}
}
if (declaresConstructor) {
Iterator<ResultMapping> extendedResultMappingsIter = extendedResultMappings.iterator();
while (extendedResultMappingsIter.hasNext()) {
if (extendedResultMappingsIter.next().getFlags().contains(ResultFlag.CONSTRUCTOR)) {
extendedResultMappingsIter.remove();
}
}
}
resultMappings.addAll(extendedResultMappings);
}
resultMapBuilder.discriminator(discriminator);
ResultMap resultMap = resultMapBuilder.build();
configuration.addResultMap(resultMap);
return resultMap;
}
public ResultMapping buildResultMapping(
Class<?> resultType,
String property,
String column,
Class<?> javaType,
JdbcType jdbcType,
String nestedSelect,
String nestedResultMap,
String notNullColumn,
String columnPrefix,
Class<? extends TypeHandler<?>> typeHandler,
List<ResultFlag> flags) {
ResultMapping resultMapping = assembleResultMapping(
resultType,
property,
column,
javaType,
jdbcType,
nestedSelect,
nestedResultMap,
notNullColumn,
columnPrefix,
typeHandler,
flags);
return resultMapping;
}
public Discriminator buildDiscriminator(
Class<?> resultType,
String column,
Class<?> javaType,
JdbcType jdbcType,
Class<? extends TypeHandler<?>> typeHandler,
Map<String, String> discriminatorMap) {
ResultMapping resultMapping = assembleResultMapping(
resultType,
null,
column,
javaType,
jdbcType,
null,
null,
null,
null,
typeHandler,
new ArrayList<ResultFlag>());
Map<String, String> namespaceDiscriminatorMap = new HashMap<String, String>();
for (Map.Entry<String, String> e : discriminatorMap.entrySet()) {
String resultMap = e.getValue();
resultMap = applyCurrentNamespace(resultMap, true);
namespaceDiscriminatorMap.put(e.getKey(), resultMap);
}
Discriminator.Builder discriminatorBuilder = new Discriminator.Builder(configuration, resultMapping, namespaceDiscriminatorMap);
return discriminatorBuilder.build();
}
public MappedStatement addMappedStatement(
String id,
SqlSource sqlSource,
StatementType statementType,
SqlCommandType sqlCommandType,
Integer fetchSize,
Integer timeout,
String parameterMap,
Class<?> parameterType,
String resultMap,
Class<?> resultType,
ResultSetType resultSetType,
boolean flushCache,
boolean useCache,
KeyGenerator keyGenerator,
String keyProperty,
String keyColumn,
String databaseId,
LanguageDriver lang) {
id = applyCurrentNamespace(id, false);
boolean isSelect = sqlCommandType == SqlCommandType.SELECT;
MappedStatement.Builder statementBuilder = new MappedStatement.Builder(configuration, id, sqlSource, sqlCommandType);
statementBuilder.resource(resource);
statementBuilder.fetchSize(fetchSize);
statementBuilder.statementType(statementType);
statementBuilder.keyGenerator(keyGenerator);
statementBuilder.keyProperty(keyProperty);
statementBuilder.keyColumn(keyColumn);
statementBuilder.databaseId(databaseId);
statementBuilder.lang(lang);
setStatementTimeout(timeout, statementBuilder);
setStatementParameterMap(parameterMap, parameterType, statementBuilder);
setStatementResultMap(resultMap, resultType, resultSetType, statementBuilder);
setStatementCache(isSelect, flushCache, useCache, currentCache, statementBuilder);
MappedStatement statement = statementBuilder.build();
configuration.addMappedStatement(statement);
return statement;
}
private <T> T valueOrDefault(T value, T defaultValue) {
return value == null ? defaultValue : value;
}
private void setStatementCache(
boolean isSelect,
boolean flushCache,
boolean useCache,
Cache cache,
MappedStatement.Builder statementBuilder) {
flushCache = valueOrDefault(flushCache, !isSelect);
useCache = valueOrDefault(useCache, isSelect);
statementBuilder.flushCacheRequired(flushCache);
statementBuilder.useCache(useCache);
statementBuilder.cache(cache);
}
private void setStatementParameterMap(
String parameterMap,
Class<?> parameterTypeClass,
MappedStatement.Builder statementBuilder) {
parameterMap = applyCurrentNamespace(parameterMap, true);
if (parameterMap != null) {
try {
statementBuilder.parameterMap(configuration.getParameterMap(parameterMap));
} catch (IllegalArgumentException e) {
throw new IncompleteElementException("Could not find parameter map " + parameterMap, e);
}
} else if (parameterTypeClass != null) {
List<ParameterMapping> parameterMappings = new ArrayList<ParameterMapping>();
ParameterMap.Builder inlineParameterMapBuilder = new ParameterMap.Builder(
configuration,
statementBuilder.id() + "-Inline",
parameterTypeClass,
parameterMappings);
statementBuilder.parameterMap(inlineParameterMapBuilder.build());
}
}
private void setStatementResultMap(
String resultMap,
Class<?> resultType,
ResultSetType resultSetType,
MappedStatement.Builder statementBuilder) {
resultMap = applyCurrentNamespace(resultMap, true);
List<ResultMap> resultMaps = new ArrayList<ResultMap>();
if (resultMap != null) {
String[] resultMapNames = resultMap.split(",");
for (String resultMapName : resultMapNames) {
try {
resultMaps.add(configuration.getResultMap(resultMapName.trim()));
} catch (IllegalArgumentException e) {
throw new IncompleteElementException("Could not find result map " + resultMapName, e);
}
}
} else if (resultType != null) {
ResultMap.Builder inlineResultMapBuilder = new ResultMap.Builder(
configuration,
statementBuilder.id() + "-Inline",
resultType,
new ArrayList<ResultMapping>(),
null);
resultMaps.add(inlineResultMapBuilder.build());
}
statementBuilder.resultMaps(resultMaps);
statementBuilder.resultSetType(resultSetType);
}
private void setStatementTimeout(Integer timeout, MappedStatement.Builder statementBuilder) {
if (timeout == null) {
timeout = configuration.getDefaultStatementTimeout();
}
statementBuilder.timeout(timeout);
}
private ResultMapping assembleResultMapping(
Class<?> resultType,
String property,
String column,
Class<?> javaType,
JdbcType jdbcType,
String nestedSelect,
String nestedResultMap,
String notNullColumn,
String columnPrefix,
Class<? extends TypeHandler<?>> typeHandler,
List<ResultFlag> flags) {
nestedResultMap = applyCurrentNamespace(nestedResultMap, true);
Class<?> javaTypeClass = resolveResultJavaType(resultType, property, javaType);
TypeHandler<?> typeHandlerInstance = resolveTypeHandler(javaTypeClass, typeHandler);
List<ResultMapping> composites = parseCompositeColumnName(column);
if (composites.size() > 0) {
ResultMapping first = composites.get(0);
column = first.getColumn();
}
// issue #4 column is mandatory on nested queries
if (nestedSelect != null && column == null) {
throw new BuilderException("Missing column attribute for nested select in property " + property);
}
ResultMapping.Builder builder = new ResultMapping.Builder(configuration, property, column, javaTypeClass);
builder.jdbcType(jdbcType);
builder.nestedQueryId(applyCurrentNamespace(nestedSelect, true));
builder.nestedResultMapId(applyCurrentNamespace(nestedResultMap, true));
builder.typeHandler(typeHandlerInstance);
builder.flags(flags == null ? new ArrayList<ResultFlag>() : flags);
builder.composites(composites);
builder.notNullColumns(parseMultipleColumnNames(notNullColumn));
builder.columnPrefix(columnPrefix);
return builder.build();
}
private Set<String> parseMultipleColumnNames(String columnName) {
Set<String> columns = new HashSet<String>();
if (columnName != null) {
if (columnName.indexOf(',') > -1) {
StringTokenizer parser = new StringTokenizer(columnName, "{}, ", false);
while (parser.hasMoreTokens()) {
String column = parser.nextToken();
columns.add(column);
}
} else {
columns.add(columnName);
}
}
return columns;
}
private List<ResultMapping> parseCompositeColumnName(String columnName) {
List<ResultMapping> composites = new ArrayList<ResultMapping>();
if (columnName != null) {
if (columnName.indexOf('=') > -1
|| columnName.indexOf(',') > -1) {
StringTokenizer parser = new StringTokenizer(columnName, "{}=, ", false);
while (parser.hasMoreTokens()) {
String property = parser.nextToken();
String column = parser.nextToken();
ResultMapping.Builder complexBuilder = new ResultMapping.Builder(configuration, property, column, configuration.getTypeHandlerRegistry().getUnknownTypeHandler());
composites.add(complexBuilder.build());
}
}
}
return composites;
}
private Class<?> resolveResultJavaType(Class<?> resultType, String property, Class<?> javaType) {
if (javaType == null && property != null) {
try {
MetaClass metaResultType = MetaClass.forClass(resultType);
javaType = metaResultType.getSetterType(property);
} catch (Exception e) {
//ignore, following null check statement will deal with the situation
}
}
if (javaType == null) {
javaType = Object.class;
}
return javaType;
}
private Class<?> resolveParameterJavaType(Class<?> resultType, String property, Class<?> javaType, JdbcType jdbcType) {
if (javaType == null) {
if (JdbcType.CURSOR.equals(jdbcType)) {
javaType = java.sql.ResultSet.class;
} else if (Map.class.isAssignableFrom(resultType)) {
javaType = Object.class;
} else {
MetaClass metaResultType = MetaClass.forClass(resultType);
javaType = metaResultType.getGetterType(property);
}
}
if (javaType == null) {
javaType = Object.class;
}
return javaType;
}
}
| binarytemple/mybatis-all-syncing-test | src/main/java/org/apache/ibatis/builder/MapperBuilderAssistant.java | Java | apache-2.0 | 18,210 |
package com.future.panels;
import javax.swing.JPanel;
public class PanelFactory {
public static JPanel createPanel(String name)
{
JPanel panel;
if(name.equals("SettingPanel"))
{
panel=new SettingPanel();
}else
{
panel=new MainPanel();
}
return panel;
}
}
| simpher/FutureStrategy | src/com/future/panels/PanelFactory.java | Java | apache-2.0 | 298 |
<?php
namespace Irto\OAuth2Proxy\Middleware;
use Closure;
use React\HttpClient\Client;
use Irto\OAuth2Proxy\Server;
class Authorization {
/**
* @var Irto\OAuth2Proxy\Server
*/
protected $server = null;
/**
* @var React\HttpClient\Client
*/
protected $client = null;
/**
* Buffered data
*
* @var string
*/
protected $buffer = null;
/**
* Constructor
*
* @param Irto\OAuth2Proxy\Server $server
*
* @return Irto\OAuth2Proxy\Middleware\Authorization
*/
public function __construct(Server $server, Client $client)
{
$this->server = $server;
$this->client = $client;
}
/**
* Change data watchers to work in api credentials to send to api server
*
* @param Irto\OAuth2Proxy\ProxyRequest $request
*
* @return void
*/
protected function proxyContent($request)
{
$original = $request->originRequest();
$data = $request->getBufferClean();
$this->bufferData($data);
$original->removeAllListeners('data');
if ($this->bufferLength() == (int) $request->headers()->get('content-length')) {
$request->write($this->getDataEnd(true));
} else {
$original->on('data', function ($data) use ($request, $original) {
$this->bufferData($data);
if ($this->bufferLength() == (int) $request->headers()->get('content-length')) {
$request->write($this->getDataEnd(true));
}
});
}
}
/**
* Buffered data length
*
* @return int
*/
protected function bufferLength()
{
return strlen($this->buffer);
}
/**
* Buffer data
*
* @return self
*/
protected function bufferData($data)
{
$this->buffer .= $data;
return $this;
}
/**
* Return total content length after data merge
*
* @return int
*/
protected function getContentLength($request)
{
$length = (int) $request->headers()->get('content-length');
return $length + strlen(json_encode($this->getOAuthCredentials())) - 1;
}
/**
* Return data to send to API with credentials merged with front-end data
*
* @return string
*/
protected function getDataEnd($mergeCredentials = false)
{
$data = json_decode($this->buffer, true);
$this->buffer = null;
if ($mergeCredentials && !empty($data) && $data) {
$data += $this->getOAuthCredentials();
return json_encode($data, JSON_UNESCAPED_SLASHES);
}
return json_encode($data, JSON_UNESCAPED_SLASHES);
}
/**
* Return configured webapp oauth2 credentials to api
*
* @return array
*/
private function getOAuthCredentials()
{
return array(
'grant_type' => 'password',
'client_id' => $this->server['config']->get('client_id'),
'client_secret' => $this->server['config']->get('client_secret'),
);
}
/**
* Catch $request when it is created
*
* @param Irto\OAuth2Proxy\ProxyRequest $request
* @param Closure $next
*
* @return Irto\OAuth2Proxy\ProxyResponse
*/
public function request($request, Closure $next)
{
if ($request->originRequest()->getPath() == $this->server['config']->get('grant_path')) {
$this->proxyContent($request);
$request->headers()->put('content-length', $this->getContentLength($request));
} else {
$session = $request->session();
if ($credentials = $session->get('oauth_grant', false)) {
if ($request->originRequest()->getPath() == $this->server['config']->get('revoke_path')) {
$request->query()->put('token', $session->get('oauth_grant.access_token', false));
}
} else {
$credentials = $this->server->getClientCredentials();
}
$request->headers()->put('authorization', "{$credentials['token_type']} {$credentials['access_token']}");
}
return $next($request);
}
/**
* Catch $response on get it from api server.
*
* @param Irto\OAuth2Proxy\ProxyResponse $response
* @param Closure $next
*
* @return React\HttpClient\Response
*/
public function response($response, Closure $next)
{
$request = $response->originRequest();
$original = $response->clientResponse();
if ($request->originRequest()->getPath() == $this->server['config']->get('grant_path') && $original->getCode() == 200) {
$response->setCode(204);
$original->removeAllListeners('data');
$original->on('data', function ($data) use ($response, $original) {
$this->bufferData($data);
if ($response->dataLength() === (int) $response->headers()->get('content-length', -1)) {
$original->close();
}
});
$original->on('end', function () use ($response) {
$response->end();
$this->processResponse($response, json_decode($this->getDataEnd(), true));
});
}
$session = $request->session();
if ($original->getCode() == 401 && $session->has('oauth_grant.refresh_token')) {
$original->removeAllListeners('data');
$this->updateCredentials($response, function ($clientResponse) use ($response, $request, $next, $session) {
$request->removeAllListeners('response');
$request->on('response', function ($original) use ($next, $response) {
$response->mergeClientResponse($original);
$next($response);
});
$credentials = $session->get('oauth_grant');
$request->headers()->put('authorization', "{$credentials['token_type']} {$credentials['access_token']}");
$request->retry();
});
return false;
}
return $next($response);
}
/**
*
*
*/
protected function updateCredentials($response, $callback)
{
$session = $response->originRequest()->session();
$url = $this->server->get('api_url') . $this->server['config']->get('grant_path');
$data = json_encode(array(
'client_id' => $this->server['config']->get('client_id'),
'client_secret' => $this->server['config']->get('client_secret'),
'refresh_token' => $session->get('oauth_grant.refresh_token'),
'grant_type' => 'refresh_token'
));
$request = $this->client->request('POST', $url, array(
'content-type' => 'application/json;charset=UTF-8',
'content-length' => strlen($data),
));
$request->on('response', function ($clientResponse) use ($response, $request, $callback, $session) {
if ($clientResponse->getCode() != 200) {
$clientResponse->on('data', function ($data) use ($clientResponse, $request, $callback, $session) {
$session->forget('oauth_grant');
$this->server->log('Não foi possível autenticar o usuário utilizando o refresh token (%s).', [$data]);
});
return $callback($clientResponse);
}
$clientResponse->on('data', function ($data) { $this->bufferData($data); });
$clientResponse->on('end', function ($data) use ($clientResponse, $response, $callback) {
$data = $this->getDataEnd();
$this->processResponse($response, json_decode($data, true));
$callback($clientResponse);
});
});
$request->end($data);
}
/**
*
* @param Irto\OAuth2Proxy\ProxyResponse $response
* @param array $data
*
* @return self
*/
protected function processResponse($response, $data)
{
$session = $response->originRequest()->session();
$session->set('oauth_grant', $data);
$session->save();
return $this;
}
} | Irto/oauth2-proxy | src/Middleware/Authorization.php | PHP | apache-2.0 | 8,334 |
package com.intellij.util;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
// We don't use Java URI due to problem — http://cns-etuat-2.localnet.englishtown.com/school/e12/#school/45383/201/221/382?c=countrycode=cc|culturecode=en-us|partnercode=mkge
// it is illegal URI (fragment before query), but we must support such URI
// Semicolon as parameters separator is supported (WEB-6671)
public interface Url {
@NotNull
String getPath();
boolean isInLocalFileSystem();
String toDecodedForm();
@NotNull
String toExternalForm();
@Nullable
String getScheme();
@Nullable
String getAuthority();
@Nullable
String getParameters();
boolean equalsIgnoreParameters(@Nullable Url url);
@NotNull
Url trimParameters();
}
| romankagan/DDBWorkbench | platform/platform-api/src/com/intellij/util/Url.java | Java | apache-2.0 | 785 |
/*******************************************************************************
* Copyright 2013 SAP AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.sap.core.odata.api.edm;
/**
* @com.sap.core.odata.DoNotImplement
* EdmLiteralKind indicates the format of an EDM literal.
* @author SAP AG
*/
public enum EdmLiteralKind {
DEFAULT, URI, JSON;
}
| SAP/cloud-odata-java | odata-api/src/main/java/com/sap/core/odata/api/edm/EdmLiteralKind.java | Java | apache-2.0 | 959 |
package com.barbarum.tutorial.code.array;
import com.barbarum.tutorial.util.PrintUtil;
public class FindLongestBinarySequence {
public static int find(int[] nums) {
int[] ones = new int[nums.length];
countLeftOnes(nums, ones);
countRightOnes(nums, ones);
return findMaximum(ones);
}
private static int findMaximum(int[] ones) {
int maximum = 0;
int index = -1;
for (int i = 0; i < ones.length; i++) {
if (ones[i] > maximum) {
maximum = ones[i];
index = i;
}
}
return index;
}
private static void countLeftOnes(int[] nums, int[] table) {
int currentOnes = 0;
for (int i = 0; i < nums.length; i++) {
if (nums[i] == 0) {
table[i] += currentOnes;
currentOnes = 0;
} else {
currentOnes++;
}
}
}
public static void countRightOnes(int[] nums, int[] table) {
int currentOnes = 0;
for (int i = nums.length - 1; i >= 0; i--) {
if (nums[i] == 0) {
table[i] += currentOnes;
currentOnes = 0;
} else {
currentOnes++;
}
}
}
public static void main(String args[]) {
PrintUtil.println(new int[]{0, 1, 1, 1, 0, 1, 0}, FindLongestBinarySequence::find);
PrintUtil.println(new int[]{0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0}, FindLongestBinarySequence::find);
PrintUtil.println(new int[]{1, 1, 1, 1, 0, 1}, FindLongestBinarySequence::find);
PrintUtil.println(new int[]{1, 1, 1, 1, 1, 1}, FindLongestBinarySequence::find);
}
}
| barbarum/barbarum-tutorial | src/main/java/com/barbarum/tutorial/code/array/FindLongestBinarySequence.java | Java | apache-2.0 | 1,761 |
package ssh_test
import (
fakesys "github.com/cloudfoundry/bosh-utils/system/fakes"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
boshdir "github.com/cloudfoundry/bosh-cli/director"
. "github.com/cloudfoundry/bosh-cli/ssh"
)
var _ = Describe("SSHArgs", func() {
var (
connOpts ConnectionOpts
result boshdir.SSHResult
forceTTY bool
privKeyFile *fakesys.FakeFile
knownHostsFile *fakesys.FakeFile
fs *fakesys.FakeFileSystem
host boshdir.Host
)
BeforeEach(func() {
connOpts = ConnectionOpts{}
result = boshdir.SSHResult{}
forceTTY = false
fs = fakesys.NewFakeFileSystem()
privKeyFile = fakesys.NewFakeFile("/tmp/priv-key", fs)
knownHostsFile = fakesys.NewFakeFile("/tmp/known-hosts", fs)
host = boshdir.Host{Host: "127.0.0.1", Username: "user"}
})
Describe("LoginForHost", func() {
act := func() []string {
return SSHArgs{}.LoginForHost(host)
}
It("returns login details with IPv4", func() {
Expect(act()).To(Equal([]string{"127.0.0.1", "-l", "user"}))
})
It("returns login details with IPv6 non-bracketed", func() {
host.Host = "::1"
Expect(act()).To(Equal([]string{"::1", "-l", "user"}))
})
})
Describe("OptsForHost", func() {
act := func() []string {
args := SSHArgs{
ConnOpts: connOpts,
Result: result,
ForceTTY: forceTTY,
PrivKeyFile: privKeyFile,
KnownHostsFile: knownHostsFile,
}
return args.OptsForHost(host)
}
It("returns ssh options with correct paths to private key and known hosts", func() {
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
}))
})
It("returns ssh options with forced tty option if requested", func() {
forceTTY = true
Expect(act()).To(Equal([]string{
"-tt",
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
}))
})
It("returns ssh options with custom raw options specified", func() {
connOpts.RawOpts = []string{"raw1", "raw2"}
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"raw1", "raw2",
}))
})
It("returns ssh options with gateway settings returned from the Director", func() {
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=ssh -tt -W %h:%p -l gw-user gw-host -o ServerAliveInterval=30 -o ForwardAgent=no -o ClearAllForwardings=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null",
}))
})
It("returns ssh options with gateway settings returned from the Director and private key set by user", func() {
connOpts.GatewayPrivateKeyPath = "/tmp/gw-priv-key"
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=ssh -tt -W %h:%p -l gw-user gw-host -o ServerAliveInterval=30 -o ForwardAgent=no -o ClearAllForwardings=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o PasswordAuthentication=no -o IdentitiesOnly=yes -o IdentityFile=/tmp/gw-priv-key",
}))
})
It("returns ssh options with gateway settings overridden by user even if the Director specifies some", func() {
connOpts.GatewayUsername = "user-gw-user"
connOpts.GatewayHost = "user-gw-host"
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=ssh -tt -W %h:%p -l user-gw-user user-gw-host -o ServerAliveInterval=30 -o ForwardAgent=no -o ClearAllForwardings=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null",
}))
})
It("returns ssh options without gateway settings if disabled even if user or the Director specifies some", func() {
connOpts.GatewayDisable = true
connOpts.GatewayUsername = "user-gw-user"
connOpts.GatewayHost = "user-gw-host"
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
}))
})
It("returns ssh options without socks5 settings if SOCKS5Proxy is set", func() {
connOpts.GatewayDisable = true
connOpts.GatewayUsername = "user-gw-user"
connOpts.GatewayHost = "user-gw-host"
connOpts.SOCKS5Proxy = "socks5://some-proxy"
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=nc -x some-proxy %h %p",
}))
})
It("starts a socks5 proxy and uses the address if SOCKS5Proxy has ssh+socks5:// schema", func() {
connOpts.GatewayDisable = true
connOpts.GatewayUsername = "user-gw-user"
connOpts.GatewayHost = "user-gw-host"
connOpts.SOCKS5Proxy = "ssh+socks5://some-jumpbox-address?private-key=some-private-key-path"
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
args := NewSSHArgs(
connOpts,
result,
forceTTY,
privKeyFile,
knownHostsFile,
)
Expect(args.OptsForHost(host)).To(ConsistOf(
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", MatchRegexp("ProxyCommand=nc -x 127.0.0.1:\\d+ %h %p"),
))
})
It("returns ssh options with bracketed gateway proxy command if host IP is IPv6", func() {
result.GatewayUsername = "gw-user"
result.GatewayHost = "gw-host"
host = boshdir.Host{Host: "::1", Username: "user"}
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=ssh -tt -W [%h]:%p -l gw-user gw-host -o ServerAliveInterval=30 -o ForwardAgent=no -o ClearAllForwardings=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null",
}))
})
It("returns ssh options with non-bracketed IPs if gateway IP is IPv6", func() {
result.GatewayUsername = "gw-user"
result.GatewayHost = "::1"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=ssh -tt -W %h:%p -l gw-user ::1 -o ServerAliveInterval=30 -o ForwardAgent=no -o ClearAllForwardings=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null",
}))
})
It("returns ssh options with bracketed and non-bracketed IPs if host and gateway IP is IPv6", func() {
result.GatewayUsername = "gw-user"
result.GatewayHost = "::1"
host = boshdir.Host{Host: "::2", Username: "user"}
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=ssh -tt -W [%h]:%p -l gw-user ::1 -o ServerAliveInterval=30 -o ForwardAgent=no -o ClearAllForwardings=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null",
}))
})
It("returns ssh options non-bracketed if host is IPv6 and SOCKS5Proxy is set", func() {
host = boshdir.Host{Host: "::1", Username: "user"}
connOpts.SOCKS5Proxy = "socks5://some-proxy"
Expect(act()).To(Equal([]string{
"-o", "ServerAliveInterval=30",
"-o", "ForwardAgent=no",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly=yes",
"-o", "IdentityFile=/tmp/priv-key",
"-o", "StrictHostKeyChecking=yes",
"-o", "UserKnownHostsFile=/tmp/known-hosts",
"-o", "ProxyCommand=nc -x some-proxy %h %p",
}))
})
})
})
| cppforlife/bosh-lint | src/github.com/cloudfoundry/bosh-cli/ssh/ssh_args_test.go | GO | apache-2.0 | 9,932 |
package org.seasar.doma.internal.apt.meta.parameter;
import static org.seasar.doma.internal.util.AssertionUtil.assertNotNull;
public class OptionalIntListParameterMeta implements CallableSqlParameterMeta {
private final String name;
public OptionalIntListParameterMeta(String name) {
assertNotNull(name);
this.name = name;
}
public String getName() {
return name;
}
@Override
public <R, P> R accept(CallableSqlParameterMetaVisitor<R, P> visitor, P p) {
return visitor.visitOptionalIntListParameterMeta(this, p);
}
}
| domaframework/doma | doma-processor/src/main/java/org/seasar/doma/internal/apt/meta/parameter/OptionalIntListParameterMeta.java | Java | apache-2.0 | 554 |
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.ros.concurrent;
import com.google.common.base.Preconditions;
import java.util.concurrent.ExecutorService;
/**
* An interruptable loop that can be run by an {@link ExecutorService}.
*
* @author khughes@google.com (Keith M. Hughes)
*/
public abstract class CancellableLoop implements Runnable {
/**
* {@code true} if the code has been run once, {@code false} otherwise.
*/
private boolean ranOnce = false;
/**
* The {@link Thread} the code will be running in.
*/
private Thread thread;
@Override
public void run() {
synchronized (this) {
Preconditions.checkState(!ranOnce, "CancellableLoops cannot be restarted.");
ranOnce = true;
thread = Thread.currentThread();
}
try {
setup();
while (!thread.isInterrupted()) {
loop();
}
} catch (InterruptedException e) {
handleInterruptedException(e);
} finally {
thread = null;
}
}
/**
* The setup block for the loop. This will be called exactly once before
* the first call to {@link #loop()}.
*/
protected void setup() {
}
/**
* The body of the loop. This will run continuously until the
* {@link CancellableLoop} has been interrupted externally or by calling
* {@link #cancel()}.
*/
protected abstract void loop() throws InterruptedException;
/**
* An {@link InterruptedException} was thrown.
*/
protected void handleInterruptedException(InterruptedException e) {
}
/**
* Interrupts the loop.
*/
public void cancel() {
if (thread != null) {
thread.interrupt();
}
}
/**
* @return {@code true} if the loop is running
*/
public boolean isRunning() {
return thread != null && !thread.isInterrupted();
}
}
| interactivespaces/interactivespaces-rosjava | rosjava/src/main/java/org/ros/concurrent/CancellableLoop.java | Java | apache-2.0 | 2,359 |
package org.projectiles;
import static java.awt.Color.red;
import static java.lang.Math.*;
import java.awt.image.BufferedImage;
import static java.lang.System.*;
import org.rooms.*;
import org.resources.AudioPack;
import org.resources.Collisions;
import org.resources.Element;
import org.resources.ImagePack;
import org.resources.VisibleObject;
import org.walls.DamageableWall;
import org.walls.Wall;
import org.enemies.*;
public class AirBall extends Projectile {
public static BufferedImage[] ani = airani;
public AirBall(float X, float Y, float vx, float vy) {
image = ani[0];
dead = false;
color = red;
x = X;
y = Y;
vX = vx * 2.5f;
vY = vy * 2.5f;
life = lifeCapacity = -1 - (int) round(150 * random());
w = h = 22;
if (vX == 0 && vY == 0) {
vX = 5 * (float) random() - 2.5f;
vY = 5 * (float) random() - 2.5f;
}
if (abs(vX) > abs(vY)) {
h = 13;
w = 20;
}
// int counter=0;
// while(hypot(vX,vY)<1&&counter<10){counter++;vX*=2;vY*=2;}
if (abs(abs(atan((double) vY / vX)) - PI / 4) > PI / 8)
image = ani[abs(vX) > abs(vY) ? (vX < 0 ? 0 : 1) : (vY > 0 ? 2 : 3)];
else {
image = ani[vX > 0 ? (vY < 0 ? 4 : 6) : (vY < 0 ? 5 : 7)];
w = h = 13;
}
synchronized (sync) {
livePros++;
}
}
public void run() {
// boolean frame=Clock.frame;
if (life != 0) {
if (life > 0)
image = ani[11];
if (life > 3)
image = ani[10];
if (life > 6)
image = ani[9];
if (life > 8)
image = ani[8];
// Clock.waitFor(frame=!frame);
// if(Clock.dead)break;
if (life < 0) {
vX *= .98;
vY *= .98;
x += vX;
y -= vY;
}
if (life == -200) {
vY = vX = 0;
life = 10;
w = h = 16;
image = ani[8];
x += round(10 * random());
y += round(10 * random());
}
life--;
boolean collided = false;
for (int i = 0; i < Room.walls.size(); i++) {
Wall wal = Room.walls.get(i);
if (vY == 0 && vX == 0)
break;
if ((x < wal.x + wal.w && x + w > wal.x) && (y < wal.y + wal.h && y + h > wal.y)) {
collided = true;
if (wal.damagable) {
w = h = 16;
((DamageableWall) wal).life -= 5;
// if
// (Jump.kraidLife<=0&&Jump.countdown<0){Jump.countdown=500;
// AudioPack.playAudio("Ima Firen Mah Lazor!.wav",0.1);
// }
}
}
}
synchronized (Room.enemies) {
for (VisibleObject en : Room.enemies) {
if (Collisions.collides(this, en)) {
if (life < 0 || life > 2) {
((Enemy) en).vMultiplier = -1;
}
if (life < 0) {
((Enemy) en).damage(Element.AIR, 12);
image = ani[8];
collided = true;
}
}
}
}
if (collided) {
vY = vX = 0;
life = 10;
w = h = 16;
image = ani[8];
x += round(10 * random());
y += round(10 * random());
// AudioPack.playAudio("BExplosion2.wav",0.05);
AudioPack.playClip(boom);
}
} else
dead = true;
}
}
| zapper59/RangerHale---Space-Adventure | src/org/projectiles/AirBall.java | Java | apache-2.0 | 2,909 |
package lab2;
import java.util.PriorityQueue;
public class Lab2b {
private static class Vector implements Comparable{
public double x, y, d;
public DLList.Node node;
public Vector(double x, double y){
this(x,y,null,100);
}
public Vector(double x, double y, DLList.Node n, double d){
this.x = x;
this.y = y;
node = n;
this.d = d;
}
public void setNode(DLList.Node n){
node = n;
}
public double getDist(Vector v){
double xD = x - v.x;
double yD = y - v.y;
return Math.sqrt(xD*xD + yD*yD);
}
public double getX(){
return x;
}
public double getY(){
return y;
}
public void calculateDelta() {
if (node.prev != null && node.next != null){
Vector v1 = (Vector) node.prev.elt;
Vector v2 = (Vector) node.next.elt;
double l1 = getDist(v1);
double l2 = getDist(v2);
double l3 = v1.getDist(v2);
d= l1 + l2 - l3;
}else
d = 100;
}
@Override
public int compareTo(Object o){
int v = (int) ( (d - ((Vector)o).d) *10000);
calculateDelta();
return v;
}
}
private static DLList listOfPoints = new DLList<Vector>();
private static PriorityQueue<Vector> q = new PriorityQueue<Vector>();
public static double[] simplifyShape(double[] poly, int k) {
int pointsToRemove = (poly.length / 2) - k;
// Populate list of Vectors (points in graph)
listOfPoints.addFirst(new Vector(poly[0], poly[1]));
DLList.Node node = listOfPoints.first;
((Vector) node.elt).setNode(node);
for (int i = 2; i<poly.length;i+=2)
populateList(poly[i], poly[i + 1]);
((Vector)listOfPoints.last.elt).calculateDelta();
q.add(((Vector) listOfPoints.last.elt));
DLList.Node testn = listOfPoints.first;
while (testn != null) {
System.out.println(((Vector) testn.elt).d);
testn = testn.next;
}
for (int i = 0; i<pointsToRemove; i++)
delete(((Vector)(q.remove())).node);
double[] array = new double[poly.length-(pointsToRemove*2)];
DLList.Node curNode = listOfPoints.first;
for (int i = 0; i<array.length; i++){
array[i++] = ((Vector)curNode.elt).x;
array[i] = ((Vector)curNode.elt).y;
curNode = curNode.next;
}
return array;
}
private static void populateList(double x, double y) {
listOfPoints.addLast(new Vector(x, y));
DLList.Node n = listOfPoints.last;
((Vector) n.elt).setNode(n);
((Vector)n.prev.elt).calculateDelta();
q.add(((Vector) n.prev.elt));
}
private static void delete(DLList.Node n){
Vector v = ((Vector)n.elt);
System.out.println("Deleting point: (" + v.x + ", " + v.y + "), d = " + (int)(v.d*100));
DLList.Node prev = n.prev;
DLList.Node next = n.next;
listOfPoints.remove(n);
q.remove((Vector) prev.elt);
((Vector)prev.elt).calculateDelta();
q.add((Vector) prev.elt);
q.remove((Vector) next.elt);
((Vector)next.elt).calculateDelta();
q.add((Vector)next.elt);
}
}
| AlexanderSopov/datastruktur | src/lab2/Lab2b.java | Java | apache-2.0 | 2,889 |
package pub.devrel.easypermissions.helper;
import android.app.Activity;
import android.app.FragmentManager;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
/**
* Permissions helper for {@link Activity}.
*/
class ActivityPermissionHelper extends BaseFrameworkPermissionsHelper<Activity> {
public ActivityPermissionHelper(Activity host) {
super(host);
}
@Override
public FragmentManager getFragmentManager() {
return getHost().getFragmentManager();
}
@Override
public void directRequestPermissions(int requestCode, @NonNull String... perms) {
ActivityCompat.requestPermissions(getHost(), perms, requestCode);
}
@Override
public boolean shouldShowRequestPermissionRationale(@NonNull String perm) {
return ActivityCompat.shouldShowRequestPermissionRationale(getHost(), perm);
}
@Override
public Context getContext() {
return getHost();
}
}
| SUPERCILEX/easypermissions | easypermissions/src/main/java/pub/devrel/easypermissions/helper/ActivityPermissionHelper.java | Java | apache-2.0 | 1,014 |
/*
* Copyright 2015 Formal Methods and Tools, University of Twente
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <sigref_util.h>
#include <sylvan_common.h>
/* Calculate random height */
DECLARE_THREAD_LOCAL(thread_rng, uint64_t);
uint64_t trng()
{
LOCALIZE_THREAD_LOCAL(thread_rng, uint64_t);
thread_rng = 2862933555777941757ULL * thread_rng + 3037000493ULL;
SET_THREAD_LOCAL(thread_rng, thread_rng);
return thread_rng;
}
VOID_TASK_0(init_trng_par)
{
SET_THREAD_LOCAL(thread_rng, (((uint64_t)rand()) << 32 | rand()));
}
VOID_TASK_IMPL_0(init_trng)
{
INIT_THREAD_LOCAL(thread_rng);
TOGETHER(init_trng_par);
}
TASK_IMPL_3(BDD, three_and, BDD, a, BDD, b, BDD, c)
{
if (a == sylvan_false || b == sylvan_false || c == sylvan_false) return sylvan_false;
if (a == sylvan_true) return sylvan_and(b, c);
if (b == sylvan_true) return sylvan_and(a, c);
if (c == sylvan_true) return sylvan_and(a, b);
BDD result;
if (cache_get(a|(260LL<<42), b, c, &result)) return result;
sylvan_gc_test();
BDDVAR a_var = sylvan_var(a);
BDDVAR b_var = sylvan_var(b);
BDDVAR c_var = sylvan_var(c);
BDD var = a_var;
if (var > b_var) var = b_var;
if (var > c_var) var = c_var;
BDD a_low, a_high;
if (var == a_var) {
a_low = sylvan_low(a);
a_high = sylvan_high(a);
} else {
a_low = a_high = a;
}
BDD b_low, b_high;
if (var == b_var) {
b_low = sylvan_low(b);
b_high = sylvan_high(b);
} else {
b_low = b_high = b;
}
BDD c_low, c_high;
if (var == c_var) {
c_low = sylvan_low(c);
c_high = sylvan_high(c);
} else {
c_low = c_high = c;
}
bdd_refs_spawn(SPAWN(three_and, a_low, b_low, c_low));
BDD high = bdd_refs_push(CALL(three_and, a_high, b_high, c_high));
BDD low = bdd_refs_sync(SYNC(three_and));
result = sylvan_makenode(var, low, high);
bdd_refs_pop(1);
cache_put(a|(260LL<<42), b, c, result);
return result;
}
TASK_IMPL_1(MTBDD, swap_prime, MTBDD, set)
{
if (mtbdd_isleaf(set)) return set;
// TODO: properly ignore action/block variables
if (mtbdd_getvar(set) >= 99999) return set;
MTBDD result;
if (cache_get(set|(258LL<<42), set, 0, &result)) return result;
sylvan_gc_test();
mtbdd_refs_spawn(SPAWN(swap_prime, mtbdd_getlow(set)));
MTBDD high = mtbdd_refs_push(CALL(swap_prime, mtbdd_gethigh(set)));
MTBDD low = mtbdd_refs_sync(SYNC(swap_prime));
result = mtbdd_makenode(sylvan_var(set)^1, low, high);
mtbdd_refs_pop(1);
cache_put(set|(258LL<<42), set, 0, result);
return result;
}
TASK_IMPL_3(double, big_satcount, MTBDD*, dds, size_t, count, size_t, nvars)
{
if (count == 1) return mtbdd_satcount(*dds, nvars);
SPAWN(big_satcount, dds, count/2, nvars);
double result = big_satcount(dds+count/2, count-count/2, nvars);
return result + SYNC(big_satcount);
}
TASK_IMPL_2(MTBDD, big_union, MTBDD*, sets, size_t, count)
{
if (count == 1) return *sets;
mtbdd_refs_spawn(SPAWN(big_union, sets, count/2));
MTBDD right = mtbdd_refs_push(CALL(big_union, sets+count/2, count-count/2));
MTBDD left = mtbdd_refs_push(mtbdd_refs_sync(SYNC(big_union)));
MTBDD result = mtbdd_plus(left, right);
mtbdd_refs_pop(2);
return result;
}
| utwente-fmt/sigrefmc | src/sigref_util.cpp | C++ | apache-2.0 | 3,863 |
// Copyright (c) 2014 kingsoft
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include "hiredis/adapters/libuv.h"
#include "hiredis/hiredis.h"
#include "hiredis/async.h"
#include "./rediswrapper.h"
#ifndef see_here
#define see_here dba_err("check here [%s:%d:%s]\n", __FILE__, __LINE__, __func__)
#endif
#ifndef dba_dbg
#define dba_dbg(fmt, ...) \
fprintf(stdout, fmt, ##__VA_ARGS__);
#endif
#ifndef dba_err
#define dba_err(fmt, ...) \
fprintf(stderr, fmt, ##__VA_ARGS__);
#endif
namespace immortal {
namespace db {
using std::string;
std::tuple<string, uint16_t> ParseAddr(const string& addr);
static void ConnectCallback(const redisAsyncContext *c, int status) {
if (status != REDIS_OK) {
fprintf(stderr, "Error: %s\n", c->errstr);
auto r = (RedisClient *)c->data;
r->InitRedis();
return;
}
printf("Connected...\n");
}
static void DisconnectCallback(const redisAsyncContext *c, int status) {
if (status != REDIS_OK) {
fprintf(stderr, "Error: %s\n", c->errstr);
}
printf("Disconnected...and try to reconnect\n");
auto r = (RedisClient *)c->data;
r->InitRedis();
}
static void RedisSetCallback2(redisAsyncContext *c, void *r, void *privdata) {
uint32_t seq = (uintptr_t)privdata;
auto cb = ((RedisClient *)c->data)->cb_;
redisReply *reply = (redisReply *)r;
int err = 0;
if (reply == NULL) {
dba_err("redis bug!? reply is NULL\n");
err = -1;
return;
} else if (reply->type != REDIS_REPLY_STATUS) {
dba_err("redis bug!? HMSET reply should be status, but %d\n", reply->type);
} else {
// dba_dbg("%s\n", reply->str);
}
cb(seq, NULL, 0, err);
}
static void RedisCommandCallback(redisAsyncContext *c, void *r, void *privdata) {
uint32_t seq = (uintptr_t)privdata;
auto cb = ((RedisClient *)c->data)->cb_;
redisReply *reply = (redisReply *)r;
int err = 0;
if (reply == NULL) {
dba_err("redis bug!? reply is NULL\n");
err = -1;
return;
} else if (reply->type == REDIS_REPLY_STATUS) {
} else {
}
cb(seq, NULL, 0, err);
}
static void RedisGetCallback2(redisAsyncContext *c, void *r, void *privdata) {
uint32_t seq = (uintptr_t)privdata;
auto cb = ((RedisClient *)c->data)->cb_;
redisReply *reply = (redisReply *)r;
int err = 0;
if (reply == NULL) {
dba_err("redis bug!? reply is NULL\n");
return;
} else if (reply->type == REDIS_REPLY_NIL) {
// dba_dbg("no data\n");
} else if (reply->type != REDIS_REPLY_STRING) {
dba_err("redis bug!? GET reply should be str, but %d\n", reply->type);
err = -1;
}
cb(seq, reply->str, reply->len, err);
}
int RedisClient::Get(uint32_t seq, const char *key) {
int r;
r = redisAsyncCommand(redis_ctx_, RedisGetCallback2,
(void *)(uintptr_t)seq, "GET %s", key);
if (r != 0) {
dba_err("redisAsyncCommand GET error\n");
return -1;
}
return 0;
}
int RedisClient::Set(uint32_t seq, const char *key,
const char *buf, size_t len) {
int r;
r = redisAsyncCommand(redis_ctx_, RedisSetCallback2,
(void *)(uintptr_t)seq, "SET %s %b", key, buf, len);
if (r != 0) {
dba_err("redisAsyncCommand SET error\n");
return -1;
}
return 0;
}
int RedisClient::Del(uint32_t seq, const char *key) {
int r;
r = redisAsyncCommand(redis_ctx_, RedisSetCallback2,
(void *)(uintptr_t)seq, "DEL %s", key);
if (r != 0) {
dba_err("redisAsyncCommand DEL error\n");
return -1;
}
return 0;
}
int RedisClient::Command(uint32_t seq, const char *fmt, ...) {
int r;
va_list ap;
va_start(ap, fmt);
r = redisvAsyncCommand(redis_ctx_, RedisCommandCallback, (void *)(uintptr_t)seq, fmt, ap);
va_end(ap);
if (r != 0) {
dba_err("redisvAsyncCommand error\n");
return -1;
}
return 0;
}
#if 0
int RedisClient::HSet(Handle *req, google::protobuf::Message *msg) {
dbaproto::Record *save = (dbaproto::Record *)(msg);
// TODO(lijie3): select redis conn by table name
req->data = dba_;
int r;
// Start copy save to redis protocol
// COMMAND KEY args...
int argc = 2 + save->values_size() * 2;
const char **argv = (const char **)malloc(sizeof(char *) * argc);
size_t *argvlen = (size_t *)malloc(sizeof(size_t) * argc);
// set command
argv[0] = "HMSET";
argvlen[0] = strlen(argv[0]);
// set key
argv[1] = save->key().value().c_str();
argvlen[1] = save->key().value().size();
// set args
for (int i = 0, j = 2; i < save->values_size(); i++) {
argv[j] = (char *)(save->values(i).field().c_str());
argvlen[j] = save->values(i).field().size();
j++;
argv[j] = (char *)(save->values(i).value().c_str());
argvlen[j] = save->values(i).value().size();
j++;
}
r = redisAsyncCommandArgv(redis_ctx_, RedisHSetCallback, req,
argc, argv, argvlen);
free(argv);
free(argvlen);
if (r != 0) {
fprintf(stderr, "redisAsyncCommand HMSET error\n");
return -1;
}
return 0;
}
#endif
int RedisClient::InitRedis() {
printf("connect redis %s:%d\n", ip_.c_str(), port_);
redis_ctx_ = redisAsyncConnect(ip_.c_str(), port_);
if (redis_ctx_->err) {
fprintf(stderr, "Error: %s\n", redis_ctx_->errstr);
redisAsyncFree(redis_ctx_);
return -1;
}
int r = redisLibuvAttach(redis_ctx_, loop_);
if (r != REDIS_OK) {
fprintf(stderr, "redis attach to libuv failed\n");
redisAsyncFree(redis_ctx_);
return -1;
}
// save current env
redis_ctx_->data = (void *)this;
redisAsyncSetConnectCallback(redis_ctx_, ConnectCallback);
redisAsyncSetDisconnectCallback(redis_ctx_, DisconnectCallback);
return 0;
}
int RedisClient::Init(RedisCallback_t cb, void *loop, const std::string& ip, uint32_t port) {
cb_ = cb;
loop_ = (uv_loop_t *)loop;
ip_ = ip;
port_ = port;
return InitRedis();
}
int RedisClient::Init(RedisCallback_t cb, void *loop, const std::string& url) {
auto tup = ParseAddr(url);
auto ip = std::get<0>(tup);
auto port = std::get<1>(tup);
return Init(cb, loop, ip, port);
}
} // namespace dba
} // namespace immortal
| lijie/siriusdb | dbproxy/rediswrapper.cpp | C++ | apache-2.0 | 6,087 |
package com.lerx.integral.rule.dao.imp;
import java.util.List;
import org.springframework.dao.DataAccessException;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
import com.lerx.integral.rule.dao.IIntegralRuleDao;
import com.lerx.integral.rule.vo.IntegralRule;
public class IntegralRuleDaoImp extends HibernateDaoSupport implements IIntegralRuleDao {
@Override
public int add(IntegralRule ir) {
if (findByName(ir.getName())==null){
this.getHibernateTemplate().save(ir);
return ir.getId();
}else{
return 0;
}
}
@Override
public boolean modify(IntegralRule ir) {
try {
IntegralRule irdb=findById(ir.getId());
if (ir.getCreateTime()==null){
ir.setCreateTime(irdb.getCreateTime());
}
if (ir.getLocalPostion()==0){
ir.setLocalPostion(irdb.getLocalPostion());
}
if (ir.isState()){
String hql="update IntegralRule i set i.state=false where i.localPostion=?";
this.getHibernateTemplate().bulkUpdate(hql,ir.getLocalPostion());
}
this.getHibernateTemplate().saveOrUpdate(ir);
return true;
} catch (DataAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
}
@Override
public boolean deyById(int id) {
try {
this.getHibernateTemplate().delete(
this.getHibernateTemplate()
.get("com.lerx.integral.rule.vo.IntegralRule", id));
return true;
} catch (DataAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
}
@Override
public IntegralRule findById(int id) {
// TODO Auto-generated method stub
return
(IntegralRule) this.getHibernateTemplate()
.get("com.lerx.integral.rule.vo.IntegralRule", id);
}
@Override
public List<IntegralRule> query(int localPostion) {
String hql="from IntegralRule i where i.localPostion=?";
@SuppressWarnings("unchecked")
List<IntegralRule> list = (List<IntegralRule>)this.getHibernateTemplate().find(hql, localPostion);
return list;
}
@Override
public IntegralRule findByName(String name) {
String hql="from IntegralRule i where i.name=?";
@SuppressWarnings("unchecked")
List<IntegralRule> list=(List<IntegralRule>)this.getHibernateTemplate().find(hql, name);
if (list.isEmpty()){
return null;
}else{
return list.get(0);
}
}
@Override
public boolean changeState(int id,boolean state,int localPostion) {
String hql="";
if (state){
System.out.println("localPostion:"+localPostion);
hql="update IntegralRule i set i.state=false where i.localPostion=?";
this.getHibernateTemplate().bulkUpdate(hql,localPostion);
hql="update IntegralRule i set i.state=true where i.id="+id;
this.getHibernateTemplate().bulkUpdate(hql);
}else{
IntegralRule i=findById(id);
i.setState(state);
this.getHibernateTemplate().saveOrUpdate(i);
// hql="update IntegralRule i set i.state=false where i.id="+id;
// this.getHibernateTemplate().bulkUpdate(hql);
}
return true;
}
@Override
public IntegralRule findDefault(int localPostion) {
String hql="from IntegralRule i where i.state=true and i.localPostion=?";
@SuppressWarnings("unchecked")
List<IntegralRule> list=(List<IntegralRule>)this.getHibernateTemplate().find(hql,localPostion);
if (list.isEmpty()){
return null;
}else{
return list.get(0);
}
// return null;
}
}
| chenxyzy/cms | src/com/lerx/integral/rule/dao/imp/IntegralRuleDaoImp.java | Java | apache-2.0 | 3,349 |
/*
* Copyright 2014 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Modified by Ebrahim Aharpour
* to integrate the authentication of the user with the CMS
*/
package nl.openweb.hippo.umd.webservices;
import java.io.Serializable;
import java.lang.reflect.Method;
import javax.jcr.LoginException;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.Provider;
import org.apache.cxf.jaxrs.ext.RequestHandler;
import org.apache.cxf.jaxrs.ext.ResponseHandler;
import org.apache.cxf.jaxrs.model.ClassResourceInfo;
import org.apache.cxf.jaxrs.model.OperationResourceInfo;
import org.apache.cxf.message.Message;
import org.apache.cxf.transport.http.AbstractHTTPDestination;
import org.apache.wicket.Application;
import org.apache.wicket.protocol.http.servlet.ServletWebRequest;
import org.apache.wicket.session.ISessionStore;
import org.hippoecm.frontend.model.UserCredentials;
import org.hippoecm.frontend.session.PluginUserSession;
import org.onehippo.forge.webservices.AuthenticationConstants;
import org.onehippo.forge.webservices.jaxrs.exception.UnauthorizedException;
import org.onehippo.forge.webservices.jaxrs.jcr.util.JcrSessionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Provider
public class HippoAuthenticationRequestHandler implements RequestHandler, ResponseHandler {
private static final Logger LOG = LoggerFactory.getLogger(HippoAuthenticationRequestHandler.class);
private Session session = null;
public static final String[] STREAMING_OUTPUT_SERVICES = new String[] { "getGroupsOverview", "getUsersOverview" };
public Response handleRequest(Message m, ClassResourceInfo resourceClass) {
ISessionStore sessionStore = Application.get().getSessionStore();
Serializable attribute = sessionStore.getAttribute(
new ServletWebRequest((HttpServletRequest) m.get(AbstractHTTPDestination.HTTP_REQUEST), ""), "session");
if (attribute instanceof PluginUserSession) {
UserCredentials userCredentials = ((PluginUserSession) attribute).getUserCredentials();
if (userCredentials != null) {
SimpleCredentials jcrCredentials = (SimpleCredentials) userCredentials.getJcrCredentials();
String username = jcrCredentials.getUserID();
String password = new String(jcrCredentials.getPassword());
try {
session = JcrSessionUtil.createSession(username, password);
if (isAuthenticated()) {
HttpServletRequest request = (HttpServletRequest) m.get(AbstractHTTPDestination.HTTP_REQUEST);
request.setAttribute(AuthenticationConstants.HIPPO_SESSION, session);
return null;
} else {
throw new UnauthorizedException();
}
} catch (LoginException e) {
LOG.debug("Login failed: {}", e);
throw new UnauthorizedException(e.getMessage());
}
}
}
throw new UnauthorizedException();
}
@Override
public Response handleResponse(final Message m, final OperationResourceInfo ori, final Response response) {
if (session != null && session.isLive()) {
if (!(ori != null && ori.getMethodToInvoke() != null && isStreamingOutputServices(ori.getMethodToInvoke()))) {
session.logout();
}
session = null;
}
return null;
}
public boolean isStreamingOutputServices(Method methodToInvoke) {
boolean result = false;
if (methodToInvoke != null) {
String calledMethod = methodToInvoke.getName();
for (String serviceMethod : STREAMING_OUTPUT_SERVICES) {
if (serviceMethod.equals(calledMethod)) {
result = true;
break;
}
}
}
return result;
}
private boolean isAuthenticated() {
return session != null;
}
} | aharpour/user-management-dashboard | cms/src/main/java/nl/openweb/hippo/umd/webservices/HippoAuthenticationRequestHandler.java | Java | apache-2.0 | 4,730 |
/*
* Copyright 2008-2009 Web Cohesion
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.oauth2.config;
import java.util.List;
import org.springframework.beans.BeanMetadataElement;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.security.oauth2.provider.BaseClientDetails;
import org.springframework.security.oauth2.provider.InMemoryClientDetailsService;
import org.springframework.util.StringUtils;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
/**
* @author Ryan Heaton
* @author Andrew McCall
*/
public class ClientDetailsServiceBeanDefinitionParser extends AbstractSingleBeanDefinitionParser {
@Override
protected Class<?> getBeanClass(Element element) {
return InMemoryClientDetailsService.class;
}
@Override
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
List<Element> clientElements = DomUtils.getChildElementsByTagName(element, "client");
ManagedMap<String, BeanMetadataElement> clients = new ManagedMap<String, BeanMetadataElement>();
for (Element clientElement : clientElements) {
BeanDefinitionBuilder client = BeanDefinitionBuilder.rootBeanDefinition(BaseClientDetails.class);
String clientId = clientElement.getAttribute("client-id");
if (StringUtils.hasText(clientId)) {
client.addConstructorArgValue(clientId);
}
else {
parserContext.getReaderContext().error("A client id must be supplied with the definition of a client.",
clientElement);
}
String secret = clientElement.getAttribute("secret");
if (StringUtils.hasText(secret)) {
client.addPropertyValue("clientSecret", secret);
}
String resourceIds = clientElement.getAttribute("resource-ids");
if (StringUtils.hasText(clientId)) {
client.addConstructorArgValue(resourceIds);
}
else {
client.addConstructorArgValue("");
}
String redirectUri = clientElement.getAttribute("redirect-uri");
String tokenValidity = clientElement.getAttribute("access-token-validity");
if (StringUtils.hasText(tokenValidity)) {
client.addPropertyValue("accessTokenValiditySeconds", tokenValidity);
}
String refreshValidity = clientElement.getAttribute("refresh-token-validity");
if (StringUtils.hasText(refreshValidity)) {
client.addPropertyValue("refreshTokenValiditySeconds", refreshValidity);
}
client.addConstructorArgValue(clientElement.getAttribute("scope"));
client.addConstructorArgValue(clientElement.getAttribute("authorized-grant-types"));
client.addConstructorArgValue(clientElement.getAttribute("authorities"));
if (StringUtils.hasText(redirectUri)) {
client.addConstructorArgValue(redirectUri);
}
clients.put(clientId, client.getBeanDefinition());
}
builder.addPropertyValue("clientDetailsStore", clients);
}
} | rwinch/spring-security-oauth | spring-security-oauth2/src/main/java/org/springframework/security/oauth2/config/ClientDetailsServiceBeanDefinitionParser.java | Java | apache-2.0 | 3,600 |
// Alias to import the folder
import Logo from './Logo';
export default Logo; | Angelmmiguel/discuzzion | client/src/components/Logo/index.js | JavaScript | apache-2.0 | 77 |
package com.cloud.dc.dao;
import com.cloud.dc.AccountVlanMapVO;
import com.cloud.dc.DomainVlanMapVO;
import com.cloud.dc.PodVlanMapVO;
import com.cloud.dc.Vlan;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.utils.Pair;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.JoinBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.TransactionLegacy;
import com.cloud.utils.exception.CloudRuntimeException;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Component;
@Component
public class VlanDaoImpl extends GenericDaoBase<VlanVO, Long> implements VlanDao {
private final String FindZoneWideVlans =
"SELECT * FROM vlan WHERE data_center_id=? and vlan_type=? and vlan_id!=? and id not in (select vlan_db_id from account_vlan_map)";
protected SearchBuilder<VlanVO> ZoneVlanIdSearch;
protected SearchBuilder<VlanVO> ZoneSearch;
protected SearchBuilder<VlanVO> ZoneTypeSearch;
protected SearchBuilder<VlanVO> ZoneTypeAllPodsSearch;
protected SearchBuilder<VlanVO> ZoneTypePodSearch;
protected SearchBuilder<VlanVO> ZoneVlanSearch;
protected SearchBuilder<VlanVO> NetworkVlanSearch;
protected SearchBuilder<VlanVO> PhysicalNetworkVlanSearch;
protected SearchBuilder<VlanVO> ZoneWideNonDedicatedVlanSearch;
protected SearchBuilder<VlanVO> VlanGatewaysearch;
protected SearchBuilder<VlanVO> DedicatedVlanSearch;
protected SearchBuilder<AccountVlanMapVO> AccountVlanMapSearch;
protected SearchBuilder<DomainVlanMapVO> DomainVlanMapSearch;
@Inject
protected PodVlanMapDao _podVlanMapDao;
@Inject
protected AccountVlanMapDao _accountVlanMapDao;
@Inject
protected DomainVlanMapDao _domainVlanMapDao;
@Inject
protected IPAddressDao _ipAddressDao;
public VlanDaoImpl() {
ZoneVlanIdSearch = createSearchBuilder();
ZoneVlanIdSearch.and("zoneId", ZoneVlanIdSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
ZoneVlanIdSearch.and("vlanId", ZoneVlanIdSearch.entity().getVlanTag(), SearchCriteria.Op.EQ);
ZoneVlanIdSearch.done();
ZoneSearch = createSearchBuilder();
ZoneSearch.and("zoneId", ZoneSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
ZoneSearch.done();
ZoneTypeSearch = createSearchBuilder();
ZoneTypeSearch.and("zoneId", ZoneTypeSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
ZoneTypeSearch.and("vlanType", ZoneTypeSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
ZoneTypeSearch.done();
NetworkVlanSearch = createSearchBuilder();
NetworkVlanSearch.and("networkOfferingId", NetworkVlanSearch.entity().getNetworkId(), SearchCriteria.Op.EQ);
NetworkVlanSearch.done();
PhysicalNetworkVlanSearch = createSearchBuilder();
PhysicalNetworkVlanSearch.and("physicalNetworkId", PhysicalNetworkVlanSearch.entity().getPhysicalNetworkId(), SearchCriteria.Op.EQ);
PhysicalNetworkVlanSearch.done();
VlanGatewaysearch = createSearchBuilder();
VlanGatewaysearch.and("gateway", VlanGatewaysearch.entity().getVlanGateway(), SearchCriteria.Op.EQ);
VlanGatewaysearch.and("networkid", VlanGatewaysearch.entity().getNetworkId(), SearchCriteria.Op.EQ);
VlanGatewaysearch.done();
}
@Override
public VlanVO findByZoneAndVlanId(final long zoneId, final String vlanId) {
final SearchCriteria<VlanVO> sc = ZoneVlanIdSearch.create();
sc.setParameters("zoneId", zoneId);
sc.setParameters("vlanId", vlanId);
return findOneBy(sc);
}
@Override
public List<VlanVO> listByZone(final long zoneId) {
final SearchCriteria<VlanVO> sc = ZoneSearch.create();
sc.setParameters("zoneId", zoneId);
return listBy(sc);
}
@Override
public List<VlanVO> listByType(final VlanType vlanType) {
final SearchCriteria<VlanVO> sc = ZoneTypeSearch.create();
sc.setParameters("vlanType", vlanType);
return listBy(sc);
}
@Override
public List<VlanVO> listByZoneAndType(final long zoneId, final VlanType vlanType) {
final SearchCriteria<VlanVO> sc = ZoneTypeSearch.create();
sc.setParameters("zoneId", zoneId);
sc.setParameters("vlanType", vlanType);
return listBy(sc);
}
@Override
public List<VlanVO> listVlansForPod(final long podId) {
//FIXME: use a join statement to improve the performance (should be minor since we expect only one or two
final List<PodVlanMapVO> vlanMaps = _podVlanMapDao.listPodVlanMapsByPod(podId);
final List<VlanVO> result = new ArrayList<>();
for (final PodVlanMapVO pvmvo : vlanMaps) {
result.add(findById(pvmvo.getVlanDbId()));
}
return result;
}
@Override
public List<VlanVO> listVlansForPodByType(final long podId, final VlanType vlanType) {
//FIXME: use a join statement to improve the performance (should be minor since we expect only one or two)
final List<PodVlanMapVO> vlanMaps = _podVlanMapDao.listPodVlanMapsByPod(podId);
final List<VlanVO> result = new ArrayList<>();
for (final PodVlanMapVO pvmvo : vlanMaps) {
final VlanVO vlan = findById(pvmvo.getVlanDbId());
if (vlan.getVlanType() == vlanType) {
result.add(vlan);
}
}
return result;
}
@Override
public void addToPod(final long podId, final long vlanDbId) {
final PodVlanMapVO pvmvo = new PodVlanMapVO(podId, vlanDbId);
_podVlanMapDao.persist(pvmvo);
}
@Override
public List<VlanVO> listVlansForAccountByType(final Long zoneId, final long accountId, final VlanType vlanType) {
//FIXME: use a join statement to improve the performance (should be minor since we expect only one or two)
final List<AccountVlanMapVO> vlanMaps = _accountVlanMapDao.listAccountVlanMapsByAccount(accountId);
final List<VlanVO> result = new ArrayList<>();
for (final AccountVlanMapVO acvmvo : vlanMaps) {
final VlanVO vlan = findById(acvmvo.getVlanDbId());
if (vlan.getVlanType() == vlanType && (zoneId == null || vlan.getDataCenterId() == zoneId)) {
result.add(vlan);
}
}
return result;
}
@Override
public boolean zoneHasDirectAttachUntaggedVlans(final long zoneId) {
final SearchCriteria<VlanVO> sc = ZoneTypeAllPodsSearch.create();
sc.setParameters("zoneId", zoneId);
sc.setParameters("vlanType", VlanType.DirectAttached);
return listIncludingRemovedBy(sc).size() > 0;
}
@Override
public List<VlanVO> listZoneWideVlans(final long zoneId, final VlanType vlanType, final String vlanId) {
final SearchCriteria<VlanVO> sc = ZoneVlanSearch.create();
sc.setParameters("zoneId", zoneId);
sc.setParameters("vlanId", vlanId);
sc.setParameters("vlanType", vlanType);
return listBy(sc);
}
@Override
@DB
public List<VlanVO> searchForZoneWideVlans(final long dcId, final String vlanType, final String vlanId) {
final StringBuilder sql = new StringBuilder(FindZoneWideVlans);
final TransactionLegacy txn = TransactionLegacy.currentTxn();
final List<VlanVO> zoneWideVlans = new ArrayList<>();
try (PreparedStatement pstmt = txn.prepareStatement(sql.toString())) {
if (pstmt != null) {
pstmt.setLong(1, dcId);
pstmt.setString(2, vlanType);
pstmt.setString(3, vlanId);
try (ResultSet rs = pstmt.executeQuery()) {
while (rs.next()) {
zoneWideVlans.add(toEntityBean(rs, false));
}
} catch (final SQLException e) {
throw new CloudRuntimeException("searchForZoneWideVlans:Exception:" + e.getMessage(), e);
}
}
return zoneWideVlans;
} catch (final SQLException e) {
throw new CloudRuntimeException("searchForZoneWideVlans:Exception:" + e.getMessage(), e);
}
}
@Override
public List<VlanVO> listVlansByNetworkId(final long networkOfferingId) {
final SearchCriteria<VlanVO> sc = NetworkVlanSearch.create();
sc.setParameters("networkOfferingId", networkOfferingId);
return listBy(sc);
}
@Override
public List<VlanVO> listVlansByPhysicalNetworkId(final long physicalNetworkId) {
final SearchCriteria<VlanVO> sc = PhysicalNetworkVlanSearch.create();
sc.setParameters("physicalNetworkId", physicalNetworkId);
return listBy(sc);
}
@Override
public List<VlanVO> listZoneWideNonDedicatedVlans(final long zoneId) {
final SearchCriteria<VlanVO> sc = ZoneWideNonDedicatedVlanSearch.create();
sc.setParameters("zoneId", zoneId);
return listBy(sc);
}
@Override
public List<VlanVO> listVlansByNetworkIdAndGateway(final long networkid, final String gateway) {
final SearchCriteria<VlanVO> sc = VlanGatewaysearch.create();
sc.setParameters("networkid", networkid);
sc.setParameters("gateway", gateway);
return listBy(sc);
}
@Override
public List<VlanVO> listDedicatedVlans(final long accountId) {
final SearchCriteria<VlanVO> sc = DedicatedVlanSearch.create();
sc.setJoinParameters("AccountVlanMapSearch", "accountId", accountId);
return listBy(sc);
}
@Override
public boolean configure(final String name, final Map<String, Object> params) throws ConfigurationException {
final boolean result = super.configure(name, params);
ZoneTypeAllPodsSearch = createSearchBuilder();
ZoneTypeAllPodsSearch.and("zoneId", ZoneTypeAllPodsSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
ZoneTypeAllPodsSearch.and("vlanType", ZoneTypeAllPodsSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
final SearchBuilder<PodVlanMapVO> PodVlanSearch = _podVlanMapDao.createSearchBuilder();
PodVlanSearch.and("podId", PodVlanSearch.entity().getPodId(), SearchCriteria.Op.NNULL);
ZoneTypeAllPodsSearch.join("vlan", PodVlanSearch, PodVlanSearch.entity().getVlanDbId(), ZoneTypeAllPodsSearch.entity().getId(), JoinBuilder.JoinType.INNER);
ZoneTypeAllPodsSearch.done();
PodVlanSearch.done();
ZoneTypePodSearch = createSearchBuilder();
ZoneTypePodSearch.and("zoneId", ZoneTypePodSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
ZoneTypePodSearch.and("vlanType", ZoneTypePodSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
final SearchBuilder<PodVlanMapVO> PodVlanSearch2 = _podVlanMapDao.createSearchBuilder();
PodVlanSearch2.and("podId", PodVlanSearch2.entity().getPodId(), SearchCriteria.Op.EQ);
ZoneTypePodSearch.join("vlan", PodVlanSearch2, PodVlanSearch2.entity().getVlanDbId(), ZoneTypePodSearch.entity().getId(), JoinBuilder.JoinType.INNER);
PodVlanSearch2.done();
ZoneTypePodSearch.done();
ZoneWideNonDedicatedVlanSearch = createSearchBuilder();
ZoneWideNonDedicatedVlanSearch.and("zoneId", ZoneWideNonDedicatedVlanSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
AccountVlanMapSearch = _accountVlanMapDao.createSearchBuilder();
AccountVlanMapSearch.and("accountId", AccountVlanMapSearch.entity().getAccountId(), SearchCriteria.Op.NULL);
ZoneWideNonDedicatedVlanSearch.join("AccountVlanMapSearch", AccountVlanMapSearch, ZoneWideNonDedicatedVlanSearch.entity().getId(), AccountVlanMapSearch.entity()
.getVlanDbId(),
JoinBuilder.JoinType.LEFTOUTER);
DomainVlanMapSearch = _domainVlanMapDao.createSearchBuilder();
DomainVlanMapSearch.and("domainId", DomainVlanMapSearch.entity().getDomainId(), SearchCriteria.Op.NULL);
ZoneWideNonDedicatedVlanSearch.join("DomainVlanMapSearch", DomainVlanMapSearch, ZoneWideNonDedicatedVlanSearch.entity().getId(), DomainVlanMapSearch.entity().getVlanDbId
(), JoinBuilder.JoinType.LEFTOUTER);
ZoneWideNonDedicatedVlanSearch.done();
AccountVlanMapSearch.done();
DomainVlanMapSearch.done();
DedicatedVlanSearch = createSearchBuilder();
AccountVlanMapSearch = _accountVlanMapDao.createSearchBuilder();
AccountVlanMapSearch.and("accountId", AccountVlanMapSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
DedicatedVlanSearch.join("AccountVlanMapSearch", AccountVlanMapSearch, DedicatedVlanSearch.entity().getId(), AccountVlanMapSearch.entity().getVlanDbId(),
JoinBuilder.JoinType.LEFTOUTER);
DedicatedVlanSearch.done();
AccountVlanMapSearch.done();
return result;
}
private VlanVO findNextVlan(final long zoneId, final Vlan.VlanType vlanType) {
final List<VlanVO> allVlans = listByZoneAndType(zoneId, vlanType);
final List<VlanVO> emptyVlans = new ArrayList<>();
final List<VlanVO> fullVlans = new ArrayList<>();
// Try to find a VLAN that is partially allocated
for (final VlanVO vlan : allVlans) {
final long vlanDbId = vlan.getId();
final int countOfAllocatedIps = _ipAddressDao.countIPs(zoneId, vlanDbId, true);
final int countOfAllIps = _ipAddressDao.countIPs(zoneId, vlanDbId, false);
if ((countOfAllocatedIps > 0) && (countOfAllocatedIps < countOfAllIps)) {
return vlan;
} else if (countOfAllocatedIps == 0) {
emptyVlans.add(vlan);
} else if (countOfAllocatedIps == countOfAllIps) {
fullVlans.add(vlan);
}
}
if (emptyVlans.isEmpty()) {
return null;
}
// Try to find an empty VLAN with the same tag/subnet as a VLAN that is full
for (final VlanVO fullVlan : fullVlans) {
for (final VlanVO emptyVlan : emptyVlans) {
if (fullVlan.getVlanTag().equals(emptyVlan.getVlanTag()) && fullVlan.getVlanGateway().equals(emptyVlan.getVlanGateway()) &&
fullVlan.getVlanNetmask().equals(emptyVlan.getVlanNetmask())) {
return emptyVlan;
}
}
}
// Return a random empty VLAN
return emptyVlans.get(0);
}
public Pair<String, VlanVO> assignPodDirectAttachIpAddress(final long zoneId, final long podId, final long accountId, final long domainId) {
final SearchCriteria<VlanVO> sc = ZoneTypePodSearch.create();
sc.setParameters("zoneId", zoneId);
sc.setParameters("vlanType", VlanType.DirectAttached);
sc.setJoinParameters("vlan", "podId", podId);
final VlanVO vlan = findOneIncludingRemovedBy(sc);
if (vlan == null) {
return null;
}
return null;
// String ipAddress = _ipAddressDao.assignIpAddress(accountId, domainId, vlan.getId(), false).getAddress();
// if (ipAddress == null) {
// return null;
// }
// return new Pair<String, VlanVO>(ipAddress, vlan);
}
}
| remibergsma/cosmic | cosmic-core/engine/schema/src/main/java/com/cloud/dc/dao/VlanDaoImpl.java | Java | apache-2.0 | 15,800 |