repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
garygunarman/firanew | admin/account/recover.php | 5563 | <?php
/*
# ----------------------------------------------------------------------
# RECOVER PASSWORD: CONTROL
# ----------------------------------------------------------------------
*/
include('get.php');
include('update.php');
include('control.php');
// DEFINED VARIABLE
$recover_name = $_REQUEST['name'];
$recover_code = $_REQUEST['code'];
$recover_time = $_REQUEST['time'];
function check_recover_time($post_end_time, $post_start_time){
$conn = connDB();
$sql = "SELECT DATEDIFF('$post_end_time', '$post_start_time') AS left_days";
$query = mysql_query($sql, $conn);
$result = mysql_fetch_array($query);
return $result;
}
$check_time = check_recover_time(date('Y-m-d'), $_REQUEST['time']);
if($check_time['left_days'] > 0){
}else{
function count_recover($post_name, $post_code){
$conn = connDB();
$sql = "SELECT COUNT(*) AS rows FROM tbl_forgot_log WHERE `admin_username` = '$post_name' AND `code` = MD5('$post_code')";
$query = mysql_query($sql, $conn);
$result = mysql_fetch_array($query);
return $result;
}
function update_recover($post_name, $post_code){
$conn = connDB();
$sql = "UPDATE tbl_forgot_log SET `status` = '$post_status' WHERE `admin_username` = '$post_name' AND `code` = MD5('$post_code')";
$query = mysql_query($sql, $conn) or die(mysql_error());
}
// CALL FUNCTION
$check_link = count_recover($recover_name, $recover_code);
if($check_link['rows'] > 0){
if(isset($_POST['btn-admin-recover'])){
}
?>
<form method="post" enctype="multipart/form-data" autocomplete="off">
<?php
/* --- SHOW ALERT --- */
if(!empty($_SESSION['alert'])){
echo '<div class="alert '.$_SESSION['alert'].'" id="alert-msg-pass">';
echo '<div class="container text-center">';
echo $_SESSION['msg'];
echo '</div>';
echo '</div>';
}
/* --- UNSET ALERT --- */
if($_POST['btn-admin-recover'] == ""){
unset($_SESSION['alert']);
unset($_SESSION['msg']);
}
?>
<div class="container main">
<!--<div class="overlay over-signin <?php if(!empty($_SESSION['alert'])){ echo "error";}?>" id="overlay-error">-->
<div class="box row login">
<div class="navbar-login clearfix">
<div class="navbar-brand"><img src="<?php echo $prefix_url;?>files/common/logo.png" alt="logo"></div>
<h1>Antikode Admin</h1>
</div>
<div class="content">
<ul class="form-set clearfix">
<p class="m_b_15">Reset password for <strong><?php echo $recover_name;?></strong></p>
<li class="form-group row">
<label class="col-xs-4 control-label">New Password</label>
<div class="col-xs-8">
<input type="text" class="form-control" style="width: 100%" id="id_password" name="password">
</div>
</li>
<li class="form-group row">
<label class="col-xs-4 control-label">Retype Password</label>
<div class="col-xs-8">
<input type="text" class="form-control" autocomplete="off" style="width: 100%" id="id_cpassword" name="cpassword">
</div>
</li>
<li class="btn-placeholder m_b_15">
<a href="<?php echo $prefix_url;?>"><input type="button" class="btn btn-default btn-sm" value="Back"></a>
<input type="button" class="btn btn-success btn-sm" value="Reset Password" id="id_btn_recover" onclick="validateRecover()">
<input type="submit" class="btn btn-success btn-sm" value="Reset Password" id="id_btn_admin_recover" name="btn-admin-recover">
</li>
</ul>
</div><!--.content-->
</div><!--.box.row-->
</div><!--.container.main-->
</form>
<script>
function validateRecover(){
var pass = $('#id_password').val();
var cpass = $('#id_cpassword').val();
if(pass == ""){
$('#id_password').attr('placeholder', 'required');
}else if(cpass == ""){
$('#id_cpassword').attr('placeholder', 'required');
}else if(pass != cpass){
$('#id_cpassword').val('');
$('#id_cpassword').attr('placeholder', 'required');
$('#id_cpassword').focus();
}else{
$('#id_btn_admin_recover').click();
}
}
$(document).keypress(function(e) {
if(e.which == 13) {
$('#id_btn_recover').click();
}
});
</script>
<script type="text/javascript" src="<?php echo $prefix_url?>/script/login.js"></script>
<?php
}else{
echo "<div class=\"alert error\" id=\"alert-msg-pass\">";
echo " <div class=\"container text-center\">";
echo "Your session has been expired";
echo "<br><br>";
echo "<a href=\"".$prefix_url."\">";
echo " <b>back</b>";
echo "</a>";
echo " </div>";
echo "</div>";
unset($_SESSION['alert']);
unset($_SESSION['msg']);
}
}
?>
| apache-2.0 |
kubernetes-sigs/kustomize | plugin/builtin/secretgenerator/SecretGenerator.go | 978 | // Copyright 2019 The Kubernetes Authors.
// SPDX-License-Identifier: Apache-2.0
//go:generate pluginator
package main
import (
"sigs.k8s.io/kustomize/api/kv"
"sigs.k8s.io/kustomize/api/resmap"
"sigs.k8s.io/kustomize/api/types"
"sigs.k8s.io/yaml"
)
type plugin struct {
h *resmap.PluginHelpers
types.ObjectMeta `json:"metadata,omitempty" yaml:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
types.SecretArgs
}
//noinspection GoUnusedGlobalVariable
var KustomizePlugin plugin
func (p *plugin) Config(h *resmap.PluginHelpers, config []byte) (err error) {
p.SecretArgs = types.SecretArgs{}
err = yaml.Unmarshal(config, p)
if p.SecretArgs.Name == "" {
p.SecretArgs.Name = p.Name
}
if p.SecretArgs.Namespace == "" {
p.SecretArgs.Namespace = p.Namespace
}
p.h = h
return
}
func (p *plugin) Generate() (resmap.ResMap, error) {
return p.h.ResmapFactory().FromSecretArgs(
kv.NewLoader(p.h.Loader(), p.h.Validator()), p.SecretArgs)
}
| apache-2.0 |
stori-es/stori_es | shared/src/main/java/org/consumersunion/stories/server/business_logic/UserService.java | 1166 | package org.consumersunion.stories.server.business_logic;
import org.consumersunion.stories.common.shared.model.CredentialedUser;
import org.consumersunion.stories.common.shared.model.User;
import org.consumersunion.stories.server.exception.NotLoggedInException;
public interface UserService {
CredentialedUser getCredentialedUser(String handle);
CredentialedUser getCredentialedUser(Integer id);
/**
* Retrieves the currently logged in user (or null) associated with the
* user's session. There are no restrictions on this method.
*/
User getLoggedInUser();
int getActiveProfileId() throws NotLoggedInException;
User getLoggedInUser(boolean dieOnNull) throws NotLoggedInException;
void setActiveProfileId(int id);
Integer getContextOrganizationId();
Integer getEffectiveSubject();
Integer getEffectiveSubject(User user);
User getUserForProfile(int profileId);
void setLoggedInUser(User user);
User getUser(int id);
CredentialedUser createUser(CredentialedUser credentialedUser);
CredentialedUser updateUser(CredentialedUser credentialedUser);
void deleteUser(int id);
}
| apache-2.0 |
ScriptonBasestar-Lib/sb-tool-jvm | validation/src/test/java/org/scriptonbasestar/tool/validation/package-info.java | 105 | package org.scriptonbasestar.tool.validation;
/**
* @Author archmagece
* @CreatedAt 2016-12-06 14
*/
| apache-2.0 |
sobkowiak/myfoo-commonj | src/main/java/de/myfoo/commonj/timers/FooTimer.java | 4339 | /**
* #(@) FooTimer.java Aug 16, 2006
*/
package de.myfoo.commonj.timers;
import commonj.timers.StopTimerListener;
import commonj.timers.Timer;
import commonj.timers.TimerListener;
/**
* A FooTimer is returned when a TimerListener is scheduled using one of
* the <code>FooTimerManager.schedule</code> methods.
* It allows retrieving information about the scheduled TimerListener and
* allows canceling it.
*
* @author Andreas Keldenich
*/
public abstract class FooTimer implements Timer {
protected boolean stopped = false;
private boolean cancelled = false;
protected long scheduledExcecutionTime;
protected long period = 0L;
private TimerListener listener;
/**
* Creates a new instance of FooTimer.
*
* @param startTime start time
* @param period execution period
* @param listener the timer listener for this timer.
*/
public FooTimer(long startTime, long period, TimerListener listener) {
scheduledExcecutionTime = startTime;
this.period = period;
this.listener = listener;
}
/**
* Compute the next exceution time for this timer.
*/
protected abstract void computeNextExecutionTime();
/**
* Execute the timer listner.
*/
public void execute() {
listener.timerExpired(this);
}
/**
* This cancels the timer and all future TimerListener invocations and
* may be called during the <code>TimerListener.timerExpired</code>
* method.
*
* <p>
* <code>CancelTimerListener.timerCancel</code> events may be
* called concurrently with any <code>TimerListener.timerExpired</code>
* methods. Proper thread synchronization techiniques must be employed to
* ensure consistency.
*
* <p>
* Once a Timer is cancelled an application <b>must not</b> use
* the Timer instance again.
*
* @return <code>true</code> if this prevented the next execution
* of this timer. <code>false</code> if this was already
* cancelled or had already expired in the one shot case.
* @see commonj.timers.Timer#cancel()
*/
public boolean cancel() {
if (cancelled) {
return false;
}
cancelled = true;
return true;
}
/**
* Returns the application-supplied TimerListener associated
* with this Timer.
*
* @return The TimerListener associated with the timer.
* @throws IllegalStateException if the TimerManager has been stopped.
* @see commonj.timers.Timer#getTimerListener()
*/
public TimerListener getTimerListener() throws IllegalStateException {
if (stopped) {
throw new IllegalStateException("Timer has already been stopped.");
}
return listener;
}
/**
* Returns the next absolute <i>scheduled</i> execution time in
* milliseconds.
*
* <p>
* If invoked while a TimerListener is running, the return value is the
* <i>scheduled</i> execution time of the current TimerListener execution.
*
* <p>
* If the timer has been suspended, the time reflects the most
* recently-calculated execution time prior to being suspended.
*
* @return the time in milliseconds at which the TimerListener is
* scheduled to run next.
* @throws IllegalStateException if the TimerManager has been stopped.
* @see commonj.timers.Timer#getScheduledExecutionTime()
*/
public long getScheduledExecutionTime() throws IllegalStateException {
if (stopped) {
throw new IllegalStateException("Timer has already been stopped.");
}
return scheduledExcecutionTime;
}
/**
* Return the period used to compute the time this timer will repeat.
* A value of zero indicates that the timer is non-repeating.
*
* @return the period in milliseconds between timer executions.
* @see commonj.timers.Timer#getPeriod()
*/
public long getPeriod() {
return period;
}
/**
* Getter for cancelled
* @return Returns the cancelled.
*/
public boolean isCancelled() {
return cancelled;
}
/**
* Stop the timer.
*/
public void stop() {
this.stopped = true;
if (listener instanceof StopTimerListener) {
StopTimerListener stl = (StopTimerListener) listener;
stl.timerStop(this);
}
}
/**
* Check if this timer is expired and needs to be fired.
* @return <code>true</code> if timer is expired.
*/
public boolean isExpired() {
return scheduledExcecutionTime < System.currentTimeMillis();
}
}
| apache-2.0 |
kinabalu/mysticpaste | web/src/test/java/com/mysticcoders/webapp/TestHomePage.java | 1735 | package com.mysticcoders.webapp;
import com.mysticcoders.integrations.AbstractIntegrationTest;
import com.mysticcoders.mysticpaste.persistence.PasteItemDao;
import com.mysticcoders.mysticpaste.services.PasteService;
import org.apache.wicket.protocol.http.WebApplication;
import org.apache.wicket.spring.test.ApplicationContextMock;
import org.apache.wicket.util.tester.WicketTester;
import org.junit.Before;
import org.junit.Test;
import org.unitils.spring.annotation.SpringBeanByType;
/**
* Simple test using the WicketTester
*/
public class TestHomePage extends AbstractIntegrationTest {
@SpringBeanByType
private PasteService svc;
@SpringBeanByType
private PasteItemDao dao;
protected WicketTester tester;
@Before
public void setup() {
ApplicationContextMock appctx = new
ApplicationContextMock();
appctx.putBean("pasteDao", dao);
appctx.putBean("pasteService", svc);
// if we inject spring here, its more of an integration test, and that is not fun
// tester = new WicketTester(new MysticPasteApplication());
tester = new WicketTester();
WebApplication app = tester.getApplication();
// app.addComponentInstantiationListener(new SpringComponentInjector(app, appctx));
}
@Test
public void testRenderMyPage() {
//start and render the test page
// tester.startPage(HomePage.class);
//assert rendered page class
// tester.assertRenderedPage(HomePage.class);
//assert rendered label component
tester.assertLabel("message", "If you see this message wicket is properly configured and running");
}
}
| apache-2.0 |
gchq/gaffer-tools | python-shell/src/test/test_connector_requests.py | 12273 | #
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import json
from gafferpy import gaffer as g
from gafferpy import gaffer_connector_requests
class GafferConnectorTest(unittest.TestCase):
def test_execute_operation(self):
gc = gaffer_connector_requests.GafferConnector('http://localhost:8080/rest/latest')
elements = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed('M5:10')
],
view=g.View(
edges=[
g.ElementDefinition(
group='JunctionLocatedAt'
)
]
)
)
)
self.assertEqual(
[g.Edge("JunctionLocatedAt", "M5:10", "390466,225615", True, {},
"SOURCE")],
elements)
def test_is_operation_supported(self):
gc = gaffer_connector_requests.GafferConnector('http://localhost:8080/rest/latest')
response_text = gc.is_operation_supported(
g.IsOperationSupported(
operation='uk.gov.gchq.gaffer.operation.impl.get.GetAllElements'
)
)
expected_response_text = '''
{
"name": "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"summary": "Gets all elements compatible with a provided View",
"fields": [
{
"name": "view",
"className": "uk.gov.gchq.gaffer.data.elementdefinition.view.View",
"required": false
},
{
"name": "options",
"className": "java.util.Map<java.lang.String,java.lang.String>",
"required": false
},
{
"name": "directedType",
"summary": "Is the Edge directed?",
"className": "java.lang.String",
"options": [
"DIRECTED",
"UNDIRECTED",
"EITHER"
],
"required": false
},
{
"name": "views",
"className": "java.util.List<uk.gov.gchq.gaffer.data.elementdefinition.view.View>",
"required": false
}
],
"next": [
"uk.gov.gchq.gaffer.operation.impl.add.AddElements",
"uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet",
"uk.gov.gchq.gaffer.operation.impl.output.ToArray",
"uk.gov.gchq.gaffer.operation.impl.output.ToEntitySeeds",
"uk.gov.gchq.gaffer.operation.impl.output.ToList",
"uk.gov.gchq.gaffer.operation.impl.output.ToMap",
"uk.gov.gchq.gaffer.operation.impl.output.ToCsv",
"uk.gov.gchq.gaffer.operation.impl.output.ToSet",
"uk.gov.gchq.gaffer.operation.impl.output.ToStream",
"uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"uk.gov.gchq.gaffer.named.operation.NamedOperation",
"uk.gov.gchq.gaffer.operation.impl.compare.Max",
"uk.gov.gchq.gaffer.operation.impl.compare.Min",
"uk.gov.gchq.gaffer.operation.impl.compare.Sort",
"uk.gov.gchq.gaffer.operation.impl.GetWalks",
"uk.gov.gchq.gaffer.operation.impl.generate.GenerateElements",
"uk.gov.gchq.gaffer.operation.impl.generate.GenerateObjects",
"uk.gov.gchq.gaffer.operation.impl.Validate",
"uk.gov.gchq.gaffer.operation.impl.Count",
"uk.gov.gchq.gaffer.operation.impl.CountGroups",
"uk.gov.gchq.gaffer.operation.impl.Limit",
"uk.gov.gchq.gaffer.operation.impl.DiscardOutput",
"uk.gov.gchq.gaffer.operation.impl.Map",
"uk.gov.gchq.gaffer.operation.impl.If",
"uk.gov.gchq.gaffer.operation.impl.While",
"uk.gov.gchq.gaffer.operation.impl.ForEach",
"uk.gov.gchq.gaffer.operation.impl.output.ToSingletonList",
"uk.gov.gchq.gaffer.operation.impl.Reduce",
"uk.gov.gchq.gaffer.operation.impl.join.Join",
"uk.gov.gchq.gaffer.operation.impl.SetVariable",
"uk.gov.gchq.gaffer.operation.impl.function.Filter",
"uk.gov.gchq.gaffer.operation.impl.function.Transform",
"uk.gov.gchq.gaffer.operation.impl.function.Aggregate",
"uk.gov.gchq.gaffer.mapstore.operation.CountAllElementsDefaultView",
"uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherAuthorisedGraph",
"uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache"
],
"exampleJson": {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
},
"outputClassName": "uk.gov.gchq.gaffer.commonutil.iterable.CloseableIterable<uk.gov.gchq.gaffer.data.element.Element>"
}
'''
self.assertEqual(
json.loads(expected_response_text),
json.loads(response_text)
)
def test_execute_get(self):
self.maxDiff = None
gc = gaffer_connector_requests.GafferConnector('http://localhost:8080/rest/latest')
response_text = gc.execute_get(
g.GetOperations()
)
expected_response_text = '''
[
"uk.gov.gchq.gaffer.operation.impl.add.AddElements",
"uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet",
"uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"uk.gov.gchq.gaffer.operation.impl.export.GetExports",
"uk.gov.gchq.gaffer.operation.impl.job.GetJobDetails",
"uk.gov.gchq.gaffer.operation.impl.job.GetAllJobDetails",
"uk.gov.gchq.gaffer.operation.impl.job.GetJobResults",
"uk.gov.gchq.gaffer.operation.impl.output.ToArray",
"uk.gov.gchq.gaffer.operation.impl.output.ToEntitySeeds",
"uk.gov.gchq.gaffer.operation.impl.output.ToList",
"uk.gov.gchq.gaffer.operation.impl.output.ToMap",
"uk.gov.gchq.gaffer.operation.impl.output.ToCsv",
"uk.gov.gchq.gaffer.operation.impl.output.ToSet",
"uk.gov.gchq.gaffer.operation.impl.output.ToStream",
"uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"uk.gov.gchq.gaffer.named.operation.NamedOperation",
"uk.gov.gchq.gaffer.named.operation.AddNamedOperation",
"uk.gov.gchq.gaffer.named.operation.GetAllNamedOperations",
"uk.gov.gchq.gaffer.named.operation.DeleteNamedOperation",
"uk.gov.gchq.gaffer.named.view.AddNamedView",
"uk.gov.gchq.gaffer.named.view.GetAllNamedViews",
"uk.gov.gchq.gaffer.named.view.DeleteNamedView",
"uk.gov.gchq.gaffer.operation.impl.compare.Max",
"uk.gov.gchq.gaffer.operation.impl.compare.Min",
"uk.gov.gchq.gaffer.operation.impl.compare.Sort",
"uk.gov.gchq.gaffer.operation.OperationChain",
"uk.gov.gchq.gaffer.operation.OperationChainDAO",
"uk.gov.gchq.gaffer.operation.impl.ValidateOperationChain",
"uk.gov.gchq.gaffer.operation.impl.GetWalks",
"uk.gov.gchq.gaffer.operation.impl.generate.GenerateElements",
"uk.gov.gchq.gaffer.operation.impl.generate.GenerateObjects",
"uk.gov.gchq.gaffer.operation.impl.Validate",
"uk.gov.gchq.gaffer.operation.impl.Count",
"uk.gov.gchq.gaffer.operation.impl.CountGroups",
"uk.gov.gchq.gaffer.operation.impl.Limit",
"uk.gov.gchq.gaffer.operation.impl.DiscardOutput",
"uk.gov.gchq.gaffer.store.operation.GetSchema",
"uk.gov.gchq.gaffer.operation.impl.Map",
"uk.gov.gchq.gaffer.operation.impl.If",
"uk.gov.gchq.gaffer.operation.impl.While",
"uk.gov.gchq.gaffer.operation.impl.ForEach",
"uk.gov.gchq.gaffer.operation.impl.output.ToSingletonList",
"uk.gov.gchq.gaffer.operation.impl.Reduce",
"uk.gov.gchq.gaffer.operation.impl.join.Join",
"uk.gov.gchq.gaffer.operation.impl.job.CancelScheduledJob",
"uk.gov.gchq.gaffer.operation.impl.SetVariable",
"uk.gov.gchq.gaffer.operation.impl.GetVariable",
"uk.gov.gchq.gaffer.operation.impl.GetVariables",
"uk.gov.gchq.gaffer.operation.impl.function.Filter",
"uk.gov.gchq.gaffer.operation.impl.function.Transform",
"uk.gov.gchq.gaffer.operation.impl.function.Aggregate",
"uk.gov.gchq.gaffer.store.operation.GetTraits",
"uk.gov.gchq.gaffer.mapstore.operation.CountAllElementsDefaultView",
"uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherAuthorisedGraph",
"uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache",
"uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport"
]
'''
self.assertEqual(
json.loads(expected_response_text),
json.loads(response_text)
)
def test_dummy_header(self):
"""Test that the addition of a dummy header does not effect the standard test"""
gc = gaffer_connector_requests.GafferConnector('http://localhost:8080/rest/latest', headers={"dummy_Header": "value"})
elements = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed('M5:10')
],
view=g.View(
edges=[
g.ElementDefinition(
group='JunctionLocatedAt'
)
]
)
)
)
self.assertEqual(
[g.Edge("JunctionLocatedAt", "M5:10", "390466,225615", True, {},
"SOURCE")],
elements)
def test_class_initilisation(self):
"""Test that the gaffer_connector class is correctly initialised with instance attributes"""
host = 'http://localhost:8080/rest/latest'
verbose = False
headers = {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive', 'dummy_Header': 'value'}
gc = gaffer_connector_requests.GafferConnector(host, verbose, headers)
actuals = [gc._host, gc._verbose, gc._session.headers]
expecteds = [host, verbose, headers]
for actual, expected in zip(actuals, expecteds):
self.assertEqual(actual, expected)
def test_raise_connection_error(self):
"""Test that a ConnectionError is correctly raised when a HTTP 404 error is caught"""
# Define a host that has an invalid endpoint in order to get a HTTP 404 error
host_with_bad_endpoint = "http://localhost:8080/badEndPoint"
gc = gaffer_connector_requests.GafferConnector(host_with_bad_endpoint)
# Check that a ConnectionError is raised (which is catching the underlying HTTP 404)
with self.assertRaises(ConnectionError):
gc.execute_get(g.GetOperations())
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
dbrant/MpoViewer | MpoImage.cs | 1679 | using System.Collections.Generic;
using System.Drawing;
using System.IO;
namespace MpoViewer
{
public static class MpoImage
{
public static List<Image> GetMpoImages(string fileName)
{
var images = new List<Image>();
byte[] tempBytes = new byte[100];
using (var f = new FileStream(fileName, FileMode.Open, FileAccess.Read))
{
tempBytes = new byte[f.Length];
f.Read(tempBytes, 0, (int)f.Length);
}
List<int> imageOffsets = new List<int>();
int offset = 0, tempOffset = 0;
byte[] keyBytes = { 0xFF, 0xD8, 0xFF, 0xE1 };
byte[] keyBytes2 = { 0xFF, 0xD8, 0xFF, 0xE0 };
while (true)
{
tempOffset = Utils.SearchBytes(tempBytes, keyBytes, offset, tempBytes.Length);
if (tempOffset == -1)
tempOffset = Utils.SearchBytes(tempBytes, keyBytes2, offset, tempBytes.Length);
if (tempOffset == -1) break;
offset = tempOffset;
imageOffsets.Add(offset);
offset += 4;
}
for (int i = 0; i < imageOffsets.Count; i++)
{
int length;
if (i < (imageOffsets.Count - 1))
length = imageOffsets[i + 1] - imageOffsets[i];
else
length = tempBytes.Length - imageOffsets[i];
MemoryStream stream = new MemoryStream(tempBytes, imageOffsets[i], length);
images.Add(new Bitmap(stream));
}
return images;
}
}
}
| apache-2.0 |
blinkfox/probe | src/main/java/com/blinkfox/learn/javafx/archon/modules/Modules.java | 370 | package com.blinkfox.learn.javafx.archon.modules;
import com.google.inject.AbstractModule;
/**
* 管理依赖注入所有Module的主要Module.
* Created by blinkfox on 2017-03-22.
*/
public class Modules extends AbstractModule {
@Override
protected void configure() {
install(new ControllerModule());
install(new ServiceModule());
}
} | apache-2.0 |
cpoopc/com.cp.monsterMod | src/com/cp/monsterMod/helpers/AddIdCursorLoader.java | 8909 | /*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cp.monsterMod.helpers;
import android.support.v4.content.AsyncTaskLoader;
import android.support.v4.content.Loader;
import android.content.ContentQueryMap;
import android.content.Context;
import android.content.ContentValues;
import android.database.ContentObserver;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.net.Uri;
import android.provider.BaseColumns;
import android.provider.MediaStore.Audio;
import android.provider.MediaStore.MediaColumns;
import android.provider.MediaStore.Audio.AlbumColumns;
import android.provider.MediaStore.Audio.AudioColumns;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Map;
/**
* A loader that queries the {@link ContentResolver} and returns a {@link Cursor}.
* This class implements the {@link Loader} protocol in a standard way for
* querying cursors, building on {@link AsyncTaskLoader} to perform the cursor
* query on a background thread so that it does not block the application's UI.
*
* <p>A CursorLoader must be built with the full information for the query to
* perform, either through the
* {@link #CursorLoader(Context, Uri, String[], String, String[], String)} or
* creating an empty instance with {@link #CursorLoader(Context)} and filling
* in the desired paramters with {@link #setUri(Uri)}, {@link #setSelection(String)},
* {@link #setSelectionArgs(String[])}, {@link #setSortOrder(String)},
* and {@link #setProjection(String[])}.
*/
public class AddIdCursorLoader extends AsyncTaskLoader<Cursor> {
final ForceLoadContentObserver mObserver;
Uri mUri;
String[] mProjection;
String mSelection;
String[] mSelectionArgs;
String mSortOrder;
Cursor mCursor;
/* Runs on a worker thread */
@Override
public Cursor loadInBackground() {
Cursor mediaCursor = getContext().getContentResolver().query(mUri, mProjection, mSelection,
mSelectionArgs, mSortOrder);
//Get cursor filled with Audio Id's
String [] projection = new String[] {
BaseColumns._ID, AlbumColumns.ALBUM
};
Uri uri = Audio.Albums.EXTERNAL_CONTENT_URI;
String sortOrder = Audio.Albums.DEFAULT_SORT_ORDER;
Cursor albumCursor = getContext().getContentResolver().query(uri, projection, null, null, sortOrder);
//Matrix cursor to hold final data to be returned to calling context
MatrixCursor cursor = new MatrixCursor( new String[]
{ BaseColumns._ID, MediaColumns.TITLE, AudioColumns.ARTIST, AudioColumns.ALBUM, AudioColumns.ALBUM_ID});
//Map data from Audio Id cursor to the ALbumName Colum
ContentQueryMap mQueryMap = new ContentQueryMap(albumCursor, AlbumColumns.ALBUM, false, null);
Map<String, ContentValues> data = mQueryMap.getRows();
if (mediaCursor != null) {
while(mediaCursor.moveToNext()) {
String id = mediaCursor.getString(mediaCursor.getColumnIndexOrThrow(BaseColumns._ID));
String title = mediaCursor.getString(mediaCursor.getColumnIndexOrThrow(MediaColumns.TITLE));
String artist = mediaCursor.getString(mediaCursor.getColumnIndexOrThrow(AudioColumns.ARTIST));
String album = mediaCursor.getString(mediaCursor.getColumnIndexOrThrow(AudioColumns.ALBUM));
ContentValues tData = data.get(album);
String albumid = (String) tData.get(BaseColumns._ID);
cursor.addRow(new String[] {id, title, artist, album, albumid});
}
mediaCursor.close();
}
if (cursor != null) {
// Ensure the cursor window is filled
registerContentObserver(cursor, mObserver);
}
return cursor;
}
/**
* Registers an observer to get notifications from the content provider
* when the cursor needs to be refreshed.
*/
void registerContentObserver(Cursor cursor, ContentObserver observer) {
cursor.registerContentObserver(mObserver);
}
/* Runs on the UI thread */
@Override
public void deliverResult(Cursor cursor) {
if (isReset()) {
// An async query came in while the loader is stopped
if (cursor != null) {
cursor.close();
}
return;
}
Cursor oldCursor = mCursor;
mCursor = cursor;
if (isStarted()) {
super.deliverResult(cursor);
}
if (oldCursor != null && oldCursor != cursor && !oldCursor.isClosed()) {
oldCursor.close();
}
}
/**
* Creates an empty unspecified CursorLoader. You must follow this with
* calls to {@link #setUri(Uri)}, {@link #setSelection(String)}, etc
* to specify the query to perform.
*/
public AddIdCursorLoader(Context context) {
super(context);
mObserver = new ForceLoadContentObserver();
}
/**
* Creates a fully-specified CursorLoader. See
* {@link ContentResolver#query(Uri, String[], String, String[], String)
* ContentResolver.query()} for documentation on the meaning of the
* parameters. These will be passed as-is to that call.
*/
public AddIdCursorLoader(Context context, Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
super(context);
mObserver = new ForceLoadContentObserver();
mUri = uri;
mProjection = projection;
mSelection = selection;
mSelectionArgs = selectionArgs;
mSortOrder = sortOrder;
}
/**
* Starts an asynchronous load of the contacts list data. When the result is ready the callbacks
* will be called on the UI thread. If a previous load has been completed and is still valid
* the result may be passed to the callbacks immediately.
*
* Must be called from the UI thread
*/
@Override
protected void onStartLoading() {
if (mCursor != null) {
deliverResult(mCursor);
}
if (takeContentChanged() || mCursor == null) {
forceLoad();
}
}
/**
* Must be called from the UI thread
*/
@Override
protected void onStopLoading() {
// Attempt to cancel the current load task if possible.
cancelLoad();
}
@Override
public void onCanceled(Cursor cursor) {
if (cursor != null && !cursor.isClosed()) {
cursor.close();
}
}
@Override
protected void onReset() {
super.onReset();
// Ensure the loader is stopped
onStopLoading();
if (mCursor != null && !mCursor.isClosed()) {
mCursor.close();
}
mCursor = null;
}
public Uri getUri() {
return mUri;
}
public void setUri(Uri uri) {
mUri = uri;
}
public String[] getProjection() {
return mProjection;
}
public void setProjection(String[] projection) {
mProjection = projection;
}
public String getSelection() {
return mSelection;
}
public void setSelection(String selection) {
mSelection = selection;
}
public String[] getSelectionArgs() {
return mSelectionArgs;
}
public void setSelectionArgs(String[] selectionArgs) {
mSelectionArgs = selectionArgs;
}
public String getSortOrder() {
return mSortOrder;
}
public void setSortOrder(String sortOrder) {
mSortOrder = sortOrder;
}
@Override
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
super.dump(prefix, fd, writer, args);
writer.print(prefix); writer.print("mUri="); writer.println(mUri);
writer.print(prefix); writer.print("mProjection=");
writer.println(Arrays.toString(mProjection));
writer.print(prefix); writer.print("mSelection="); writer.println(mSelection);
writer.print(prefix); writer.print("mSelectionArgs=");
writer.println(Arrays.toString(mSelectionArgs));
writer.print(prefix); writer.print("mSortOrder="); writer.println(mSortOrder);
writer.print(prefix); writer.print("mCursor="); writer.println(mCursor);
}
}
| apache-2.0 |
rospilot/rospilot | share/web_assets/nodejs_deps/node_modules/rxjs/_esm5/operators/share.js | 1015 | /** PURE_IMPORTS_START ._multicast,._refCount,.._Subject PURE_IMPORTS_END */
import { multicast } from './multicast';
import { refCount } from './refCount';
import { Subject } from '../Subject';
function shareSubjectFactory() {
return new Subject();
}
/**
* Returns a new Observable that multicasts (shares) the original Observable. As long as there is at least one
* Subscriber this Observable will be subscribed and emitting data. When all subscribers have unsubscribed it will
* unsubscribe from the source Observable. Because the Observable is multicasting it makes the stream `hot`.
* This is an alias for .publish().refCount().
*
* <img src="./img/share.png" width="100%">
*
* @return {Observable<T>} An Observable that upon connection causes the source Observable to emit items to its Observers.
* @method share
* @owner Observable
*/
export function share() {
return function (source) { return refCount()(multicast(shareSubjectFactory)(source)); };
}
;
//# sourceMappingURL=share.js.map
| apache-2.0 |
TapCard/TapCard | tapcard/src/main/java/io/github/tapcard/emvnfccard/log/LogWriter.java | 284 | package io.github.tapcard.emvnfccard.log;
public interface LogWriter {
void debug(Class clazz, String msg);
void error(Class clazz, String msg, Throwable t);
void error(Class clazz, String msg);
void info(Class clazz, String msg);
boolean isDebugEnabled();
}
| apache-2.0 |
laimis/lucenenet | src/Lucene.Net.Tests.Sandbox/Queries/TestSortedSetSortField.cs | 9631 | using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Store;
using Lucene.Net.Util;
using NUnit.Framework;
using System;
namespace Lucene.Net.Sandbox.Queries
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>Simple tests for SortedSetSortField</summary>
public class TestSortedSetSortField : LuceneTestCase
{
[Test]
public void TestForward()
{
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("value", "baz", Field.Store.NO));
doc.Add(NewStringField("id", "2", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("value", "foo", Field.Store.NO));
doc.Add(NewStringField("value", "bar", Field.Store.NO));
doc.Add(NewStringField("id", "1", Field.Store.YES));
writer.AddDocument(doc);
IndexReader ir = writer.Reader;
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
Sort sort = new Sort(new SortedSetSortField("value", false));
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
assertEquals(2, td.TotalHits);
// 'bar' comes before 'baz'
assertEquals("1", searcher.Doc(td.ScoreDocs[0].Doc).Get("id"));
assertEquals("2", searcher.Doc(td.ScoreDocs[1].Doc).Get("id"));
ir.Dispose();
dir.Dispose();
}
[Test]
public void TestReverse()
{
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("value", "foo", Field.Store.NO));
doc.Add(NewStringField("value", "bar", Field.Store.NO));
doc.Add(NewStringField("id", "1", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("value", "baz", Field.Store.NO));
doc.Add(NewStringField("id", "2", Field.Store.YES));
writer.AddDocument(doc);
IndexReader ir = writer.Reader;
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
Sort sort = new Sort(new SortedSetSortField("value", true));
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
assertEquals(2, td.TotalHits);
// 'bar' comes before 'baz'
assertEquals("2", searcher.Doc(td.ScoreDocs[0].Doc).Get("id"));
assertEquals("1", searcher.Doc(td.ScoreDocs[1].Doc).Get("id"));
ir.Dispose();
dir.Dispose();
}
[Test]
public void TestMissingFirst()
{
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("value", "baz", Field.Store.NO));
doc.Add(NewStringField("id", "2", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("value", "foo", Field.Store.NO));
doc.Add(NewStringField("value", "bar", Field.Store.NO));
doc.Add(NewStringField("id", "1", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("id", "3", Field.Store.YES));
writer.AddDocument(doc);
IndexReader ir = writer.Reader;
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
SortField sortField = new SortedSetSortField("value", false);
sortField.MissingValue = SortField.STRING_FIRST;
Sort sort = new Sort(sortField);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.TotalHits);
// 'bar' comes before 'baz'
// null comes first
assertEquals("3", searcher.Doc(td.ScoreDocs[0].Doc).Get("id"));
assertEquals("1", searcher.Doc(td.ScoreDocs[1].Doc).Get("id"));
assertEquals("2", searcher.Doc(td.ScoreDocs[2].Doc).Get("id"));
ir.Dispose();
dir.Dispose();
}
[Test]
public void TestMissingLast()
{
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("value", "baz", Field.Store.NO));
doc.Add(NewStringField("id", "2", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("value", "foo", Field.Store.NO));
doc.Add(NewStringField("value", "bar", Field.Store.NO));
doc.Add(NewStringField("id", "1", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("id", "3", Field.Store.YES));
writer.AddDocument(doc);
IndexReader ir = writer.Reader;
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
SortField sortField = new SortedSetSortField("value", false);
sortField.MissingValue = SortField.STRING_LAST;
Sort sort = new Sort(sortField);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.TotalHits);
// 'bar' comes before 'baz'
assertEquals("1", searcher.Doc(td.ScoreDocs[0].Doc).Get("id"));
assertEquals("2", searcher.Doc(td.ScoreDocs[1].Doc).Get("id"));
// null comes last
assertEquals("3", searcher.Doc(td.ScoreDocs[2].Doc).Get("id"));
ir.Dispose();
dir.Dispose();
}
[Test]
public void TestSingleton()
{
Directory dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("value", "baz", Field.Store.NO));
doc.Add(NewStringField("id", "2", Field.Store.YES));
writer.AddDocument(doc);
doc = new Document();
doc.Add(NewStringField("value", "bar", Field.Store.NO));
doc.Add(NewStringField("id", "1", Field.Store.YES));
writer.AddDocument(doc);
IndexReader ir = writer.Reader;
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
Sort sort = new Sort(new SortedSetSortField("value", false));
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
assertEquals(2, td.TotalHits);
// 'bar' comes before 'baz'
assertEquals("1", searcher.Doc(td.ScoreDocs[0].Doc).Get("id"));
assertEquals("2", searcher.Doc(td.ScoreDocs[1].Doc).Get("id"));
ir.Dispose();
dir.Dispose();
}
[Test]
public void TestEmptyIndex()
{
IndexSearcher empty = NewSearcher(new MultiReader());
Query query = new TermQuery(new Term("contents", "foo"));
Sort sort = new Sort();
sort.SetSort(new SortedSetSortField("sortedset", false));
TopDocs td = empty.Search(query, null, 10, sort, true, true);
assertEquals(0, td.TotalHits);
// for an empty index, any selector should work
foreach (Selector v in Enum.GetValues(typeof(Selector)))
{
sort.SetSort(new SortedSetSortField("sortedset", false, v));
td = empty.Search(query, null, 10, sort, true, true);
assertEquals(0, td.TotalHits);
}
}
[Test]
public void TestEquals()
{
SortField sf = new SortedSetSortField("a", false);
assertFalse(sf.equals(null));
assertEquals(sf, sf);
SortField sf2 = new SortedSetSortField("a", false);
assertEquals(sf, sf2);
assertEquals(sf.GetHashCode(), sf2.GetHashCode());
assertFalse(sf.equals(new SortedSetSortField("a", true)));
assertFalse(sf.equals(new SortedSetSortField("b", false)));
assertFalse(sf.equals(new SortedSetSortField("a", false, Selector.MAX)));
assertFalse(sf.equals("foo"));
}
}
}
| apache-2.0 |
liuyuanyuan/dbeaver | plugins/org.jkiss.dbeaver.registry/src/org/jkiss/dbeaver/registry/DataSourceDescriptor.java | 52837 | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.registry;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.equinox.security.storage.ISecurePreferences;
import org.eclipse.osgi.util.NLS;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ModelPreferences;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.connection.DBPAuthInfo;
import org.jkiss.dbeaver.model.app.DBPDataSourceRegistry;
import org.jkiss.dbeaver.model.app.DBPPlatform;
import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration;
import org.jkiss.dbeaver.model.connection.DBPConnectionEventType;
import org.jkiss.dbeaver.model.connection.DBPDriver;
import org.jkiss.dbeaver.model.connection.DBPNativeClientLocation;
import org.jkiss.dbeaver.model.data.DBDDataFormatterProfile;
import org.jkiss.dbeaver.model.data.DBDPreferences;
import org.jkiss.dbeaver.model.data.DBDValueHandler;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.exec.DBCExecutionContext;
import org.jkiss.dbeaver.model.exec.DBCTransactionManager;
import org.jkiss.dbeaver.model.exec.DBExecUtils;
import org.jkiss.dbeaver.model.impl.data.DefaultValueHandler;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.net.*;
import org.jkiss.dbeaver.model.preferences.DBPPropertySource;
import org.jkiss.dbeaver.model.runtime.AbstractJob;
import org.jkiss.dbeaver.model.runtime.DBRProcessDescriptor;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRShellCommand;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.model.virtual.DBVModel;
import org.jkiss.dbeaver.registry.driver.DriverDescriptor;
import org.jkiss.dbeaver.registry.formatter.DataFormatterProfile;
import org.jkiss.dbeaver.registry.internal.RegistryMessages;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.TasksJob;
import org.jkiss.dbeaver.runtime.properties.PropertyCollector;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.dbeaver.utils.SystemVariablesResolver;
import org.jkiss.utils.CommonUtils;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.text.DateFormat;
import java.util.*;
/**
* DataSourceDescriptor
*/
public class DataSourceDescriptor
implements
DBPDataSourceContainer,
DBPImageProvider,
IAdaptable,
DBPStatefulObject,
DBPRefreshableObject
{
private static final Log log = Log.getLog(DataSourceDescriptor.class);
public static final String[] CONNECT_PATTERNS = new String[] {
DBPConnectionConfiguration.VARIABLE_HOST,
DBPConnectionConfiguration.VARIABLE_PORT,
DBPConnectionConfiguration.VARIABLE_SERVER,
DBPConnectionConfiguration.VARIABLE_DATABASE,
DBPConnectionConfiguration.VARIABLE_USER,
DBPConnectionConfiguration.VARIABLE_PASSWORD,
DBPConnectionConfiguration.VARIABLE_URL,
SystemVariablesResolver.VAR_WORKSPACE,
SystemVariablesResolver.VAR_HOME,
SystemVariablesResolver.VAR_DBEAVER_HOME,
SystemVariablesResolver.VAR_APP_NAME,
SystemVariablesResolver.VAR_APP_VERSION,
SystemVariablesResolver.VAR_LOCAL_IP
};
public static final String[][] CONNECT_VARIABLES = new String[][]{
{DBPConnectionConfiguration.VARIABLE_HOST, "target host"},
{DBPConnectionConfiguration.VARIABLE_PORT, "target port"},
{DBPConnectionConfiguration.VARIABLE_SERVER, "target server name"},
{DBPConnectionConfiguration.VARIABLE_DATABASE, "target database"},
{DBPConnectionConfiguration.VARIABLE_USER, "user name"},
{DBPConnectionConfiguration.VARIABLE_PASSWORD, "password (plain)"},
{DBPConnectionConfiguration.VARIABLE_URL, "JDBC URL"},
{SystemVariablesResolver.VAR_WORKSPACE, "workspace path"},
{SystemVariablesResolver.VAR_HOME, "user home path"},
{SystemVariablesResolver.VAR_DBEAVER_HOME, "application install path"},
{SystemVariablesResolver.VAR_APP_NAME, "application name"},
{SystemVariablesResolver.VAR_APP_VERSION, "application version"},
{SystemVariablesResolver.VAR_LOCAL_IP, "local IP address"},
};
@NotNull
private final DBPDataSourceRegistry registry;
@NotNull
private final DataSourceOrigin origin;
@NotNull
private DriverDescriptor driver;
@NotNull
private DBPConnectionConfiguration connectionInfo;
private DBPConnectionConfiguration tunnelConnectionInfo;
// Copy of connection info with resolved params (cache)
private DBPConnectionConfiguration resolvedConnectionInfo;
@NotNull
private String id;
private String name;
private String description;
private boolean savePassword;
private boolean showSystemObjects;
private boolean showUtilityObjects;
private boolean connectionReadOnly;
private List<DBPDataSourcePermission> connectionModifyRestrictions;
private final Map<String, FilterMapping> filterMap = new HashMap<>();
private DBDDataFormatterProfile formatterProfile;
@Nullable
private DBPNativeClientLocation clientHome;
@Nullable
private String lockPasswordHash;
@Nullable
private DataSourceFolder folder;
@NotNull
private DataSourcePreferenceStore preferenceStore;
@Nullable
private DBPDataSource dataSource;
private final List<DBPDataSourceTask> users = new ArrayList<>();
private volatile boolean connectFailed = false;
private volatile Date connectTime = null;
private volatile boolean disposed = false;
private volatile boolean connecting = false;
private boolean temporary;
private final List<DBRProcessDescriptor> childProcesses = new ArrayList<>();
private DBWNetworkHandler proxyHandler;
private DBWTunnel tunnelHandler;
@NotNull
private DBVModel virtualModel;
public DataSourceDescriptor(
@NotNull DBPDataSourceRegistry registry,
@NotNull String id,
@NotNull DriverDescriptor driver,
@NotNull DBPConnectionConfiguration connectionInfo)
{
this(registry, ((DataSourceRegistry)registry).getDefaultOrigin(), id, driver, connectionInfo);
}
DataSourceDescriptor(
@NotNull DBPDataSourceRegistry registry,
@NotNull DataSourceOrigin origin,
@NotNull String id,
@NotNull DriverDescriptor driver,
@NotNull DBPConnectionConfiguration connectionInfo)
{
this.registry = registry;
this.origin = origin;
this.id = id;
this.driver = driver;
this.connectionInfo = connectionInfo;
this.preferenceStore = new DataSourcePreferenceStore(this);
this.virtualModel = new DBVModel(this);
}
// Copy constructor
public DataSourceDescriptor(@NotNull DataSourceDescriptor source) {
this(source, source.registry);
}
public DataSourceDescriptor(@NotNull DataSourceDescriptor source, @NotNull DBPDataSourceRegistry registry)
{
this.registry = registry;
this.origin = ((DataSourceRegistry)registry).getDefaultOrigin();
this.id = source.id;
this.name = source.name;
this.description = source.description;
this.savePassword = source.savePassword;
this.showSystemObjects = source.showSystemObjects;
this.showUtilityObjects = source.showUtilityObjects;
this.connectionReadOnly = source.connectionReadOnly;
this.driver = source.driver;
this.connectionInfo = source.connectionInfo;
this.formatterProfile = source.formatterProfile;
this.clientHome = source.clientHome;
this.connectionModifyRestrictions = source.connectionModifyRestrictions == null ? null : new ArrayList<>(source.connectionModifyRestrictions);
this.connectionInfo = new DBPConnectionConfiguration(source.connectionInfo);
for (Map.Entry<String, FilterMapping> fe : source.filterMap.entrySet()) {
this.filterMap.put(fe.getKey(), new FilterMapping(fe.getValue()));
}
this.lockPasswordHash = source.lockPasswordHash;
this.folder = source.folder;
this.preferenceStore = new DataSourcePreferenceStore(this);
this.virtualModel = new DBVModel(this, source.virtualModel);
}
public boolean isDisposed()
{
return disposed;
}
public void dispose()
{
if (disposed) {
log.warn("Dispose of already disposed data source");
return;
}
synchronized (users) {
users.clear();
}
this.virtualModel.dispose();
disposed = true;
}
@NotNull
@Override
@Property(name = "ID", viewable = false, order = 0)
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@NotNull
@Override
public DriverDescriptor getDriver()
{
return driver;
}
@NotNull
@Override
public DBPPlatform getPlatform() {
return registry.getPlatform();
}
public void setDriver(@NotNull DriverDescriptor driver)
{
this.driver = driver;
}
@NotNull
@Override
public DBPConnectionConfiguration getConnectionConfiguration()
{
return connectionInfo;
}
public void setConnectionInfo(@NotNull DBPConnectionConfiguration connectionInfo)
{
this.connectionInfo = connectionInfo;
}
@NotNull
@Override
public DBPConnectionConfiguration getActualConnectionConfiguration()
{
return
this.resolvedConnectionInfo != null ?
this.resolvedConnectionInfo :
(this.tunnelConnectionInfo != null ? tunnelConnectionInfo : connectionInfo);
}
@NotNull
@Override
@Property(viewable = true, order = 1)
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
@Nullable
@Override
@Property(viewable = true, multiline = true, order = 2)
public String getDescription()
{
return description;
}
public boolean isSavePassword()
{
return savePassword;
}
public void setSavePassword(boolean savePassword)
{
this.savePassword = savePassword;
}
@Override
public boolean isShowSystemObjects()
{
return showSystemObjects;
}
public void setShowSystemObjects(boolean showSystemObjects)
{
this.showSystemObjects = showSystemObjects;
}
@Override
public boolean isShowUtilityObjects() {
return showUtilityObjects;
}
public void setShowUtilityObjects(boolean showUtilityObjects) {
this.showUtilityObjects = showUtilityObjects;
}
@Override
public boolean isConnectionReadOnly()
{
return connectionReadOnly;
}
@Override
public void setConnectionReadOnly(boolean connectionReadOnly)
{
this.connectionReadOnly = connectionReadOnly;
}
@Override
public boolean hasModifyPermission(DBPDataSourcePermission permission) {
if ((permission == DBPDataSourcePermission.PERMISSION_EDIT_DATA ||
permission == DBPDataSourcePermission.PERMISSION_EDIT_METADATA) && connectionReadOnly)
{
return false;
}
if (CommonUtils.isEmpty(connectionModifyRestrictions)) {
return getConnectionConfiguration().getConnectionType().hasModifyPermission(permission);
} else {
return !connectionModifyRestrictions.contains(permission);
}
}
@Override
public List<DBPDataSourcePermission> getModifyPermission() {
if (CommonUtils.isEmpty(this.connectionModifyRestrictions)) {
return Collections.emptyList();
} else {
return new ArrayList<>(this.connectionModifyRestrictions);
}
}
@Override
public void setModifyPermissions(@Nullable Collection<DBPDataSourcePermission> permissions) {
if (CommonUtils.isEmpty(permissions)) {
this.connectionModifyRestrictions = null;
} else {
this.connectionModifyRestrictions = new ArrayList<>(permissions);
}
}
@Override
public boolean isDefaultAutoCommit()
{
if (connectionInfo.getBootstrap().getDefaultAutoCommit() != null) {
return connectionInfo.getBootstrap().getDefaultAutoCommit();
} else {
return getConnectionConfiguration().getConnectionType().isAutocommit();
}
}
@Override
public void setDefaultAutoCommit(final boolean autoCommit, @Nullable DBCExecutionContext updateContext, boolean updateConnection, @Nullable final Runnable onFinish) throws DBException {
if (updateContext != null) {
final DBCTransactionManager txnManager = DBUtils.getTransactionManager(updateContext);
if (updateConnection && txnManager != null) {
TasksJob.runTask("Set auto-commit mode", monitor -> {
try {
// Change auto-commit mode
txnManager.setAutoCommit(monitor, autoCommit);
} catch (DBCException e) {
throw new InvocationTargetException(e);
} finally {
monitor.done();
if (onFinish != null) {
onFinish.run();
}
}
});
}
}
// Save in preferences
if (autoCommit == getConnectionConfiguration().getConnectionType().isAutocommit()) {
connectionInfo.getBootstrap().setDefaultAutoCommit(null);
} else {
connectionInfo.getBootstrap().setDefaultAutoCommit(autoCommit);
}
}
@Nullable
@Override
public DBPTransactionIsolation getActiveTransactionsIsolation()
{
if (dataSource != null) {
DBSInstance defaultInstance = dataSource.getDefaultInstance();
if (defaultInstance != null) {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(defaultInstance.getDefaultContext(false));
if (txnManager != null) {
try {
return txnManager.getTransactionIsolation();
} catch (DBCException e) {
log.debug("Can't determine isolation level", e);
return null;
}
}
}
}
return null;
}
@Override
public Integer getDefaultTransactionsIsolation() {
return connectionInfo.getBootstrap().getDefaultTransactionIsolation();
}
@Override
public void setDefaultTransactionsIsolation(@Nullable final DBPTransactionIsolation isolationLevel) throws DBException {
if (isolationLevel == null) {
connectionInfo.getBootstrap().setDefaultTransactionIsolation(null);
} else {
connectionInfo.getBootstrap().setDefaultTransactionIsolation(isolationLevel.getCode());
if (dataSource != null) {
TasksJob.runTask("Set transactions isolation level", monitor -> {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(dataSource.getDefaultInstance().getDefaultContext(false));
if (txnManager != null) {
try {
if (!txnManager.getTransactionIsolation().equals(isolationLevel)) {
txnManager.setTransactionIsolation(monitor, isolationLevel);
}
} catch (DBCException e) {
throw new InvocationTargetException(e);
}
}
});
}
}
}
public Collection<FilterMapping> getObjectFilters()
{
return filterMap.values();
}
@Nullable
@Override
public DBSObjectFilter getObjectFilter(Class<?> type, @Nullable DBSObject parentObject, boolean firstMatch)
{
FilterMapping filterMapping = getFilterMapping(type, parentObject, firstMatch);
if (filterMapping != null) {
return filterMapping.getFilter(parentObject, firstMatch);
}
return null;
}
@Nullable
private FilterMapping getFilterMapping(Class<?> type, @Nullable DBSObject parentObject, boolean firstMatch)
{
if (filterMap.isEmpty()) {
return null;
}
// Test all super classes
for (Class<?> testType = type; testType != null; testType = testType.getSuperclass()) {
FilterMapping filterMapping = getTypeFilterMapping(parentObject, firstMatch, testType);
if (filterMapping != null) {
return filterMapping;
}
}
for (Class<?> testType : type.getInterfaces()) {
FilterMapping filterMapping = getTypeFilterMapping(parentObject, firstMatch, testType);
if (filterMapping != null) {
return filterMapping;
}
}
return null;
}
private FilterMapping getTypeFilterMapping(@Nullable DBSObject parentObject, boolean firstMatch, Class<?> testType) {
FilterMapping filterMapping = filterMap.get(testType.getName());
DBSObjectFilter filter;
if (filterMapping == null) {
// Try to find using interfaces and superclasses
for (Class<?> it : testType.getInterfaces()) {
filterMapping = filterMap.get(it.getName());
if (filterMapping != null) {
filter = filterMapping.getFilter(parentObject, firstMatch);
if (filter != null && (firstMatch || filter.isEnabled())) return filterMapping;
}
}
}
if (filterMapping != null) {
filter = filterMapping.getFilter(parentObject, firstMatch);
if (filter != null && (firstMatch || filter.isEnabled())) {
return filterMapping;
}
}
return null;
}
@Override
public void setObjectFilter(Class<?> type, DBSObject parentObject, DBSObjectFilter filter)
{
FilterMapping filterMapping = getFilterMapping(type, parentObject, true);
if (filterMapping != null) {
// Update filter
if (parentObject == null) {
filterMapping.defaultFilter = filter;
} else {
filterMapping.customFilters.put(FilterMapping.getFilterContainerUniqueID(parentObject), filter);
}
}
updateObjectFilter(type.getName(), parentObject == null ? null : FilterMapping.getFilterContainerUniqueID(parentObject), filter);
}
void clearFilters() {
filterMap.clear();
}
void updateObjectFilter(String typeName, @Nullable String objectID, DBSObjectFilter filter)
{
FilterMapping filterMapping = filterMap.get(typeName);
if (filterMapping == null) {
filterMapping = new FilterMapping(typeName);
filterMap.put(typeName, filterMapping);
}
if (objectID == null) {
filterMapping.defaultFilter = filter;
} else {
filterMapping.customFilters.put(objectID, filter);
}
}
@Override
@NotNull
public DBVModel getVirtualModel()
{
return virtualModel;
}
public boolean hasSharedVirtualModel() {
return !CommonUtils.equalObjects(virtualModel.getId(), getId());
}
public void setVirtualModel(@NotNull DBVModel virtualModel) {
if (virtualModel.getId().equals(getId())) {
// DS-specific model
this.virtualModel = virtualModel;
this.virtualModel.setDataSourceContainer(this);
} else {
// Shared model
this.virtualModel = new DBVModel(this, virtualModel);
this.virtualModel.setId(virtualModel.getId());
}
}
@Override
public DBPNativeClientLocation getClientHome()
{
if (clientHome == null && !CommonUtils.isEmpty(connectionInfo.getClientHomeId())) {
this.clientHome = DBUtils.findObject(driver.getNativeClientLocations(), connectionInfo.getClientHomeId());
}
return clientHome;
}
@Override
public DBWNetworkHandler[] getActiveNetworkHandlers() {
if (proxyHandler == null && tunnelHandler == null) {
return new DBWNetworkHandler[0];
}
return proxyHandler == null ?
new DBWNetworkHandler[] {tunnelHandler} :
tunnelHandler == null ?
new DBWNetworkHandler[] {proxyHandler} :
new DBWNetworkHandler[] {proxyHandler, tunnelHandler};
}
@NotNull
DataSourceOrigin getOrigin() {
return origin;
}
@Override
public boolean isProvided() {
return !origin.isDefault();
}
@Override
public boolean isTemporary() {
return temporary;
}
public void setTemporary(boolean temporary) {
this.temporary = temporary;
}
@Override
public DBSObject getParentObject()
{
return null;
}
@Override
public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor)
throws DBException
{
if (dataSource instanceof DBPRefreshableObject) {
dataSource = (DBPDataSource) ((DBPRefreshableObject) dataSource).refreshObject(monitor);
} else {
this.reconnect(monitor, false);
}
getRegistry().notifyDataSourceListeners(new DBPEvent(
DBPEvent.Action.OBJECT_UPDATE,
DataSourceDescriptor.this));
return this;
}
@Override
public void setDescription(@Nullable String description)
{
this.description = description;
}
public Date getConnectTime() {
return connectTime;
}
public boolean isLocked() {
return !CommonUtils.isEmpty(lockPasswordHash);
}
@Nullable
public String getLockPasswordHash() {
return lockPasswordHash;
}
void setLockPasswordHash(@Nullable String lockPasswordHash) {
this.lockPasswordHash = lockPasswordHash;
}
@Nullable
@Override
public DBPDataSource getDataSource()
{
return dataSource;
}
@Nullable
@Override
public DataSourceFolder getFolder() {
return folder;
}
@Override
public void setFolder(@Nullable DBPDataSourceFolder folder) {
this.folder = (DataSourceFolder) folder;
}
@Override
public boolean isPersisted()
{
return true;
}
@NotNull
@Override
public DBPDataSourceRegistry getRegistry()
{
return registry;
}
@NotNull
@Override
public DBPProject getProject() {
return registry.getProject();
}
@Override
public void persistConfiguration()
{
registry.flushConfig();
}
@Override
public boolean isConnected()
{
return dataSource != null;
}
public boolean connect(DBRProgressMonitor monitor, boolean initialize, boolean reflect)
throws DBException
{
if (connecting) {
log.debug("Can't connect - connect/disconnect is in progress");
return false;
}
if (this.isConnected()) {
log.debug("Can't connect - already connected");
return false;
}
log.debug("Connect with '" + getName() + "' (" + getId() + ")");
//final String oldName = getConnectionConfiguration().getUserName();
//final String oldPassword = getConnectionConfiguration().getUserPassword();
if (!isSavePassword() && !getDriver().isAnonymousAccess()) {
// Ask for password
if (!askForPassword(this, null, false)) {
updateDataSourceObject(this);
return false;
}
}
processEvents(monitor, DBPConnectionEventType.BEFORE_CONNECT);
connecting = true;
tunnelConnectionInfo = null;
resolvedConnectionInfo = null;
try {
// Handle tunnelHandler
// Open tunnelHandler and replace connection info with new one
this.proxyHandler = null;
this.tunnelHandler = null;
DBWHandlerConfiguration tunnelConfiguration = null, proxyConfiguration = null;
for (DBWHandlerConfiguration handler : connectionInfo.getHandlers()) {
if (handler.isEnabled()) {
// Set driver explicitly.
// Handler config may have null driver if it was copied from profile config.
handler.setDriver(getDriver());
if (handler.getType() == DBWHandlerType.TUNNEL) {
tunnelConfiguration = handler;
} else if (handler.getType() == DBWHandlerType.PROXY) {
proxyConfiguration = handler;
}
}
}
monitor.beginTask("Connect to " + getName(), tunnelConfiguration != null ? 3 : 2);
// Setup proxy handler
if (proxyConfiguration != null) {
monitor.subTask("Initialize proxy");
proxyHandler = proxyConfiguration.createHandler(DBWNetworkHandler.class);
proxyHandler.initializeHandler(monitor, registry.getPlatform(), proxyConfiguration, connectionInfo);
}
if (tunnelConfiguration != null) {
monitor.subTask("Initialize tunnel");
tunnelHandler = tunnelConfiguration.createHandler(DBWTunnel.class);
try {
if (!tunnelConfiguration.isSavePassword()) {
DBWTunnel.AuthCredentials rc = tunnelHandler.getRequiredCredentials(tunnelConfiguration);
if (rc != DBWTunnel.AuthCredentials.NONE) {
if (!askForPassword(this, tunnelConfiguration, rc == DBWTunnel.AuthCredentials.PASSWORD)) {
updateDataSourceObject(this);
tunnelHandler = null;
return false;
}
}
}
/*
for (DBWHandlerConfiguration handler : getConnectionConfiguration().getDeclaredHandlers()) {
if (handler.isEnabled() && handler.isSecured() && !handler.isSavePassword()) {
if (!DataSourceHandler.askForPassword(this, handler)) {
DataSourceHandler.updateDataSourceObject(this);
return false;
}
}
}
*/
if (preferenceStore.getBoolean(ModelPreferences.CONNECT_USE_ENV_VARS)) {
tunnelConfiguration = new DBWHandlerConfiguration(tunnelConfiguration);
tunnelConfiguration.resolveSystemEnvironmentVariables();
}
DBExecUtils.startContextInitiation(this);
try {
tunnelConnectionInfo = tunnelHandler.initializeHandler(monitor, registry.getPlatform(), tunnelConfiguration, connectionInfo);
} finally {
DBExecUtils.finishContextInitiation(this);
}
} catch (Exception e) {
throw new DBCException("Can't initialize tunnel", e);
}
monitor.worked(1);
}
monitor.subTask("Connect to data source");
if (preferenceStore.getBoolean(ModelPreferences.CONNECT_USE_ENV_VARS) ||
!CommonUtils.isEmpty(connectionInfo.getConfigProfileName()) ||
!CommonUtils.isEmpty(connectionInfo.getUserProfileName()))
{
this.resolvedConnectionInfo = new DBPConnectionConfiguration(this.tunnelConnectionInfo != null ? tunnelConnectionInfo : connectionInfo);
if (preferenceStore.getBoolean(ModelPreferences.CONNECT_USE_ENV_VARS)) {
this.resolvedConnectionInfo.resolveDynamicVariables();
}
if (!CommonUtils.isEmpty(connectionInfo.getConfigProfileName())) {
// Update config from profile
DBWNetworkProfile profile = registry.getNetworkProfile(connectionInfo.getConfigProfileName());
if (profile != null) {
for (DBWHandlerConfiguration handlerCfg : profile.getConfigurations()) {
if (handlerCfg.isEnabled()) {
resolvedConnectionInfo.updateHandler(handlerCfg);
}
}
}
}
if (!CommonUtils.isEmpty(connectionInfo.getUserProfileName())) {
}
}
this.dataSource = getDriver().getDataSourceProvider().openDataSource(monitor, this);
this.connectTime = new Date();
monitor.worked(1);
if (initialize) {
monitor.subTask("Initialize data source");
try {
dataSource.initialize(monitor);
} catch (Throwable e) {
log.error("Error initializing datasource", e);
}
// Change connection properties
initConnectionState(monitor);
}
this.connectFailed = false;
processEvents(monitor, DBPConnectionEventType.AFTER_CONNECT);
if (reflect) {
getRegistry().notifyDataSourceListeners(new DBPEvent(
DBPEvent.Action.OBJECT_UPDATE,
DataSourceDescriptor.this,
true));
}
try {
log.debug("Connected (" + getId() + ", " + getPropertyDriver() + ")");
} catch (Throwable e) {
log.debug("Connected (" + getId() + ", driver unknown)");
}
return true;
} catch (Exception e) {
log.debug("Connection failed (" + getId() + ")");
if (tunnelHandler != null) {
try {
tunnelHandler.closeTunnel(monitor);
} catch (IOException e1) {
log.error("Error closing tunnel", e);
} finally {
tunnelHandler = null;
tunnelConnectionInfo = null;
}
}
proxyHandler = null;
// Failed
connectFailed = true;
//if (reflect) {
getRegistry().notifyDataSourceListeners(new DBPEvent(
DBPEvent.Action.OBJECT_UPDATE,
DataSourceDescriptor.this,
false));
//}
if (e instanceof DBException) {
throw (DBException)e;
} else {
throw new DBException("Internal error connecting to " + getName(), e);
}
} finally {
monitor.done();
connecting = false;
}
}
private void initConnectionState(DBRProgressMonitor monitor) throws DBException {
if (dataSource == null) {
return;
}
// Set active object
if (dataSource instanceof DBSObjectContainer) {
String activeObject = getConnectionConfiguration().getBootstrap().getDefaultObjectName();
if (!CommonUtils.isEmptyTrimmed(activeObject)) {
DBSObjectContainer schemaContainer = DBUtils.getChangeableObjectContainer((DBSObjectContainer) dataSource);
if (schemaContainer != null && schemaContainer instanceof DBSObjectSelector) {
DBSObject child = schemaContainer.getChild(monitor, activeObject);
if (child != null) {
try {
((DBSObjectSelector) schemaContainer).setDefaultObject(monitor, child);
} catch (DBException e) {
log.warn("Can't select active object", e);
}
} else {
log.debug("Object '" + activeObject + "' not found");
}
}
}
}
}
private void processEvents(DBRProgressMonitor monitor, DBPConnectionEventType eventType)
{
DBPConnectionConfiguration info = getActualConnectionConfiguration();
DBRShellCommand command = info.getEvent(eventType);
if (command != null && command.isEnabled()) {
final DBRProcessDescriptor processDescriptor = new DBRProcessDescriptor(command, getVariablesResolver());
monitor.subTask("Execute process " + processDescriptor.getName());
DBWorkbench.getPlatformUI().executeProcess(processDescriptor);
{
// Run output grab job
new AbstractJob(processDescriptor.getName() + ": output reader") {
@Override
protected IStatus run(DBRProgressMonitor monitor) {
try {
String output = processDescriptor.dumpErrors();
log.debug("Process error output:\n" + output);
} catch (Exception e) {
log.debug(e);
}
return Status.OK_STATUS;
}
}.schedule();
}
if (command.isWaitProcessFinish()) {
int resultCode;
if (command.getWaitProcessTimeoutMs() >= 0) {
resultCode = processDescriptor.waitFor(command.getWaitProcessTimeoutMs());
} else {
resultCode = processDescriptor.waitFor();
}
log.debug(processDescriptor.getName() + " result code: " + resultCode);
}
addChildProcess(processDescriptor);
}
}
@Override
public boolean disconnect(final DBRProgressMonitor monitor)
throws DBException
{
return disconnect(monitor, true);
}
private boolean disconnect(final DBRProgressMonitor monitor, boolean reflect)
throws DBException
{
if (dataSource == null) {
log.error("Datasource is not connected");
return true;
}
if (connecting) {
log.error("Connect/disconnect is in progress");
return false;
}
connecting = true;
try {
releaseDataSourceUsers(monitor);
monitor.beginTask("Disconnect from '" + getName() + "'", 5 + dataSource.getAvailableInstances().size());
processEvents(monitor, DBPConnectionEventType.BEFORE_DISCONNECT);
monitor.worked(1);
// Close datasource
monitor.subTask("Close connection");
if (dataSource != null) {
dataSource.shutdown(monitor);
}
monitor.worked(1);
// Close tunnelHandler
if (tunnelHandler != null) {
monitor.subTask("Close tunnel");
try {
tunnelHandler.closeTunnel(monitor);
} catch (Throwable e) {
log.error("Error closing tunnel", e);
}
}
monitor.worked(1);
proxyHandler = null;
processEvents(monitor, DBPConnectionEventType.AFTER_DISCONNECT);
monitor.worked(1);
monitor.done();
// Terminate child processes
synchronized (childProcesses) {
for (Iterator<DBRProcessDescriptor> iter = childProcesses.iterator(); iter.hasNext(); ) {
DBRProcessDescriptor process = iter.next();
if (process.isRunning() && process.getCommand().isTerminateAtDisconnect()) {
process.terminate();
}
iter.remove();
}
}
this.dataSource = null;
this.tunnelConnectionInfo = null;
this.resolvedConnectionInfo = null;
this.connectTime = null;
if (reflect) {
// Reflect UI
getRegistry().notifyDataSourceListeners(new DBPEvent(
DBPEvent.Action.OBJECT_UPDATE,
this,
false));
}
return true;
} finally {
connecting = false;
log.debug("Disconnected (" + getId() + ")");
}
}
private void releaseDataSourceUsers(DBRProgressMonitor monitor) {
List<DBPDataSourceTask> usersStamp;
synchronized (users) {
usersStamp = new ArrayList<>(users);
}
int jobCount = 0;
// Save all unsaved data
for (DBPDataSourceTask user : usersStamp) {
if (user instanceof Job) {
jobCount++;
}
if (user instanceof DBPDataSourceHandler) {
((DBPDataSourceHandler) user).beforeDisconnect();
}
}
if (jobCount > 0) {
monitor.beginTask("Waiting for all active tasks to finish", jobCount);
// Stop all jobs
for (DBPDataSourceTask user : usersStamp) {
if (user instanceof Job) {
Job job = (Job) user;
monitor.subTask("Stop '" + job.getName() + "'");
if (job.getState() == Job.RUNNING) {
job.cancel();
try {
// Wait for 3 seconds
for (int i = 0; i < 30; i++) {
Thread.sleep(100);
if (job.getState() != Job.RUNNING) {
break;
}
}
} catch (InterruptedException e) {
// its ok, do nothing
}
}
monitor.worked(1);
}
}
monitor.done();
}
}
@Override
public boolean reconnect(final DBRProgressMonitor monitor)
throws DBException
{
return reconnect(monitor, true);
}
public boolean reconnect(final DBRProgressMonitor monitor, boolean reflect)
throws DBException
{
if (connecting) {
log.debug("Can't reconnect - connect/disconnect is in progress");
return false;
}
if (isConnected()) {
if (!disconnect(monitor, reflect)) {
return false;
}
}
return connect(monitor, true, reflect);
}
@Override
public Collection<DBPDataSourceTask> getTasks()
{
synchronized (users) {
return new ArrayList<>(users);
}
}
@Override
public void acquire(DBPDataSourceTask user)
{
synchronized (users) {
if (users.contains(user)) {
log.warn("Datasource user '" + user + "' already registered in datasource '" + getName() + "'");
} else {
users.add(user);
}
}
}
@Override
public void release(DBPDataSourceTask user)
{
synchronized (users) {
if (!users.remove(user)) {
if (!isDisposed()) {
log.warn("Datasource user '" + user + "' is not registered in datasource '" + getName() + "'");
}
}
}
}
@Override
public void fireEvent(DBPEvent event) {
registry.notifyDataSourceListeners(event);
}
@Override
public DBDDataFormatterProfile getDataFormatterProfile()
{
if (this.formatterProfile == null) {
this.formatterProfile = new DataFormatterProfile(getId(), preferenceStore);
}
return this.formatterProfile;
}
@Override
public void setDataFormatterProfile(DBDDataFormatterProfile formatterProfile)
{
this.formatterProfile = formatterProfile;
}
@NotNull
@Override
public DBDValueHandler getDefaultValueHandler()
{
if (dataSource instanceof DBDPreferences) {
return ((DBDPreferences) dataSource).getDefaultValueHandler();
}
return DefaultValueHandler.INSTANCE;
}
@NotNull
@Override
public DataSourcePreferenceStore getPreferenceStore()
{
return preferenceStore;
}
public void resetPassword()
{
connectionInfo.setUserPassword(null);
}
@Nullable
@Override
public <T> T getAdapter(Class<T> adapter)
{
if (DBPDataSourceContainer.class.isAssignableFrom(adapter)) {
return adapter.cast(this);
} else if (adapter == DBPPropertySource.class) {
PropertyCollector coll = new PropertyCollector(this, true);
coll.collectProperties();
if (dataSource != null) {
int conIndex = 0;
for (DBSInstance instance : dataSource.getAvailableInstances()) {
for (DBCExecutionContext context : instance.getAllContexts()) {
conIndex++;
coll.addProperty("Connections", conIndex, String.valueOf(conIndex), new ContextInfo(context));
}
}
}
return adapter.cast(coll);
}
return null;
}
@Override
@NotNull
public DBPImage getObjectImage()
{
return driver.getPlainIcon();
}
@NotNull
@Override
public DBSObjectState getObjectState()
{
if (isConnected()) {
return DBSObjectState.ACTIVE;
} else if (connectFailed) {
return DBSObjectState.INVALID;
} else {
return DBSObjectState.NORMAL;
}
}
@Override
public void refreshObjectState(@NotNull DBRProgressMonitor monitor)
{
// just do nothing
}
public static String generateNewId(DBPDriver driver)
{
long rnd = new Random().nextLong();
if (rnd < 0) rnd = -rnd;
return driver.getId() + "-" + Long.toHexString(System.currentTimeMillis()) + "-" + Long.toHexString(rnd);
}
@Property(viewable = true, order = 20, category = "Driver")
public String getPropertyDriverType()
{
return driver.getName();
}
@Property(order = 3, category = "Server")
public String getPropertyAddress()
{
StringBuilder addr = new StringBuilder();
if (!CommonUtils.isEmpty(connectionInfo.getHostName())) {
addr.append(connectionInfo.getHostName());
}
if (!CommonUtils.isEmpty(connectionInfo.getHostPort())) {
addr.append(':').append(connectionInfo.getHostPort());
}
return addr.toString();
}
@Property(order = 4, category = "Server")
public String getPropertyDatabase()
{
return connectionInfo.getDatabaseName();
}
@Property(order = 5, category = "Server")
public String getPropertyURL()
{
return connectionInfo.getUrl();
}
@Nullable
@Property(order = 6, category = "Server")
public String getPropertyServerName()
{
if (dataSource != null) {
String serverName = dataSource.getInfo().getDatabaseProductName();
String serverVersion = dataSource.getInfo().getDatabaseProductVersion();
if (serverName != null) {
return serverName + (serverVersion == null ? "" : " [" + serverVersion + "]");
}
}
return null;
}
@Nullable
@Property(order = 7, category = "Server")
public Map<String, Object> getPropertyServerDetails()
{
if (dataSource != null) {
return dataSource.getInfo().getDatabaseProductDetails();
}
return null;
}
@Nullable
@Property(order = 21, category = "Driver")
public String getPropertyDriver()
{
if (dataSource != null) {
String driverName = dataSource.getInfo().getDriverName();
String driverVersion = dataSource.getInfo().getDriverVersion();
if (driverName != null) {
return driverName + (driverVersion == null ? "" : " [" + driverVersion + "]");
}
}
return null;
}
@Nullable
@Property(order = 8)
public String getPropertyConnectTime()
{
if (connectTime != null) {
return DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT).format(connectTime);
}
return null;
}
@Property(order = 9)
public String getPropertyConnectType()
{
return connectionInfo.getConnectionType().getName();
}
private void addChildProcess(DBRProcessDescriptor process)
{
synchronized (childProcesses) {
childProcesses.add(process);
}
}
public void copyFrom(DataSourceDescriptor descriptor) {
filterMap.clear();
for (FilterMapping mapping : descriptor.getObjectFilters()) {
filterMap.put(mapping.typeName, new FilterMapping(mapping));
}
virtualModel.copyFrom(descriptor.getVirtualModel());
setDescription(descriptor.getDescription());
setSavePassword(descriptor.isSavePassword());
setShowSystemObjects(descriptor.isShowSystemObjects());
setShowUtilityObjects(descriptor.isShowUtilityObjects());
setConnectionReadOnly(descriptor.isConnectionReadOnly());
}
@Override
@NotNull
public ISecurePreferences getSecurePreferences() {
return registry.getSecurePreferences().node(id);
}
@Override
public String toString() {
return name + " [" + driver + "]";
}
public boolean equalSettings(Object obj) {
if (!(obj instanceof DataSourceDescriptor)) {
return false;
}
DataSourceDescriptor source = (DataSourceDescriptor) obj;
return
CommonUtils.equalOrEmptyStrings(this.name, source.name) &&
CommonUtils.equalOrEmptyStrings(this.description, source.description) &&
CommonUtils.equalObjects(this.savePassword, source.savePassword) &&
CommonUtils.equalObjects(this.showSystemObjects, source.showSystemObjects) &&
CommonUtils.equalObjects(this.showUtilityObjects, source.showUtilityObjects) &&
CommonUtils.equalObjects(this.connectionReadOnly, source.connectionReadOnly) &&
CommonUtils.equalObjects(this.driver, source.driver) &&
CommonUtils.equalObjects(this.connectionInfo, source.connectionInfo) &&
CommonUtils.equalObjects(this.filterMap, source.filterMap) &&
CommonUtils.equalObjects(this.formatterProfile, source.formatterProfile) &&
CommonUtils.equalObjects(this.clientHome, source.clientHome) &&
CommonUtils.equalObjects(this.lockPasswordHash, source.lockPasswordHash) &&
CommonUtils.equalObjects(this.folder, source.folder) &&
CommonUtils.equalsContents(this.connectionModifyRestrictions, source.connectionModifyRestrictions);
}
public static class ContextInfo implements DBPObject {
private final DBCExecutionContext context;
public ContextInfo(DBCExecutionContext context) {
this.context = context;
}
@Property(viewable = true, order = 1)
public String getName() {
return context.getContextName();
}
@Override
public String toString() {
return getName();
}
}
@Override
public GeneralUtils.IVariableResolver getVariablesResolver() {
return name -> {
String propValue = getActualConnectionConfiguration().getProperties().get(name);
if (propValue != null) {
return propValue;
}
name = name.toLowerCase(Locale.ENGLISH);
switch (name) {
case DBPConnectionConfiguration.VARIABLE_HOST: return getActualConnectionConfiguration().getHostName();
case DBPConnectionConfiguration.VARIABLE_PORT: return getActualConnectionConfiguration().getHostPort();
case DBPConnectionConfiguration.VARIABLE_SERVER: return getActualConnectionConfiguration().getServerName();
case DBPConnectionConfiguration.VARIABLE_DATABASE: return getActualConnectionConfiguration().getDatabaseName();
case DBPConnectionConfiguration.VARIABLE_USER: return getActualConnectionConfiguration().getUserName();
case DBPConnectionConfiguration.VARIABLE_PASSWORD: return getActualConnectionConfiguration().getUserPassword();
case DBPConnectionConfiguration.VARIABLE_URL: return getActualConnectionConfiguration().getUrl();
default: return SystemVariablesResolver.INSTANCE.get(name);
}
};
}
@Override
public DBPDataSourceContainer createCopy(DBPDataSourceRegistry forRegistry) {
DataSourceDescriptor copy = new DataSourceDescriptor(this, forRegistry);
copy.setId(DataSourceDescriptor.generateNewId(copy.getDriver()));
return copy;
}
public static boolean askForPassword(@NotNull final DataSourceDescriptor dataSourceContainer, @Nullable final DBWHandlerConfiguration networkHandler, final boolean passwordOnly)
{
final String prompt = networkHandler != null ?
NLS.bind(RegistryMessages.dialog_connection_auth_title_for_handler, networkHandler.getTitle()) :
"'" + dataSourceContainer.getName() + RegistryMessages.dialog_connection_auth_title; //$NON-NLS-1$
final String user = networkHandler != null ? networkHandler.getUserName() : dataSourceContainer.getConnectionConfiguration().getUserName();
final String password = networkHandler != null ? networkHandler.getPassword() : dataSourceContainer.getConnectionConfiguration().getUserPassword();
DBPAuthInfo authInfo = DBWorkbench.getPlatformUI().promptUserCredentials(prompt, user, password, passwordOnly, !dataSourceContainer.isTemporary());
if (authInfo == null) {
return false;
}
if (networkHandler != null) {
if (!passwordOnly) {
networkHandler.setUserName(authInfo.getUserName());
}
networkHandler.setPassword(authInfo.getUserPassword());
networkHandler.setSavePassword(authInfo.isSavePassword());
} else {
if (!passwordOnly) {
dataSourceContainer.getConnectionConfiguration().setUserName(authInfo.getUserName());
}
dataSourceContainer.getConnectionConfiguration().setUserPassword(authInfo.getUserPassword());
dataSourceContainer.setSavePassword(authInfo.isSavePassword());
}
if (authInfo.isSavePassword()) {
// Update connection properties
dataSourceContainer.getRegistry().updateDataSource(dataSourceContainer);
}
return true;
}
public void updateDataSourceObject(DataSourceDescriptor dataSourceDescriptor)
{
getRegistry().notifyDataSourceListeners(new DBPEvent(
DBPEvent.Action.OBJECT_UPDATE,
dataSourceDescriptor,
false));
}
}
| apache-2.0 |
Stratio/stratio-connector-decision | connector-decision/src/test/java/com/stratio/connector/decision/ftest/thread/ThreadTwoFilterFunctionalFT.java | 7674 | /*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.connector.decision.ftest.thread;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.LinkedList;
import org.junit.Before;
import org.junit.Test;
import com.stratio.connector.commons.metadata.TableMetadataBuilder;
import com.stratio.connector.commons.test.util.LogicalWorkFlowCreator;
import com.stratio.connector.decision.ftest.GenericDecisionTest;
import com.stratio.connector.decision.ftest.thread.actions.DecisionInserter;
import com.stratio.connector.decision.ftest.thread.actions.DecisionRead;
import com.stratio.connector.decision.ftest.thread.actions.RowToInsertDefault;
import com.stratio.crossdata.common.connector.IResultHandler;
import com.stratio.crossdata.common.data.Row;
import com.stratio.crossdata.common.exceptions.ConnectorException;
import com.stratio.crossdata.common.exceptions.ExecutionException;
import com.stratio.crossdata.common.exceptions.UnsupportedException;
import com.stratio.crossdata.common.logicalplan.LogicalWorkflow;
import com.stratio.crossdata.common.metadata.ColumnType;
import com.stratio.crossdata.common.metadata.DataType;
import com.stratio.crossdata.common.metadata.TableMetadata;
import com.stratio.crossdata.common.result.QueryResult;
import com.stratio.crossdata.common.statements.structures.window.WindowType;
public class ThreadTwoFilterFunctionalFT extends GenericDecisionTest {
public static final int CORRECT_ELMENT_TO_FIND = 90;
private static final int WAIT_TIME = 20;
private static final String TEXT_FIND = "text find";
public int OTHER_INT_VALUE = 5;
public int recoveredRecord = 0;
TableMetadata tableMetadata;
int numberDefaultText = 0;
int numberAlternativeText = 0;
private int correctValueCount = 0;
private int incorrectValueCount = 0;
private ArrayList<String> recovered = new ArrayList<>();
@Before
public void setUp() throws ConnectorException {
super.setUp();
numberDefaultText = 0;
numberAlternativeText = 0;
correctValueCount = 0;
incorrectValueCount = 0;
TableMetadataBuilder tableMetadataBuilder = new TableMetadataBuilder(CATALOG, TABLE);
tableMetadata = tableMetadataBuilder.addColumn(STRING_COLUMN, new ColumnType(DataType.VARCHAR))
.addColumn(INTEGER_COLUMN, new ColumnType(DataType.INT)).addColumn(BOOLEAN_COLUMN, new ColumnType(DataType.BOOLEAN))
.addColumn(INTEGER_CHANGEABLE_COLUMN, new ColumnType(DataType.INT))
.build(false);
try {
sConnector.getMetadataEngine().createTable(getClusterName(), tableMetadata);
} catch (ExecutionException t) {
}
}
@Test
public void twoFilterTest() throws InterruptedException, UnsupportedException {
DecisionRead stremingRead = new DecisionRead(sConnector, createTwoFilterWorkFlow(), new ResultNumberHandler());
stremingRead.start();
System.out.println("TEST ********************** Querying......");
waitSeconds(WAIT_TIME);
System.out.println("TEST ********************** Inserting ......");
DecisionInserter stramingInserter = new DecisionInserter(sConnector, getClusterName(), tableMetadata, new RowToInsertDefault());
stramingInserter.setAddIntegerChangeable(true);
stramingInserter.changeIntegerChangeableColumn(OTHER_INT_VALUE);
stramingInserter.start();
DecisionInserter otherDecisionInserter = new DecisionInserter(sConnector, getClusterName(), tableMetadata, new RowToInsertDefault());
otherDecisionInserter.changeStingColumn(TEXT_FIND);
otherDecisionInserter.start();
// This is the correct inserter.
DecisionInserter correctDecisionInserter = new DecisionInserter(sConnector, getClusterName(),
tableMetadata, new RowToInsertDefault());
correctDecisionInserter.setAddIntegerChangeable(true);
correctDecisionInserter.changeStingColumn(TEXT_FIND);
correctDecisionInserter.changeIntegerChangeableColumn(OTHER_INT_VALUE - 1);
correctDecisionInserter.numOfElement(CORRECT_ELMENT_TO_FIND); // Desiere element number
correctDecisionInserter.start();
waitSeconds(WAIT_TIME);
stremingRead.end();
waitSeconds(WAIT_TIME);
System.out.println("TEST ********************** END Querying Test......");
otherDecisionInserter.end();
stramingInserter.end();
System.out.println("TEST ********************** END Insert......");
assertEquals("All correct elements have been found", CORRECT_ELMENT_TO_FIND, recoveredRecord);
for (String recover : recovered) {
System.out.println(recover);
}
}
private LogicalWorkflow createTwoFilterWorkFlow() throws UnsupportedException {
LogicalWorkFlowCreator logicalWorkFlowCreator = new LogicalWorkFlowCreator(CATALOG, TABLE, getClusterName());
LinkedList<LogicalWorkFlowCreator.ConnectorField> selectColumns = new LinkedList<>();
selectColumns.add(logicalWorkFlowCreator.createConnectorField(STRING_COLUMN, STRING_COLUMN, new ColumnType(DataType.TEXT)));
selectColumns.add(logicalWorkFlowCreator.createConnectorField(INTEGER_COLUMN, INTEGER_COLUMN, new ColumnType(DataType.INT)));
selectColumns.add(logicalWorkFlowCreator.createConnectorField(BOOLEAN_COLUMN, BOOLEAN_COLUMN,
new ColumnType(DataType.BOOLEAN)));
selectColumns.add(logicalWorkFlowCreator.createConnectorField(INTEGER_CHANGEABLE_COLUMN,
INTEGER_CHANGEABLE_COLUMN, new ColumnType(DataType.INT)));
return logicalWorkFlowCreator.addColumnName(STRING_COLUMN).addColumnName(INTEGER_COLUMN)
.addColumnName(BOOLEAN_COLUMN).addColumnName(INTEGER_CHANGEABLE_COLUMN)
.addSelect(selectColumns).addNLowerFilter(INTEGER_CHANGEABLE_COLUMN, OTHER_INT_VALUE, false)
.addEqualFilter(STRING_COLUMN, TEXT_FIND, false, false).addWindow(WindowType.TEMPORAL, 5)
.build();
}
private class ResultNumberHandler implements IResultHandler {
public ResultNumberHandler() {
}
@Override
public void processException(String queryId, ExecutionException exception) {
System.out.println(queryId + " " + exception.getMessage());
exception.printStackTrace();
}
@Override
public void processResult(QueryResult result) {
for (Row row : result.getResultSet()) {
recovered.add(INTEGER_CHANGEABLE_COLUMN + "=" + row.getCell(INTEGER_CHANGEABLE_COLUMN).getValue() + ","
+ STRING_COLUMN + "=" + row.getCell(STRING_COLUMN).getValue());
recoveredRecord++;
}
}
}
}
| apache-2.0 |
SilicorniO/webui | src/model/UIAttr.ts | 4600 | export enum UI_VIEW_ID {
NONE = "",
SCREEN = "s",
PARENT = "p",
LAST = "l",
}
export type UIViewId = UI_VIEW_ID | string
export enum UI_REF {
START_START,
START_END,
END_END,
END_START,
CENTER,
}
export const UI_REF_LIST: UI_REF[] = [
UI_REF.START_START,
UI_REF.START_END,
UI_REF.END_END,
UI_REF.END_START,
UI_REF.CENTER,
]
export enum UI_SIZE {
SIZE_CONTENT = "sc",
SCREEN = "s",
PERCENTAGE = "sp",
}
export enum UI_OVERFLOW {
HIDDEN = "h",
SCROLL = "s",
VISIBLE = "v",
}
export interface UIAttrCleanOptions {
size?: boolean
position?: boolean
margin?: boolean
padding?: boolean
overflow?: boolean
center?: boolean
}
export default class UIAttr {
startStart: UIViewId = ""
startEnd: UIViewId = ""
endEnd: UIViewId = ""
endStart: UIViewId = ""
size: UI_SIZE = UI_SIZE.SIZE_CONTENT
sizeValue: number = 0
percentPos: number = 0
overflow: string = UI_OVERFLOW.HIDDEN
center: UIViewId = ""
marginStart: string = ""
marginEnd: string = ""
paddingStart: string = ""
paddingEnd: string = ""
public getRef(ref: UI_REF) {
switch (ref) {
case UI_REF.START_START:
return this.startStart
case UI_REF.START_END:
return this.startEnd
case UI_REF.END_END:
return this.endEnd
case UI_REF.END_START:
return this.endStart
case UI_REF.CENTER:
return this.center
default:
return ""
}
}
public setRef(ref: UI_REF, id: string) {
switch (ref) {
case UI_REF.START_START:
this.startStart = id
return
case UI_REF.START_END:
this.startEnd = id
return
case UI_REF.END_END:
this.endEnd = id
return
case UI_REF.END_START:
this.endStart = id
return
case UI_REF.CENTER:
this.center = id
return
default:
return
}
}
public setSize(value: string) {
if (value == UI_SIZE.SIZE_CONTENT) {
this.size = value
this.sizeValue = 0
} else if (String(value).indexOf("%") != -1) {
const indexPercent = value.indexOf("%")
this.sizeValue = parseFloat(value.substring(0, indexPercent))
if (indexPercent < value.length - 1) {
this.percentPos = parseInt(value.substring(indexPercent + 1, value.length), 10)
}
this.size = UI_SIZE.PERCENTAGE
} else {
this.sizeValue = parseFloat(value)
this.size = UI_SIZE.SCREEN
}
}
public setMargin(margin: string) {
this.marginStart = margin
this.marginEnd = margin
}
public setPadding(padding: string) {
this.paddingStart = padding
this.paddingEnd = padding
}
public clean(options?: UIAttrCleanOptions) {
if (options == null || options.position != false) {
this.startStart = ""
this.startEnd = ""
this.endEnd = ""
this.endStart = ""
}
if (options == null || options.size != false) {
this.size = UI_SIZE.SIZE_CONTENT
this.sizeValue = 0
this.percentPos = 0
}
if (options == null || options.overflow != false) {
this.overflow = UI_OVERFLOW.HIDDEN
}
if (options == null || options.center != false) {
this.center = ""
}
if (options == null || options.margin != false) {
this.marginStart = ""
this.marginEnd = ""
}
if (options == null || options.position != false) {
this.paddingStart = ""
this.paddingEnd = ""
}
}
public clone(): UIAttr {
const attr = new UIAttr()
attr.startStart = this.startStart
attr.startEnd = this.startEnd
attr.endEnd = this.endEnd
attr.endStart = this.endStart
attr.size = this.size
attr.sizeValue = this.sizeValue
attr.percentPos = this.percentPos
attr.overflow = this.overflow
attr.center = this.center
attr.marginStart = this.marginStart
attr.marginEnd = this.marginEnd
attr.paddingStart = this.paddingStart
attr.paddingEnd = this.paddingEnd
return attr
}
}
| apache-2.0 |
ray-project/ray | rllib/examples/export/onnx_torch.py | 1802 | from distutils.version import LooseVersion
import numpy as np
import ray
import ray.rllib.agents.ppo as ppo
import onnxruntime
import os
import shutil
import torch
# Configure our PPO trainer
config = ppo.DEFAULT_CONFIG.copy()
config["num_gpus"] = 0
config["num_workers"] = 1
config["framework"] = "torch"
outdir = "export_torch"
if os.path.exists(outdir):
shutil.rmtree(outdir)
np.random.seed(1234)
# We will run inference with this test batch
test_data = {
"obs": np.random.uniform(0, 1.0, size=(10, 4)).astype(np.float32),
"state_ins": np.array([0.0], dtype=np.float32),
}
# Start Ray and initialize a PPO trainer
ray.init()
trainer = ppo.PPOTrainer(config=config, env="CartPole-v0")
# You could train the model here
# trainer.train()
# Let's run inference on the torch model
policy = trainer.get_policy()
result_pytorch, _ = policy.model(
{
"obs": torch.tensor(test_data["obs"]),
}
)
# Evaluate tensor to fetch numpy array
result_pytorch = result_pytorch.detach().numpy()
# This line will export the model to ONNX
res = trainer.export_policy_model(outdir, onnx=11)
# Import ONNX model
exported_model_file = os.path.join(outdir, "model.onnx")
# Start an inference session for the ONNX model
session = onnxruntime.InferenceSession(exported_model_file, None)
# Pass the same test batch to the ONNX model
if LooseVersion(torch.__version__) < LooseVersion("1.9.0"):
# In torch < 1.9.0 the second input/output name gets mixed up
test_data["state_outs"] = test_data.pop("state_ins")
result_onnx = session.run(["output"], test_data)
# These results should be equal!
print("PYTORCH", result_pytorch)
print("ONNX", result_onnx)
assert np.allclose(result_pytorch, result_onnx), "Model outputs are NOT equal. FAILED"
print("Model outputs are equal. PASSED")
| apache-2.0 |
nblumhardt/seq-forwarder | src/Seq.Forwarder/Web/Formats/MimeTypeHelpers.cs | 1373 | // Copyright Andreas Håkansson, Steven Robbins and contributors
// MIT License https://github.com/NancyFx/Nancy/blob/master/license.txt
using System;
namespace Seq.Forwarder.Web.Formats
{
internal static class Helpers
{
/// <summary>
/// Attempts to detect if the content type is JSON.
/// Supports:
/// application/json
/// text/json
/// application/vnd[something]+json
/// Matches are case insentitive to try and be as "accepting" as possible.
/// </summary>
/// <param name="contentType">Request content type</param>
/// <returns>True if content type is JSON, false otherwise</returns>
public static bool IsJsonType(string contentType)
{
if (string.IsNullOrEmpty(contentType))
{
return false;
}
var contentMimeType = contentType.Split(';')[0];
return contentMimeType.Equals("application/json", StringComparison.InvariantCultureIgnoreCase) ||
contentMimeType.Equals("text/json", StringComparison.InvariantCultureIgnoreCase) ||
(contentMimeType.StartsWith("application/vnd", StringComparison.InvariantCultureIgnoreCase) &&
contentMimeType.EndsWith("+json", StringComparison.InvariantCultureIgnoreCase));
}
}
}
| apache-2.0 |
arnotixe/androidbible | Alkitab/src/main/java/yuku/alkitab/base/widget/VerseInlineLinkSpan.java | 1010 | package yuku.alkitab.base.widget;
import android.text.TextPaint;
import android.text.style.ClickableSpan;
import android.view.View;
public abstract class VerseInlineLinkSpan extends ClickableSpan {
public interface Factory {
VerseInlineLinkSpan create(final Type type, final int arif);
}
private final Type type;
private final int arif;
private final Object source;
public enum Type {
footnote,
xref,
}
public VerseInlineLinkSpan(final Type type, final int arif, final Object source) {
this.type = type;
this.arif = arif;
this.source = source;
}
@Override
public final void onClick(final View widget) {
onClick(type, arif, source);
}
public abstract void onClick(final Type type, final int arif, final Object source);
@Override
public void updateDrawState(final TextPaint ds) {
// don't call super to prevent link underline and link coloring
// FIXME color of choice here
//ds.setColor(ds.linkColor);
ds.setColor(0xff08088a);
// NOP
}
}
| apache-2.0 |
marcovc/casper | pyutils/objdb.py | 19800 |
import itertools
attrsPriority = ["type","cl","ev","extType","func", "nArgs",\
"arg1Eval","arg2Eval","arg3Eval","arg4Eval","arg5Eval"]
class Property:
def __init__(self,attr,value):
self.attr = attr
self.value = value
def __eq__(self,other):
return self.attr==other.attr and self.value==other.value
def __ne__(self,other):
return self.attr!=other.attr or self.value!=other.value
def __hash__(self):
return hash(self.attr)+hash(self.value)
class Rule:
def __init__(self,**kwargs):
self.properties = dict(kwargs)
def covers(self,rule):
for k,v in rule.properties.iteritems():
if k not in self.properties.keys() or self.properties[k]!=v:
return False
return True
def isCoveredBy(self,rule):
for k,v in self.properties.iteritems():
if k not in rule.properties.keys() or rule.properties[k]!=v:
return False
return True
class ActionMap:
def __init__(self):
self.rules = []
def add(self,rule,action):
#self.rules += [(i,action) for i in itertools.permutations(rule,len(rule))]
self.rules.append((rule,action))
def runAll(self):
for (rule,action) in self.rules:
action(rule)
def getCoveredBySet(self,rule):
cover = []
for (mrule,action) in self.rules:
if mrule.isCoveredBy(rule):
cover.append((mrule,action))
return cover
def getCoverSet(self,rule):
cover = []
for (mrule,action) in self.rules:
if mrule.covers(rule):
cover.append((mrule,action))
return cover
objs = set()
seqTypes = ["Casper::BoolSeq", "Casper::IntSeq", "Casper::BoolSetSeq", "Casper::IntSetSeq"]
setTypes = ["Casper::IntSet","Casper::BoolSet"]
# literal types
for e in ["bool", "int"]:
objs.add(Rule(type="tExt",cl="Common",ev=e,extType="Lit"))
for e in ["Casper::BoolSeq", "Casper::IntSeq"]:
objs.add(Rule(type="tExt",cl="CP",ev=e,extType="LitArray"))
# CP types
for e in ["bool", "int"]:
objs.add(Rule(type="tExt",cl="CP",ev=e,extType="Var"))
for e in seqTypes:
objs.add(Rule(type="tExt",cl="CP",ev=e,extType="VarArray"))
# boolean unary operations between boolean types
objs.add(Rule(type="tRel",func="Not",ev="bool",nArgs=1,arg1Eval="bool"))
objs.add(Rule(type="tRel",func="Cast<bool>",ev="bool",nArgs=1,arg1Eval="int"))
# boolean unary operations between integer sequence types
objs.add(Rule(type="tRel",func="Distinct",ev="bool",nArgs=1,arg1Eval="Casper::IntSeq"))
# boolean unary operations between set types
for f in ["Disjoint","Partition"]:
for t in ["Casper::BoolSetSeq", "Casper::IntSetSeq"]:
objs.add(Rule(type="tRel",func=f,ev="bool",nArgs=1,arg1Eval=t))
# boolean binary operations between boolean types
for f in ["And","Or","XOr","Less","LessEqual","Greater","GreaterEqual"]:
objs.add(Rule(type="tRel",func=f,ev="bool",nArgs=2,arg1Eval="bool",arg2Eval="bool"))
# boolean binary operations between integer types
for f in ["Less","LessEqual","Greater","GreaterEqual"]:
objs.add(Rule(type="tRel",func=f,ev="bool",nArgs=2,arg1Eval="int",arg2Eval="int"))
# boolean binary operations between set types
for f in ["Disjoint","Contained"]:
for t in ["Casper::BoolSet", "Casper::IntSet"]:
objs.add(Rule(type="tRel",func=f,ev="bool",nArgs=2,arg1Eval=t,arg2Eval=t))
# boolean binary operations between several type pairs
for f in ["Distinct","Equal"]:
for t in ["bool", "int", "Casper::BoolSet", "Casper::IntSet"]:
objs.add(Rule(type="tRel",func=f,ev="bool",nArgs=2,arg1Eval=t,arg2Eval=t))
# boolean binary operations between heterogeneous types
objs.add(Rule(type="tRel",func="SumEqual",ev="bool",nArgs=2,arg1Eval="Casper::IntSeq",arg2Eval="int"))
objs.add(Rule(type="tRel",func="Member",ev="bool",nArgs=2,arg1Eval="int",arg2Eval="Casper::IntSet"))
objs.add(Rule(type="tRel",func="Member",ev="bool",nArgs=2,arg1Eval="bool",arg2Eval="Casper::BoolSet"))
objs.add(Rule(type="tRel",func="NotMember",ev="bool",nArgs=2,arg1Eval="int",arg2Eval="Casper::IntSet"))
objs.add(Rule(type="tRel",func="NotMember",ev="bool",nArgs=2,arg1Eval="bool",arg2Eval="Casper::BoolSet"))
objs.add(Rule(type="tRel",func="Element",ev="bool",nArgs=2,arg1Eval="Casper::BoolSeq",arg2Eval="int"))
objs.add(Rule(type="tRel",func="InTable",ev="bool",nArgs=2,arg1Eval="Casper::IntSeq",arg2Eval="Casper::IntSeq"))
objs.add(Rule(type="tRel",func="NotInTable",ev="bool",nArgs=2,arg1Eval="Casper::IntSeq",arg2Eval="Casper::IntSeq"))
# boolean ternary operations
objs.add(Rule(type="tRel",func="LinearEqual",ev="bool",nArgs=3,arg1Eval="Casper::IntSeq",arg2Eval="Casper::IntSeq",arg3Eval="int"))
objs.add(Rule(type="tRel",func="ElementEqual",ev="bool",nArgs=3,arg1Eval="Casper::IntSeq",arg2Eval="int",arg3Eval="int"))
objs.add(Rule(type="tRel",func="ElementEqual",ev="bool",nArgs=3,arg1Eval="Casper::BoolSeq",arg2Eval="int",arg3Eval="bool"))
for e in ["Casper::IntSet","Casper::BoolSet"]:
objs.add(Rule(type="tRel",func="IntersectEqual",ev="bool",nArgs=3,arg1Eval=e,arg2Eval=e,arg3Eval=e))
objs.add(Rule(type="tRel",func="UnionEqual",ev="bool",nArgs=3,arg1Eval=e,arg2Eval=e,arg3Eval=e))
# boolean five-ary (!?) operations
objs.add(Rule(type="tRel",func="Cumulative",ev="bool",nArgs=5,arg1Eval="Casper::IntSeq",arg2Eval="Casper::IntSeq",\
arg3Eval="Casper::IntSeq",arg4Eval="Casper::IntSeq",arg5Eval="int"))
# integer unary operations between integer types
for f in ["Sym","Abs","Exp","Sqr","Log"]:
objs.add(Rule(type="tRel",func=f,ev="int",nArgs=1,arg1Eval="int"))
objs.add(Rule(type="tRel",func="Cast<int>",ev="int",nArgs=1,arg1Eval="bool"))
# integer unary operations between integer sequence types
objs.add(Rule(type="tRel",func="Sum",ev="int",nArgs=1,arg1Eval="Casper::IntSeq"))
# integer unary operations between set types
objs.add(Rule(type="tRel",func="Cardinal",ev="int",nArgs=1,arg1Eval="Casper::IntSet"))
# integer binary operations between integer types
for f in ["Add","Sub","Mul","Div","Min","Max","Mod"]:
objs.add(Rule(type="tRel",func=f,ev="int",nArgs=2,arg1Eval="int",arg2Eval="int"))
# integer binary operations between integer sequence types
objs.add(Rule(type="tRel",func="SumProduct",ev="int",nArgs=2,arg1Eval="Casper::IntSeq",arg2Eval="Casper::IntSeq"))
# integer binary operations between heterogeneous types
objs.add(Rule(type="tRel",func="Element",ev="int",nArgs=2,arg1Eval="Casper::IntSeq",arg2Eval="int"))
## Goals ##
for t in ["bool","int"]:
objs.add(Rule(type="tRel",func="Assign",ev="bool",nArgs=2,arg1Eval=t,arg1="Casper::Ref<"+t+">",arg2Eval=t))
objs.add(Rule(type="tRel",func="WhileDo",ev="bool",nArgs=2,arg1Eval="bool",arg2Eval="bool"))
objs.add(Rule(type="tRel",func="ForAll",ev="bool",nArgs=3,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool"))
objs.add(Rule(type="tRel",func="ForAll",ev="bool",nArgs=4,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval="bool"))
objs.add(Rule(type="tRel",func="TryAll",ev="bool",nArgs=3,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool"))
objs.add(Rule(type="tRel",func="TryAll",ev="bool",nArgs=4,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval="bool"))
for t in ["int"]:
objs.add(Rule(type="tRel",func="ForAll",ev="bool",nArgs=5,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval=t,arg5Eval="bool"))
objs.add(Rule(type="tRel",func="TryAll",ev="bool",nArgs=5,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval=t,arg5Eval="bool"))
# objs.add(Rule(type="tRel",func="SelectMin",ev="bool",nArgs=4,arg1Eval="int",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval=t))
# objs.add(Rule(type="tRel",func="SelectMin",ev="bool",nArgs=3,arg1Eval="int",arg2Eval="Casper::IntSeq",arg3Eval=t))
# objs.add(Rule(type="tRel",func="SelectMax",ev="bool",nArgs=4,arg1Eval="int",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval=t))
# objs.add(Rule(type="tRel",func="SelectMax",ev="bool",nArgs=3,arg1Eval="int",arg2Eval="Casper::IntSeq",arg3Eval=t))
#objs.add(Rule(type="tRel",func="SelectRand",ev="bool",nArgs=3,arg1Eval="int",arg2Eval="Casper::IntSeq",arg3Eval="bool"))
#objs.add(Rule(type="tRel",func="SelectRand",ev="bool",nArgs=2,arg1Eval="int",arg2Eval="Casper::IntSeq"))
## Ref expressions
for f in ["bool","int","Casper::IntSet","Casper::BoolSet","Casper::IntSeq","Casper::BoolSeq","Casper::BoolSetSeq","Casper::IntSetSeq"]:
objs.add(Rule(type="tRel",cl="CP",func="Ground",ev="bool",nArgs=1,arg1Eval=f))
for f in ["bool","int"]:
objs.add(Rule(type="tRel",cl="CP",func="DomainSize",ev="int",nArgs=1,arg1Eval=f))
objs.add(Rule(type="tRel",cl="CP",func="Domain",ev="Casper::IntSeq",nArgs=1,arg1Eval="int"))
objs.add(Rule(type="tRel",cl="CP",func="Domain",ev="Casper::BoolSeq",nArgs=1,arg1Eval="bool"))
for t in ["bool","int","float","Casper::IntSeq","Casper::BoolSeq","Casper::FloatSeq"]:
objs.add(Rule(type="tRel",func="ArgMax",ev="int",nArgs=3,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval=t))
objs.add(Rule(type="tRel",func="ArgMax",ev="int",nArgs=4,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval=t))
objs.add(Rule(type="tRel",func="ArgMin",ev="int",nArgs=3,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval=t))
objs.add(Rule(type="tRel",func="ArgMin",ev="int",nArgs=4,arg1Eval="int",arg1="Casper::Ref<int>",arg2Eval="Casper::IntSeq",arg3Eval="bool",arg4Eval=t))
instobjs=set()
for f in ["Min","Max"]:
for ev in ["int","bool","Seq<int>","Seq<bool>"]:
instobjs.add(Rule(ev="bool",func="Rel2<Assign,Ref<int>,Rel4<Arg"+f+",Ref<int>,Expr<Seq<int> >,Expr<bool>,Expr<"+ev+"> > >"))
instobjs.add(Rule(ev="bool",func="Rel2<Assign,Ref<int>,Rel3<Arg"+f+",Ref<int>,Expr<Seq<int> >,Expr<"+ev+"> > >"))
instobjs.add(Rule(ev="bool",func="Rel2<Assign,Ref<int>,Rel3<ArgMin,Ref<int>,Expr<Seq<int> >,Casper::Rel2<RandInRange,double,double> > >"))
instobjs.add(Rule(ev="bool",func="Rel2<Assign,Ref<int>,Rel4<ArgMin,Ref<int>,Expr<Seq<int> >,Expr<bool>,Casper::Rel2<RandInRange,double,double> > >"))
# TODO later
# "fIfThen","fIfThenElse","fInRange",
# "fAll","fMaxDiff",
# "fForSome","fSelectFirst",,"fRandInRange",
cppObjType = ActionMap()
cppObjType.add(Rule(ev="bool",extType="Lit"),lambda r: "bool")
cppObjType.add(Rule(ev="int",extType="Lit"),lambda r: "int")
cppObjType.add(Rule(ev="Casper::BoolSeq",extType="LitArray"),lambda r: "Util::StdBoolArray")
cppObjType.add(Rule(ev="Casper::IntSeq",extType="LitArray"),lambda r: "Util::StdIntArray")
cppObjType.add(Rule(cl="CP",ev="bool",extType="Var"),lambda r: "CP::Var<bool,Casper::CP::Traits::GetDefaultDom<bool>::Type>")
cppObjType.add(Rule(cl="CP",ev="int",extType="Var"),lambda r: "CP::Var<int,Casper::CP::Traits::GetDefaultDom<int>::Type>")
cppObjType.add(Rule(cl="CP",ev="Casper::BoolSet",extType="Var"),lambda r: "CP::Casper::BoolSetVar")
cppObjType.add(Rule(cl="CP",ev="Casper::IntSet",extType="Var"),lambda r: "CP::Casper::IntSetVar")
cppObjType.add(Rule(cl="CP",ev="Casper::BoolSeq",extType="VarArray"),lambda r: "CP::BoolVarArray")
cppObjType.add(Rule(cl="CP",ev="Casper::IntSeq",extType="VarArray"),lambda r: "CP::IntVarArray")
cppObjType.add(Rule(cl="CP",ev="Casper::BoolSetSeq",extType="VarArray"),lambda r: "CP::Casper::BoolSetVarArray")
cppObjType.add(Rule(cl="CP",ev="Casper::IntSetSeq",extType="VarArray"),lambda r: "CP::Casper::IntSetVarArray")
for i in range(1,6):
cppObjType.add(Rule(type="tRel",nArgs=i),
lambda r,i=i: "Rel"+str(i)+"<"+r.properties["func"][1:]+
"".join([","+cppEval[r.properties["arg"+str(j)+"Eval"]] for j in range(1,i+1)])
+">")
#python unary and binary operators
casperOp2PythonOp = {
("Equal",2) : "eq",
("Distinct",2) : "ne",
("And",2) : "and",
("XOr",2) : "xor",
("Or",2) : "or",
("Not",1) : "invert",
("Mod",2) : "mod",
("Less",2) : "lt",
("LessEqual",2) : "le",
("Greater",2) : "gt",
("GreaterEqual",2) : "ge",
("Add",2) : "add",
("Sym",1) : "neg",
("Sub",2) : "sub",
("Mul",2) : "mul",
("Div",2) : "div",
("Pow",2) : "pow",
}
customCasperFn2PythonFn = {
"Cast<int>" : "asInt",
"Cast<bool>" : "asBool",
"Abs" : "_abs",
"Min" : "_min",
"Max" : "_max",
"Sum" : "_sum",
"Pow" : "_pow",
"WhileDo" : "_whileDo",
"ForAll" : "_forAll",
"TryAll" : "_tryAll",
"SelectMin" : "_selectMin",
"SelectMax" : "_selectMax",
"SelectRand" : "_selectRand",
}
def casperFn2PythonFn(casperFn):
if casperFn in customCasperFn2PythonFn.keys():
return customCasperFn2PythonFn[casperFn]
return casperFn[0].lower()+casperFn[1:]
# binary operators
def printUnaryOperator(func,ev,arg1):
print "Casper::Expr<"+ev+">"
print "__"+casperOp2PythonOp[(func,1)]+"__()"
print "{"
print "\tCasper::Rel1<Casper::"+func+","+arg1+"> r(*$self);"
print "\treturn r;"
print "}"
def printBinaryOperator(func,ev,arg1,arg2,reverse=False):
print "Casper::Expr<"+ev+">"
if not reverse:
print "__"+casperOp2PythonOp[(func,2)]+"__("+arg2+" const& v)"
else:
print "__r"+casperOp2PythonOp[(func,2)]+"__("+arg2+" const& v)"
print "{"
if not reverse:
print "\tCasper::Rel2<Casper::"+func+","+arg1+","+arg2+"> r(*$self,v);"
else:
print "\tCasper::Rel2<Casper::"+func+","+arg2+","+arg1+"> r(v,*$self);"
print "\treturn r;"
print "}"
def printVarOperators(arg1,ev1):
for r in objs:
if r.properties["type"]=="tRel" and \
(r.properties["func"], r.properties["nArgs"]) in casperOp2PythonOp.keys() and \
r.properties["arg1Eval"]==ev1:
if r.properties["nArgs"]==1 and ("arg1" not in r.properties or r.properties["arg1"]==arg1):
printUnaryOperator(r.properties["func"],r.properties["ev"],arg1)
if r.properties["nArgs"]==2 and ("arg1" not in r.properties or r.properties["arg1"]==arg1):
ev2 = r.properties["arg2Eval"]
# self with expression
arg2 = "Casper::Expr<"+ev2+">"
printBinaryOperator(r.properties["func"],r.properties["ev"],arg1,arg2)
# expression with self
printBinaryOperator(r.properties["func"],r.properties["ev"],arg1,arg2,True)
def printExprOperators(arg1,ev1):
for r in objs:
if r.properties["type"]=="tRel" and \
(r.properties["func"], r.properties["nArgs"]) in casperOp2PythonOp.keys() and \
r.properties["arg1Eval"]==ev1:
if r.properties["nArgs"]==1 and ("arg1" not in r.properties or r.properties["arg1"]==arg1):
printUnaryOperator(r.properties["func"],r.properties["ev"],arg1)
if r.properties["nArgs"]==2 and ("arg1" not in r.properties or r.properties["arg1"]==arg1):
ev2 = r.properties["arg2Eval"]
# expr with expr
arg2 = "Casper::Expr<"+ev2+">"
printBinaryOperator(r.properties["func"],r.properties["ev"],arg1,arg2)
def printPredicate(r,func,ev,argevs):
print "Casper::Expr<"+ev+">"
if "arg1" in r.properties:
print casperFn2PythonFn(func)+"("+r.properties["arg1"]+" const& a1",
else:
print casperFn2PythonFn(func)+"(Casper::Expr<"+argevs[0]+"> const& a1",
for i in range(1,len(argevs)):
if "arg"+str(i+1) in r.properties:
print ","+r.properties["arg"+str(i+1)]+" const& a"+str(i+1),
else:
print ",Casper::Expr<"+argevs[i]+"> const& a"+str(i+1),
print ")"
print "{"
print "\treturn Casper::rel<Casper::"+func+">(a1",
for i in range(1,len(argevs)):
print ",a"+str(i+1),
print ");"
print "}"
def printExprPredicates():
for r in objs:
if r.properties["type"]=="tRel" and \
(r.properties["func"], r.properties["nArgs"]) not in casperOp2PythonOp.keys():
argevs = [r.properties["arg"+str(i)+"Eval"] for i in range(1,r.properties["nArgs"]+1)]
printPredicate(r,r.properties["func"],r.properties["ev"],argevs)
def getCPModule(r):
if r.properties["type"]=="tRel":
argevs = [r.properties["arg"+str(i)+"Eval"] for i in range(1,r.properties["nArgs"]+1)]
# if ev=set or any arg is a set, then put in set module
if r.properties["ev"]=="Casper::IntSet" or r.properties["ev"]=="Casper::BoolSet" or \
r.properties["ev"]=="Casper::IntSetSeq" or r.properties["ev"]=="Casper::BoolSetSeq" or \
any([ev=="Casper::IntSet" or ev=="Casper::BoolSet" for ev in argevs]) or \
any([ev=="Casper::IntSetSeq" or ev=="Casper::BoolSetSeq" for ev in argevs]):
return "set"
# if is not a bool expression then put in evaluation module
if r.properties["ev"]!="bool":
return r.properties["ev"]
# if any arg has double or float then put in real module
if r.properties["ev"]=="double" or r.properties["ev"]=="float" or \
r.properties["ev"]=="DoubleSeq" or r.properties["ev"]=="FloatSeq" or \
any([ev=="double" or ev=="float" for ev in argevs]) or \
any([ev=="DoubleSeq" or ev=="FloatSeq" for ev in argevs]):
return "real"
#otherwise put it int module
return "int"
assert(0)
def forAllRelOper(fn,argfn):
for r in objs:
if r.properties["type"]=="tRel" and \
(r.properties["func"], r.properties["nArgs"]) in casperOp2PythonOp.keys():
argevs = [r.properties["arg"+str(i)+"Eval"] for i in range(1,r.properties["nArgs"]+1)]
func = r.properties["func"]
if len(argevs)==1 and argfn(argevs[0])!=None and ("arg1" not in r.properties or r.properties["arg1"]==argfn(argevs[0])):
fn(r,"Rel1<Casper::"+func+","+argfn(argevs[0])+" >")
elif len(argevs)==2:
if argfn(argevs[0])!=None and ("arg1" not in r.properties or r.properties["arg1"]==argfn(argevs[0])):
if argfn(argevs[1])!=None and ("arg2" not in r.properties or r.properties["arg2"]==argfn(argevs[1])):
fn(r,"Rel2<Casper::"+func+","+argfn(argevs[0])+","+argfn(argevs[1])+" >")
fn(r,"Rel2<Casper::"+func+","+argfn(argevs[0])+",Expr<"+argevs[1]+"> >")
if argfn(argevs[1])!=None and ("arg2" not in r.properties or r.properties["arg2"]==argfn(argevs[1])):
fn(r,"Rel2<Casper::"+func+",Expr<"+argevs[0]+">,"+argfn(argevs[1])+" >")
def forAllRelPred(fn):
for r in objs:
if r.properties["type"]=="tRel":
argevs = [r.properties["arg"+str(i)+"Eval"] for i in range(1,r.properties["nArgs"]+1)]
func = r.properties["func"]
s = "Rel"+str(len(argevs))+"<Casper::"+func
for i in range(len(argevs)):
if "arg"+str(i+1) in r.properties:
s += ","+r.properties["arg"+str(i+1)]
else:
s += ",Expr<"+argevs[i]+">"
s += ">"
fn(r,s)
def forAllInstObjs(fn):
for r in instobjs:
fn(r,r.properties["func"])
def forAllRelOperList(fn,argfn):
for r in objs:
if r.properties["type"]=="tRel" and \
(r.properties["func"], r.properties["nArgs"]) in casperOp2PythonOp.keys():
argevs = [r.properties["arg"+str(i)+"Eval"] for i in range(1,r.properties["nArgs"]+1)]
func = r.properties["func"]
if len(argevs)==1 and argfn(argevs[0])!=None and ("arg1" not in r.properties or r.properties["arg1"]==argfn(argevs[0])):
fn(r,func,[argfn(argevs[0])],argevs)
elif len(argevs)==2:
if argfn(argevs[0])!=None and ("arg1" not in r.properties or r.properties["arg1"]==argfn(argevs[0])):
if argfn(argevs[1])!=None and ("arg2" not in r.properties or r.properties["arg2"]==argfn(argevs[1])):
fn(r,func,[argfn(argevs[0]),argfn(argevs[1])],argevs)
fn(r,func,[argfn(argevs[0]),"Expr<"+argevs[1]+">"],argevs)
if argfn(argevs[1])!=None and ("arg2" not in r.properties or r.properties["arg2"]==argfn(argevs[1])):
fn(r,func,["Expr<"+argevs[0]+">",argfn(argevs[1])],argevs)
def forAllRelPredList(fn):
for r in objs:
if r.properties["type"]=="tRel":
argevs = [r.properties["arg"+str(i)+"Eval"] for i in range(1,r.properties["nArgs"]+1)]
func = r.properties["func"]
s = []
for i in range(len(argevs)):
if "arg"+str(i+1) in r.properties:
s.append(r.properties["arg"+str(i+1)])
else:
s.append("Expr<"+argevs[i]+">")
fn(r,func,s,argevs)
#def args(ev):
# s = []
# if ev=="bool":
# s.append('Goal')
# if ev not in seqTypes:
# s.append('CP::Var<'+ev+'>')
# if ev not in seqTypes and ev not in setTypes:
# s.append("Ref<"+ev+">")
# s.append("Expr<"+ev+">")
# return s
# CP::Var<->Expr
# CP::Ref<->Expr
# Expr<->Expr
# Goal<->Expr
| apache-2.0 |
asheshsaraf/ecommerce-simple | sm-core/src/main/java/com/salesmanager/core/modules/integration/shipping/impl/UPSShippingQuote.java | 21530 | package com.salesmanager.core.modules.integration.shipping.impl;
import java.io.BufferedReader;
import java.io.Reader;
import java.io.StringReader;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.digester.Digester;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.RequestEntity;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.salesmanager.core.business.common.model.Delivery;
import com.salesmanager.core.business.merchant.model.MerchantStore;
import com.salesmanager.core.business.reference.country.model.Country;
import com.salesmanager.core.business.shipping.model.PackageDetails;
import com.salesmanager.core.business.shipping.model.ShippingConfiguration;
import com.salesmanager.core.business.shipping.model.ShippingOption;
import com.salesmanager.core.business.system.model.CustomIntegrationConfiguration;
import com.salesmanager.core.business.system.model.IntegrationConfiguration;
import com.salesmanager.core.business.system.model.IntegrationModule;
import com.salesmanager.core.business.system.model.ModuleConfig;
import com.salesmanager.core.modules.integration.IntegrationException;
import com.salesmanager.core.modules.integration.shipping.model.ShippingQuoteModule;
import com.salesmanager.core.utils.DataUtils;
/**
* Integrates with UPS online API
* @author casams1
*
*/
public class UPSShippingQuote implements ShippingQuoteModule {
private static final Logger LOGGER = LoggerFactory.getLogger(UPSShippingQuote.class);
@Override
public void validateModuleConfiguration(
IntegrationConfiguration integrationConfiguration,
MerchantStore store) throws IntegrationException {
List<String> errorFields = null;
//validate integrationKeys['accessKey']
Map<String,String> keys = integrationConfiguration.getIntegrationKeys();
if(keys==null || StringUtils.isBlank(keys.get("accessKey"))) {
errorFields = new ArrayList<String>();
errorFields.add("accessKey");
}
if(keys==null || StringUtils.isBlank(keys.get("userId"))) {
errorFields = new ArrayList<String>();
errorFields.add("userId");
}
if(keys==null || StringUtils.isBlank(keys.get("password"))) {
errorFields = new ArrayList<String>();
errorFields.add("password");
}
//validate at least one integrationOptions['packages']
Map<String,List<String>> options = integrationConfiguration.getIntegrationOptions();
if(options==null) {
errorFields = new ArrayList<String>();
errorFields.add("packages");
}
List<String> packages = options.get("packages");
if(packages==null || packages.size()==0) {
if(errorFields==null) {
errorFields = new ArrayList<String>();
}
errorFields.add("packages");
}
/* List<String> services = options.get("services");
if(services==null || services.size()==0) {
if(errorFields==null) {
errorFields = new ArrayList<String>();
}
errorFields.add("services");
}
if(services!=null && services.size()>3) {
if(errorFields==null) {
errorFields = new ArrayList<String>();
}
errorFields.add("services");
}*/
if(errorFields!=null) {
IntegrationException ex = new IntegrationException(IntegrationException.ERROR_VALIDATION_SAVE);
ex.setErrorFields(errorFields);
throw ex;
}
}
@Override
public List<ShippingOption> getShippingQuotes(
List<PackageDetails> packages, BigDecimal orderTotal,
Delivery delivery, MerchantStore store,
IntegrationConfiguration configuration, IntegrationModule module,
ShippingConfiguration shippingConfiguration, Locale locale)
throws IntegrationException {
BigDecimal total = orderTotal;
if (packages == null) {
return null;
}
List<ShippingOption> options = null;
// only applies to Canada and US
Country country = delivery.getCountry();
if(!(country.getIsoCode().equals("US") || country.getIsoCode().equals("CA"))) {
return null;
//throw new IntegrationException("UPS Not configured for shipping in country " + country.getIsoCode());
}
// supports en and fr
String language = locale.getLanguage();
if (!language.equals(Locale.FRENCH.getLanguage())
&& !language.equals(Locale.ENGLISH.getLanguage())) {
language = Locale.ENGLISH.getLanguage();
}
String pack = configuration.getIntegrationOptions().get("packages").get(0);
Map<String,String> keys = configuration.getIntegrationKeys();
String accessKey = keys.get("accessKey");
String userId = keys.get("userId");
String password = keys.get("password");
String host = null;
String protocol = null;
String port = null;
String url = null;
StringBuilder xmlbuffer = new StringBuilder();
PostMethod httppost = null;
BufferedReader reader = null;
try {
String env = configuration.getEnvironment();
Set<String> regions = module.getRegionsSet();
if(!regions.contains(store.getCountry().getIsoCode())) {
throw new IntegrationException("Can't use the service for store country code ");
}
Map<String, ModuleConfig> moduleConfigsMap = module.getModuleConfigs();
for(String key : moduleConfigsMap.keySet()) {
ModuleConfig moduleConfig = (ModuleConfig)moduleConfigsMap.get(key);
if(moduleConfig.getEnv().equals(env)) {
host = moduleConfig.getHost();
protocol = moduleConfig.getScheme();
port = moduleConfig.getPort();
url = moduleConfig.getUri();
}
}
StringBuilder xmlreqbuffer = new StringBuilder();
xmlreqbuffer.append("<?xml version=\"1.0\"?>");
xmlreqbuffer.append("<AccessRequest>");
xmlreqbuffer.append("<AccessLicenseNumber>");
xmlreqbuffer.append(accessKey);
xmlreqbuffer.append("</AccessLicenseNumber>");
xmlreqbuffer.append("<UserId>");
xmlreqbuffer.append(userId);
xmlreqbuffer.append("</UserId>");
xmlreqbuffer.append("<Password>");
xmlreqbuffer.append(password);
xmlreqbuffer.append("</Password>");
xmlreqbuffer.append("</AccessRequest>");
String xmlhead = xmlreqbuffer.toString();
String weightCode = store.getWeightunitcode();
String measureCode = store.getSeizeunitcode();
if (weightCode.equals("KG")) {
weightCode = "KGS";
} else {
weightCode = "LBS";
}
String xml = "<?xml version=\"1.0\"?><RatingServiceSelectionRequest><Request><TransactionReference><CustomerContext>Shopizer</CustomerContext><XpciVersion>1.0001</XpciVersion></TransactionReference><RequestAction>Rate</RequestAction><RequestOption>Shop</RequestOption></Request>";
StringBuffer xmldatabuffer = new StringBuffer();
/**
* <Shipment>
*
* <Shipper> <Address> <City></City>
* <StateProvinceCode>QC</StateProvinceCode>
* <CountryCode>CA</CountryCode> <PostalCode></PostalCode>
* </Address> </Shipper>
*
* <ShipTo> <Address> <City>Redwood Shores</City>
* <StateProvinceCode>CA</StateProvinceCode>
* <CountryCode>US</CountryCode> <PostalCode></PostalCode>
* <ResidentialAddressIndicator/> </Address> </ShipTo>
*
* <Package> <PackagingType> <Code>21</Code> </PackagingType>
* <PackageWeight> <UnitOfMeasurement> <Code>LBS</Code>
* </UnitOfMeasurement> <Weight>1.1</Weight> </PackageWeight>
* <PackageServiceOptions> <InsuredValue>
* <CurrencyCode>CAD</CurrencyCode>
* <MonetaryValue>100</MonetaryValue> </InsuredValue>
* </PackageServiceOptions> </Package>
*
*
* </Shipment>
*
* <CustomerClassification> <Code>03</Code>
* </CustomerClassification> </RatingServiceSelectionRequest>
* **/
/**Map countriesMap = (Map) RefCache.getAllcountriesmap(LanguageUtil
.getLanguageNumberCode(locale.getLanguage()));
Map zonesMap = (Map) RefCache.getAllZonesmap(LanguageUtil
.getLanguageNumberCode(locale.getLanguage()));
Country storeCountry = (Country) countriesMap.get(store
.getCountry());
Country customerCountry = (Country) countriesMap.get(customer
.getCustomerCountryId());
int sZone = -1;
try {
sZone = Integer.parseInt(store.getZone());
} catch (Exception e) {
// TODO: handle exception
}
Zone storeZone = (Zone) zonesMap.get(sZone);
Zone customerZone = (Zone) zonesMap.get(customer
.getCustomerZoneId());**/
xmldatabuffer.append("<PickupType><Code>03</Code></PickupType>");
// xmldatabuffer.append("<Description>Daily Pickup</Description>");
xmldatabuffer.append("<Shipment><Shipper>");
xmldatabuffer.append("<Address>");
xmldatabuffer.append("<City>");
xmldatabuffer.append(store.getStorecity());
xmldatabuffer.append("</City>");
// if(!StringUtils.isBlank(store.getStorestateprovince())) {
if (store.getZone() != null) {
xmldatabuffer.append("<StateProvinceCode>");
xmldatabuffer.append(store.getZone().getCode());// zone code
xmldatabuffer.append("</StateProvinceCode>");
}
xmldatabuffer.append("<CountryCode>");
xmldatabuffer.append(store.getCountry().getIsoCode());
xmldatabuffer.append("</CountryCode>");
xmldatabuffer.append("<PostalCode>");
xmldatabuffer.append(DataUtils
.trimPostalCode(store.getStorepostalcode()));
xmldatabuffer.append("</PostalCode></Address></Shipper>");
// ship to
xmldatabuffer.append("<ShipTo>");
xmldatabuffer.append("<Address>");
xmldatabuffer.append("<City>");
xmldatabuffer.append(delivery.getCity());
xmldatabuffer.append("</City>");
// if(!StringUtils.isBlank(customer.getCustomerState())) {
if (delivery.getZone() != null) {
xmldatabuffer.append("<StateProvinceCode>");
xmldatabuffer.append(delivery.getZone().getCode());// zone code
xmldatabuffer.append("</StateProvinceCode>");
}
xmldatabuffer.append("<CountryCode>");
xmldatabuffer.append(delivery.getCountry().getIsoCode());
xmldatabuffer.append("</CountryCode>");
xmldatabuffer.append("<PostalCode>");
xmldatabuffer.append(DataUtils
.trimPostalCode(delivery.getPostalCode()));
xmldatabuffer.append("</PostalCode></Address></ShipTo>");
// xmldatabuffer.append("<Service><Code>11</Code></Service>");//TODO service codes (next day ...)
for(PackageDetails packageDetail : packages){
xmldatabuffer.append("<Package>");
xmldatabuffer.append("<PackagingType>");
xmldatabuffer.append("<Code>");
xmldatabuffer.append(pack);
xmldatabuffer.append("</Code>");
xmldatabuffer.append("</PackagingType>");
// weight
xmldatabuffer.append("<PackageWeight>");
xmldatabuffer.append("<UnitOfMeasurement>");
xmldatabuffer.append("<Code>");
xmldatabuffer.append(weightCode);
xmldatabuffer.append("</Code>");
xmldatabuffer.append("</UnitOfMeasurement>");
xmldatabuffer.append("<Weight>");
xmldatabuffer.append(new BigDecimal(packageDetail.getShippingWeight())
.setScale(1, BigDecimal.ROUND_HALF_UP));
xmldatabuffer.append("</Weight>");
xmldatabuffer.append("</PackageWeight>");
// dimension
xmldatabuffer.append("<Dimensions>");
xmldatabuffer.append("<UnitOfMeasurement>");
xmldatabuffer.append("<Code>");
xmldatabuffer.append(measureCode);
xmldatabuffer.append("</Code>");
xmldatabuffer.append("</UnitOfMeasurement>");
xmldatabuffer.append("<Length>");
xmldatabuffer.append(new BigDecimal(packageDetail.getShippingLength())
.setScale(2, BigDecimal.ROUND_HALF_UP));
xmldatabuffer.append("</Length>");
xmldatabuffer.append("<Width>");
xmldatabuffer.append(new BigDecimal(packageDetail.getShippingWidth())
.setScale(2, BigDecimal.ROUND_HALF_UP));
xmldatabuffer.append("</Width>");
xmldatabuffer.append("<Height>");
xmldatabuffer.append(new BigDecimal(packageDetail.getShippingHeight())
.setScale(2, BigDecimal.ROUND_HALF_UP));
xmldatabuffer.append("</Height>");
xmldatabuffer.append("</Dimensions>");
xmldatabuffer.append("</Package>");
}
xmldatabuffer.append("</Shipment>");
xmldatabuffer.append("</RatingServiceSelectionRequest>");
xmlbuffer.append(xmlhead).append(xml).append(
xmldatabuffer.toString());
LOGGER.debug("UPS QUOTE REQUEST " + xmlbuffer.toString());
String data = "";
HttpClient client = new HttpClient();
httppost = new PostMethod(protocol + "://" + host + ":" + port
+ url);
RequestEntity entity = new StringRequestEntity(
xmlbuffer.toString(), "text/plain", "UTF-8");
httppost.setRequestEntity(entity);
int result = client.executeMethod(httppost);
if (result != 200) {
LOGGER.error("Communication Error with ups quote " + result + " "
+ protocol + "://" + host + ":" + port + url);
throw new Exception("UPS quote communication error " + result);
}
data = httppost.getResponseBodyAsString();
LOGGER.debug("ups quote response " + data);
UPSParsedElements parsed = new UPSParsedElements();
Digester digester = new Digester();
digester.push(parsed);
digester.addCallMethod(
"RatingServiceSelectionResponse/Response/Error",
"setErrorCode", 0);
digester.addCallMethod(
"RatingServiceSelectionResponse/Response/ErrorDescriprion",
"setError", 0);
digester
.addCallMethod(
"RatingServiceSelectionResponse/Response/ResponseStatusCode",
"setStatusCode", 0);
digester
.addCallMethod(
"RatingServiceSelectionResponse/Response/ResponseStatusDescription",
"setStatusMessage", 0);
digester
.addCallMethod(
"RatingServiceSelectionResponse/Response/Error/ErrorDescription",
"setError", 0);
digester.addObjectCreate(
"RatingServiceSelectionResponse/RatedShipment",
ShippingOption.class);
// digester.addSetProperties(
// "RatingServiceSelectionResponse/RatedShipment", "sequence",
// "optionId" );
digester
.addCallMethod(
"RatingServiceSelectionResponse/RatedShipment/Service/Code",
"setOptionId", 0);
digester
.addCallMethod(
"RatingServiceSelectionResponse/RatedShipment/TotalCharges/MonetaryValue",
"setOptionPriceText", 0);
//digester
// .addCallMethod(
// "RatingServiceSelectionResponse/RatedShipment/TotalCharges/CurrencyCode",
// "setCurrency", 0);
digester
.addCallMethod(
"RatingServiceSelectionResponse/RatedShipment/Service/Code",
"setOptionCode", 0);
digester
.addCallMethod(
"RatingServiceSelectionResponse/RatedShipment/GuaranteedDaysToDelivery",
"setEstimatedNumberOfDays", 0);
digester.addSetNext("RatingServiceSelectionResponse/RatedShipment",
"addOption");
// <?xml
// version="1.0"?><AddressValidationResponse><Response><TransactionReference><CustomerContext>SalesManager
// Data</CustomerContext><XpciVersion>1.0</XpciVersion></TransactionReference><ResponseStatusCode>0</ResponseStatusCode><ResponseStatusDescription>Failure</ResponseStatusDescription><Error><ErrorSeverity>Hard</ErrorSeverity><ErrorCode>10002</ErrorCode><ErrorDescription>The
// XML document is well formed but the document is not
// valid</ErrorDescription><ErrorLocation><ErrorLocationElementName>AddressValidationRequest</ErrorLocationElementName></ErrorLocation></Error></Response></AddressValidationResponse>
Reader xmlreader = new StringReader(data);
digester.parse(xmlreader);
if (!StringUtils.isBlank(parsed.getErrorCode())) {
LOGGER.error("Can't process UPS statusCode="
+ parsed.getErrorCode() + " message= "
+ parsed.getError());
throw new IntegrationException(parsed.getError());
}
if (!StringUtils.isBlank(parsed.getStatusCode())
&& !parsed.getStatusCode().equals("1")) {
throw new IntegrationException(parsed.getError());
}
if (parsed.getOptions() == null || parsed.getOptions().size() == 0) {
throw new IntegrationException("No shipping options available for the configuration");
}
/*String carrier = getShippingMethodDescription(locale);
// cost is in CAD, need to do conversion
boolean requiresCurrencyConversion = false; String storeCurrency
= store.getCurrency();
if(!storeCurrency.equals(Constants.CURRENCY_CODE_CAD)) {
requiresCurrencyConversion = true; }
LabelUtil labelUtil = LabelUtil.getInstance();
Map serviceMap = com.salesmanager.core.util.ShippingUtil
.buildServiceMap("upsxml", locale);
*//** Details on whit RT quote information to display **//*
MerchantConfiguration rtdetails = config
.getMerchantConfiguration(ShippingConstants.MODULE_SHIPPING_DISPLAY_REALTIME_QUOTES);
int displayQuoteDeliveryTime = ShippingConstants.NO_DISPLAY_RT_QUOTE_TIME;
if (rtdetails != null) {
if (!StringUtils.isBlank(rtdetails.getConfigurationValue1())) {// display
// or
// not
// quotes
try {
displayQuoteDeliveryTime = Integer.parseInt(rtdetails
.getConfigurationValue1());
} catch (Exception e) {
log.error("Display quote is not an integer value ["
+ rtdetails.getConfigurationValue1() + "]");
}
}
}*/
List<ShippingOption> shippingOptions = parsed.getOptions();
if(shippingOptions!=null) {
Map<String,String> details = module.getDetails();
for(ShippingOption option : shippingOptions) {
String name = details.get(option.getOptionCode());
option.setOptionName(name);
if(option.getOptionPrice()==null) {
String priceText = option.getOptionPriceText();
if(StringUtils.isBlank(priceText)) {
throw new IntegrationException("Price text is null for option " + name);
}
try {
BigDecimal price = new BigDecimal(priceText);
option.setOptionPrice(price);
} catch(Exception e) {
throw new IntegrationException("Can't convert to numeric price " + priceText);
}
}
}
}
/* if (options != null) {
Map selectedintlservices = (Map) config
.getConfiguration("service-global-upsxml");
Iterator i = options.iterator();
while (i.hasNext()) {
ShippingOption option = (ShippingOption) i.next();
// option.setCurrency(store.getCurrency());
StringBuffer description = new StringBuffer();
String code = option.getOptionCode();
option.setOptionCode(code);
// get description
String label = (String) serviceMap.get(code);
if (label == null) {
log
.warn("UPSXML cannot find description for service code "
+ code);
}
option.setOptionName(label);
description.append(option.getOptionName());
if (displayQuoteDeliveryTime == ShippingConstants.DISPLAY_RT_QUOTE_TIME) {
if (!StringUtils.isBlank(option
.getEstimatedNumberOfDays())) {
description.append(" (").append(
option.getEstimatedNumberOfDays()).append(
" ").append(
labelUtil.getText(locale,
"label.generic.days.lowercase"))
.append(")");
}
}
option.setDescription(description.toString());
// get currency
if (!option.getCurrency().equals(store.getCurrency())) {
option.setOptionPrice(CurrencyUtil.convertToCurrency(
option.getOptionPrice(), option.getCurrency(),
store.getCurrency()));
}
if (!selectedintlservices.containsKey(option
.getOptionCode())) {
if (returnColl == null) {
returnColl = new ArrayList();
}
returnColl.add(option);
// options.remove(option);
}
}
if (options.size() == 0) {
LogMerchantUtil
.log(
store.getMerchantId(),
" none of the service code returned by UPS ["
+ selectedintlservices
.keySet()
.toArray(
new String[selectedintlservices
.size()])
+ "] for this shipping is in your selection list");
}
}*/
return shippingOptions;
} catch (Exception e1) {
LOGGER.error("UPS quote error",e1);
throw new IntegrationException(e1);
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception ignore) {
}
}
if (httppost != null) {
httppost.releaseConnection();
}
}
}
@Override
public CustomIntegrationConfiguration getCustomModuleConfiguration(
MerchantStore store) throws IntegrationException {
//nothing to do
return null;
}}
class UPSParsedElements {
private String statusCode;
private String statusMessage;
private String error = "";
private String errorCode = "";
private List<ShippingOption> options = new ArrayList<ShippingOption>();
public void addOption(ShippingOption option) {
options.add(option);
}
public List<ShippingOption> getOptions() {
return options;
}
public String getStatusCode() {
return statusCode;
}
public void setStatusCode(String statusCode) {
this.statusCode = statusCode;
}
public String getStatusMessage() {
return statusMessage;
}
public void setStatusMessage(String statusMessage) {
this.statusMessage = statusMessage;
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
public String getErrorCode() {
return errorCode;
}
public void setErrorCode(String errorCode) {
this.errorCode = errorCode;
}
}
| apache-2.0 |
ryandcarter/hybris-connector | src/main/java/org/mule/modules/hybris/model/CelumPicturesIntegrationJobBaseDTO.java | 1299 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.11.29 at 12:35:53 PM GMT
//
package org.mule.modules.hybris.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for celumPicturesIntegrationJobBaseDTO complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="celumPicturesIntegrationJobBaseDTO">
* <complexContent>
* <extension base="{}jobDTO">
* <sequence>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "celumPicturesIntegrationJobBaseDTO")
@XmlSeeAlso({
FilePicturesIntegrationJobDTO.class,
CelumInformationFieldPicturesIntegrationJobBaseDTO.class
})
public class CelumPicturesIntegrationJobBaseDTO
extends JobDTO
{
}
| apache-2.0 |
nsip/nias2 | sifxml/AggregateStatisticInfo.go | 900 | package sifxml
type AggregateStatisticInfo struct {
RefId RefIdType `xml:"RefId,attr"`
StatisticName string `xml:"StatisticName"`
CalculationRule AggregateStatisticInfo_CalculationRule `xml:"CalculationRule"`
ApprovalDate string `xml:"ApprovalDate"`
ExpirationDate string `xml:"ExpirationDate"`
ExclusionRules ExclusionRulesType `xml:"ExclusionRules"`
Source string `xml:"Source"`
EffectiveDate string `xml:"EffectiveDate"`
DiscontinueDate string `xml:"DiscontinueDate"`
Location LocationType `xml:"Location"`
Measure string `xml:"Measure"`
SIF_Metadata SIF_MetadataType `xml:"SIF_Metadata"`
SIF_ExtendedElements SIF_ExtendedElementsType `xml:"SIF_ExtendedElements"`
}
type AggregateStatisticInfo_CalculationRule struct {
Type string `xml:"Type,attr"`
Value string `xml:",chardata"`
}
| apache-2.0 |
yanirta/applitools.examples | Ruby/app/test_applitools_website.rb | 840 | require 'eyes_selenium'
eyes = Applitools::Eyes.new
# This is your api key, make sure you use it in all your tests.
eyes.api_key = APPLITOOLS_APIKEY
# Get a selenium web driver object.
my_webdriver = Selenium::WebDriver.for :firefox
#baseline = os + browser + viewport-size + app_name + test
begin
# Start visual testing using my_webdriver and setting the viewport to 1024x768.
eyes.test(app_name: 'Applitools website', test_name: 'Example test',
viewport_size: {width: 900, height: 600}, driver: my_webdriver) do |driver|
driver.get 'http://www.applitools.com'
# Visual validation point #1
eyes.check_window('Main Page')
driver.find_element(:css, ".read_more").click
# Visual validation point #2
eyes.check_window('Features Page')
end
ensure
eyes.abort_if_not_closed
my_webdriver.quit
end
| apache-2.0 |
botunge/hapi-fhir | examples/src/main/java/example/ClientTransactionExamples.java | 3727 | package example;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.dstu2.composite.QuantityDt;
import ca.uhn.fhir.model.dstu2.composite.ResourceReferenceDt;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Observation;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.valueset.AdministrativeGenderEnum;
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
import ca.uhn.fhir.model.dstu2.valueset.ObservationStatusEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.client.IGenericClient;
public class ClientTransactionExamples {
public static void main(String[] args) {
conditionalCreate();
}
private static void conditionalCreate() {
//START SNIPPET: conditional
// Create a patient object
Patient patient = new Patient();
patient.addIdentifier()
.setSystem("http://acme.org/mrns")
.setValue("12345");
patient.addName()
.addFamily("Jameson")
.addGiven("J")
.addGiven("Jonah");
patient.setGender(AdministrativeGenderEnum.MALE);
// Give the patient a temporary UUID so that other resources in
// the transaction can refer to it
patient.setId(IdDt.newRandomUuid());
// Create an observation object
Observation observation = new Observation();
observation.setStatus(ObservationStatusEnum.FINAL);
observation
.getCode()
.addCoding()
.setSystem("http://loinc.org")
.setCode("789-8")
.setDisplay("Erythrocytes [#/volume] in Blood by Automated count");
observation.setValue(
new QuantityDt()
.setValue(4.12)
.setUnit("10 trillion/L")
.setSystem("http://unitsofmeasure.org")
.setCode("10*12/L"));
// The observation refers to the patient using the ID, which is already
// set to a temporary UUID
observation.setSubject(new ResourceReferenceDt(patient.getId().getValue()));
// Create a bundle that will be used as a transaction
Bundle bundle = new Bundle();
bundle.setType(BundleTypeEnum.TRANSACTION);
// Add the patient as an entry. This entry is a POST with an
// If-None-Exist header (conditional create) meaning that it
// will only be created if there isn't already a Patient with
// the identifier 12345
bundle.addEntry()
.setFullUrl(patient.getId().getValue())
.setResource(patient)
.getRequest()
.setUrl("Patient")
.setIfNoneExist("identifier=http://acme.org/mrns|12345")
.setMethod(HTTPVerbEnum.POST);
// Add the observation. This entry is a POST with no header
// (normal create) meaning that it will be created even if
// a similar resource already exists.
bundle.addEntry()
.setResource(observation)
.getRequest()
.setUrl("Observation")
.setMethod(HTTPVerbEnum.POST);
// Log the request
FhirContext ctx = FhirContext.forDstu2();
System.out.println(ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(bundle));
// Create a client and post the transaction to the server
IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2");
Bundle resp = client.transaction().withBundle(bundle).execute();
// Log the response
System.out.println(ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp));
//END SNIPPET: conditional
}
}
| apache-2.0 |
benoitletondor/mvp-maps | sample/src/main/java/com/benoitletondor/mvp/maps/sample/scene/fragment/samplefragment/injection/FragmentSampleSceneModule.java | 1039 | package com.benoitletondor.mvp.maps.sample.scene.fragment.samplefragment.injection;
import androidx.annotation.NonNull;
import com.benoitletondor.mvp.core.presenter.PresenterFactory;
import com.benoitletondor.mvp.maps.sample.scene.fragment.samplefragment.SampleFragmentPresenter;
import com.benoitletondor.mvp.maps.sample.scene.fragment.samplefragment.impl.SampleFragmentPresenterImpl;
import dagger.Module;
import dagger.Provides;
/**
* Injection module for the fragment sample scene
*
* @author Benoit LETONDOR
*/
@Module
public final class FragmentSampleSceneModule
{
@Provides
PresenterFactory<SampleFragmentPresenter> provideSampleFragmentPresenterFactory()
{
return new SampleFragmentPresenterImplFactory();
}
private static final class SampleFragmentPresenterImplFactory implements PresenterFactory<SampleFragmentPresenter>
{
@NonNull
@Override
public SampleFragmentPresenter create()
{
return new SampleFragmentPresenterImpl();
}
}
}
| apache-2.0 |
Zverik/omim | tools/python/ResponseProvider.py | 6576 | from __future__ import print_function
import jsons
import logging
import os
BIG_FILE_SIZE = 47684
class Payload:
def __init__(self, message, response_code=200, headers={}):
self.__response_code = response_code
self.__message = message
self.__headers = headers
def response_code(self):
"""
Response code to send to the client.
"""
return self.__response_code
def message(self):
"""
The message to send to the client.
"""
return self.__message
def length(self):
"""
The length of the response.
"""
return len(self.message())
def headers(self):
"""
The headers to be sent to the client. Please, note, that these do not include
the Content-Length header, which you need to send separately.
"""
return self.__headers
def __repr__(self):
return "{}: {}: {}".format(self.response_code(), self.length(), self.message())
class ResponseProviderMixin:
"""
A mixin (basically, an interface) that the web-server that we might use relies on.
In this implementation, the job of the web-server is just to get the request
(the url and the headers), and to send the response as it knows how. It isn't
its job to decide how to respond to what request. It is the job of the
ResponseProvider.
In your web-server you should initialize the ResponseProvider, and ask it for
response_for_url_and_headers(url, headers)
Which will return a Payload object that the server must send as response.
The server might be notified when a particular request has been received:
got_pinged(self) - someone sent a ping request. The Response provider will
respond with "pong" and call this method of the server. You might want to
increment the count of active users, for ping is the request that new instances
of servers send to check if other servers are currently serving.
kill(self) - someone sent the kill request, which means that that someone
no longer needs this server to serve. You might want to decrement the count of
active users and/or stop the server.
"""
def dispatch_response(self, payload):
"""
Define this mehtod to dispatch the response received from the ResponseProvider
"""
raise NotImplementedError()
def got_pinged(self):
"""
A ping request has been received. In most scenarios it means that the number of
users of this server has increased by 1.
"""
raise NotImplementedError()
def kill(self):
"""
Someone no longer needs this server. Decrement the number of users and stop
the server if the number fell to 0.
"""
raise NotImplementedError()
class ResponseProvider:
def __init__(self, delegate):
self.headers = list()
self.delegate = delegate
self.byterange = None
self.is_chunked = False
self.response_code = 200
def pong(self):
self.delegate.got_pinged()
return Payload("pong")
def my_id(self):
return Payload(str(os.getpid()))
def strip_query(self, url):
query_start = url.find("?")
if (query_start > 0):
return url[:query_start]
return url
def response_for_url_and_headers(self, url, headers):
self.headers = headers
self.chunk_requested()
url = self.strip_query(url)
try:
return {
"/unit_tests/1.txt": self.test1,
"/unit_tests/notexisting_unittest": self.test_404,
"/unit_tests/permanent": self.test_301,
"/unit_tests/47kb.file": self.test_47_kb,
# Following two URIs are used to test downloading failures on different platforms.
"/unit_tests/mac/1234/Uruguay.mwm": self.test_404,
"/unit_tests/linux/1234/Uruguay.mwm": self.test_404,
"/ping": self.pong,
"/kill": self.kill,
"/id": self.my_id,
"/partners/time": self.partners_time,
"/partners/price": self.partners_price,
"/booking/min_price": self.partners_minprice,
"/booking/min_price.getHotelAvailability": self.partners_minprice,
}[url]()
except:
return self.test_404()
def chunk_requested(self):
if "range" in self.headers:
self.is_chunked = True
self.response_code = 206
meaningful_string = self.headers["range"][6:]
first, last = meaningful_string.split("-")
self.byterange = (int(first), int(last))
def trim_message(self, message):
if not self.is_chunked:
return message
return message[self.byterange[0]: self.byterange[1] + 1]
def test1(self):
init_message = "Test1"
message = self.trim_message(init_message)
size = len(init_message)
self.check_byterange(size)
headers = self.chunked_response_header(size)
return Payload(message, self.response_code, headers)
def test_404(self):
return Payload("", response_code=404)
def test_301(self):
return Payload("", 301, {"Location" : "google.com"})
def check_byterange(self, size):
if self.byterange is None:
self.byterange = (0, size)
def chunked_response_header(self, size):
return {
"Content-Range" : "bytes {start}-{end}/{out_of}".format(start=self.byterange[0],
end=self.byterange[1], out_of=size)
}
def test_47_kb(self):
self.check_byterange(BIG_FILE_SIZE)
headers = self.chunked_response_header(BIG_FILE_SIZE)
message = self.trim_message(self.message_for_47kb_file())
return Payload(message, self.response_code, headers)
def message_for_47kb_file(self):
message = []
for i in range(0, BIG_FILE_SIZE + 1):
message.append(chr(i / 256))
message.append(chr(i % 256))
return "".join(message)
# Partners_api_tests
def partners_time(self):
return Payload(jsons.PARTNERS_TIME)
def partners_price(self):
return Payload(jsons.PARTNERS_PRICE)
def partners_minprice(self):
return Payload(jsons.PARTNERS_MINPRICE)
def kill(self):
logging.debug("Kill called in ResponseProvider")
self.delegate.kill()
return Payload("Bye...")
| apache-2.0 |
fangj/samples-start | SpringHibernate/src/test/java/try1/AppTest.java | 632 | package try1;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}
| apache-2.0 |
PATRIC3/p3_solr | lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java | 19288 | package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Arrays;
import java.util.Locale;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
/**
* Test of the DisjunctionMaxQuery.
*
*/
public class TestDisjunctionMaxQuery extends LuceneTestCase {
/** threshold for comparing floats */
public static final float SCORE_COMP_THRESH = 0.0000f;
/**
* Similarity to eliminate tf, idf and lengthNorm effects to isolate test
* case.
*
* <p>
* same as TestRankingSimilarity in TestRanking.zip from
* http://issues.apache.org/jira/browse/LUCENE-323
* </p>
*/
private static class TestSimilarity extends DefaultSimilarity {
public TestSimilarity() {}
@Override
public float tf(float freq) {
if (freq > 0.0f) return 1.0f;
else return 0.0f;
}
@Override
public float lengthNorm(FieldInvertState state) {
// Disable length norm
return state.getBoost();
}
@Override
public float idf(long docFreq, long numDocs) {
return 1.0f;
}
}
public Similarity sim = new TestSimilarity();
public Directory index;
public IndexReader r;
public IndexSearcher s;
private static final FieldType nonAnalyzedType = new FieldType(TextField.TYPE_STORED);
static {
nonAnalyzedType.setTokenized(false);
}
@Override
public void setUp() throws Exception {
super.setUp();
index = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), index,
newIndexWriterConfig(new MockAnalyzer(random()))
.setSimilarity(sim).setMergePolicy(newLogMergePolicy()));
// hed is the most important field, dek is secondary
// d1 is an "ok" match for: albino elephant
{
Document d1 = new Document();
d1.add(newField("id", "d1", nonAnalyzedType));// Field.Keyword("id",
// "d1"));
d1
.add(newTextField("hed", "elephant", Field.Store.YES));// Field.Text("hed", "elephant"));
d1
.add(newTextField("dek", "elephant", Field.Store.YES));// Field.Text("dek", "elephant"));
writer.addDocument(d1);
}
// d2 is a "good" match for: albino elephant
{
Document d2 = new Document();
d2.add(newField("id", "d2", nonAnalyzedType));// Field.Keyword("id",
// "d2"));
d2
.add(newTextField("hed", "elephant", Field.Store.YES));// Field.Text("hed", "elephant"));
d2.add(newTextField("dek", "albino", Field.Store.YES));// Field.Text("dek",
// "albino"));
d2
.add(newTextField("dek", "elephant", Field.Store.YES));// Field.Text("dek", "elephant"));
writer.addDocument(d2);
}
// d3 is a "better" match for: albino elephant
{
Document d3 = new Document();
d3.add(newField("id", "d3", nonAnalyzedType));// Field.Keyword("id",
// "d3"));
d3.add(newTextField("hed", "albino", Field.Store.YES));// Field.Text("hed",
// "albino"));
d3
.add(newTextField("hed", "elephant", Field.Store.YES));// Field.Text("hed", "elephant"));
writer.addDocument(d3);
}
// d4 is the "best" match for: albino elephant
{
Document d4 = new Document();
d4.add(newField("id", "d4", nonAnalyzedType));// Field.Keyword("id",
// "d4"));
d4.add(newTextField("hed", "albino", Field.Store.YES));// Field.Text("hed",
// "albino"));
d4
.add(newField("hed", "elephant", nonAnalyzedType));// Field.Text("hed", "elephant"));
d4.add(newTextField("dek", "albino", Field.Store.YES));// Field.Text("dek",
// "albino"));
writer.addDocument(d4);
}
r = SlowCompositeReaderWrapper.wrap(writer.getReader());
writer.close();
s = newSearcher(r);
s.setSimilarity(sim);
}
@Override
public void tearDown() throws Exception {
r.close();
index.close();
super.tearDown();
}
public void testSkipToFirsttimeMiss() throws IOException {
final DisjunctionMaxQuery dq = new DisjunctionMaxQuery(
Arrays.asList(tq("id", "d1"), tq("dek", "DOES_NOT_EXIST")), 0.0f);
QueryUtils.check(random(), dq, s);
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
final Weight dw = s.createNormalizedWeight(dq, true);
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
final Scorer ds = dw.scorer(context);
final boolean skipOk = ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS;
if (skipOk) {
fail("firsttime skipTo found a match? ... "
+ r.document(ds.docID()).get("id"));
}
}
public void testSkipToFirsttimeHit() throws IOException {
final DisjunctionMaxQuery dq = new DisjunctionMaxQuery(
Arrays.asList(tq("dek", "albino"), tq("dek", "DOES_NOT_EXIST")), 0.0f);
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
QueryUtils.check(random(), dq, s);
final Weight dw = s.createNormalizedWeight(dq, true);
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
final Scorer ds = dw.scorer(context);
assertTrue("firsttime skipTo found no match",
ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals("found wrong docid", "d4", r.document(ds.docID()).get("id"));
}
public void testSimpleEqualScores1() throws Exception {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "albino"), tq("hed", "elephant")),
0.0f);
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("all docs should match " + q.toString(), 4, h.length);
float score = h[0].score;
for (int i = 1; i < h.length; i++) {
assertEquals("score #" + i + " is not the same", score, h[i].score,
SCORE_COMP_THRESH);
}
} catch (Error e) {
printHits("testSimpleEqualScores1", h, s);
throw e;
}
}
public void testSimpleEqualScores2() throws Exception {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(
Arrays.asList(tq("dek", "albino"), tq("dek", "elephant")),
0.0f);
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("3 docs should match " + q.toString(), 3, h.length);
float score = h[0].score;
for (int i = 1; i < h.length; i++) {
assertEquals("score #" + i + " is not the same", score, h[i].score,
SCORE_COMP_THRESH);
}
} catch (Error e) {
printHits("testSimpleEqualScores2", h, s);
throw e;
}
}
public void testSimpleEqualScores3() throws Exception {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(
Arrays.asList(
tq("hed", "albino"),
tq("hed", "elephant"),
tq("dek", "albino"),
tq("dek", "elephant")),
0.0f);
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("all docs should match " + q.toString(), 4, h.length);
float score = h[0].score;
for (int i = 1; i < h.length; i++) {
assertEquals("score #" + i + " is not the same", score, h[i].score,
SCORE_COMP_THRESH);
}
} catch (Error e) {
printHits("testSimpleEqualScores3", h, s);
throw e;
}
}
public void testSimpleTiebreaker() throws Exception {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(
Arrays.asList(tq("dek", "albino"), tq("dek", "elephant")),
0.01f);
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("3 docs should match " + q.toString(), 3, h.length);
assertEquals("wrong first", "d2", s.doc(h[0].doc).get("id"));
float score0 = h[0].score;
float score1 = h[1].score;
float score2 = h[2].score;
assertTrue("d2 does not have better score then others: " + score0
+ " >? " + score1, score0 > score1);
assertEquals("d4 and d1 don't have equal scores", score1, score2,
SCORE_COMP_THRESH);
} catch (Error e) {
printHits("testSimpleTiebreaker", h, s);
throw e;
}
}
public void testBooleanRequiredEqualScores() throws Exception {
BooleanQuery.Builder q = new BooleanQuery.Builder();
{
DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "albino"), tq("dek", "albino")),
0.0f);
q.add(q1, BooleanClause.Occur.MUST);// true,false);
QueryUtils.check(random(), q1, s);
}
{
DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "elephant"), tq("dek", "elephant")),
0.0f);
q.add(q2, BooleanClause.Occur.MUST);// true,false);
QueryUtils.check(random(), q2, s);
}
QueryUtils.check(random(), q.build(), s);
ScoreDoc[] h = s.search(q.build(), 1000).scoreDocs;
try {
assertEquals("3 docs should match " + q.toString(), 3, h.length);
float score = h[0].score;
for (int i = 1; i < h.length; i++) {
assertEquals("score #" + i + " is not the same", score, h[i].score,
SCORE_COMP_THRESH);
}
} catch (Error e) {
printHits("testBooleanRequiredEqualScores1", h, s);
throw e;
}
}
public void testBooleanOptionalNoTiebreaker() throws Exception {
BooleanQuery.Builder q = new BooleanQuery.Builder();
{
DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "albino"), tq("dek", "albino")),
0.0f);
q.add(q1, BooleanClause.Occur.SHOULD);// false,false);
}
{
DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "elephant"), tq("dek", "elephant")),
0.0f);
q.add(q2, BooleanClause.Occur.SHOULD);// false,false);
}
QueryUtils.check(random(), q.build(), s);
ScoreDoc[] h = s.search(q.build(), 1000).scoreDocs;
try {
assertEquals("4 docs should match " + q.toString(), 4, h.length);
float score = h[0].score;
for (int i = 1; i < h.length - 1; i++) { /* note: -1 */
assertEquals("score #" + i + " is not the same", score, h[i].score,
SCORE_COMP_THRESH);
}
assertEquals("wrong last", "d1", s.doc(h[h.length - 1].doc).get("id"));
float score1 = h[h.length - 1].score;
assertTrue("d1 does not have worse score then others: " + score + " >? "
+ score1, score > score1);
} catch (Error e) {
printHits("testBooleanOptionalNoTiebreaker", h, s);
throw e;
}
}
public void testBooleanOptionalWithTiebreaker() throws Exception {
BooleanQuery.Builder q = new BooleanQuery.Builder();
{
DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "albino"), tq("dek", "albino")),
0.01f);
q.add(q1, BooleanClause.Occur.SHOULD);// false,false);
}
{
DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "elephant"), tq("dek", "elephant")),
0.01f);
q.add(q2, BooleanClause.Occur.SHOULD);// false,false);
}
QueryUtils.check(random(), q.build(), s);
ScoreDoc[] h = s.search(q.build(), 1000).scoreDocs;
try {
assertEquals("4 docs should match " + q.toString(), 4, h.length);
float score0 = h[0].score;
float score1 = h[1].score;
float score2 = h[2].score;
float score3 = h[3].score;
String doc0 = s.doc(h[0].doc).get("id");
String doc1 = s.doc(h[1].doc).get("id");
String doc2 = s.doc(h[2].doc).get("id");
String doc3 = s.doc(h[3].doc).get("id");
assertTrue("doc0 should be d2 or d4: " + doc0, doc0.equals("d2")
|| doc0.equals("d4"));
assertTrue("doc1 should be d2 or d4: " + doc0, doc1.equals("d2")
|| doc1.equals("d4"));
assertEquals("score0 and score1 should match", score0, score1,
SCORE_COMP_THRESH);
assertEquals("wrong third", "d3", doc2);
assertTrue("d3 does not have worse score then d2 and d4: " + score1
+ " >? " + score2, score1 > score2);
assertEquals("wrong fourth", "d1", doc3);
assertTrue("d1 does not have worse score then d3: " + score2 + " >? "
+ score3, score2 > score3);
} catch (Error e) {
printHits("testBooleanOptionalWithTiebreaker", h, s);
throw e;
}
}
public void testBooleanOptionalWithTiebreakerAndBoost() throws Exception {
BooleanQuery.Builder q = new BooleanQuery.Builder();
{
DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "albino", 1.5f), tq("dek", "albino")),
0.01f);
q.add(q1, BooleanClause.Occur.SHOULD);// false,false);
}
{
DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(
Arrays.asList(tq("hed", "elephant", 1.5f), tq("dek", "elephant")),
0.01f);
q.add(q2, BooleanClause.Occur.SHOULD);// false,false);
}
QueryUtils.check(random(), q.build(), s);
ScoreDoc[] h = s.search(q.build(), 1000).scoreDocs;
try {
assertEquals("4 docs should match " + q.toString(), 4, h.length);
float score0 = h[0].score;
float score1 = h[1].score;
float score2 = h[2].score;
float score3 = h[3].score;
String doc0 = s.doc(h[0].doc).get("id");
String doc1 = s.doc(h[1].doc).get("id");
String doc2 = s.doc(h[2].doc).get("id");
String doc3 = s.doc(h[3].doc).get("id");
assertEquals("doc0 should be d4: ", "d4", doc0);
assertEquals("doc1 should be d3: ", "d3", doc1);
assertEquals("doc2 should be d2: ", "d2", doc2);
assertEquals("doc3 should be d1: ", "d1", doc3);
assertTrue("d4 does not have a better score then d3: " + score0 + " >? "
+ score1, score0 > score1);
assertTrue("d3 does not have a better score then d2: " + score1 + " >? "
+ score2, score1 > score2);
assertTrue("d3 does not have a better score then d1: " + score2 + " >? "
+ score3, score2 > score3);
} catch (Error e) {
printHits("testBooleanOptionalWithTiebreakerAndBoost", h, s);
throw e;
}
}
// LUCENE-4477 / LUCENE-4401:
public void testBooleanSpanQuery() throws Exception {
int hits = 0;
Directory directory = newDirectory();
Analyzer indexerAnalyzer = new MockAnalyzer(random());
IndexWriterConfig config = new IndexWriterConfig(indexerAnalyzer);
IndexWriter writer = new IndexWriter(directory, config);
String FIELD = "content";
Document d = new Document();
d.add(new TextField(FIELD, "clockwork orange", Field.Store.YES));
writer.addDocument(d);
writer.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = newSearcher(indexReader);
DisjunctionMaxQuery query = new DisjunctionMaxQuery(
Arrays.<Query>asList(
new SpanTermQuery(new Term(FIELD, "clockwork")),
new SpanTermQuery(new Term(FIELD, "clckwork"))),
1.0f);
TopScoreDocCollector collector = TopScoreDocCollector.create(1000);
searcher.search(query, collector);
hits = collector.topDocs().scoreDocs.length;
for (ScoreDoc scoreDoc : collector.topDocs().scoreDocs){
System.out.println(scoreDoc.doc);
}
indexReader.close();
assertEquals(hits, 1);
directory.close();
}
/** macro */
protected Query tq(String f, String t) {
return new TermQuery(new Term(f, t));
}
/** macro */
protected Query tq(String f, String t, float b) {
Query q = tq(f, t);
return new BoostQuery(q, b);
}
protected void printHits(String test, ScoreDoc[] h, IndexSearcher searcher)
throws Exception {
System.err.println("------- " + test + " -------");
DecimalFormat f = new DecimalFormat("0.000000000", DecimalFormatSymbols.getInstance(Locale.ROOT));
for (int i = 0; i < h.length; i++) {
Document d = searcher.doc(h[i].doc);
float score = h[i].score;
System.err
.println("#" + i + ": " + f.format(score) + " - " + d.get("id"));
}
}
public void testAdd() {
Query q1 = new MatchAllDocsQuery();
Query q2 = new TermQuery(new Term("foo", "bar"));
float tiebreak = random().nextFloat();
Query dmq = new DisjunctionMaxQuery(Arrays.asList(q1, q2), tiebreak);
DisjunctionMaxQuery dmq2 = new DisjunctionMaxQuery(tiebreak);
dmq2.add(q1);
dmq2.add(q2);
assertEquals(dmq, dmq2);
DisjunctionMaxQuery dmq3 = new DisjunctionMaxQuery(tiebreak);
dmq3.add(Arrays.asList(q1, q2));
assertEquals(dmq, dmq3);
}
}
| apache-2.0 |
evoshop/evo_maa | backend/controllers/AdminUserController.php | 3171 | <?php
namespace backend\controllers;
use Yii;
use backend\models\AdminUser;
use backend\models\AdminUserSearch;
use yii\web\Controller;
use yii\web\NotFoundHttpException;
use yii\filters\VerbFilter;
/**
* AdminUserController implements the CRUD actions for AdminUser model.
*/
class AdminUserController extends Controller
{
/**
* @inheritdoc
*/
public function behaviors()
{
return [
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'delete' => ['POST'],
],
],
];
}
/**
* Lists all AdminUser models.
* @return mixed
*/
public function actionIndex()
{
$searchModel = new AdminUserSearch();
$dataProvider = $searchModel->search(Yii::$app->request->queryParams);
return $this->render('index', [
'searchModel' => $searchModel,
'dataProvider' => $dataProvider,
]);
}
/**
* Displays a single AdminUser model.
* @param integer $id
* @return mixed
*/
public function actionView($id)
{
return $this->render('view', [
'model' => $this->findModel($id),
]);
}
/**
* Creates a new AdminUser model.
* If creation is successful, the browser will be redirected to the 'view' page.
* @return mixed
*/
public function actionCreate()
{
$model = new AdminUser();
if ($model->load(Yii::$app->request->post()) && $model->save()) {
return $this->redirect(['view', 'id' => $model->id]);
} else {
return $this->render('create', [
'model' => $model,
]);
}
}
/**
* Updates an existing AdminUser model.
* If update is successful, the browser will be redirected to the 'view' page.
* @param integer $id
* @return mixed
*/
public function actionUpdate($id)
{
$model = $this->findModel($id);
if ($model->load(Yii::$app->request->post()) && $model->save()) {
return $this->redirect(['view', 'id' => $model->id]);
} else {
return $this->render('update', [
'model' => $model,
]);
}
}
/**
* Deletes an existing AdminUser model.
* If deletion is successful, the browser will be redirected to the 'index' page.
* @param integer $id
* @return mixed
*/
public function actionDelete($id)
{
$this->findModel($id)->delete();
return $this->redirect(['index']);
}
/**
* Finds the AdminUser model based on its primary key value.
* If the model is not found, a 404 HTTP exception will be thrown.
* @param integer $id
* @return AdminUser the loaded model
* @throws NotFoundHttpException if the model cannot be found
*/
protected function findModel($id)
{
if (($model = AdminUser::findOne($id)) !== null) {
return $model;
} else {
throw new NotFoundHttpException('The requested page does not exist.');
}
}
}
| apache-2.0 |
TeamNIKaml/Budget | Budget/src/com/teamNikAml/budget/database/IncomeStatementHelper.java | 2387 | package com.teamNikAml.budget.database;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.os.AsyncTask;
import android.util.Log;
import com.teamNikAml.budget.model.Constants;
public class IncomeStatementHelper implements IDBHelper {
private DBHelper dbHelper;
private Context context;
public IncomeStatementHelper(Context context)
{
this.context = context;
}
@Override
public boolean onCreate() {
Log.e("DBTask onCreate", "doInBackground");
dbHelper = new DBHelper(context, 1, Constants.DB_NAME,
Constants.INCOME_STATEMENT_DB_QUERY);
return true;
}
@Override
public void insert() {
// TODO Auto-generated method stub
Log.e("insert Passwordhelper", "doInBackground");
new DBTask().execute("insert");
}
@Override
public void update(String whereClause, String[] whereArgs) {
// TODO Auto-generated method stub
new DBTask().execute("update");
}
@Override
public void delete(String where, String[] args) {
// TODO Auto-generated method stub
new DBTask().execute("delete");
}
@Override
public void select(String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
// TODO Auto-generated method stub
new DBTask().execute("select");
}
private class DBTask extends AsyncTask<String, Integer, String> {
@Override
protected String doInBackground(String... operation) {
// TODO Auto-generated method stub
if (operation[0].equalsIgnoreCase("insert")) {
Log.e("DBTask inset", "doInBackground");
onCreate();
SQLiteDatabase database = dbHelper.getWritableDatabase();
database.close();
} else if (operation[0].equalsIgnoreCase("update")) {
onCreate();
Log.e("DBTask update", "doInBackground");
SQLiteDatabase database = dbHelper.getWritableDatabase();
database.close();
} else if (operation[0].equalsIgnoreCase("delete")) {
onCreate();
Log.e("DBTask delete", "doInBackground");
SQLiteDatabase dataBase = dbHelper.getWritableDatabase();
dataBase.close();
}
else if (operation[0].equalsIgnoreCase("select"))
{
onCreate();
Log.e("DBTask select", "doInBackground");
SQLiteDatabase database = dbHelper.getReadableDatabase();
}
else {
Log.e("Invalid db task", "invalid dsfsdfasdas");
}
return null;
}
}
}
| apache-2.0 |
smee/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/io/impl/json/package-info.java | 753 | /*
* Copyright © 2014 - 2018 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains all classes related to json input and output to Flink.
*/
package org.gradoop.flink.io.impl.json;
| apache-2.0 |
reportportal/service-api | src/main/java/com/epam/ta/reportportal/core/filter/GetUserFilterHandler.java | 3120 | /*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.core.filter;
import com.epam.ta.reportportal.commons.ReportPortalUser;
import com.epam.ta.reportportal.commons.querygen.Filter;
import com.epam.ta.reportportal.entity.filter.UserFilter;
import com.epam.ta.reportportal.ws.model.SharedEntity;
import com.epam.ta.reportportal.ws.model.filter.UserFilterResource;
import org.springframework.data.domain.Pageable;
import java.util.List;
/**
* Get filter handler
*
* @author Aliaksei_Makayed
*/
public interface GetUserFilterHandler {
/**
* Get shared and own {@link UserFilterResource} objects
*
* @param projectName Project Name
* @param pageable Page request
* @param filter Filter representation
* @param user Report Portal User
* @return {@link Iterable}
*/
Iterable<UserFilterResource> getPermitted(String projectName, Pageable pageable, Filter filter, ReportPortalUser user);
/**
* Get own {@link UserFilterResource} objects
*
* @param projectName Project Name
* @param pageable Page request
* @param filter Filter representation
* @param user Report Portal User
* @return {@link Iterable}
*/
Iterable<UserFilterResource> getOwn(String projectName, Pageable pageable, Filter filter, ReportPortalUser user);
/**
* Get shared {@link UserFilterResource} objects
*
* @param projectName Project Name
* @param pageable Page request
* @param filter Filter representation
* @param user Report Portal User
* @return {@link Iterable}
*/
Iterable<UserFilterResource> getShared(String projectName, Pageable pageable, Filter filter, ReportPortalUser user);
/**
* Get all {@link com.epam.ta.reportportal.entity.filter.UserFilter}'s names
*
* @param projectDetails Project details
* @param user Report Portal user
* @param pageable Page request
* @param filter Filter representation
* @param isShared Is shared
* @return List of {@link SharedEntity}
*/
Iterable<SharedEntity> getFiltersNames(ReportPortalUser.ProjectDetails projectDetails, Pageable pageable, Filter filter,
ReportPortalUser user, boolean isShared);
/**
* Get all
* {@link com.epam.ta.reportportal.ws.model.filter.UserFilterResource}
* objects
*
* @param ids Filter IDs
* @param projectDetails Project details
* @param user Report Portal user
* @return Found filters
*/
List<UserFilter> getFiltersById(Long[] ids, ReportPortalUser.ProjectDetails projectDetails, ReportPortalUser user);
}
| apache-2.0 |
runmyprocess/json | src/test/java/org/runmyprocess/test/JsonParserTest.java | 5529 | package org.runmyprocess.test;
import junit.framework.TestCase;
import org.runmyprocess.json.*;
import org.runmyprocess.json.parser.DefaultParser;
import org.runmyprocess.json.parser.Parser;
import org.runmyprocess.json.parser.StrictParser;
/**
* User: sgaide & dboulay
* Date: 09/01/13
* Time: 15:48
*/
public class JsonParserTest extends TestCase {
public void testParserEmptyObject() throws JSONException {
String o = "{}";
JSON json = JSON.fromString(o);
}
public void testParserEmptyArray() throws JSONException {
String o = "[]";
JSON json = JSON.fromString(o);
}
public void testParserSimpleArray() throws JSONException {
String o = "[1,2]";
JSON json = JSON.fromString(o);
}
public void testParserSimpleArrayWithSeparator() throws JSONException {
String o = "[1,2,]";
JSON json = JSON.fromString(o);
}
public void testParserSimpleObjectWithSeparator() throws JSONException {
String o = "{\"foo\":\"bar\",}";
JSON json = JSON.fromString(o);
}
public void testParserSimpleSetWithSeparator() throws JSONException {
String o = "<1,2,>";
JSON json = JSON.fromString(o);
}
public void testParserSimpleStructureWithSeparator() throws JSONException {
String o = "{\"foo\":<1,2,>,\"bar\":[1,2,],}";
JSON json = JSON.fromString(o);
}
public void testParserSimpleObject() throws JSONException {
String o = "{\"name\": \"Denis\"}";
JSON json = JSON.fromString(o);
assertEquals(((JSONObject)json).get("name").toString(),"Denis");
}
public void testParserSimpleObjectTrim() throws JSONException {
String o = " {\"name\": \"Denis\"}";
JSON json = JSON.fromString(o);
assertEquals(((JSONObject)json).get("name").toString(),"Denis");
}
public void testParserSimpleObject2() throws JSONException {
String o = "{\"name\": \"Denis\", \"age\":3.9e1}";
JSONObject json = JSONObject.fromString(o);
assertEquals("Denis", json.get("name").toString());
assertEquals(39.0, json.get("age"));
}
public void testParserArrayArithmetic() throws JSONException {
String o = "[1,2,3,-1,-2.2,-3.4e5,4.52e-4,6.7E8,1.360,2.999999999]";
JSONArray json = JSONArray.fromString(o);
assertEquals(10, json.size());
assertEquals(1L, json.get(0));
assertEquals(-1L, json.get(3));
assertEquals(-2.2, json.get(4));
assertEquals(-340000.0, json.get(5));
assertEquals(4.52e-4, json.get(6));
assertEquals(1.36, json.get(8));
assertEquals(2.999999999, json.get(9));
}
public void testParserComplexObject() throws JSONException {
String o = "{\"name\": \"Jean\", \"age\":3.9e1, \"work_address\":{\"street\":\"rue de la paix\", \"number\":22, \"city\":\"Paris\"},\"tags\":[false,true,null,1,\"titi\"]}";
JSONObject json = JSONObject.fromString(o);
assertEquals("Jean", json.get("name").toString());
assertEquals(39.0, json.get("age"));
assertTrue(((JSON)json.get("tags")).isJSONArray());
assertEquals(Boolean.FALSE, ((JSONArray)json.get("tags")).get(0));
assertEquals(Boolean.TRUE, ((JSONArray)json.get("tags")).get(1));
}
public void testUnquottedKey() {
String q = "{\"a\":1,\"b\":2}";
String o = "{ a : 1,b :2, 'c':3, d+c:45, d-t:67, d a b c :89, h'b:23, i\"b:12, i\\\\\":\"truc\"}";
JSONObject strictObject = JSONObject.fromObject( q );
JSONObject looseObject = JSONObject.fromObject( o );
assertEquals( strictObject.getInteger("a"), looseObject.getInteger("a"));
assertEquals( strictObject.getInteger("b"), looseObject.getInteger("b"));
}
public void testJSONSet() {
String o = "<1,3,4,8,9,10,12>";
String q = "<\"titi\",\"toto\",\"tata\",4,10>";
JSONSet set1 = JSONSet.fromString(o);
assertEquals( 7, set1.size());
assertTrue( set1.contains(4L));
JSONSet set2 = JSONSet.fromString(q);
assertEquals( 5, set2.size());
assertTrue( set2.contains("titi"));
assertTrue( set2.contains(10L));
JSONObject json = new JSONObject();
json.put( "set", set1 );
assertTrue( json.getJSONSet("set") != null);
}
public void testQuotedValue() {
String q = "{\"a\":1,b:'2',\"c\":'toto\"titi'}";
JSONObject json = JSONObject.fromString(q);
assertEquals( 3, json.size());
assertEquals( 1, (int)json.getInteger("a"));
assertEquals( "2", json.getString("b"));
assertEquals( "toto\"titi", json.getString("c"));
}
public void testUrl() {
String q = "{\"href\":\"http:\\/\\/www.ibm.com\"}";
JSONObject json = JSONObject.fromString(q);
assertEquals( "http://www.ibm.com", json.getString("href"));
}
public void testEqualSeparator() {
String q = "{\"a\"=1,'b'='2',\"c\":\"toto\"}";
JSONObject json = JSONObject.fromString(q);
assertEquals( 3, json.size());
assertEquals( 1, (int)json.getInteger("a"));
assertEquals( "2", json.getString("b"));
assertEquals( "toto", json.getString("c"));
}
public void testAmpersand() {
String q = "{'make_model':'AT&T blue'}";
JSONObject json = JSONObject.fromString(q);
assertEquals(1, json.size());
assertEquals("AT&T blue", json.getString("make_model"));
}
}
| apache-2.0 |
avianey/Versatile-HTTP-proxy | src/test/java/fr/avianey/vhp/ProxyITCase.java | 1858 | package fr.avianey.vhp;
import org.jboss.netty.handler.codec.http.DefaultHttpRequest;
import org.jboss.netty.handler.codec.http.DefaultHttpResponse;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpRequest;
import org.jboss.netty.handler.codec.http.HttpResponse;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.jboss.netty.handler.codec.http.HttpVersion;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.twitter.finagle.Http;
import com.twitter.finagle.ListeningServer;
import com.twitter.finagle.Service;
import com.twitter.util.Await;
import com.twitter.util.Future;
import com.twitter.util.TimeoutException;
public class ProxyITCase {
private ListeningServer targetServer;
@Before
public void startTargetServer() throws TimeoutException, InterruptedException {
Service<HttpRequest, HttpResponse> target = new Service<HttpRequest, HttpResponse>() {
@Override
public Future<HttpResponse> apply(HttpRequest req) {
return Future.value((HttpResponse) new DefaultHttpResponse(req.getProtocolVersion(), HttpResponseStatus.OK));
}
};
targetServer = Http.serve(":8081", target);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
targetServer.close();
}
});
}
@After
public void stopTargetServer() {
targetServer.close();
}
@Test
public void shouldReachTargetThroughProxy() throws Exception {
Assert.assertEquals(HttpResponseStatus.OK, Await.result(Http.newService("localhost:8080").apply(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "nowhere"))).getStatus());
}
}
| apache-2.0 |
phrocker/accumulo | core/src/main/java/org/apache/accumulo/core/client/admin/InstanceOperations.java | 4532 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.client.admin;
import java.util.List;
import java.util.Map;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
/**
*
*/
public interface InstanceOperations {
/**
* Sets an system property in zookeeper. Tablet servers will pull this setting and override the equivalent setting in accumulo-site.xml. Changes can be seen
* using {@link #getSystemConfiguration()}
*
* @param property
* the name of a per-table property
* @param value
* the value to set a per-table property to
* @throws AccumuloException
* if a general error occurs
* @throws AccumuloSecurityException
* if the user does not have permission
*/
public void setProperty(final String property, final String value) throws AccumuloException, AccumuloSecurityException;
/**
* Removes a system property from zookeeper. Changes can be seen using {@link #getSystemConfiguration()}
*
* @param property
* the name of a per-table property
* @throws AccumuloException
* if a general error occurs
* @throws AccumuloSecurityException
* if the user does not have permission
*/
public void removeProperty(final String property) throws AccumuloException, AccumuloSecurityException;
/**
*
* @return A map of system properties set in zookeeper. If a property is not set in zookeeper, then it will return the value set in accumulo-site.xml on some
* server. If nothing is set in an accumulo-site.xml file it will return the default value for each property.
*/
public Map<String,String> getSystemConfiguration() throws AccumuloException, AccumuloSecurityException;
/**
*
* @return A map of system properties set in accumulo-site.xml on some server. If nothing is set in an accumulo-site.xml file it will return the default value
* for each property.
*/
public Map<String,String> getSiteConfiguration() throws AccumuloException, AccumuloSecurityException;
/**
* List the currently active tablet servers participating in the accumulo instance
*
* @return A list of currently active tablet servers.
*/
public List<String> getTabletServers();
/**
* List the active scans on tablet server.
*
* @param tserver
* The tablet server address should be of the form {@code <ip address>:<port>}
* @return A list of active scans on tablet server.
*/
public List<ActiveScan> getActiveScans(String tserver) throws AccumuloException, AccumuloSecurityException;
/**
* List the active compaction running on a tablet server
*
* @param tserver
* The tablet server address should be of the form {@code <ip address>:<port>}
* @return the list of active compactions
* @since 1.5.0
*/
public List<ActiveCompaction> getActiveCompactions(String tserver) throws AccumuloException, AccumuloSecurityException;
/**
* Throws an exception if a tablet server can not be contacted.
*
* @param tserver
* The tablet server address should be of the form {@code <ip address>:<port>}
* @since 1.5.0
*/
public void ping(String tserver) throws AccumuloException;
/**
* Test to see if the instance can load the given class as the given type. This check does not consider per table classpaths, see
* {@link TableOperations#testClassLoad(String, String, String)}
*
* @return true if the instance can load the given class as the given type, false otherwise
*/
public boolean testClassLoad(final String className, final String asTypeName) throws AccumuloException, AccumuloSecurityException;
}
| apache-2.0 |
minghuascode/pyj | examples/addonsgallery/TooltipTab.py | 1370 | from pyjamas.ui.Sink import Sink, SinkInfo
from pyjamas.ui.Image import Image
from pyjamas.ui.HTML import HTML
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.HorizontalPanel import HorizontalPanel
from pyjamas.ui.Tooltip import TooltipListener
class TooltipTab(Sink):
def __init__(self):
Sink.__init__(self)
img = Image("images/num1.png")
img.addMouseListener(TooltipListener("An image: " + img.getUrl()))
img2 = Image("images/num2.png")
img2.addMouseListener(TooltipListener("An image: " + img2.getUrl()))
html = HTML("Some <i>HTML</i> text.")
html.addMouseListener(TooltipListener("An HTML component."))
panel_h = HorizontalPanel()
panel_h.add(img)
panel_h.add(img2)
panel_h.setSpacing(8)
panel = VerticalPanel()
panel.add(panel_h)
panel.add(html)
panel.setSpacing(8)
self.setWidget(panel)
def onShow(self):
pass
def init():
text="<b>Tooltip popup component</b><p>Shows up after 1 second, hides after 5 seconds. Once activated, other tooltips show up immediately."
text+=r"<br><br>Originally by Alexei Sokolov at <a href=\"http://gwt.components.googlepages.com\">gwt.components.googlepages.com</a>"
return SinkInfo("Tooltip", text, TooltipTab)
| apache-2.0 |
pepstock-org/Charba | src/org/pepstock/charba/client/geo/enums/Mode.java | 1473 | /**
Copyright 2017 Andrea "Stock" Stocchero
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pepstock.charba.client.geo.enums;
import org.pepstock.charba.client.commons.Key;
/**
* Enumerates the operation modes for the scale, area means that the area is linearly increasing whereas radius the radius is.
*
* @author Andrea "Stock" Stocchero
*
*/
public enum Mode implements Key
{
/**
* The area is linearly increasing.
*/
AREA("area"),
/**
* Uses the radius as is.
*/
RADIUS("radius");
// name value of property
private final String value;
/**
* Creates with the property value to use in the native object.
*
* @param value value of property name
*/
private Mode(String value) {
this.value = value;
}
/*
* (non-Javadoc)
*
* @see org.pepstock.charba.client.commons.Key#value()
*/
@Override
public String value() {
return value;
}
}
| apache-2.0 |
tkao1000/pinot | thirdeye/thirdeye-pinot/src/main/java/com/linkedin/thirdeye/detector/driver/FailureEmailConfiguration.java | 1228 | package com.linkedin.thirdeye.detector.driver;
public class FailureEmailConfiguration {
public static final String FAILURE_EMAIL_CONFIG_KEY = "FAILURE_EMAIL_CONFIG";
private String fromAddress;
private String toAddresses;
private String smtpHost;
private int smtpPort = 25;
private String smtpUser;
private String smtpPassword;
public String getFromAddress() {
return fromAddress;
}
public void setFromAddress(String fromAddress) {
this.fromAddress = fromAddress;
}
public String getToAddresses() {
return toAddresses;
}
public void setToAddresses(String toAddresses) {
this.toAddresses = toAddresses;
}
public String getSmtpHost() {
return smtpHost;
}
public void setSmtpHost(String smtpHost) {
this.smtpHost = smtpHost;
}
public int getSmtpPort() {
return smtpPort;
}
public void setSmtpPort(int smtpPort) {
this.smtpPort = smtpPort;
}
public String getSmtpUser() {
return smtpUser;
}
public void setSmtpUser(String smtpUser) {
this.smtpUser = smtpUser;
}
public String getSmtpPassword() {
return smtpPassword;
}
public void setSmtpPassword(String smtpPassword) {
this.smtpPassword = smtpPassword;
}
}
| apache-2.0 |
cooosuper/wx-ptl-zhc | WXPortal/src/org/wxportal/util/SignUtil.java | 1826 | package org.wxportal.util;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
public class SignUtil {
private static String token;
public static String getToken() {
return token;
}
public static void setToken(String token) {
SignUtil.token = token;
}
public static boolean checkSignature(String signature, String timestamp, String nonce) {
String[] arr = new String[] { token, timestamp, nonce };
Arrays.sort(arr);
StringBuilder content = new StringBuilder();
for (int i = 0; i < arr.length; i++) {
content.append(arr[i]);
}
MessageDigest md = null;
String tmpStr = null;
try {
md = MessageDigest.getInstance("SHA-1");
byte[] digest = md.digest(content.toString().getBytes());
tmpStr = byteToStr(digest);
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
content = null;
return tmpStr != null ? tmpStr.equals(signature.toUpperCase()) : false;
}
private static String byteToStr(byte[] byteArray) {
String strDigest = "";
for (int i = 0; i < byteArray.length; i++) {
strDigest += byteToHexStr(byteArray[i]);
}
return strDigest;
}
private static String byteToHexStr(byte mByte) {
char[] Digit = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
char[] tempArr = new char[2];
tempArr[0] = Digit[(mByte >>> 4) & 0X0F];
tempArr[1] = Digit[mByte & 0X0F];
String s = new String(tempArr);
return s;
}
} | apache-2.0 |
CodeRushing/tidb | store/tikv/2pc_test.go | 12001 | // Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package tikv
import (
"math"
"math/rand"
"strings"
"time"
"github.com/juju/errors"
. "github.com/pingcap/check"
"github.com/pingcap/kvproto/pkg/kvrpcpb"
"github.com/pingcap/tidb/store/mockstore/mocktikv"
"github.com/pingcap/tidb/store/tikv/tikvrpc"
"golang.org/x/net/context"
)
type testCommitterSuite struct {
OneByOneSuite
cluster *mocktikv.Cluster
store *tikvStore
}
var _ = Suite(&testCommitterSuite{})
func (s *testCommitterSuite) SetUpTest(c *C) {
s.cluster = mocktikv.NewCluster()
mocktikv.BootstrapWithMultiRegions(s.cluster, []byte("a"), []byte("b"), []byte("c"))
mvccStore, err := mocktikv.NewMVCCLevelDB("")
c.Assert(err, IsNil)
client := mocktikv.NewRPCClient(s.cluster, mvccStore)
pdCli := &codecPDClient{mocktikv.NewPDClient(s.cluster)}
spkv := NewMockSafePointKV()
store, err := newTikvStore("mocktikv-store", pdCli, spkv, client, false)
c.Assert(err, IsNil)
store.EnableTxnLocalLatches(1024000)
s.store = store
CommitMaxBackoff = 2000
}
func (s *testCommitterSuite) TearDownSuite(c *C) {
CommitMaxBackoff = 20000
s.store.Close()
s.OneByOneSuite.TearDownSuite(c)
}
func (s *testCommitterSuite) begin(c *C) *tikvTxn {
txn, err := s.store.Begin()
c.Assert(err, IsNil)
return txn.(*tikvTxn)
}
func (s *testCommitterSuite) checkValues(c *C, m map[string]string) {
txn := s.begin(c)
for k, v := range m {
val, err := txn.Get([]byte(k))
c.Assert(err, IsNil)
c.Assert(string(val), Equals, v)
}
}
func (s *testCommitterSuite) mustCommit(c *C, m map[string]string) {
txn := s.begin(c)
for k, v := range m {
err := txn.Set([]byte(k), []byte(v))
c.Assert(err, IsNil)
}
err := txn.Commit(context.Background())
c.Assert(err, IsNil)
s.checkValues(c, m)
}
func randKV(keyLen, valLen int) (string, string) {
const letters = "abc"
k, v := make([]byte, keyLen), make([]byte, valLen)
for i := range k {
k[i] = letters[rand.Intn(len(letters))]
}
for i := range v {
v[i] = letters[rand.Intn(len(letters))]
}
return string(k), string(v)
}
func (s *testCommitterSuite) TestCommitRollback(c *C) {
s.mustCommit(c, map[string]string{
"a": "a",
"b": "b",
"c": "c",
})
txn := s.begin(c)
txn.Set([]byte("a"), []byte("a1"))
txn.Set([]byte("b"), []byte("b1"))
txn.Set([]byte("c"), []byte("c1"))
s.mustCommit(c, map[string]string{
"c": "c2",
})
err := txn.Commit(context.Background())
c.Assert(err, NotNil)
s.checkValues(c, map[string]string{
"a": "a",
"b": "b",
"c": "c2",
})
}
func (s *testCommitterSuite) TestPrewriteRollback(c *C) {
s.mustCommit(c, map[string]string{
"a": "a0",
"b": "b0",
})
ctx := context.Background()
txn1 := s.begin(c)
err := txn1.Set([]byte("a"), []byte("a1"))
c.Assert(err, IsNil)
err = txn1.Set([]byte("b"), []byte("b1"))
c.Assert(err, IsNil)
committer, err := newTwoPhaseCommitter(txn1, 0)
c.Assert(err, IsNil)
err = committer.prewriteKeys(NewBackoffer(ctx, prewriteMaxBackoff), committer.keys)
c.Assert(err, IsNil)
txn2 := s.begin(c)
v, err := txn2.Get([]byte("a"))
c.Assert(err, IsNil)
c.Assert(v, BytesEquals, []byte("a0"))
err = committer.prewriteKeys(NewBackoffer(ctx, prewriteMaxBackoff), committer.keys)
if err != nil {
// Retry.
txn1 = s.begin(c)
err = txn1.Set([]byte("a"), []byte("a1"))
c.Assert(err, IsNil)
err = txn1.Set([]byte("b"), []byte("b1"))
c.Assert(err, IsNil)
committer, err = newTwoPhaseCommitter(txn1, 0)
c.Assert(err, IsNil)
err = committer.prewriteKeys(NewBackoffer(ctx, prewriteMaxBackoff), committer.keys)
c.Assert(err, IsNil)
}
committer.commitTS, err = s.store.oracle.GetTimestamp(ctx)
c.Assert(err, IsNil)
err = committer.commitKeys(NewBackoffer(ctx, CommitMaxBackoff), [][]byte{[]byte("a")})
c.Assert(err, IsNil)
txn3 := s.begin(c)
v, err = txn3.Get([]byte("b"))
c.Assert(err, IsNil)
c.Assert(v, BytesEquals, []byte("b1"))
}
func (s *testCommitterSuite) TestContextCancel(c *C) {
txn1 := s.begin(c)
err := txn1.Set([]byte("a"), []byte("a1"))
c.Assert(err, IsNil)
err = txn1.Set([]byte("b"), []byte("b1"))
c.Assert(err, IsNil)
committer, err := newTwoPhaseCommitter(txn1, 0)
c.Assert(err, IsNil)
bo := NewBackoffer(context.Background(), prewriteMaxBackoff)
backoffer, cancel := bo.Fork()
cancel() // cancel the context
err = committer.prewriteKeys(backoffer, committer.keys)
c.Assert(errors.Cause(err), Equals, context.Canceled)
}
func (s *testCommitterSuite) TestContextCancelRetryable(c *C) {
txn1, txn2, txn3 := s.begin(c), s.begin(c), s.begin(c)
// txn1 locks "b"
err := txn1.Set([]byte("b"), []byte("b1"))
c.Assert(err, IsNil)
committer, err := newTwoPhaseCommitter(txn1, 0)
c.Assert(err, IsNil)
err = committer.prewriteKeys(NewBackoffer(context.Background(), prewriteMaxBackoff), committer.keys)
c.Assert(err, IsNil)
// txn3 writes "c"
err = txn3.Set([]byte("c"), []byte("c3"))
c.Assert(err, IsNil)
err = txn3.Commit(context.Background())
c.Assert(err, IsNil)
// txn2 writes "a"(PK), "b", "c" on different regions.
// "c" will return a retryable error.
// "b" will get a Locked error first, then the context must be canceled after backoff for lock.
err = txn2.Set([]byte("a"), []byte("a2"))
c.Assert(err, IsNil)
err = txn2.Set([]byte("b"), []byte("b2"))
c.Assert(err, IsNil)
err = txn2.Set([]byte("c"), []byte("c2"))
c.Assert(err, IsNil)
err = txn2.Commit(context.Background())
c.Assert(err, NotNil)
c.Assert(strings.Contains(err.Error(), txnRetryableMark), IsTrue)
}
func (s *testCommitterSuite) mustGetRegionID(c *C, key []byte) uint64 {
loc, err := s.store.regionCache.LocateKey(NewBackoffer(context.Background(), getMaxBackoff), key)
c.Assert(err, IsNil)
return loc.Region.id
}
func (s *testCommitterSuite) isKeyLocked(c *C, key []byte) bool {
ver, err := s.store.CurrentVersion()
c.Assert(err, IsNil)
bo := NewBackoffer(context.Background(), getMaxBackoff)
req := &tikvrpc.Request{
Type: tikvrpc.CmdGet,
Get: &kvrpcpb.GetRequest{
Key: key,
Version: ver.Ver,
},
}
loc, err := s.store.regionCache.LocateKey(bo, key)
c.Assert(err, IsNil)
resp, err := s.store.SendReq(bo, req, loc.Region, readTimeoutShort)
c.Assert(err, IsNil)
c.Assert(resp.Get, NotNil)
keyErr := resp.Get.GetError()
return keyErr.GetLocked() != nil
}
func (s *testCommitterSuite) TestPrewriteCancel(c *C) {
// Setup region delays for key "b" and "c".
delays := map[uint64]time.Duration{
s.mustGetRegionID(c, []byte("b")): time.Millisecond * 10,
s.mustGetRegionID(c, []byte("c")): time.Millisecond * 20,
}
s.store.client = &slowClient{
Client: s.store.client,
regionDelays: delays,
}
txn1, txn2 := s.begin(c), s.begin(c)
// txn2 writes "b"
err := txn2.Set([]byte("b"), []byte("b2"))
c.Assert(err, IsNil)
err = txn2.Commit(context.Background())
c.Assert(err, IsNil)
// txn1 writes "a"(PK), "b", "c" on different regions.
// "b" will return an error and cancel commit.
err = txn1.Set([]byte("a"), []byte("a1"))
c.Assert(err, IsNil)
err = txn1.Set([]byte("b"), []byte("b1"))
c.Assert(err, IsNil)
err = txn1.Set([]byte("c"), []byte("c1"))
c.Assert(err, IsNil)
err = txn1.Commit(context.Background())
c.Assert(err, NotNil)
// "c" should be cleaned up in reasonable time.
for i := 0; i < 50; i++ {
if !s.isKeyLocked(c, []byte("c")) {
return
}
time.Sleep(time.Millisecond * 10)
}
c.Fail()
}
// slowClient wraps rpcClient and makes some regions respond with delay.
type slowClient struct {
Client
regionDelays map[uint64]time.Duration
}
func (c *slowClient) SendReq(ctx context.Context, addr string, req *tikvrpc.Request, timeout time.Duration) (*tikvrpc.Response, error) {
for id, delay := range c.regionDelays {
reqCtx := &req.Context
if reqCtx.GetRegionId() == id {
time.Sleep(delay)
}
}
return c.Client.SendRequest(ctx, addr, req, timeout)
}
func (s *testCommitterSuite) TestIllegalTso(c *C) {
txn := s.begin(c)
data := map[string]string{
"name": "aa",
"age": "12",
}
for k, v := range data {
err := txn.Set([]byte(k), []byte(v))
c.Assert(err, IsNil)
}
// make start ts bigger.
txn.startTS = uint64(math.MaxUint64)
err := txn.Commit(context.Background())
c.Assert(err, NotNil)
}
func errMsgMustContain(c *C, err error, msg string) {
c.Assert(strings.Contains(err.Error(), msg), IsTrue)
}
func (s *testCommitterSuite) TestCommitBeforePrewrite(c *C) {
txn := s.begin(c)
err := txn.Set([]byte("a"), []byte("a1"))
c.Assert(err, IsNil)
commiter, err := newTwoPhaseCommitter(txn, 0)
c.Assert(err, IsNil)
ctx := context.Background()
err = commiter.cleanupKeys(NewBackoffer(ctx, cleanupMaxBackoff), commiter.keys)
c.Assert(err, IsNil)
err = commiter.prewriteKeys(NewBackoffer(ctx, prewriteMaxBackoff), commiter.keys)
c.Assert(err, NotNil)
errMsgMustContain(c, err, "write conflict")
}
func (s *testCommitterSuite) TestPrewritePrimaryKeyFailed(c *C) {
// commit (a,a1)
txn1 := s.begin(c)
err := txn1.Set([]byte("a"), []byte("a1"))
c.Assert(err, IsNil)
err = txn1.Commit(context.Background())
c.Assert(err, IsNil)
// check a
txn := s.begin(c)
v, err := txn.Get([]byte("a"))
c.Assert(err, IsNil)
c.Assert(v, BytesEquals, []byte("a1"))
// set txn2's startTs before txn1's
txn2 := s.begin(c)
txn2.startTS = txn1.startTS - 1
err = txn2.Set([]byte("a"), []byte("a2"))
c.Assert(err, IsNil)
err = txn2.Set([]byte("b"), []byte("b2"))
c.Assert(err, IsNil)
// prewrite:primary a failed, b success
err = txn2.Commit(context.Background())
c.Assert(err, NotNil)
// txn2 failed with a rollback for record a.
txn = s.begin(c)
v, err = txn.Get([]byte("a"))
c.Assert(err, IsNil)
c.Assert(v, BytesEquals, []byte("a1"))
_, err = txn.Get([]byte("b"))
errMsgMustContain(c, err, "key not exist")
// clean again, shouldn't be failed when a rollback already exist.
ctx := context.Background()
commiter, err := newTwoPhaseCommitter(txn2, 0)
c.Assert(err, IsNil)
err = commiter.cleanupKeys(NewBackoffer(ctx, cleanupMaxBackoff), commiter.keys)
c.Assert(err, IsNil)
// check the data after rollback twice.
txn = s.begin(c)
v, err = txn.Get([]byte("a"))
c.Assert(err, IsNil)
c.Assert(v, BytesEquals, []byte("a1"))
// update data in a new txn, should be success.
err = txn.Set([]byte("a"), []byte("a3"))
c.Assert(err, IsNil)
err = txn.Commit(context.Background())
c.Assert(err, IsNil)
// check value
txn = s.begin(c)
v, err = txn.Get([]byte("a"))
c.Assert(err, IsNil)
c.Assert(v, BytesEquals, []byte("a3"))
}
func (s *testCommitterSuite) TestWrittenKeysOnConflict(c *C) {
// This test checks that when there is a write conflict, written keys is collected,
// so we can use it to clean up keys.
region, _ := s.cluster.GetRegionByKey([]byte("x"))
newRegionID := s.cluster.AllocID()
newPeerID := s.cluster.AllocID()
s.cluster.Split(region.Id, newRegionID, []byte("y"), []uint64{newPeerID}, newPeerID)
var totalTime time.Duration
for i := 0; i < 10; i++ {
txn1 := s.begin(c)
txn2 := s.begin(c)
txn2.Set([]byte("x1"), []byte("1"))
commiter2, err := newTwoPhaseCommitter(txn2, 2)
c.Assert(err, IsNil)
err = commiter2.execute(context.Background())
c.Assert(err, IsNil)
txn1.Set([]byte("x1"), []byte("1"))
txn1.Set([]byte("y1"), []byte("2"))
commiter1, err := newTwoPhaseCommitter(txn1, 2)
c.Assert(err, IsNil)
err = commiter1.execute(context.Background())
c.Assert(err, NotNil)
commiter1.cleanWg.Wait()
txn3 := s.begin(c)
start := time.Now()
txn3.Get([]byte("y1"))
totalTime += time.Since(start)
txn3.Commit(context.Background())
}
c.Assert(totalTime, Less, time.Millisecond*200)
}
| apache-2.0 |
TpSr52/allure2 | allure-generator/src/main/java/io/qameta/allure/DefaultConfiguration.java | 1498 | package io.qameta.allure;
import io.qameta.allure.core.Configuration;
import io.qameta.allure.core.Plugin;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* Default implementation of {@link Configuration}.
*
* @since 2.0
*/
public class DefaultConfiguration implements Configuration {
private final List<Extension> extensions;
private final List<Plugin> plugins;
public DefaultConfiguration(final List<Extension> extensions,
final List<Plugin> plugins) {
this.extensions = extensions;
this.plugins = plugins;
}
@Override
public List<Plugin> getPlugins() {
return Collections.unmodifiableList(plugins);
}
@Override
public List<Aggregator> getAggregators() {
return extensions.stream()
.filter(Aggregator.class::isInstance)
.map(Aggregator.class::cast)
.collect(Collectors.toList());
}
@Override
public List<Reader> getReaders() {
return extensions.stream()
.filter(Reader.class::isInstance)
.map(Reader.class::cast)
.collect(Collectors.toList());
}
@Override
public <T> Optional<T> getContext(final Class<T> contextType) {
return extensions.stream()
.filter(contextType::isInstance)
.map(contextType::cast)
.findFirst();
}
}
| apache-2.0 |
nagyistoce/camunda-tasklist-ui | client/scripts/camunda-tasklist-ui.js | 5891 | define('camunda-tasklist-ui', [
'camunda-commons-ui',
'camunda-bpm-sdk-js',
'angular-data-depend',
'scripts/config/date',
'scripts/config/routes',
'scripts/config/locales',
'scripts/config/tooltip',
'scripts/config/uris',
'scripts/controller/cam-tasklist-app-ctrl',
'scripts/controller/cam-tasklist-view-ctrl',
'scripts/services/cam-tasklist-assign-notification',
'scripts/services/cam-tasklist-configuration',
'scripts/user/index',
'scripts/variable/index',
'scripts/tasklist/index',
'scripts/task/index',
'scripts/process/index',
'scripts/navigation/index',
'scripts/form/index',
'scripts/filter/index',
'scripts/api/index',
'text!scripts/index.html'
], function() {
'use strict';
/**
* @namespace cam
*/
/**
* @module cam.tasklist
*/
var pluginPackages = window.PLUGIN_PACKAGES || [];
var pluginDependencies = window.PLUGIN_DEPENDENCIES || [];
require.config({
packages: pluginPackages
});
var tasklistApp;
var deps = [
'camunda-commons-ui'
].concat(pluginDependencies.map(function(plugin) {
return plugin.requirePackageName;
}));
function bootstrapApp() {
var angular = require('angular');
var $ = angular.element;
$(document).ready(function() {
angular.bootstrap(document, [
'cam.tasklist',
'cam.embedded.forms',
'cam.tasklist.custom'
]);
setTimeout(function() {
var $aufocused = $('[autofocus]');
if ($aufocused.length) {
$aufocused[0].focus();
}
}, 300);
});
}
function loaded() {
var angular = require('angular');
var $ = angular.element;
function parseUriConfig() {
var $baseTag = angular.element('base');
var config = {};
var names = ['href', 'app-root', 'admin-api', 'engine-api'];
for(var i = 0; i < names.length; i++) {
config[names[i]] = $baseTag.attr(names[i]);
}
return config;
}
var ngDeps = [
'cam.commons',
'pascalprecht.translate',
'ngRoute',
'dataDepend',
require('scripts/user/index').name,
require('scripts/variable/index').name,
require('scripts/tasklist/index').name,
require('scripts/task/index').name,
require('scripts/process/index').name,
require('scripts/navigation/index').name,
require('scripts/form/index').name,
require('scripts/filter/index').name,
require('scripts/api/index').name,
].concat(pluginDependencies.map(function(el){
return el.ngModuleName;
}));
var uriConfig = parseUriConfig();
tasklistApp = angular.module('cam.tasklist', ngDeps);
tasklistApp.factory('assignNotification', require('scripts/services/cam-tasklist-assign-notification'));
tasklistApp.provider('configuration', require('scripts/services/cam-tasklist-configuration'));
require('scripts/config/locales')(tasklistApp, uriConfig['app-root']);
require('scripts/config/uris')(tasklistApp, uriConfig);
tasklistApp.config(require('scripts/config/routes'));
tasklistApp.config(require('scripts/config/date'));
tasklistApp.config(require('scripts/config/tooltip'));
tasklistApp.controller('camTasklistAppCtrl', require('scripts/controller/cam-tasklist-app-ctrl'));
tasklistApp.controller('camTasklistViewCtrl', require('scripts/controller/cam-tasklist-view-ctrl'));
// The `cam.tasklist` AngularJS module is now available but not yet bootstraped,
// it is the right moment to load plugins
if (typeof window.camTasklistConf !== 'undefined' && window.camTasklistConf.customScripts) {
var custom = window.camTasklistConf.customScripts || {};
// copy the relevant RequireJS configuration in a empty object
// see: http://requirejs.org/docs/api.html#config
var conf = {};
[
'baseUrl',
'paths',
'bundles',
'shim',
'map',
'config',
'packages',
// 'nodeIdCompat',
'waitSeconds',
'context',
// 'deps', // not relevant in this case
'callback',
'enforceDefine',
'xhtml',
'urlArgs',
'scriptType'
// 'skipDataMain' // not relevant either
].forEach(function (prop) {
if (custom[prop]) {
conf[prop] = custom[prop];
}
});
// configure RequireJS
require.config(conf);
// load the dependencies and bootstrap the AngularJS application
require(custom.deps || [], function() {
// create a AngularJS module (with possible AngularJS module dependencies)
// on which the custom scripts can register their
// directives, controllers, services and all when loaded
angular.module('cam.tasklist.custom', custom.ngDeps);
bootstrapApp.apply(this, arguments);
});
}
else {
// for consistency, also create a empty module
angular.module('cam.tasklist.custom', []);
// make sure that we are at the end of the require-js callback queue.
// Why? => the plugins will also execute require(..) which will place new
// entries into the queue. if we bootstrap the angular app
// synchronously, the plugins' require callbacks will not have been
// executed yet and the angular modules provided by those plugins will
// not have been defined yet. Placing a new require call here will put
// the bootstrapping of the angular app at the end of the queue
require([], function() {
bootstrapApp();
});
}
}
/* live-reload
// loads livereload client library (without breaking other scripts execution)
require(['jquery'], function($) {
$('body').append('<script src="//' + location.hostname + ':LIVERELOAD_PORT/livereload.js?snipver=1"></script>');
});
/* */
// and load the dependencies
require(deps, loaded);
});
| apache-2.0 |
edde-framework/edde-framework | src/Edde/Common/Protocol/Request/Response.php | 244 | <?php
declare(strict_types=1);
namespace Edde\Common\Protocol\Request;
use Edde\Common\Protocol\Element;
class Response extends Element {
public function __construct(string $id = null) {
parent::__construct('response', $id);
}
}
| apache-2.0 |
Y1Ying/Algorithms | src/string/DictionaryString.java | 1295 | package string;
import java.util.ArrayList;
import java.util.TreeSet;
/**
* 输入一个字符串,按字典序打印出该字符串中字符的所有排列。例如输入字符串abc,则打印出由字符a,b,c所能排列出来的所有字符串abc,acb,bac,
* bca,cab和cba。
*
* @author dell
*
*/
public class DictionaryString {
public static ArrayList<String> Permutation(String str) {
ArrayList<String> result = new ArrayList<String>();
if (str == null || str.length() == 0) {
return result;
}
char[] chars = str.toCharArray();
TreeSet<String> temp = new TreeSet<>();
Permutation(chars, 0, temp);
result.addAll(temp);
return result;
}
public static void Permutation(char[] chars, int begin, TreeSet<String> result) {
if (chars == null || chars.length == 0 || begin < 0 || begin > chars.length - 1) {
return;
}
if (begin == chars.length - 1) {
result.add(String.valueOf(chars));
} else {
for (int i = begin; i <= chars.length - 1; i++) {
swap(chars, begin, i);
Permutation(chars, begin + 1, result);
swap(chars, begin, i);
}
}
}
public static void swap(char[] x, int a, int b) {
char t = x[a];
x[a] = x[b];
x[b] = t;
}
public static void main(String[] args) {
System.out.println(Permutation("aabbc"));
}
}
| apache-2.0 |
saulbein/web3j | core/src/main/java/org/web3j/abi/datatypes/generated/Ufixed120x136.java | 601 | package org.web3j.abi.datatypes.generated;
import java.math.BigInteger;
import org.web3j.abi.datatypes.Ufixed;
/**
* <p>Auto generated code.<br>
* <strong>Do not modifiy!</strong><br>
* Please use {@link org.web3j.codegen.AbiTypesGenerator} to update.</p>
*/
public class Ufixed120x136 extends Ufixed {
public static final Ufixed120x136 DEFAULT = new Ufixed120x136(BigInteger.ZERO);
public Ufixed120x136(BigInteger value) {
super(120, 136, value);
}
public Ufixed120x136(int mBitSize, int nBitSize, BigInteger m, BigInteger n) {
super(120, 136, m, n);
}
}
| apache-2.0 |
Josephblt/lienzo-core | src/main/java/com/ait/lienzo/client/widget/panel/impl/PreviewLayer.java | 2817 | /*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ait.lienzo.client.widget.panel.impl;
import com.ait.lienzo.client.core.Context2D;
import com.ait.lienzo.client.core.shape.Layer;
import com.ait.lienzo.client.core.types.BoundingBox;
import com.ait.lienzo.client.core.types.Transform;
import com.ait.lienzo.client.widget.panel.Bounds;
import com.ait.lienzo.shared.core.types.ColorName;
import com.ait.tooling.common.api.java.util.function.Supplier;
public class PreviewLayer extends Layer
{
static final double ALPHA = 0.5d;
static final String FILL_COLOR = ColorName.LIGHTGREY.getColorString();
private final Supplier<Bounds> backgroundBounds;
private final Supplier<Bounds> visibleBounds;
public PreviewLayer(final Supplier<Bounds> backgroundBounds,
final Supplier<Bounds> visibleBounds)
{
this.backgroundBounds = backgroundBounds;
this.visibleBounds = visibleBounds;
setTransformable(true);
setListening(true);
}
@Override
public void drawWithTransforms(final Context2D context,
final double alpha,
final BoundingBox bounds,
final Supplier<Transform> transformSupplier)
{
drawBackground(context);
super.drawWithTransforms(context, alpha, bounds, transformSupplier);
}
private void drawBackground(final Context2D context)
{
final Bounds clearBounds = visibleBounds.get();
if (clearBounds.getWidth() > 0 || clearBounds.getHeight() > 0)
{
final Bounds bgBounds = backgroundBounds.get();
context.save();
context.setGlobalAlpha(ALPHA);
context.setFillColor(FILL_COLOR);
context.fillRect(bgBounds.getX(),
bgBounds.getY(),
bgBounds.getWidth(),
bgBounds.getHeight());
context.clearRect(clearBounds.getX(),
clearBounds.getY(),
clearBounds.getWidth(),
clearBounds.getHeight());
context.restore();
}
}
}
| apache-2.0 |
fuermolv/treesys | Application/Admin/Controller/TesController.class.php | 351 | <?php
namespace Admin\Controller;
use Common\Controller\HomeBaseController;
/**
* 后台首页控制器
*/
class TesController extends HomeBaseController
{
/**
* 首页
*/
public function _initialize()
{
parent::_initialize();
}
public function test()
{
echo "string";
}
} | apache-2.0 |
Longi94/Hexle | core/src/main/com/tlongdev/hexle/renderer/GameRenderer.java | 386 | package com.tlongdev.hexle.renderer;
import com.badlogic.gdx.math.Vector2;
import com.tlongdev.hexle.view.FieldView;
/**
* @author longi
* @since 2016.04.13.
*/
public interface GameRenderer extends Renderer {
void notifyModelChanged();
void update(float dt);
FieldView getFieldView();
void setSlideOffset(Vector2 offset);
void notifyNewTilesGenerated();
}
| apache-2.0 |
Activiti/Activiti | activiti-core/activiti-engine/src/test/java/org/activiti/engine/test/cmd/FailingDelegate.java | 1167 | /*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.test.cmd;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.delegate.DelegateExecution;
import org.activiti.engine.delegate.JavaDelegate;
/**
*/
public class FailingDelegate implements JavaDelegate {
public static final String EXCEPTION_MESSAGE = "Expected exception.";
@Override
public void execute(DelegateExecution execution) {
Boolean fail = (Boolean) execution.getVariable("fail");
if (fail == null || fail) {
throw new ActivitiException(EXCEPTION_MESSAGE);
}
}
}
| apache-2.0 |
dmeignan/INRC2010-UOS-evaluator | fr.lalea.inrc2010evaluator/src/main/java/de/uos/inf/ischedule/model/UnwantedShiftPatternConstraint.java | 20157 | /**
* Copyright 2013, Universitaet Osnabrueck
* Author: David Meignan
*/
package de.uos.inf.ischedule.model;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.List;
import de.uos.inf.ischedule.model.heuristic.SwapMove;
import de.uos.inf.ischedule.util.IconUtils;
import de.uos.inf.ischedule.util.Messages;
/**
* This constraint defines an unwanted shift pattern.
*
* @author David Meignan
*/
public class UnwantedShiftPatternConstraint implements Constraint {
/**
* Unwanted shift pattern.
*/
protected ShiftPattern unwantedPattern;
/**
* Contracts for which the constraint applies.
*/
protected ArrayList<Contract> scope = new ArrayList<Contract>();
/**
* Activation of the constraint.
*/
protected boolean active;
/**
* The weight value of the constraint.
*/
protected int weightValue;
/**
* Evaluator of the constraint.
*/
private UnwantedShiftPatternConstraintEvaluator evaluator = null;
/**
* Constructs an unwanted shift pattern constraint.
*
* @param unwantedPattern the unwanted shift pattern.
* @param active the active property of the constraint.
* @param rankIndex the rank index of the constraint.
* @param weightValue the weight value of the constraint.
*
* @throws IllegalArgumentException if the unwanted shift pattern is <code>null</code>,
* or rank-index is negative, or weight value is negative.
*/
public UnwantedShiftPatternConstraint(ShiftPattern unwantedPattern,
boolean active, int weightValue) {
if (unwantedPattern == null)
throw new IllegalArgumentException();
if (weightValue < 0)
throw new IllegalArgumentException();
this.unwantedPattern = unwantedPattern;
this.active = active;
this.weightValue = weightValue;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintInstance#isActive()
*/
@Override
public boolean isActive() {
return active;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintInstance#getDefaultWeightValue()
*/
@Override
public int getDefaultWeightValue() {
return weightValue;
}
/**
* Returns a collection view of the scope of the constraint.
* @return a collection view of the scope of the constraint.
*/
public List<Contract> scope() {
return new ContractCollection();
}
/**
* Collection view of the scope of the constraint. This custom implementation
* controls modification operations.
*/
private class ContractCollection extends AbstractList<Contract> {
/* (non-Javadoc)
* @see java.util.AbstractList#get(int)
*/
@Override
public Contract get(int idx) {
return scope.get(idx);
}
/* (non-Javadoc)
* @see java.util.AbstractCollection#size()
*/
@Override
public int size() {
return scope.size();
}
/* (non-Javadoc)
* @see java.util.AbstractList#add(int, java.lang.Object)
*/
@Override
public void add(int index, Contract element) {
scope.add(index, element);
}
}
/**
* Return the unwanted shift-pattern.
*
* @return the unwanted shift-pattern.
*/
public ShiftPattern getUnwantedPattern() {
return unwantedPattern;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#getEvaluator(de.uos.inf.ischedule.model.ShiftSchedulingProblem)
*/
@Override
public ConstraintEvaluator getEvaluator(ShiftSchedulingProblem problem) {
if (evaluator == null)
evaluator = new UnwantedShiftPatternConstraintEvaluator(problem);
return evaluator;
}
/**
* Evaluator of the constraint.
*/
class UnwantedShiftPatternConstraintEvaluator extends
ConstraintEvaluator {
/**
* Set of day-index for which the pattern applies.
*/
ArrayList<Integer> patternStartDayIndexes;
/**
* List of employee indexes for which the constraint applies.
*/
ArrayList<Integer> constrainedEmployeeIndexes;
/**
* Creates an evaluator of the constraint.
*
* @param problem the shift scheduling problem.
*/
public UnwantedShiftPatternConstraintEvaluator(ShiftSchedulingProblem problem) {
// Constrained employees
constrainedEmployeeIndexes = new ArrayList<Integer>();
for (int employeeIndex=0; employeeIndex<problem.employees.size();
employeeIndex++) {
if (scope.contains(problem.employees.get(employeeIndex).contract)) {
constrainedEmployeeIndexes.add(employeeIndex);
}
}
// Pattern start days
patternStartDayIndexes = new ArrayList<Integer>();
for (int dayIndex=0; dayIndex<problem.schedulingPeriod.size(); dayIndex++) {
// Check lenght of the pattern
if (unwantedPattern.entries.size()+dayIndex <=
problem.schedulingPeriod.size()) {
if (unwantedPattern.dayOfWeekSpecific) {
if (unwantedPattern.startDay ==
problem.schedulingPeriod.getDayOfWeek(dayIndex)) {
patternStartDayIndexes.add(dayIndex);
}
} else {
patternStartDayIndexes.add(dayIndex);
}
}
}
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintEvaluator#getCost(de.uos.inf.ischedule.model.Solution)
*/
@Override
public int getCost(Solution solution) {
// Check active and weight value
if (!active || weightValue <= 0)
return 0;
int matches = 0;
// Iterates on employees
for (int employeeIndex: constrainedEmployeeIndexes) {
// Iterate on start days
for (int patternStartDayIndex: patternStartDayIndexes) {
if (matchPattern(solution,
patternStartDayIndex,
employeeIndex,
unwantedPattern.entries)) {
matches++;
}
}
}
return matches*weightValue;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintEvaluator#getEstimatedAssignmentCost(de.uos.inf.ischedule.model.Solution, int, de.uos.inf.ischedule.model.Shift, int)
*/
@Override
public int getEstimatedAssignmentCost(Solution solution,
int employeeIndex, Shift shift, int assignmentDayIndex) {
// Check scope
if (!constrainedEmployeeIndexes.contains(employeeIndex))
return 0;
int patternMatchs = 0;
// Check if dayIndex in a pattern
for (int patternStartDayIndex: patternStartDayIndexes) {
int patternEndDayIndex = patternStartDayIndex+
unwantedPattern.entries.size()-1;
if (assignmentDayIndex >= patternStartDayIndex &&
assignmentDayIndex <= patternEndDayIndex) {
// Check completion of a pattern
boolean match = true;
for (int entryIndex=0; entryIndex<unwantedPattern
.entries.size(); entryIndex++) {
ShiftPatternEntry entry = unwantedPattern.entries.get(entryIndex);
Shift assignment = null;
if (entryIndex+patternStartDayIndex == assignmentDayIndex) {
assignment = shift;
} else {
assignment = solution.assignments.get(
entryIndex+patternStartDayIndex).get(employeeIndex);
}
// Check entry
if (entry.assignmentType == PatternEntryType.WORKED_SHIFT) {
if (assignment == null) {
match = false;
break;
}
} else if (entry.assignmentType == PatternEntryType.SPECIFIC_WORKED_SHIFT) {
if (assignment != entry.shift) {
match = false;
break;
}
} else if (entry.assignmentType == PatternEntryType.NO_ASSIGNMENT) {
if (assignment != null) {
match = false;
break;
}
} else if (entry.assignmentType == PatternEntryType.UNSPECIFIED_ASSIGNMENT) {
// All type of assignment allowed
}
}
if (match) {
patternMatchs++;
}
} else if (patternStartDayIndex > assignmentDayIndex) {
break;
}
}
return patternMatchs*weightValue;
}
/**
* Check if the pattern match the assignments of an employee
* from a given day-index. Returns <code>true</code> if the pattern
* matches, <code>false</code> otherwise.
* Note that length of pattern must be check beforehand in order to avoid
* out-of-range errors.
*
* @param solution the solution.
* @param dayIndex the starting day-index of the pattern.
* @param employeeIndex the index of the employee.
* @param entries the entries of the pattern.
* @return Returns <code>true</code> if the pattern
* match, <code>false</code> otherwise.
*/
private boolean matchPattern(Solution solution, int dayIndex, int employeeIndex,
List<ShiftPatternEntry> entries) {
for (int entryIndex=0; entryIndex<entries.size(); entryIndex++) {
ShiftPatternEntry entry = entries.get(entryIndex);
Shift assignment = solution.assignments.get(
entryIndex+dayIndex).get(employeeIndex);
if (entry.assignmentType == PatternEntryType.WORKED_SHIFT) {
if (assignment == null)
return false;
} else if (entry.assignmentType == PatternEntryType.SPECIFIC_WORKED_SHIFT) {
if (assignment != entry.shift)
return false;
} else if (entry.assignmentType == PatternEntryType.NO_ASSIGNMENT) {
if (assignment != null)
return false;
} else if (entry.assignmentType == PatternEntryType.UNSPECIFIED_ASSIGNMENT) {
// All type of assignment allowed
}
}
return true;
}
/**
* Check if the pattern match the assignments of an employee
* from a given day-index, taking into account a swap-move.
* Returns <code>true</code> if the pattern
* matches, <code>false</code> otherwise.
* Note that length of pattern must be check beforehand in order to avoid
* out-of-range errors.
*
* @param solution the solution.
* @param dayIndex the starting day-index of the pattern.
* @param employeeIndex the index of the employee.
* @param entries the entries of the pattern.
* @param swap the swap move.
* @return Returns <code>true</code> if the pattern
* match, <code>false</code> otherwise.
*/
private boolean matchPattern(Solution solution, int dayIndex, int employeeIndex,
List<ShiftPatternEntry> entries, SwapMove swap) {
for (int entryIndex=0; entryIndex<entries.size(); entryIndex++) {
ShiftPatternEntry entry = entries.get(entryIndex);
Shift assignment = swap.getResultingAssignment(solution,
entryIndex+dayIndex, employeeIndex);
if (entry.assignmentType == PatternEntryType.WORKED_SHIFT) {
if (assignment == null)
return false;
} else if (entry.assignmentType == PatternEntryType.SPECIFIC_WORKED_SHIFT) {
if (assignment != entry.shift)
return false;
} else if (entry.assignmentType == PatternEntryType.NO_ASSIGNMENT) {
if (assignment != null)
return false;
} else if (entry.assignmentType == PatternEntryType.UNSPECIFIED_ASSIGNMENT) {
// All type of assignment allowed
}
}
return true;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintEvaluator#getSwapMoveCostDifference(de.uos.inf.ischedule.model.Solution, de.uos.inf.ischedule.model.heuristic.SwapMove)
*/
@Override
public int getSwapMoveCostDifference(Solution solution,
SwapMove swapMove) {
// Check active state and weight value of the constraint
if (!active || weightValue <= 0)
return 0;
// Check constrained employees
boolean employee1Constrained =
constrainedEmployeeIndexes.contains(swapMove.getEmployee1Index());
boolean employee2Constrained =
constrainedEmployeeIndexes.contains(swapMove.getEmployee2Index());
if (!employee1Constrained && !employee2Constrained)
return 0;
// Compute previous and new penalties for night-shift before free weekend
// covered by swap
int initialMatches = 0;
int swapMatches = 0;
// Iterate on start days
for (int patternStartDayIndex: patternStartDayIndexes) {
int patternEndDayIndex = patternStartDayIndex+
unwantedPattern.entries.size()-1;
// Check if cover with swap move
if (
(swapMove.getStartDayIndex() <= patternEndDayIndex &&
swapMove.getEndDayIndex() >= patternStartDayIndex) ) {
// Employee 1
if (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee1Index(),
unwantedPattern.entries)) {
initialMatches++;
}
if (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee1Index(),
unwantedPattern.entries,
swapMove)) {
swapMatches++;
}
// Employee 2
if (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee2Index(),
unwantedPattern.entries)) {
initialMatches++;
}
if (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee2Index(),
unwantedPattern.entries,
swapMove)) {
swapMatches++;
}
}
if (swapMove.getEndDayIndex() <= patternStartDayIndex)
break;
}
return (swapMatches-initialMatches)*weightValue;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintEvaluator#getConstraint()
*/
@Override
public Constraint getConstraint() {
return UnwantedShiftPatternConstraint.this;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintEvaluator#getConstraintViolations(de.uos.inf.ischedule.model.Solution)
*/
@Override
public ArrayList<ConstraintViolation> getConstraintViolations(
Solution solution) {
ArrayList<ConstraintViolation> violations = new ArrayList<ConstraintViolation>();
if (!active || weightValue <= 0)
return violations;
for (int employeeIndex: constrainedEmployeeIndexes) {
// Iterate on start days
for (int patternStartDayIndex: patternStartDayIndexes) {
if (matchPattern(solution,
patternStartDayIndex,
employeeIndex,
unwantedPattern.entries)) {
ConstraintViolation violation = new ConstraintViolation(
UnwantedShiftPatternConstraint.this);
violation.setCost(weightValue);
String message = Messages.getString("UnwantedShiftPatternConstraint.unwantedShiftPattern"); //$NON-NLS-1$
message = message.replaceAll("\\$1", unwantedPattern.toString()); //$NON-NLS-1$
violation.setMessage(message);
violation.addAssignmentRangeInScope(
solution.employees.get(employeeIndex),
solution.problem.getSchedulingPeriod().getDate(patternStartDayIndex),
solution.problem.getSchedulingPeriod().getDate(
patternStartDayIndex+unwantedPattern.entries.size()-1));
violations.add(violation);
}
}
}
return violations;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.ConstraintEvaluator#getConstraintSatisfactionDifference(de.uos.inf.ischedule.model.Solution, de.uos.inf.ischedule.model.heuristic.SwapMove)
*/
@Override
public int[] getConstraintSatisfactionDifference(Solution solution,
SwapMove swapMove) {
int[] diff = new int[]{0, 0};
// Check active state and weight value of the constraint
if (!active || weightValue <= 0)
return diff;
// Check constrained employees
boolean employee1Constrained =
constrainedEmployeeIndexes.contains(swapMove.getEmployee1Index());
boolean employee2Constrained =
constrainedEmployeeIndexes.contains(swapMove.getEmployee2Index());
if (!employee1Constrained && !employee2Constrained)
return diff;
// Compute previous and new penalties for night-shift before free weekend
// covered by swap
// Iterate on start days
for (int patternStartDayIndex: patternStartDayIndexes) {
int patternEndDayIndex = patternStartDayIndex+
unwantedPattern.entries.size()-1;
// Check if cover with swap move
if (
(swapMove.getStartDayIndex() <= patternEndDayIndex &&
swapMove.getEndDayIndex() >= patternStartDayIndex) ) {
boolean initialMatch;
boolean swapMatch;
// Employee 1
initialMatch = (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee1Index(),
unwantedPattern.entries));
swapMatch = (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee1Index(),
unwantedPattern.entries,
swapMove));
if (initialMatch && !swapMatch) {
diff[0]++;
} else if (!initialMatch && swapMatch) {
diff[1]++;
}
// Employee 2
initialMatch = (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee2Index(),
unwantedPattern.entries));
swapMatch = (matchPattern(solution,
patternStartDayIndex,
swapMove.getEmployee2Index(),
unwantedPattern.entries,
swapMove));
if (initialMatch && !swapMatch) {
diff[0]++;
} else if (!initialMatch && swapMatch) {
diff[1]++;
}
}
if (swapMove.getEndDayIndex() <= patternStartDayIndex)
break;
}
return diff;
}
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#getName()
*/
@Override
public String getName() {
return Messages.getString("UnwantedShiftPatternConstraint.name"); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#getConstraintCostLabel()
*/
@Override
public String getCostLabel() {
return Messages.getString("UnwantedShiftPatternConstraint.costLabel"); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#getHTMLDescription()
*/
@Override
public String getHTMLDescription() {
return Messages.getString("UnwantedShiftPatternConstraint.description"); //$NON-NLS-1$
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#getHTMLDescription(de.uos.inf.ischedule.model.Employee)
*/
@Override
public String getHTMLDescription(Employee employee) {
String paramDesc = Messages.getString(
"UnwantedShiftPatternConstraint.descriptionEmployee"); //$NON-NLS-1$
paramDesc = paramDesc.replaceAll("\\$1", unwantedPattern.toString()); //$NON-NLS-1$
return paramDesc;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#getHTMLParametersDescriptions(de.uos.inf.ischedule.model.ShiftSchedulingProblem)
*/
@Override
public List<String> getHTMLParametersDescriptions(
ShiftSchedulingProblem problem) {
String paramDesc = Messages.getString(
"UnwantedShiftPatternConstraint.parametersDescription"); //$NON-NLS-1$
paramDesc = paramDesc.replaceAll("\\$1", unwantedPattern.toString()); //$NON-NLS-1$
String contractList = ""; //$NON-NLS-1$
for (int contractIdx=0; contractIdx<scope.size(); contractIdx++) {
String contractDesc = Messages.getString(
"UnwantedShiftPatternConstraint.parametersDescription.CoveredContract"); //$NON-NLS-1$
contractDesc = contractDesc.replaceAll("\\$1", scope.get(contractIdx).getName()); //$NON-NLS-1$
String imgURL = IconUtils.getImageURL(scope.get(contractIdx).getIconPath());
contractDesc = contractDesc.replaceAll("\\$2", imgURL); //$NON-NLS-1$
if (contractIdx < scope.size()-1)
contractDesc = contractDesc+Messages.getString(
"UnwantedShiftPatternConstraint.parametersDescription.CoveredContractSeparator"); //$NON-NLS-1$
contractList = contractList+contractDesc;
}
paramDesc = paramDesc.replaceAll("\\$2", contractList); //$NON-NLS-1$
ArrayList<String> descList = new ArrayList<String>();
descList.add(paramDesc);
return descList;
}
/* (non-Javadoc)
* @see de.uos.inf.ischedule.model.Constraint#cover(de.uos.inf.ischedule.model.Employee)
*/
@Override
public boolean cover(Employee employee) {
if (employee == null)
return false;
return (scope.contains(employee.getContract()));
}
/**
* Returns the shift pattern that is verified by the constraint.
*
* @return the shift pattern that is verified by the constraint.
*/
public ShiftPattern getShiftPattern() {
return unwantedPattern;
}
}
| apache-2.0 |
EBIBioSamples/biosamples-v4 | models/solr/src/main/java/uk/ac/ebi/biosamples/solr/model/field/FilterCriteriaBuilder.java | 841 | /*
* Copyright 2021 EMBL - European Bioinformatics Institute
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package uk.ac.ebi.biosamples.solr.model.field;
import org.springframework.data.solr.core.query.Criteria;
import uk.ac.ebi.biosamples.model.filter.Filter;
public interface FilterCriteriaBuilder {
Criteria getFilterCriteria(Filter filter);
}
| apache-2.0 |
Product-Foundry/play-swagger | src/main/scala/com/iheart/playSwagger/ResourceReader.scala | 322 | package com.iheart.playSwagger
import java.io.InputStream
object ResourceReader {
def read(resource: String)(implicit cl: ClassLoader): List[String] = {
read(cl.getResourceAsStream(resource))
}
def read(stream: InputStream): List[String] = {
scala.io.Source.fromInputStream(stream).getLines.toList
}
}
| apache-2.0 |
baishuo/falcon_search | common/src/test/java/org/apache/falcon/entity/parser/FeedEntityParserTest.java | 54920 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.entity.parser;
import org.apache.falcon.FalconException;
import org.apache.falcon.cluster.util.EmbeddedCluster;
import org.apache.falcon.entity.AbstractTestBase;
import org.apache.falcon.entity.ClusterHelper;
import org.apache.falcon.entity.EntityUtil;
import org.apache.falcon.entity.FeedHelper;
import org.apache.falcon.entity.store.ConfigurationStore;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.Frequency;
import org.apache.falcon.entity.v0.SchemaHelper;
import org.apache.falcon.entity.v0.cluster.Cluster;
import org.apache.falcon.entity.v0.cluster.Interfacetype;
import org.apache.falcon.entity.v0.datasource.Datasource;
import org.apache.falcon.entity.v0.feed.ActionType;
import org.apache.falcon.entity.v0.feed.Argument;
import org.apache.falcon.entity.v0.feed.ClusterType;
import org.apache.falcon.entity.v0.feed.ExtractMethod;
import org.apache.falcon.entity.v0.feed.Location;
import org.apache.falcon.entity.v0.feed.Locations;
import org.apache.falcon.entity.v0.feed.LocationType;
import org.apache.falcon.entity.v0.feed.MergeType;
import org.apache.falcon.entity.v0.feed.Feed;
import org.apache.falcon.entity.v0.feed.Partition;
import org.apache.falcon.entity.v0.feed.Partitions;
import org.apache.falcon.entity.v0.feed.Property;
import org.apache.falcon.entity.v0.feed.Validity;
import org.apache.falcon.group.FeedGroupMapTest;
import org.apache.falcon.security.CurrentUser;
import org.apache.falcon.service.LifecyclePolicyMap;
import org.apache.falcon.util.FalconTestUtil;
import org.apache.falcon.util.StartupProperties;
import org.apache.hadoop.fs.Path;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.util.Map;
import static org.testng.AssertJUnit.assertEquals;
/**
* Test Cases for Feed entity parser.
*/
public class FeedEntityParserTest extends AbstractTestBase {
private final FeedEntityParser parser = (FeedEntityParser) EntityParserFactory
.getParser(EntityType.FEED);
private Feed modifiableFeed;
@BeforeMethod
public void setUp() throws Exception {
cleanupStore();
ConfigurationStore store = ConfigurationStore.get();
this.dfsCluster = EmbeddedCluster.newCluster("testCluster");
this.conf = dfsCluster.getConf();
Unmarshaller unmarshaller = EntityType.CLUSTER.getUnmarshaller();
Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass()
.getResourceAsStream(CLUSTER_XML));
cluster.setName("testCluster");
store.publish(EntityType.CLUSTER, cluster);
cluster = (Cluster) unmarshaller.unmarshal(this.getClass()
.getResourceAsStream(CLUSTER_XML));
cluster.setName("backupCluster");
store.publish(EntityType.CLUSTER, cluster);
LifecyclePolicyMap.get().init();
CurrentUser.authenticate(FalconTestUtil.TEST_USER_2);
modifiableFeed = parser.parseAndValidate(this.getClass().getResourceAsStream(FEED_XML));
Unmarshaller dsUnmarshaller = EntityType.DATASOURCE.getUnmarshaller();
Datasource ds = (Datasource) dsUnmarshaller.unmarshal(this.getClass()
.getResourceAsStream(DATASOURCE_XML));
ds.setName("test-hsql-db");
store.publish(EntityType.DATASOURCE, ds);
}
@Test(expectedExceptions = ValidationException.class)
public void testValidations() throws Exception {
ConfigurationStore.get().remove(EntityType.CLUSTER, "backupCluster");
parser.parseAndValidate(this.getClass().getResourceAsStream(FEED_XML));
}
@Test
public void testParse() throws IOException, FalconException, JAXBException {
Feed feed = parser.parseAndValidate(this.getClass()
.getResourceAsStream(FEED_XML));
Assert.assertNotNull(feed);
assertEquals(feed.getName(), "clicks");
assertEquals(feed.getDescription(), "clicks log");
assertEquals(feed.getFrequency().toString(), "hours(1)");
assertEquals(feed.getSla().getSlaHigh().toString(), "hours(3)");
assertEquals(feed.getSla().getSlaLow().toString(), "hours(2)");
assertEquals(feed.getGroups(), "online,bi");
assertEquals(feed.getClusters().getClusters().get(0).getName(),
"testCluster");
assertEquals(feed.getClusters().getClusters().get(0).getSla().getSlaLow().toString(), "hours(3)");
assertEquals(feed.getClusters().getClusters().get(0).getSla().getSlaHigh().toString(), "hours(4)");
assertEquals(feed.getClusters().getClusters().get(0).getType(),
ClusterType.SOURCE);
assertEquals(SchemaHelper.formatDateUTC(feed.getClusters().getClusters().get(0).getValidity()
.getStart()), "2011-11-01T00:00Z");
assertEquals(SchemaHelper.formatDateUTC(feed.getClusters().getClusters().get(0).getValidity()
.getEnd()), "2011-12-31T00:00Z");
assertEquals(feed.getTimezone().getID(), "UTC");
assertEquals(feed.getClusters().getClusters().get(0).getRetention()
.getAction(), ActionType.DELETE);
assertEquals(feed.getClusters().getClusters().get(0).getRetention()
.getLimit().toString(), "hours(48)");
assertEquals(feed.getClusters().getClusters().get(1).getName(),
"backupCluster");
assertEquals(feed.getClusters().getClusters().get(1).getType(),
ClusterType.TARGET);
assertEquals(SchemaHelper.formatDateUTC(feed.getClusters().getClusters().get(1).getValidity()
.getStart()), "2011-11-01T00:00Z");
assertEquals(SchemaHelper.formatDateUTC(feed.getClusters().getClusters().get(1).getValidity()
.getEnd()), "2011-12-31T00:00Z");
assertEquals(feed.getClusters().getClusters().get(1).getRetention()
.getAction(), ActionType.ARCHIVE);
assertEquals(feed.getClusters().getClusters().get(1).getRetention()
.getLimit().toString(), "hours(6)");
assertEquals("${nameNode}/projects/falcon/clicks",
FeedHelper.createStorage(feed).getUriTemplate(LocationType.DATA));
assertEquals("${nameNode}/projects/falcon/clicksMetaData",
FeedHelper.createStorage(feed).getUriTemplate(LocationType.META));
assertEquals("${nameNode}/projects/falcon/clicksStats",
FeedHelper.createStorage(feed).getUriTemplate(LocationType.STATS));
assertEquals(feed.getACL().getGroup(), "group");
assertEquals(feed.getACL().getOwner(), FalconTestUtil.TEST_USER_2);
assertEquals(feed.getACL().getPermission(), "0x755");
assertEquals(feed.getSchema().getLocation(), "/schema/clicks");
assertEquals(feed.getSchema().getProvider(), "protobuf");
StringWriter stringWriter = new StringWriter();
Marshaller marshaller = EntityType.FEED.getMarshaller();
marshaller.marshal(feed, stringWriter);
System.out.println(stringWriter.toString());
}
@Test
public void testLifecycleParse() throws Exception {
Feed feed = parser.parseAndValidate(this.getClass()
.getResourceAsStream(FEED3_XML));
assertEquals("hours(17)", feed.getLifecycle().getRetentionStage().getFrequency().toString());
assertEquals("AgeBasedDelete", FeedHelper.getPolicies(feed, "testCluster").get(0));
assertEquals("reports", feed.getLifecycle().getRetentionStage().getQueue());
assertEquals("NORMAL", feed.getLifecycle().getRetentionStage().getPriority());
}
@Test(expectedExceptions = ValidationException.class,
expectedExceptionsMessageRegExp = ".*Retention is a mandatory stage.*")
public void testMandatoryRetention() throws Exception {
Feed feed = parser.parseAndValidate(this.getClass()
.getResourceAsStream(FEED3_XML));
feed.getLifecycle().setRetentionStage(null);
parser.validate(feed);
}
@Test
public void testValidRetentionFrequency() throws Exception {
Feed feed = parser.parseAndValidate(this.getClass()
.getResourceAsStream(FEED3_XML));
feed.setFrequency(Frequency.fromString("minutes(30)"));
Frequency frequency = Frequency.fromString("minutes(60)");
feed.getLifecycle().getRetentionStage().setFrequency(frequency);
parser.validate(feed); // no validation exception should be thrown
frequency = Frequency.fromString("hours(1)");
feed.getLifecycle().getRetentionStage().setFrequency(frequency);
parser.validate(feed); // no validation exception should be thrown
}
@Test
public void testDefaultRetentionFrequencyConflict() throws Exception {
Feed feed = parser.parseAndValidate(this.getClass().getResourceAsStream(FEED3_XML));
feed.getLifecycle().getRetentionStage().setFrequency(null);
feed.getClusters().getClusters().get(0).getLifecycle().getRetentionStage().setFrequency(null);
feed.setFrequency(Frequency.fromString("minutes(10)"));
parser.validate(feed); // shouldn't throw a validation exception
feed.setFrequency(Frequency.fromString("hours(7)"));
parser.validate(feed); // shouldn't throw a validation exception
feed.setFrequency(Frequency.fromString("days(2)"));
parser.validate(feed); // shouldn't throw a validation exception
}
@Test(expectedExceptions = ValidationException.class,
expectedExceptionsMessageRegExp = ".*Retention can not be more frequent than data availability.*")
public void testRetentionFrequentThanFeed() throws Exception {
Feed feed = parser.parseAndValidate(this.getClass()
.getResourceAsStream(FEED3_XML));
feed.setFrequency(Frequency.fromString("hours(2)"));
Frequency frequency = Frequency.fromString("minutes(60)");
feed.getLifecycle().getRetentionStage().setFrequency(frequency);
parser.validate(feed);
}
@Test(expectedExceptions = ValidationException.class,
expectedExceptionsMessageRegExp = ".*Feed Retention can not be more frequent than.*")
public void testRetentionFrequency() throws Exception {
Feed feed = parser.parseAndValidate(this.getClass()
.getResourceAsStream(FEED3_XML));
feed.setFrequency(Frequency.fromString("minutes(30)"));
Frequency frequency = Frequency.fromString("minutes(59)");
feed.getLifecycle().getRetentionStage().setFrequency(frequency);
parser.validate(feed);
}
@Test(expectedExceptions = ValidationException.class)
public void applyValidationInvalidFeed() throws Exception {
Feed feed = parser.parseAndValidate(ProcessEntityParserTest.class
.getResourceAsStream(FEED_XML));
feed.getClusters().getClusters().get(0).setName("invalid cluster");
parser.validate(feed);
}
@Test
public void testPartitionExpression() throws FalconException {
Feed feed = parser.parseAndValidate(ProcessEntityParserTest.class
.getResourceAsStream(FEED_XML));
//When there are more than 1 src clusters, there should be partition expression
org.apache.falcon.entity.v0.feed.Cluster newCluster = new org.apache.falcon.entity.v0.feed.Cluster();
newCluster.setName("newCluster");
newCluster.setType(ClusterType.SOURCE);
newCluster.setPartition("${cluster.colo}");
feed.getClusters().getClusters().add(newCluster);
try {
parser.validate(feed);
Assert.fail("Expected ValidationException");
} catch (ValidationException ignore) {
//ignore
}
//When there are more than 1 src clusters, the partition expression should contain cluster variable
feed.getClusters().getClusters().get(0).setPartition("*");
try {
parser.validate(feed);
Assert.fail("Expected ValidationException");
} catch (ValidationException ignore) {
//ignore
}
//When there are more than 1 target cluster, there should be partition expre
newCluster.setType(ClusterType.TARGET);
try {
parser.validate(feed);
Assert.fail("Expected ValidationException");
} catch (ValidationException ignore) {
//ignore
}
//When there are more than 1 target clusters, the partition expression should contain cluster variable
feed.getClusters().getClusters().get(1).setPartition("*");
try {
parser.validate(feed);
Assert.fail("Expected ValidationException");
} catch (ValidationException ignore) {
//ignore
}
//Number of parts in partition expression < number of partitions defined for feed
feed.getClusters().getClusters().get(1).setPartition("*/*");
try {
parser.validate(feed);
Assert.fail("Expected ValidationException");
} catch (ValidationException ignore) {
//ignore
}
feed.getClusters().getClusters().get(0).setPartition(null);
feed.getClusters().getClusters().get(1).setPartition(null);
feed.getClusters().getClusters().remove(2);
feed.setPartitions(null);
parser.validate(feed);
}
@Test
public void testInvalidClusterValidityTime() {
Validity validity = modifiableFeed.getClusters().getClusters().get(0)
.getValidity();
try {
validity.setStart(SchemaHelper.parseDateUTC("2007-02-29T00:00Z"));
modifiableFeed.getClusters().getClusters().get(0)
.setValidity(validity);
parser.parseAndValidate(marshallEntity(modifiableFeed));
Assert.fail("Cluster validity failed");
} catch (Exception e) {
System.out.println(e.getMessage());
validity.setStart(SchemaHelper.parseDateUTC("2011-11-01T00:00Z"));
modifiableFeed.getClusters().getClusters().get(0)
.setValidity(validity);
}
try {
validity.setEnd(SchemaHelper.parseDateUTC("2010-04-31T00:00Z"));
modifiableFeed.getClusters().getClusters().get(0)
.setValidity(validity);
parser.parseAndValidate(marshallEntity(modifiableFeed));
Assert.fail("Cluster validity failed");
} catch (Exception e) {
System.out.println(e.getMessage());
validity.setEnd(SchemaHelper.parseDateUTC("2011-12-31T00:00Z"));
modifiableFeed.getClusters().getClusters().get(0)
.setValidity(validity);
}
}
@Test(expectedExceptions = ValidationException.class)
public void testInvalidProcessValidity() throws Exception {
Feed feed = parser.parseAndValidate((FeedEntityParserTest.class
.getResourceAsStream(FEED_XML)));
feed.getClusters().getClusters().get(0).getValidity()
.setStart(SchemaHelper.parseDateUTC("2012-11-01T00:00Z"));
parser.validate(feed);
}
@Test(expectedExceptions = ValidationException.class, expectedExceptionsMessageRegExp = "slaLow of Feed:.*")
public void testInvalidSlaLow() throws Exception {
Feed feed = parser.parseAndValidate((FeedEntityParserTest.class
.getResourceAsStream(FEED_XML)));
feed.getSla().setSlaLow(new Frequency("hours(4)"));
feed.getSla().setSlaHigh(new Frequency("hours(2)"));
parser.validate(feed);
}
@Test(expectedExceptions = ValidationException.class, expectedExceptionsMessageRegExp = "slaHigh of Feed:.*")
public void testInvalidSlaHigh() throws Exception {
Feed feed = parser.parseAndValidate((FeedEntityParserTest.class
.getResourceAsStream(FEED_XML)));
feed.getSla().setSlaLow(new Frequency("hours(2)"));
feed.getSla().setSlaHigh(new Frequency("hours(10)"));
feed.getClusters().getClusters().get(0).getRetention().setLimit(new Frequency("hours(9)"));
parser.validate(feed);
}
@Test
public void testValidFeedGroup() throws FalconException, JAXBException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
(FeedEntityParserTest.class.getResourceAsStream(FEED_XML)));
feed1.setName("f1" + System.currentTimeMillis());
feed1.setGroups("group1,group2,group3");
feed1.setLocations(new Locations());
Location location = new Location();
location.setPath("/projects/bi/rmc/daily/ad/${YEAR}/fraud/${MONTH}-${DAY}/ad");
location.setType(LocationType.DATA);
feed1.getLocations().getLocations().add(location);
feed1.getClusters().getClusters().get(0).getLocations().getLocations().set(0, location);
parser.parseAndValidate(feed1.toString());
ConfigurationStore.get().publish(EntityType.FEED, feed1);
Feed feed2 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
(FeedEntityParserTest.class.getResourceAsStream(FEED_XML)));
feed2.setName("f2" + System.currentTimeMillis());
feed2.setGroups("group1,group2,group5");
feed2.setLocations(new Locations());
Location location2 = new Location();
location2
.setPath("/projects/bi/rmc/daily/ad/${YEAR}/fraud/${MONTH}-${DAY}/ad");
location2.setType(LocationType.DATA);
feed2.getLocations().getLocations().add(location2);
feed2.getClusters().getClusters().get(0).getLocations().getLocations().set(0, location);
parser.parseAndValidate(feed2.toString());
}
// TODO Disabled the test since I do not see anything invalid in here.
@Test(enabled = false, expectedExceptions = ValidationException.class)
public void testInvalidFeedClusterDataLocation() throws JAXBException, FalconException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
(FeedEntityParserTest.class.getResourceAsStream(FEED_XML)));
feed1.setName("f1" + System.currentTimeMillis());
feed1.setGroups("group1,group2,group3");
feed1.setLocations(new Locations());
Location location = new Location();
location.setPath("/projects/bi/rmc/daily/ad/${YEAR}/fraud/${MONTH}-${DAY}/ad");
location.setType(LocationType.DATA);
feed1.getLocations().getLocations().add(location);
parser.parseAndValidate(feed1.toString());
}
@Test(expectedExceptions = ValidationException.class)
public void testInvalidFeedGroup() throws FalconException, JAXBException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
(FeedEntityParserTest.class.getResourceAsStream(FEED_XML)));
feed1.setName("f1" + System.currentTimeMillis());
feed1.setGroups("group1,group2,group3");
feed1.setLocations(new Locations());
Location location = new Location();
location.setPath("/projects/bi/rmc/daily/ad/${YEAR}/fraud/${MONTH}-${DAY}/ad");
location.setType(LocationType.DATA);
feed1.getLocations().getLocations().add(location);
parser.parseAndValidate(feed1.toString());
feed1.getClusters().getClusters().get(0).getLocations().getLocations().set(0, location);
ConfigurationStore.get().publish(EntityType.FEED, feed1);
Feed feed2 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
(FeedEntityParserTest.class.getResourceAsStream(FEED_XML)));
feed2.setName("f2" + System.currentTimeMillis());
feed2.setGroups("group1,group2,group5");
feed2.setLocations(new Locations());
Location location2 = new Location();
location2
.setPath("/projects/bi/rmc/daily/ad/${YEAR}/fraud/${MONTH}/${HOUR}/ad");
location2.setType(LocationType.DATA);
feed2.getLocations().getLocations().add(location2);
feed2.getClusters().getClusters().get(0).getLocations().getLocations().set(0, location);
parser.parseAndValidate(feed2.toString());
}
@Test
public void testValidGroupNames() throws FalconException, JAXBException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
FeedGroupMapTest.class
.getResourceAsStream("/config/feed/feed-0.1.xml"));
feed1.setName("f1" + System.currentTimeMillis());
feed1.setGroups("group7,group8");
parser.parseAndValidate(feed1.toString());
feed1.setGroups("group7");
parser.parseAndValidate(feed1.toString());
feed1.setGroups(null);
parser.parseAndValidate(feed1.toString());
ConfigurationStore.get().publish(EntityType.FEED, feed1);
}
@Test
public void testInvalidGroupNames() throws FalconException, JAXBException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
FeedGroupMapTest.class
.getResourceAsStream("/config/feed/feed-0.1.xml"));
feed1.setName("f1" + System.currentTimeMillis());
try {
feed1.setGroups("commaend,");
parser.parseAndValidate(feed1.toString());
Assert.fail("Expected exception");
} catch (FalconException ignore) {
//ignore
}
try {
feed1.setGroups("group8, group9");
parser.parseAndValidate(feed1.toString());
Assert.fail("Expected exception");
} catch (FalconException e) {
//ignore
}
try {
feed1.setGroups("space in group,group9");
parser.parseAndValidate(feed1.toString());
Assert.fail("Expected exception");
} catch (FalconException e) {
//ignore
}
}
@Test
public void testClusterPartitionExp() throws FalconException {
Cluster cluster = ConfigurationStore.get().get(EntityType.CLUSTER,
"testCluster");
Assert.assertEquals(FeedHelper.evaluateClusterExp(cluster,
"/*/${cluster.colo}"), "/*/" + cluster.getColo());
Assert.assertEquals(FeedHelper.evaluateClusterExp(cluster,
"/*/${cluster.name}/Local"), "/*/" + cluster.getName() + "/Local");
Assert.assertEquals(FeedHelper.evaluateClusterExp(cluster,
"/*/${cluster.field1}/Local"), "/*/value1/Local");
}
@Test(expectedExceptions = FalconException.class)
public void testInvalidFeedName() throws JAXBException, FalconException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
FeedGroupMapTest.class
.getResourceAsStream("/config/feed/feed-0.1.xml"));
feed1.setName("Feed_name");
parser.parseAndValidate(feed1.toString());
}
@Test(expectedExceptions = FalconException.class)
public void testInvalidFeedGroupName() throws JAXBException, FalconException {
Feed feed1 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
FeedGroupMapTest.class
.getResourceAsStream("/config/feed/feed-0.1.xml"));
feed1.setName("feed1");
feed1.getLocations().getLocations().get(0)
.setPath("/data/clicks/${YEAR}/${MONTH}/${DAY}/${HOUR}");
feed1.getClusters().getClusters().get(0).getLocations().getLocations()
.get(0).setPath("/data/clicks/${YEAR}/${MONTH}/${DAY}/${HOUR}");
ConfigurationStore.get().publish(EntityType.FEED, feed1);
Feed feed2 = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
FeedGroupMapTest.class
.getResourceAsStream("/config/feed/feed-0.1.xml"));
feed2.setName("feed2");
feed2.getLocations().getLocations().get(0).setPath("/data/clicks/${YEAR}/${MONTH}/${DAY}/${HOUR}");
feed2.getClusters().getClusters().get(0).getLocations().getLocations()
.get(0).setPath("/data/clicks/${YEAR}/${MONTH}/${DAY}/${HOUR}");
feed2.setFrequency(new Frequency("hours(1)"));
try {
parser.parseAndValidate(feed2.toString());
} catch (FalconException e) {
e.printStackTrace();
Assert.fail("Not expecting exception for same frequency");
}
feed2.setFrequency(new Frequency("hours(2)"));
//expecting exception
parser.parseAndValidate(feed2.toString());
}
@Test
public void testNullFeedLateArrival() throws JAXBException, FalconException {
Feed feed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
FeedGroupMapTest.class
.getResourceAsStream("/config/feed/feed-0.1.xml"));
feed.setLateArrival(null);
parser.parseAndValidate(feed.toString());
}
/**
* A negative test for validating tags key value pair regex: key=value, key=value.
* @throws FalconException
*/
@Test
public void testFeedTags() throws FalconException {
try {
InputStream stream = this.getClass().getResourceAsStream("/config/feed/feed-tags-0.1.xml");
parser.parse(stream);
Assert.fail("org.xml.sax.SAXParseException should have been thrown.");
} catch (FalconException e) {
Assert.assertEquals(javax.xml.bind.UnmarshalException.class, e.getCause().getClass());
Assert.assertEquals(org.xml.sax.SAXParseException.class, e.getCause().getCause().getClass());
}
}
@Test
public void testParseFeedWithTable() throws FalconException {
final InputStream inputStream = getClass().getResourceAsStream("/config/feed/hive-table-feed.xml");
Feed feedWithTable = parser.parse(inputStream);
Assert.assertEquals(feedWithTable.getTable().getUri(),
"catalog:default:clicks#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}");
}
@Test (expectedExceptions = FalconException.class)
public void testParseInvalidFeedWithTable() throws FalconException {
parser.parse(FeedEntityParserTest.class.getResourceAsStream("/config/feed/invalid-feed.xml"));
}
@Test (expectedExceptions = FalconException.class)
public void testValidateFeedWithTableAndMultipleSources() throws FalconException {
parser.parseAndValidate(FeedEntityParserTest.class.getResourceAsStream(
"/config/feed/table-with-multiple-sources-feed.xml"));
Assert.fail("Should have thrown an exception:Multiple sources are not supported for feed with table storage");
}
@Test(expectedExceptions = ValidationException.class)
public void testValidatePartitionsForTable() throws Exception {
Feed feed = parser.parse(FeedEntityParserTest.class.getResourceAsStream("/config/feed/hive-table-feed.xml"));
Assert.assertNull(feed.getPartitions());
Partitions partitions = new Partitions();
Partition partition = new Partition();
partition.setName("colo");
partitions.getPartitions().add(partition);
feed.setPartitions(partitions);
parser.validate(feed);
Assert.fail("An exception should have been thrown:Partitions are not supported for feeds with table storage");
}
@Test(expectedExceptions = ValidationException.class)
public void testValidateClusterHasRegistryWithNoRegistryInterfaceEndPoint() throws Exception {
final InputStream inputStream = getClass().getResourceAsStream("/config/feed/hive-table-feed.xml");
Feed feedWithTable = parser.parse(inputStream);
org.apache.falcon.entity.v0.cluster.Cluster clusterEntity = EntityUtil.getEntity(EntityType.CLUSTER,
feedWithTable.getClusters().getClusters().get(0).getName());
ClusterHelper.getInterface(clusterEntity, Interfacetype.REGISTRY).setEndpoint(null);
parser.validate(feedWithTable);
Assert.fail("An exception should have been thrown: Cluster should have registry interface defined with table "
+ "storage");
}
@Test(expectedExceptions = ValidationException.class)
public void testValidateClusterHasRegistryWithNoRegistryInterface() throws Exception {
Unmarshaller unmarshaller = EntityType.CLUSTER.getUnmarshaller();
Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass()
.getResourceAsStream(("/config/cluster/cluster-no-registry.xml")));
cluster.setName("badTestCluster");
ConfigurationStore.get().publish(EntityType.CLUSTER, cluster);
final InputStream inputStream = getClass().getResourceAsStream("/config/feed/hive-table-feed.xml");
Feed feedWithTable = parser.parse(inputStream);
Validity validity = modifiableFeed.getClusters().getClusters().get(0)
.getValidity();
feedWithTable.getClusters().getClusters().clear();
org.apache.falcon.entity.v0.feed.Cluster feedCluster =
new org.apache.falcon.entity.v0.feed.Cluster();
feedCluster.setName(cluster.getName());
feedCluster.setValidity(validity);
feedWithTable.getClusters().getClusters().add(feedCluster);
parser.validate(feedWithTable);
Assert.fail("An exception should have been thrown: Cluster should have registry interface defined with table"
+ " storage");
}
@Test(expectedExceptions = ValidationException.class)
public void testValidateOwner() throws Exception {
CurrentUser.authenticate("unknown");
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
try {
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
feedEntityParser.parseAndValidate(this.getClass().getResourceAsStream(FEED_XML));
} finally {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test
public void testValidateACLWithACLAndAuthorizationDisabled() throws Exception {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
Feed feed = parser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
parser.validate(feed);
}
@Test
public void testValidateACLOwner() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
CurrentUser.authenticate(USER);
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
Feed feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
feed.getACL().setOwner(USER);
feed.getACL().setGroup(getPrimaryGroupName());
feedEntityParser.validate(feed);
} finally {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLBadOwner() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
CurrentUser.authenticate("blah");
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
Feed feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
feedEntityParser.validate(feed);
Assert.fail("Validation exception should have been thrown for invalid owner");
} finally {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLBadOwnerAndGroup() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
CurrentUser.authenticate("blah");
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
Feed feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
feedEntityParser.validate(feed);
Assert.fail("Validation exception should have been thrown for invalid owner");
} finally {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLAndStorageBadOwner() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
Feed feed = null;
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
// create locations
createLocations(feed);
feedEntityParser.validate(feed);
} finally {
if (feed != null) {
deleteLocations(feed);
}
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLAndStorageBadOwnerAndGroup() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
Feed feed = null;
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
// create locations
createLocations(feed);
feedEntityParser.validate(feed);
} finally {
if (feed != null) {
deleteLocations(feed);
}
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLAndStorageForValidOwnerBadGroup() throws Exception {
CurrentUser.authenticate(USER);
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
Feed feed = null;
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser = (FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
feed.getACL().setOwner(USER);
// create locations
createLocations(feed);
feedEntityParser.validate(feed);
} finally {
if (feed != null) {
deleteLocations(feed);
}
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLValidGroupBadOwner() throws Exception {
CurrentUser.authenticate(USER);
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser = (FeedEntityParser) EntityParserFactory.getParser(
EntityType.FEED);
Feed feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
feed.getACL().setGroup(getPrimaryGroupName());
feedEntityParser.validate(feed);
} finally {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLAndStorageForInvalidOwnerAndGroup() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
Feed feed = null;
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser = (FeedEntityParser) EntityParserFactory.getParser(
EntityType.FEED);
feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
// create locations
createLocations(feed);
feedEntityParser.validate(feed);
} finally {
if (feed != null) {
deleteLocations(feed);
}
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test (expectedExceptions = ValidationException.class)
public void testValidateACLAndStorageForValidGroupBadOwner() throws Exception {
CurrentUser.authenticate(USER);
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
Feed feed = null;
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser = (FeedEntityParser) EntityParserFactory.getParser(
EntityType.FEED);
feed = feedEntityParser.parse(stream);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
Assert.assertNotNull(feed.getACL().getOwner());
Assert.assertNotNull(feed.getACL().getGroup());
Assert.assertNotNull(feed.getACL().getPermission());
feed.getACL().setGroup(getPrimaryGroupName());
// create locations
createLocations(feed);
feedEntityParser.validate(feed);
} finally {
if (feed != null) {
deleteLocations(feed);
}
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
private void createLocations(Feed feed) throws IOException {
for (Location location : feed.getLocations().getLocations()) {
if (location.getType() == LocationType.DATA) {
dfsCluster.getFileSystem().create(new Path(location.getPath()));
break;
}
}
}
private void deleteLocations(Feed feed) throws IOException {
for (Location location : feed.getLocations().getLocations()) {
if (location.getType() == LocationType.DATA) {
dfsCluster.getFileSystem().delete(new Path(location.getPath()), true);
break;
}
}
}
@Test
public void testValidateACLForArchiveReplication() throws Exception {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "true");
Assert.assertTrue(Boolean.valueOf(
StartupProperties.get().getProperty("falcon.security.authorization.enabled")));
CurrentUser.authenticate(USER);
try {
InputStream stream = this.getClass().getResourceAsStream(FEED_XML);
// need a new parser since it caches authorization enabled flag
FeedEntityParser feedEntityParser =
(FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
Feed feed = feedEntityParser.parse(stream);
org.apache.falcon.entity.v0.feed.Cluster feedCluster =
FeedHelper.getCluster(feed, "backupCluster");
Location location = new Location();
location.setType(LocationType.DATA);
location.setPath(
"s3://falcontesting@hwxasvtesting.blob.core.windows.net/${YEAR}-${MONTH}-${DAY}-${HOUR}-${MINUTE}");
Locations locations = new Locations();
locations.getLocations().add(location);
feedCluster.setLocations(locations);
Assert.assertNotNull(feed);
Assert.assertNotNull(feed.getACL());
feed.getACL().setOwner(USER);
feed.getACL().setGroup(getPrimaryGroupName());
try {
feedEntityParser.validate(feed);
} catch (IllegalArgumentException e) {
// this is normal since AWS Secret Access Key is not specified as the password of a s3 URL
}
} finally {
StartupProperties.get().setProperty("falcon.security.authorization.enabled", "false");
}
}
@Test
public void testImportFeedSqoop() throws Exception {
storeEntity(EntityType.CLUSTER, "testCluster");
InputStream feedStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed feed = parser.parseAndValidate(feedStream);
final org.apache.falcon.entity.v0.feed.Cluster srcCluster = feed.getClusters().getClusters().get(0);
Assert.assertEquals("test-hsql-db", FeedHelper.getImportDatasourceName(srcCluster));
Assert.assertEquals("customer", FeedHelper.getImportDataSourceTableName(srcCluster));
Assert.assertEquals(2, srcCluster.getImport().getSource().getFields().getIncludes().getFields().size());
}
@Test
public void testImportFeedSqoopMinimal() throws Exception {
storeEntity(EntityType.CLUSTER, "testCluster");
InputStream feedStream = this.getClass().getResourceAsStream("/config/feed/feed-import-noargs-0.1.xml");
Feed feed = parser.parseAndValidate(feedStream);
final org.apache.falcon.entity.v0.feed.Cluster srcCluster = feed.getClusters().getClusters().get(0);
Assert.assertEquals("test-hsql-db", FeedHelper.getImportDatasourceName(srcCluster));
Assert.assertEquals("customer", FeedHelper.getImportDataSourceTableName(srcCluster));
Map<String, String> args = FeedHelper.getImportArguments(srcCluster);
Assert.assertEquals(0, args.size());
}
@Test (expectedExceptions = ValidationException.class)
public void testImportFeedSqoopExcludeFields() throws Exception {
storeEntity(EntityType.CLUSTER, "testCluster");
InputStream feedStream = this.getClass().getResourceAsStream("/config/feed/feed-import-exclude-fields-0.1.xml");
Feed feed = parser.parseAndValidate(feedStream);
Assert.fail("An exception should have been thrown: Feed Import policy not yet implement Field exclusion.");
}
@Test
public void testImportFeedSqoopArgs() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed importFeed = parser.parse(inputStream);
org.apache.falcon.entity.v0.feed.Arguments args =
importFeed.getClusters().getClusters().get(0).getImport().getArguments();
Argument splitByArg = new Argument();
splitByArg.setName("--split-by");
splitByArg.setValue("id");
Argument numMappersArg = new Argument();
numMappersArg.setName("--num-mappers");
numMappersArg.setValue("3");
args.getArguments().clear();
args.getArguments().add(numMappersArg);
args.getArguments().add(splitByArg);
parser.validate(importFeed);
}
@Test
public void testImportFeedSqoopArgsSplitBy() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed importFeed = parser.parse(inputStream);
org.apache.falcon.entity.v0.feed.Arguments args =
importFeed.getClusters().getClusters().get(0).getImport().getArguments();
Argument splitByArg = new Argument();
splitByArg.setName("--split-by");
splitByArg.setValue("id");
args.getArguments().clear();
args.getArguments().add(splitByArg);
parser.validate(importFeed);
}
@Test (expectedExceptions = ValidationException.class)
public void testImportFeedSqoopArgsNumMapper() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed importFeed = parser.parse(inputStream);
org.apache.falcon.entity.v0.feed.Arguments args =
importFeed.getClusters().getClusters().get(0).getImport().getArguments();
Argument numMappersArg = new Argument();
numMappersArg.setName("--num-mappers");
numMappersArg.setValue("2");
args.getArguments().clear();
args.getArguments().add(numMappersArg);
parser.validate(importFeed);
Assert.fail("An exception should have been thrown: Feed Import should specify "
+ "--split-by column along with --num-mappers");
}
@Test
public void testImportFeedExtractionType1() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed importFeed = parser.parse(inputStream);
org.apache.falcon.entity.v0.feed.Extract extract =
importFeed.getClusters().getClusters().get(0).getImport().getSource().getExtract();
extract.setType(ExtractMethod.FULL);
extract.setMergepolicy(MergeType.SNAPSHOT);
parser.validate(importFeed);
}
@Test (expectedExceptions = ValidationException.class)
public void testImportFeedExtractionType2() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed importFeed = parser.parse(inputStream);
org.apache.falcon.entity.v0.feed.Extract extract =
importFeed.getClusters().getClusters().get(0).getImport().getSource().getExtract();
extract.setType(ExtractMethod.FULL);
extract.setMergepolicy(MergeType.APPEND);
parser.validate(importFeed);
}
@Test (expectedExceptions = ValidationException.class)
public void testImportFeedExtractionType3() throws Exception {
final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-import-0.1.xml");
Feed importFeed = parser.parse(inputStream);
org.apache.falcon.entity.v0.feed.Extract extract =
importFeed.getClusters().getClusters().get(0).getImport().getSource().getExtract();
extract.setType(ExtractMethod.INCREMENTAL);
extract.setMergepolicy(MergeType.APPEND);
parser.validate(importFeed);
}
@Test (expectedExceptions = {ValidationException.class, FalconException.class})
public void testImportFeedSqoopInvalid() throws Exception {
InputStream feedStream = this.getClass().getResourceAsStream("/config/feed/feed-import-invalid-0.1.xml");
parser.parseAndValidate(feedStream);
Assert.fail("ValidationException should have been thrown");
}
public void testValidateEmailNotification() throws Exception {
Feed feedNotification = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(
(FeedEntityParserTest.class.getResourceAsStream(FEED_XML)));
Assert.assertNotNull(feedNotification.getNotification());
Assert.assertEquals(feedNotification.getNotification().getTo(), "falcon@localhost");
Assert.assertEquals(feedNotification.getNotification().getType(), "email");
}
@Test
public void testValidateFeedProperties() throws Exception {
FeedEntityParser feedEntityParser = Mockito
.spy((FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED));
InputStream stream = this.getClass().getResourceAsStream("/config/feed/feed-0.1.xml");
Feed feed = parser.parse(stream);
Mockito.doNothing().when(feedEntityParser).validateACL(feed);
// Good set of properties, should work
feedEntityParser.validate(feed);
// add duplicate property, should throw validation exception.
Property property1 = new Property();
property1.setName("field1");
property1.setValue("any value");
feed.getProperties().getProperties().add(property1);
try {
feedEntityParser.validate(feed);
Assert.fail(); // should not reach here
} catch (ValidationException e) {
// Do nothing
}
// Remove duplicate property. It should not throw exception anymore
feed.getProperties().getProperties().remove(property1);
feedEntityParser.validate(feed);
// add empty property name, should throw validation exception.
property1.setName("");
feed.getProperties().getProperties().add(property1);
try {
feedEntityParser.validate(feed);
Assert.fail(); // should not reach here
} catch (ValidationException e) {
// Do nothing
}
}
@Test
public void testFeedEndTimeOptional() throws Exception {
Feed feed = parser.parseAndValidate(ProcessEntityParserTest.class
.getResourceAsStream(FEED_XML));
feed.getClusters().getClusters().get(0).getValidity().setEnd(null);
parser.validate(feed);
}
}
| apache-2.0 |
sdnwiselab/onos | incubator/net/src/test/java/org/onosproject/incubator/net/mcast/impl/MulticastRouteManagerTest.java | 5480 | /*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.incubator.net.mcast.impl;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.junit.TestUtils;
import org.onlab.packet.IpAddress;
import org.onosproject.common.event.impl.TestEventDispatcher;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreServiceAdapter;
import org.onosproject.core.DefaultApplicationId;
import org.onosproject.incubator.store.mcast.impl.DistributedMcastStore;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.PortNumber;
import org.onosproject.net.mcast.McastEvent;
import org.onosproject.net.mcast.McastListener;
import org.onosproject.net.mcast.McastRoute;
import org.onosproject.store.service.TestStorageService;
import java.util.List;
import static junit.framework.Assert.fail;
import static junit.framework.TestCase.assertEquals;
import static org.onosproject.net.NetTestTools.did;
import static org.onosproject.net.NetTestTools.injectEventDispatcher;
/**
* Tests for the multicast RIB.
*/
public class MulticastRouteManagerTest {
McastRoute r1 = new McastRoute(IpAddress.valueOf("1.1.1.1"),
IpAddress.valueOf("1.1.1.2"),
McastRoute.Type.IGMP);
McastRoute r11 = new McastRoute(IpAddress.valueOf("1.1.1.1"),
IpAddress.valueOf("1.1.1.2"),
McastRoute.Type.STATIC);
McastRoute r2 = new McastRoute(IpAddress.valueOf("2.2.2.1"),
IpAddress.valueOf("2.2.2.2"),
McastRoute.Type.PIM);
ConnectPoint cp1 = new ConnectPoint(did("1"), PortNumber.portNumber(1));
ConnectPoint cp2 = new ConnectPoint(did("2"), PortNumber.portNumber(2));
private TestMulticastListener listener = new TestMulticastListener();
private MulticastRouteManager manager;
private List<McastEvent> events;
private DistributedMcastStore mcastStore;
@Before
public void setUp() throws Exception {
manager = new MulticastRouteManager();
mcastStore = new DistributedMcastStore();
TestUtils.setField(mcastStore, "storageService", new TestStorageService());
injectEventDispatcher(manager, new TestEventDispatcher());
events = Lists.newArrayList();
manager.store = mcastStore;
mcastStore.activate();
manager.activate();
manager.addListener(listener);
}
@After
public void tearDown() {
manager.removeListener(listener);
manager.deactivate();
mcastStore.deactivate();
}
@Test
public void testAdd() {
manager.add(r1);
validateEvents(McastEvent.Type.ROUTE_ADDED);
}
@Test
public void testRemove() {
manager.add(r1);
manager.remove(r1);
validateEvents(McastEvent.Type.ROUTE_ADDED, McastEvent.Type.ROUTE_REMOVED);
}
@Test
public void testAddSource() {
manager.addSource(r1, cp1);
validateEvents(McastEvent.Type.SOURCE_ADDED);
assertEquals("Route is not equal", cp1, manager.fetchSource(r1));
}
@Test
public void testAddSink() {
manager.addSink(r1, cp1);
validateEvents(McastEvent.Type.SINK_ADDED);
assertEquals("Route is not equal", Sets.newHashSet(cp1), manager.fetchSinks(r1));
}
@Test
public void testRemoveSink() {
manager.addSource(r1, cp1);
manager.addSink(r1, cp1);
manager.addSink(r1, cp2);
manager.removeSink(r1, cp2);
validateEvents(McastEvent.Type.SOURCE_ADDED,
McastEvent.Type.SINK_ADDED,
McastEvent.Type.SINK_ADDED,
McastEvent.Type.SINK_REMOVED);
assertEquals("Route is not equal", Sets.newHashSet(cp1), manager.fetchSinks(r1));
}
private void validateEvents(McastEvent.Type... evs) {
if (events.size() != evs.length) {
fail(String.format("Mismatch number of events# obtained -> %s : expected %s",
events, evs));
}
for (int i = 0; i < evs.length; i++) {
if (evs[i] != events.get(i).type()) {
fail(String.format("Mismatched events# obtained -> %s : expected %s",
events, evs));
}
}
}
class TestMulticastListener implements McastListener {
@Override
public void event(McastEvent event) {
events.add(event);
}
}
private class TestCoreService extends CoreServiceAdapter {
@Override
public ApplicationId registerApplication(String name) {
return new DefaultApplicationId(0, name);
}
}
}
| apache-2.0 |
fillumina/PerformanceTools | performance-tools/src/main/java/com/fillumina/performance/producer/progression/ProgressionPerformanceInstrumenterBuilder.java | 3131 | package com.fillumina.performance.producer.progression;
import java.io.Serializable;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
/**
*
* @author Francesco Illuminati
*/
public class ProgressionPerformanceInstrumenterBuilder
extends AbstractIstrumenterBuilder<
ProgressionPerformanceInstrumenterBuilder,
ProgressionPerformanceInstrumenter>
implements Serializable {
private static final long serialVersionUID = 1L;
private long[] iterationsProgression;
/**
* Creates a builder with a default progression (from 1_000 to
* 1_000_000 iterations) with 10 samples per step and a timeout of
* 5 seconds.
*/
public ProgressionPerformanceInstrumenterBuilder() {
super();
// init with default values
setIterationProgression(1_000, 10_000, 100_000, 1_000_000);
setSamplesPerStep(10);
setTimeout(5, TimeUnit.SECONDS);
}
/**
* Allows to define a progression by directly insert the number
* of iterations for each step.
* <br>
* Alternative to {@link #setBaseAndMagnitude(long, int) }.
*/
@SuppressWarnings(value = "unchecked")
public ProgressionPerformanceInstrumenterBuilder setIterationProgression(
final long... iterationsProgression) {
this.iterationsProgression = iterationsProgression;
return this;
}
/**
* Allows to define a progression by inserting a starting number and
* than the number of times this number should be increased of magnitude
* (multiplied by 10).
* <br>
* i.e.:
* <pre>
* base=100, magnitude=3 : 100, 1_000, 10_000
* base=20, magnitude=2 : 20, 200
* </pre>
* <br>
* Alternative to
* {@link #setIterationProgression(long...) }.
*/
@SuppressWarnings(value = "unchecked")
public ProgressionPerformanceInstrumenterBuilder setBaseAndMagnitude(
final long baseIterations,
final int maximumMagnitude) {
iterationsProgression = new long[maximumMagnitude];
for (int magnitude = 0; magnitude < maximumMagnitude; magnitude++) {
iterationsProgression[magnitude] =
calculateIterationsProgression(baseIterations, magnitude);
}
return this;
}
private static int calculateIterationsProgression(
final long baseIterations,
final int magnitude) {
return (int) Math.round(baseIterations * Math.pow(10, magnitude));
}
public long[] getIterationsProgression() {
return iterationsProgression;
}
@Override
protected void validate() {
super.validate();
if (iterationsProgression == null || iterationsProgression.length == 0) {
throw new IllegalArgumentException(
"no iteration progression specified: " +
Arrays.toString(iterationsProgression));
}
}
@Override
public ProgressionPerformanceInstrumenter build() {
validate();
return new ProgressionPerformanceInstrumenter(this);
}
}
| apache-2.0 |
Izakey/Java | Chapter15/src/FileChooserDemo.java | 3045 | /**
* Figure 15.12 : FileChooserDemo.java
* Demonstrating FileChooser
*/
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
public class FileChooserDemo extends JFrame {
private final JTextArea outputArea;
// set up GUI
public FileChooserDemo() throws IOException
{
super("FileChooser Demo");
outputArea = new JTextArea();
add(new JScrollPane(outputArea)); // outputArea is scrollable
analyzePath(); // get Path from user and display information
}
// Display informaition about file or directory user specifies
public void analyzePath() throws IOException
{
Path path = getFileOrDirectoryPath();
if (path != null && Files.exists(path))
{
// Gather file (or directory) information
StringBuilder builder = new StringBuilder();
builder.append(String.format("%s : \n", path.getFileName()));
builder.append(String.format("%s a directory\n", Files.isDirectory(path) ? "is" : "is not" ));
builder.append(String.format("%s is an absolute path", path.isAbsolute() ? "is" : "is not" ));
builder.append(String.format("Last modified : %s\n", Files.getLastModifiedTime(path)));
builder.append(String.format("Size : %s\n", Files.size(path)));
builder.append(String.format("Path : %s\n", path));
builder.append(String.format("Absolute Path : %s\n", path.toAbsolutePath()));
if (Files.isDirectory(path)) // output directory
{
builder.append(String.format("\nDirectory Contents\n"));
// Object for iterating througha directory's content
DirectoryStream<Path> directoryStream = Files.newDirectoryStream(path);
for (Path p : directoryStream)
builder.append(String.format("%s\n", p));
}
outputArea.setText(builder.toString());
}
else // Path does not exist
{
JOptionPane.showMessageDialog(this, path.getFileName() + " does not exist",
"ERROR", JOptionPane.ERROR_MESSAGE );
}
}
// allow user to specify file or directory name
private Path getFileOrDirectoryPath()
{
// Configure dialog allowing selection of a file or directory
JFileChooser fileChooser = new JFileChooser();
fileChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
int result = fileChooser.showOpenDialog(this);
// if user clicked Cancel button on dialog, return
if (result == JFileChooser.CANCEL_OPTION)
System.exit(1);
// return Path representing the selected file
return fileChooser.getSelectedFile().toPath();
}
}
| apache-2.0 |
aosp-mirror/platform_frameworks_support | room/rxjava2/src/test/java/androidx/room/RxRoomTest.java | 6970 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.room;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import androidx.arch.core.executor.JunitTaskExecutorRule;
import org.hamcrest.CoreMatchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicReference;
import io.reactivex.Flowable;
import io.reactivex.annotations.NonNull;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Consumer;
import io.reactivex.subscribers.TestSubscriber;
@RunWith(JUnit4.class)
public class RxRoomTest {
@Rule
public JunitTaskExecutorRule mExecutor = new JunitTaskExecutorRule(1, false);
private RoomDatabase mDatabase;
private InvalidationTracker mInvalidationTracker;
private List<InvalidationTracker.Observer> mAddedObservers = new ArrayList<>();
@Before
public void init() {
mDatabase = mock(RoomDatabase.class);
mInvalidationTracker = mock(InvalidationTracker.class);
when(mDatabase.getInvalidationTracker()).thenReturn(mInvalidationTracker);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
mAddedObservers.add((InvalidationTracker.Observer) invocation.getArguments()[0]);
return null;
}
}).when(mInvalidationTracker).addObserver(any(InvalidationTracker.Observer.class));
}
@Test
public void basicAddRemove() {
Flowable<Object> flowable = RxRoom.createFlowable(mDatabase, "a", "b");
verify(mInvalidationTracker, never()).addObserver(any(InvalidationTracker.Observer.class));
Disposable disposable = flowable.subscribe();
verify(mInvalidationTracker).addObserver(any(InvalidationTracker.Observer.class));
assertThat(mAddedObservers.size(), CoreMatchers.is(1));
InvalidationTracker.Observer observer = mAddedObservers.get(0);
disposable.dispose();
verify(mInvalidationTracker).removeObserver(observer);
disposable = flowable.subscribe();
verify(mInvalidationTracker, times(2))
.addObserver(any(InvalidationTracker.Observer.class));
assertThat(mAddedObservers.size(), CoreMatchers.is(2));
assertThat(mAddedObservers.get(1), CoreMatchers.not(CoreMatchers.sameInstance(observer)));
InvalidationTracker.Observer observer2 = mAddedObservers.get(1);
disposable.dispose();
verify(mInvalidationTracker).removeObserver(observer2);
}
@Test
public void basicNotify() throws InterruptedException {
String[] tables = {"a", "b"};
Set<String> tableSet = new HashSet<>(Arrays.asList(tables));
Flowable<Object> flowable = RxRoom.createFlowable(mDatabase, tables);
CountingConsumer consumer = new CountingConsumer();
Disposable disposable = flowable.subscribe(consumer);
assertThat(mAddedObservers.size(), CoreMatchers.is(1));
InvalidationTracker.Observer observer = mAddedObservers.get(0);
assertThat(consumer.mCount, CoreMatchers.is(1));
observer.onInvalidated(tableSet);
assertThat(consumer.mCount, CoreMatchers.is(2));
observer.onInvalidated(tableSet);
assertThat(consumer.mCount, CoreMatchers.is(3));
disposable.dispose();
observer.onInvalidated(tableSet);
assertThat(consumer.mCount, CoreMatchers.is(3));
}
@Test
public void internalCallable() throws InterruptedException {
final AtomicReference<String> value = new AtomicReference<>(null);
String[] tables = {"a", "b"};
Set<String> tableSet = new HashSet<>(Arrays.asList(tables));
final Flowable<String> flowable = RxRoom.createFlowable(mDatabase, tables,
new Callable<String>() {
@Override
public String call() throws Exception {
return value.get();
}
});
final CountingConsumer consumer = new CountingConsumer();
flowable.subscribe(consumer);
InvalidationTracker.Observer observer = mAddedObservers.get(0);
drain();
// no value because it is null
assertThat(consumer.mCount, CoreMatchers.is(0));
value.set("bla");
observer.onInvalidated(tableSet);
drain();
// get value
assertThat(consumer.mCount, CoreMatchers.is(1));
observer.onInvalidated(tableSet);
drain();
// get value
assertThat(consumer.mCount, CoreMatchers.is(2));
value.set(null);
observer.onInvalidated(tableSet);
drain();
// no value
assertThat(consumer.mCount, CoreMatchers.is(2));
}
private void drain() throws InterruptedException {
mExecutor.drainTasks(2);
}
@Test
public void exception() throws InterruptedException {
final Flowable<String> flowable = RxRoom.createFlowable(mDatabase, new String[]{"a"},
new Callable<String>() {
@Override
public String call() throws Exception {
throw new Exception("i want exception");
}
});
TestSubscriber<String> subscriber = new TestSubscriber<>();
flowable.subscribe(subscriber);
drain();
assertThat(subscriber.errorCount(), CoreMatchers.is(1));
assertThat(subscriber.errors().get(0).getMessage(), CoreMatchers.is("i want exception"));
}
private static class CountingConsumer implements Consumer<Object> {
int mCount = 0;
@Override
public void accept(@NonNull Object o) throws Exception {
mCount++;
}
}
}
| apache-2.0 |
openstack/doc8 | doc8/main.py | 16855 | # Copyright (C) 2014 Ivan Melnikov <iv at altlinux dot org>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Check documentation for simple style requirements.
What is checked:
- invalid rst format - D000
- lines should not be longer than 79 characters - D001
- RST exception: line with no whitespace except in the beginning
- RST exception: lines with http or https urls
- RST exception: literal blocks
- RST exception: rst target directives
- no trailing whitespace - D002
- no tabulation for indentation - D003
- no carriage returns (use unix newlines) - D004
- no newline at end of file - D005
"""
import argparse
import collections
import configparser
import logging
import os
import sys
try:
import toml
HAVE_TOML = True
except ImportError:
HAVE_TOML = False
from stevedore import extension
from doc8 import checks
from doc8 import parser as file_parser
from doc8 import utils
from doc8 import version
FILE_PATTERNS = [".rst", ".txt"]
MAX_LINE_LENGTH = 79
CONFIG_FILENAMES = ["doc8.ini", "tox.ini", "pep8.ini", "setup.cfg"]
if HAVE_TOML:
CONFIG_FILENAMES.extend(["pyproject.toml"])
def split_set_type(text, delimiter=","):
return set([i.strip() for i in text.split(delimiter) if i.strip()])
def merge_sets(sets):
m = set()
for s in sets:
m.update(s)
return m
def parse_ignore_path_errors(entries):
ignore_path_errors = collections.defaultdict(set)
for path in entries:
path, ignored_errors = path.split(";", 1)
path = path.strip()
ignored_errors = split_set_type(ignored_errors, delimiter=";")
ignore_path_errors[path].update(ignored_errors)
return dict(ignore_path_errors)
def from_ini(fp):
parser = configparser.RawConfigParser()
with open(fp, "r") as fh:
parser.read_file(fh)
cfg = {}
try:
cfg["max_line_length"] = parser.getint("doc8", "max-line-length")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["ignore"] = split_set_type(parser.get("doc8", "ignore"))
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["ignore_path"] = split_set_type(parser.get("doc8", "ignore-path"))
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
ignore_path_errors = parser.get("doc8", "ignore-path-errors")
ignore_path_errors = split_set_type(ignore_path_errors)
ignore_path_errors = parse_ignore_path_errors(ignore_path_errors)
cfg["ignore_path_errors"] = ignore_path_errors
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["allow_long_titles"] = parser.getboolean("doc8", "allow-long-titles")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["sphinx"] = parser.getboolean("doc8", "sphinx")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["verbose"] = parser.getboolean("doc8", "verbose")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["file_encoding"] = parser.get("doc8", "file-encoding")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
cfg["default_extension"] = parser.get("doc8", "default-extension")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
extensions = parser.get("doc8", "extensions")
extensions = extensions.split(",")
extensions = [s.strip() for s in extensions if s.strip()]
if extensions:
cfg["extension"] = extensions
except (configparser.NoSectionError, configparser.NoOptionError):
pass
return cfg
def from_toml(fp):
cfg = toml.load(fp).get("tool", {}).get("doc8", {})
return cfg
def extract_config(args):
cfg = {}
for cfg_file in args["config"] or CONFIG_FILENAMES:
if not os.path.isfile(cfg_file):
if args["config"]:
print(
"Configuration file %s does not exist...ignoring" % (args["config"])
)
continue
if cfg_file.endswith((".ini", ".cfg")):
cfg = from_ini(cfg_file)
elif cfg_file.endswith(".toml") and HAVE_TOML:
cfg = from_toml(cfg_file)
if cfg:
break
return cfg
def fetch_checks(cfg):
base = [
checks.CheckValidity(cfg),
checks.CheckTrailingWhitespace(cfg),
checks.CheckIndentationNoTab(cfg),
checks.CheckCarriageReturn(cfg),
checks.CheckMaxLineLength(cfg),
checks.CheckNewlineEndOfFile(cfg),
]
mgr = extension.ExtensionManager(
namespace="doc8.extension.check", invoke_on_load=True, invoke_args=(cfg.copy(),)
)
addons = []
for e in mgr:
addons.append(e.obj)
return base + addons
def setup_logging(verbose):
if verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(
level=level, format="%(levelname)s: %(message)s", stream=sys.stdout
)
def scan(cfg):
if not cfg.get("quiet"):
print("Scanning...")
files = collections.deque()
ignored_paths = cfg.get("ignore_path", [])
files_ignored = 0
file_iter = utils.find_files(
cfg.get("paths", []), cfg.get("extension", []), ignored_paths
)
default_extension = cfg.get("default_extension")
file_encoding = cfg.get("file_encoding")
for filename, ignoreable in file_iter:
if ignoreable:
files_ignored += 1
if cfg.get("verbose"):
print(" Ignoring '%s'" % (filename))
else:
f = file_parser.parse(
filename, default_extension=default_extension, encoding=file_encoding
)
files.append(f)
if cfg.get("verbose"):
print(" Selecting '%s'" % (filename))
return (files, files_ignored)
def validate(cfg, files, result=None):
if not cfg.get("quiet"):
print("Validating...")
error_counts = {}
ignoreables = frozenset(cfg.get("ignore", []))
ignore_targeted = cfg.get("ignore_path_errors", {})
while files:
f = files.popleft()
if cfg.get("verbose"):
print("Validating %s" % f)
targeted_ignoreables = set(ignore_targeted.get(f.filename, set()))
targeted_ignoreables.update(ignoreables)
for c in fetch_checks(cfg):
check_name = ".".join([c.__class__.__module__, c.__class__.__name__])
error_counts.setdefault(check_name, 0)
try:
extension_matcher = c.EXT_MATCHER
except AttributeError:
pass
else:
if not extension_matcher.match(f.extension):
if cfg.get("verbose"):
print(
" Skipping check '%s' since it does not"
" understand parsing a file with extension '%s'"
% (check_name, f.extension)
)
continue
try:
reports = set(c.REPORTS)
except AttributeError:
pass
else:
reports = reports - targeted_ignoreables
if not reports:
if cfg.get("verbose"):
print(
" Skipping check '%s', determined to only"
" check ignoreable codes" % check_name
)
continue
if cfg.get("verbose"):
print(" Running check '%s'" % check_name)
if isinstance(c, checks.ContentCheck):
for line_num, code, message in c.report_iter(f):
if code in targeted_ignoreables:
continue
if not isinstance(line_num, (float, int)):
line_num = "?"
if cfg.get("verbose"):
print(
" - %s:%s: %s %s" % (f.filename, line_num, code, message)
)
elif not result.capture:
print("%s:%s: %s %s" % (f.filename, line_num, code, message))
result.error(check_name, f.filename, line_num, code, message)
error_counts[check_name] += 1
elif isinstance(c, checks.LineCheck):
for line_num, line in enumerate(f.lines_iter(), 1):
for code, message in c.report_iter(line):
if code in targeted_ignoreables:
continue
if cfg.get("verbose"):
print(
" - %s:%s: %s %s"
% (f.filename, line_num, code, message)
)
elif not result.capture:
print(
"%s:%s: %s %s" % (f.filename, line_num, code, message)
)
result.error(check_name, f.filename, line_num, code, message)
error_counts[check_name] += 1
else:
raise TypeError("Unknown check type: %s, %s" % (type(c), c))
return error_counts
def get_defaults():
return {
"paths": [os.getcwd()],
"config": [],
"allow_long_titles": False,
"ignore": [],
"sphinx": True,
"ignore_path": [],
"ignore_path_errors": [],
"default_extension": "",
"file_encoding": "",
"max_line_length": MAX_LINE_LENGTH,
"extension": list(FILE_PATTERNS),
"quiet": False,
"verbose": False,
"version": False,
}
class Result(object):
def __init__(self):
self.files_selected = 0
self.files_ignored = 0
self.error_counts = {}
self.errors = []
self.capture = False
@property
def total_errors(self):
return len(self.errors)
def error(self, check_name, filename, line_num, code, message):
self.errors.append((check_name, filename, line_num, code, message))
def finish(self, files_selected, files_ignored, error_counts):
self.files_selected = files_selected
self.files_ignored = files_ignored
self.error_counts = error_counts
def report(self):
lines = []
if self.capture:
for error in self.errors:
lines.append("%s:%s: %s %s" % error[1:])
lines.extend(
[
"=" * 8,
"Total files scanned = %s" % (self.files_selected),
"Total files ignored = %s" % (self.files_ignored),
"Total accumulated errors = %s" % (self.total_errors),
]
)
if self.error_counts:
lines.append("Detailed error counts:")
for check_name in sorted(self.error_counts.keys()):
check_errors = self.error_counts[check_name]
lines.append(" - %s = %s" % (check_name, check_errors))
return "\n".join(lines)
def doc8(args=None, **kwargs):
result = Result()
if args is None:
args = get_defaults()
# Force reporting to suppress all output
kwargs["quiet"] = True
kwargs["verbose"] = False
result.capture = True
args["ignore"] = merge_sets(args["ignore"])
cfg = extract_config(args)
args["ignore"].update(cfg.pop("ignore", set()))
if "sphinx" in cfg:
args["sphinx"] = cfg.pop("sphinx")
args["extension"].extend(cfg.pop("extension", []))
args["ignore_path"].extend(cfg.pop("ignore_path", []))
cfg.setdefault("ignore_path_errors", {})
tmp_ignores = parse_ignore_path_errors(args.pop("ignore_path_errors", []))
for path, ignores in tmp_ignores.items():
if path in cfg["ignore_path_errors"]:
cfg["ignore_path_errors"][path].update(ignores)
else:
cfg["ignore_path_errors"][path] = set(ignores)
args.update(cfg)
# Override args with any kwargs
args.update(kwargs.items())
setup_logging(args.get("verbose"))
files, files_ignored = scan(args)
files_selected = len(files)
error_counts = validate(args, files, result=result)
result.finish(files_selected, files_ignored, error_counts)
return result
def main():
defaults = get_defaults()
parser = argparse.ArgumentParser(
prog="doc8",
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument(
"paths",
metavar="path",
type=str,
nargs="*",
help=("path to scan for doc files (default: current directory)."),
default=defaults["paths"],
)
parser.add_argument(
"--config",
metavar="path",
action="append",
help="user config file location"
" (default: %s)." % ", ".join(CONFIG_FILENAMES),
default=defaults["config"],
)
parser.add_argument(
"--allow-long-titles",
action="store_true",
help="allow long section titles (default: false).",
default=defaults["allow_long_titles"],
)
parser.add_argument(
"--ignore",
action="append",
metavar="code",
help="ignore the given error code(s).",
type=split_set_type,
default=defaults["ignore"],
)
parser.add_argument(
"--no-sphinx",
action="store_false",
help="do not ignore sphinx specific false positives.",
default=defaults["sphinx"],
dest="sphinx",
)
parser.add_argument(
"--ignore-path",
action="append",
default=defaults["ignore_path"],
help="ignore the given directory or file (globs are supported).",
metavar="path",
)
parser.add_argument(
"--ignore-path-errors",
action="append",
default=defaults["ignore_path_errors"],
help="ignore the given specific errors in the provided file.",
metavar="path",
)
parser.add_argument(
"--default-extension",
action="store",
help="default file extension to use when a file is"
" found without a file extension.",
default=defaults["default_extension"],
dest="default_extension",
metavar="extension",
)
parser.add_argument(
"--file-encoding",
action="store",
help="set input files text encoding",
default=defaults["file_encoding"],
dest="file_encoding",
metavar="encoding",
)
parser.add_argument(
"--max-line-length",
action="store",
metavar="int",
type=int,
help="maximum allowed line"
" length (default: %s)." % defaults["max_line_length"],
default=defaults["max_line_length"],
)
parser.add_argument(
"-e",
"--extension",
action="append",
metavar="extension",
help="check file extensions of the given type"
" (default: %s)." % ", ".join(defaults["extension"]),
default=defaults["extension"],
)
parser.add_argument(
"-q",
"--quiet",
action="store_true",
help="only print violations",
default=defaults["quiet"],
)
parser.add_argument(
"-v",
"--verbose",
dest="verbose",
action="store_true",
help="run in verbose mode.",
default=defaults["verbose"],
)
parser.add_argument(
"--version",
dest="version",
action="store_true",
help="show the version and exit.",
default=defaults["version"],
)
args = vars(parser.parse_args())
if args.get("version"):
print(version.version_string)
return 0
result = doc8(args)
if not args.get("quiet"):
print(result.report())
if result.total_errors:
return 1
else:
return 0
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 |
spring-cloud/spring-cloud-commons | spring-cloud-commons/src/main/java/org/springframework/cloud/client/loadbalancer/LoadBalancerAutoConfiguration.java | 6029 | /*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.client.loadbalancer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.SmartInitializingSingleton;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.AnyNestedCondition;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.loadbalancer.reactive.ReactiveLoadBalancer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.ClientHttpRequestInterceptor;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.web.client.RestTemplate;
/**
* Auto-configuration for blocking client-side load balancing.
*
* @author Spencer Gibb
* @author Dave Syer
* @author Will Tran
* @author Gang Li
* @author Olga Maciaszek-Sharma
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(RestTemplate.class)
@ConditionalOnBean(LoadBalancerClient.class)
@EnableConfigurationProperties(LoadBalancerClientsProperties.class)
public class LoadBalancerAutoConfiguration {
@LoadBalanced
@Autowired(required = false)
private List<RestTemplate> restTemplates = Collections.emptyList();
@Autowired(required = false)
private List<LoadBalancerRequestTransformer> transformers = Collections.emptyList();
@Bean
public SmartInitializingSingleton loadBalancedRestTemplateInitializerDeprecated(
final ObjectProvider<List<RestTemplateCustomizer>> restTemplateCustomizers) {
return () -> restTemplateCustomizers.ifAvailable(customizers -> {
for (RestTemplate restTemplate : LoadBalancerAutoConfiguration.this.restTemplates) {
for (RestTemplateCustomizer customizer : customizers) {
customizer.customize(restTemplate);
}
}
});
}
@Bean
@ConditionalOnMissingBean
public LoadBalancerRequestFactory loadBalancerRequestFactory(LoadBalancerClient loadBalancerClient) {
return new LoadBalancerRequestFactory(loadBalancerClient, this.transformers);
}
@Configuration(proxyBeanMethods = false)
@Conditional(RetryMissingOrDisabledCondition.class)
static class LoadBalancerInterceptorConfig {
@Bean
public LoadBalancerInterceptor loadBalancerInterceptor(LoadBalancerClient loadBalancerClient,
LoadBalancerRequestFactory requestFactory) {
return new LoadBalancerInterceptor(loadBalancerClient, requestFactory);
}
@Bean
@ConditionalOnMissingBean
public RestTemplateCustomizer restTemplateCustomizer(final LoadBalancerInterceptor loadBalancerInterceptor) {
return restTemplate -> {
List<ClientHttpRequestInterceptor> list = new ArrayList<>(restTemplate.getInterceptors());
list.add(loadBalancerInterceptor);
restTemplate.setInterceptors(list);
};
}
}
private static class RetryMissingOrDisabledCondition extends AnyNestedCondition {
RetryMissingOrDisabledCondition() {
super(ConfigurationPhase.REGISTER_BEAN);
}
@ConditionalOnMissingClass("org.springframework.retry.support.RetryTemplate")
static class RetryTemplateMissing {
}
@ConditionalOnProperty(value = "spring.cloud.loadbalancer.retry.enabled", havingValue = "false")
static class RetryDisabled {
}
}
/**
* Auto configuration for retry mechanism.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(RetryTemplate.class)
public static class RetryAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public LoadBalancedRetryFactory loadBalancedRetryFactory() {
return new LoadBalancedRetryFactory() {
};
}
}
/**
* Auto configuration for retry intercepting mechanism.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(RetryTemplate.class)
@ConditionalOnBean(ReactiveLoadBalancer.Factory.class)
@ConditionalOnProperty(value = "spring.cloud.loadbalancer.retry.enabled", matchIfMissing = true)
public static class RetryInterceptorAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public RetryLoadBalancerInterceptor loadBalancerInterceptor(LoadBalancerClient loadBalancerClient,
LoadBalancerRequestFactory requestFactory, LoadBalancedRetryFactory loadBalancedRetryFactory,
ReactiveLoadBalancer.Factory<ServiceInstance> loadBalancerFactory) {
return new RetryLoadBalancerInterceptor(loadBalancerClient, requestFactory, loadBalancedRetryFactory,
loadBalancerFactory);
}
@Bean
@ConditionalOnMissingBean
public RestTemplateCustomizer restTemplateCustomizer(
final RetryLoadBalancerInterceptor loadBalancerInterceptor) {
return restTemplate -> {
List<ClientHttpRequestInterceptor> list = new ArrayList<>(restTemplate.getInterceptors());
list.add(loadBalancerInterceptor);
restTemplate.setInterceptors(list);
};
}
}
}
| apache-2.0 |
heiwalebron/coolweather | app/src/main/java/com/coolweather/android/util/HttpUtil.java | 431 | package com.coolweather.android.util;
import okhttp3.OkHttpClient;
import okhttp3.Request;
/**
* Created by Administrator on 2016/12/7/007.
*/
public class HttpUtil {
public static void sendOkHttpRequest(String address,okhttp3.Callback callback){
OkHttpClient client=new OkHttpClient();
Request request=new Request.Builder().url(address).build();
client.newCall(request).enqueue(callback);
}
}
| apache-2.0 |
ligson/pkidemo | src/main/java/security/pkix/PKIStatusInfo.java | 696 | package security.pkix;
public class PKIStatusInfo {
/*-
* PKIStatusInfo ::= SEQUENCE {
* status PKIStatus,
* statusString PKIFreeText OPTIONAL,
* failInfo PKIFailureInfo OPTIONAL }
*/
private PKIStatus status;
private PKIFreeText statusString;
private PKIFailureInfo failInfo;
public PKIStatusInfo(PKIStatus status, PKIFreeText statusString,
PKIFailureInfo failInfo) {
this.status = status;
this.statusString = statusString;
this.failInfo = failInfo;
}
public PKIStatus getStatus() {
return status;
}
public PKIFreeText getStatusString() {
return statusString;
}
public PKIFailureInfo getFailInfo() {
return failInfo;
}
}
| apache-2.0 |
Wenpei/incubator-systemml | src/main/java/org/apache/sysml/runtime/io/FrameWriterBinaryBlock.java | 4530 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.io;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapred.JobConf;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.matrix.data.FrameBlock;
import org.apache.sysml.runtime.util.MapReduceTool;
/**
* Single-threaded frame binary block writer.
*
*/
public class FrameWriterBinaryBlock extends FrameWriter
{
/**
* @param src
* @param fname
* @param rlen
* @param clen
* @return
* @throws IOException
* @throws DMLRuntimeException
*/
@Override
public final void writeFrameToHDFS( FrameBlock src, String fname, long rlen, long clen )
throws IOException, DMLRuntimeException
{
//prepare file access
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path( fname );
//if the file already exists on HDFS, remove it.
MapReduceTool.deleteFileIfExistOnHDFS( fname );
//bound check for src block
if( src.getNumRows() > rlen || src.getNumColumns() > clen ) {
throw new IOException("Frame block [1:"+src.getNumRows()+",1:"+src.getNumColumns()+"] " +
"out of overall frame range [1:"+rlen+",1:"+clen+"].");
}
//write binary block to hdfs (sequential/parallel)
writeBinaryBlockFrameToHDFS( path, job, src, rlen, clen );
}
/**
*
* @param path
* @param job
* @param src
* @param rlen
* @param clen
* @throws IOException
* @throws DMLRuntimeException
*/
protected void writeBinaryBlockFrameToHDFS( Path path, JobConf job, FrameBlock src, long rlen, long clen )
throws IOException, DMLRuntimeException
{
FileSystem fs = FileSystem.get(job);
int blen = ConfigurationManager.getBlocksize();
//sequential write to single file
writeBinaryBlockFrameToSequenceFile(path, job, fs, src, blen, 0, (int)rlen);
}
/**
* Internal primitive to write a block-aligned row range of a frame to a single sequence file,
* which is used for both single- and multi-threaded writers (for consistency).
*
* @param path
* @param job
* @param fs
* @param src
* @param blen
* @param rl
* @param ru
* @throws DMLRuntimeException
* @throws IOException
*/
@SuppressWarnings("deprecation")
protected final void writeBinaryBlockFrameToSequenceFile( Path path, JobConf job, FileSystem fs, FrameBlock src, int blen, int rl, int ru )
throws DMLRuntimeException, IOException
{
//1) create sequence file writer
SequenceFile.Writer writer = null;
writer = new SequenceFile.Writer(fs, job, path, LongWritable.class, FrameBlock.class);
try
{
//2) reblock and write
LongWritable index = new LongWritable();
if( src.getNumRows() <= blen ) //opt for single block
{
//directly write single block
index.set(1);
writer.append(index, src);
}
else //general case
{
//initialize blocks for reuse (at most 4 different blocks required)
FrameBlock[] blocks = createFrameBlocksForReuse(src.getSchema(), src.getColumnNames(), src.getNumRows());
//create and write subblocks of frame
for(int bi = rl; bi < ru; bi += blen) {
int len = Math.min(blen, src.getNumRows()-bi);
//get reuse frame block and copy subpart to block
FrameBlock block = getFrameBlockForReuse(blocks);
src.sliceOperations( bi, bi+len-1, 0, src.getNumColumns()-1, block );
//append block to sequence file
index.set(bi+1);
writer.append(index, block);
}
}
}
finally {
IOUtilFunctions.closeSilently(writer);
}
}
}
| apache-2.0 |
dockerian/pyapi | pyapi/catalog-api/setup.py | 1150 | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
# with open(os.path.join(here, 'CHANGES.txt')) as f:
# CHANGES = f.read()
CHANGES = "Changes"
requires = [
'pyramid',
'python-keystoneclient',
'python-swiftclient',
'waitress',
'nose',
'coverage',
'mock',
'webtest',
'tissue',
'pyyaml',
]
setup(
name='globalapi',
version='0.0.2',
description='HP Cloud - Global Catalog API Service',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author = 'Helion Dev',
author_email = 'helion.dev@hp.com',
url='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="globalapi",
entry_points="""\
[paste.app_factory]
main = globalapi:main
""",
)
| apache-2.0 |
spring-projects/spring-data-examples | jpa/deferred/src/main/java/example/repo/Customer1673Repository.java | 284 | package example.repo;
import example.model.Customer1673;
import java.util.List;
import org.springframework.data.repository.CrudRepository;
public interface Customer1673Repository extends CrudRepository<Customer1673, Long> {
List<Customer1673> findByLastName(String lastName);
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-redshift/src/main/java/com/amazonaws/services/redshift/model/transform/CreateEndpointAccessRequestMarshaller.java | 3570 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.redshift.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* CreateEndpointAccessRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateEndpointAccessRequestMarshaller implements Marshaller<Request<CreateEndpointAccessRequest>, CreateEndpointAccessRequest> {
public Request<CreateEndpointAccessRequest> marshall(CreateEndpointAccessRequest createEndpointAccessRequest) {
if (createEndpointAccessRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
Request<CreateEndpointAccessRequest> request = new DefaultRequest<CreateEndpointAccessRequest>(createEndpointAccessRequest, "AmazonRedshift");
request.addParameter("Action", "CreateEndpointAccess");
request.addParameter("Version", "2012-12-01");
request.setHttpMethod(HttpMethodName.POST);
if (createEndpointAccessRequest.getClusterIdentifier() != null) {
request.addParameter("ClusterIdentifier", StringUtils.fromString(createEndpointAccessRequest.getClusterIdentifier()));
}
if (createEndpointAccessRequest.getResourceOwner() != null) {
request.addParameter("ResourceOwner", StringUtils.fromString(createEndpointAccessRequest.getResourceOwner()));
}
if (createEndpointAccessRequest.getEndpointName() != null) {
request.addParameter("EndpointName", StringUtils.fromString(createEndpointAccessRequest.getEndpointName()));
}
if (createEndpointAccessRequest.getSubnetGroupName() != null) {
request.addParameter("SubnetGroupName", StringUtils.fromString(createEndpointAccessRequest.getSubnetGroupName()));
}
if (!createEndpointAccessRequest.getVpcSecurityGroupIds().isEmpty()
|| !((com.amazonaws.internal.SdkInternalList<String>) createEndpointAccessRequest.getVpcSecurityGroupIds()).isAutoConstruct()) {
com.amazonaws.internal.SdkInternalList<String> vpcSecurityGroupIdsList = (com.amazonaws.internal.SdkInternalList<String>) createEndpointAccessRequest
.getVpcSecurityGroupIds();
int vpcSecurityGroupIdsListIndex = 1;
for (String vpcSecurityGroupIdsListValue : vpcSecurityGroupIdsList) {
if (vpcSecurityGroupIdsListValue != null) {
request.addParameter("VpcSecurityGroupIds.VpcSecurityGroupId." + vpcSecurityGroupIdsListIndex,
StringUtils.fromString(vpcSecurityGroupIdsListValue));
}
vpcSecurityGroupIdsListIndex++;
}
}
return request;
}
}
| apache-2.0 |
google/gvisor | pkg/sentry/syscalls/linux/sys_seccomp.go | 2505 | // Copyright 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package linux
import (
"gvisor.dev/gvisor/pkg/abi/linux"
"gvisor.dev/gvisor/pkg/bpf"
"gvisor.dev/gvisor/pkg/errors/linuxerr"
"gvisor.dev/gvisor/pkg/hostarch"
"gvisor.dev/gvisor/pkg/sentry/arch"
"gvisor.dev/gvisor/pkg/sentry/kernel"
)
// userSockFprog is equivalent to Linux's struct sock_fprog on amd64.
//
// +marshal
type userSockFprog struct {
// Len is the length of the filter in BPF instructions.
Len uint16
_ [6]byte // padding for alignment
// Filter is a user pointer to the struct sock_filter array that makes up
// the filter program. Filter is a uint64 rather than a hostarch.Addr
// because hostarch.Addr is actually uintptr, which is not a fixed-size
// type.
Filter uint64
}
// seccomp applies a seccomp policy to the current task.
func seccomp(t *kernel.Task, mode, flags uint64, addr hostarch.Addr) error {
// We only support SECCOMP_SET_MODE_FILTER at the moment.
if mode != linux.SECCOMP_SET_MODE_FILTER {
// Unsupported mode.
return linuxerr.EINVAL
}
tsync := flags&linux.SECCOMP_FILTER_FLAG_TSYNC != 0
// The only flag we support now is SECCOMP_FILTER_FLAG_TSYNC.
if flags&^linux.SECCOMP_FILTER_FLAG_TSYNC != 0 {
// Unsupported flag.
return linuxerr.EINVAL
}
var fprog userSockFprog
if _, err := fprog.CopyIn(t, addr); err != nil {
return err
}
filter := make([]linux.BPFInstruction, int(fprog.Len))
if _, err := linux.CopyBPFInstructionSliceIn(t, hostarch.Addr(fprog.Filter), filter); err != nil {
return err
}
compiledFilter, err := bpf.Compile(filter)
if err != nil {
t.Debugf("Invalid seccomp-bpf filter: %v", err)
return linuxerr.EINVAL
}
return t.AppendSyscallFilter(compiledFilter, tsync)
}
// Seccomp implements linux syscall seccomp(2).
func Seccomp(t *kernel.Task, args arch.SyscallArguments) (uintptr, *kernel.SyscallControl, error) {
return 0, nil, seccomp(t, args[0].Uint64(), args[1].Uint64(), args[2].Pointer())
}
| apache-2.0 |
adescre/ordrx | src/Lyglr.Ordrin/Client/OrdrxBaseClient.cs | 9748 | // <copyright file="OrdrxBaseClient.cs" company="Lyglr.com">Copyright (c) 2014 All Rights Reserved</copyright>
namespace Lyglr.Ordrx.Client
{
using System;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using Lyglr.Ordrx;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
/// <summary>
/// Client base class for the ordrx.com REST apis
/// </summary>
public abstract class OrdrxBaseClient : HttpClient
{
private const string XNaamaClientAuthenticationKey = "X-NAAMA-CLIENT-AUTHENTICATION";
private const string XNaamaClientAuthenticationValueFormat = "id=\"{0}\", version=\"1\"";
private const string XNaamaAuthenticationKey = "X-NAAMA-AUTHENTICATION";
private const string XNaamaAuthenticationValueFormat = "username=\"{0}\", response=\"{1}\", version=\"1\"";
private readonly string serviceBaseUrl;
private readonly string developerKey;
/// <summary>
/// Standard constructor.
/// </summary>
/// <param name="serviceBaseUrl">Base url of the ordr.in service.</param>
/// <param name="developerKey">Developer Key from ordr.in.</param>
protected OrdrxBaseClient(string serviceBaseUrl, string developerKey)
{
if (string.IsNullOrWhiteSpace(serviceBaseUrl))
{
throw new ArgumentNullException("serviceBaseUrl");
}
if (string.IsNullOrWhiteSpace(developerKey))
{
throw new ArgumentNullException("developerKey");
}
this.serviceBaseUrl = serviceBaseUrl;
this.developerKey = developerKey;
}
/// <summary>
/// Builds a request <see cref="Uri"/>.
/// </summary>
/// <param name="path">Path to append onto the service url.</param>
/// <returns>An absolute <see cref="Uri"/>.</returns>
private Uri BuildRequestUri(string path)
{
UriBuilder builder = new UriBuilder(new Uri(this.serviceBaseUrl, UriKind.Absolute));
builder.Path = path;
return builder.Uri;
}
/// <summary>
/// Builds the request to be sent to the service.
/// </summary>
/// <param name="method">Method of the API call.</param>
/// <param name="requestFormat">Request uri format.</param>
/// <param name="parameters">Parameters of the request uri.</param>
/// <returns>The http request.</returns>
protected HttpRequestMessage BuildRequest(HttpMethod method, string requestFormat, params object[] parameters)
{
if (method == null)
{
throw new ArgumentNullException("method");
}
if (requestFormat == null)
{
throw new ArgumentNullException("requestFormat");
}
string uriPath = Utilities.InvariantFormat(requestFormat, parameters);
Uri fullRequestUri = this.BuildRequestUri(uriPath);
HttpRequestMessage requestMessage = new HttpRequestMessage(method, fullRequestUri);
return requestMessage;
}
/// <summary>
/// Sets authentication on a <see cref="HttpRequestMessage"/>.
/// </summary>
/// <param name="requestMessage">The request message.</param>
/// <param name="email">The email required to compute the authentication hash.</param>
/// <param name="hashedPassword">The hashed password required to compute the authentication hash.</param>
protected void SetAuthentication(HttpRequestMessage requestMessage, string email, string hashedPassword)
{
// This is a valid scenario for a guest order.
if (string.IsNullOrWhiteSpace(email) || string.IsNullOrWhiteSpace(hashedPassword))
{
return;
}
// Compute the auth hash. The hash required by the ordr.in API is: "hashcode = SHA256( SHA256([password]) + [email] + [uri] )".
// The password is already stored as a hash in this object.
string authHash = Utilities.CalculateSHA256(hashedPassword + email + requestMessage.RequestUri.AbsolutePath);
requestMessage.Headers.Add(XNaamaAuthenticationKey, Utilities.InvariantFormat(XNaamaAuthenticationValueFormat, email, authHash));
}
/// <summary>
/// Performs an ordr.in authenticated http request.
/// </summary>
/// <param name="requestMessage">Request message to be sent to the service.</param>
/// <param name="cancellationToken">Token used to cancel the asynchronous call.</param>
/// <returns>A task to track the asynchronous progress.</returns>
protected Task SendRequestAsync(HttpRequestMessage requestMessage, CancellationToken cancellationToken)
{
return this.SendRequestAsync<object>(requestMessage, cancellationToken);
}
/// <summary>
/// Performs an ordr.in authenticated http request.
/// </summary>
/// <param name="requestMessage">Request message to be sent to the service.</param>
/// <param name="cancellationToken">Token used to cancel the asynchronous call.</param>
/// <typeparam name="T">Type to transform the result to.</typeparam>
/// <returns>Returns the response from the service.</returns>
protected async Task<T> SendRequestAsync<T>(HttpRequestMessage requestMessage, CancellationToken cancellationToken)
{
if (requestMessage == null)
{
throw new ArgumentNullException("requestMessage");
}
// Required for any call to the ordr.in service.
requestMessage.Headers.Add(XNaamaClientAuthenticationKey, Utilities.InvariantFormat(XNaamaClientAuthenticationValueFormat, this.developerKey));
HttpStatusCode statusCode = HttpStatusCode.InternalServerError;
try
{
using (HttpResponseMessage response = await this.SendAsync(requestMessage, cancellationToken))
{
statusCode = response.StatusCode;
// No need to go through the whole content processing
// if there is nothing to process.
if (response.Content.Headers.ContentLength == 0)
{
if (response.IsSuccessStatusCode)
{
return default(T);
}
throw OrdrxClientException.CreateException(statusCode, "Unknown failure", string.Empty);
}
string stringContent = await response.Content.ReadAsStringAsync();
T result;
if (!TryParseContentResult(stringContent, statusCode, out result))
{
response.EnsureSuccessStatusCode();
}
return result;
}
}
catch (TaskCanceledException ex)
{
throw OrdrxClientException.CreateException(this.Timeout, ex);
}
catch (HttpRequestException ex)
{
throw OrdrxClientException.CreateException(statusCode, ex);
}
catch (Exception ex)
{
throw OrdrxClientException.CreateException(ex);
}
}
/// <summary>
/// Sets the json content of a message.
/// </summary>
/// <param name="request">Request to set the content on.</param>
/// <param name="content">Content to set.</param>
protected void SetMessageContent(HttpRequestMessage request, object content)
{
request.Content = new StringContent(JsonConvert.SerializeObject(content));
request.Content.Headers.ContentType = new MediaTypeHeaderValue("application/json");
}
/// <summary>
/// Parses the content.
/// </summary>
/// <param name="content">Content return by the service.</param>
/// <param name="statusCode">Status code from the service.</param>
/// <param name="value">Returns the parsed response from the service.</param>
/// <typeparam name="T">Type to transform the result to.</typeparam>
/// <returns>True if the parsing succeeded.</returns>
private static bool TryParseContentResult<T>(string content, HttpStatusCode statusCode, out T value)
{
if (statusCode == HttpStatusCode.OK)
{
value = JsonConvert.DeserializeObject<T>(content);
return true;
}
JObject jsonContent = JObject.Parse(content);
JToken error;
if ((jsonContent.TryGetValue("_err", out error) || jsonContent.TryGetValue("_error", out error)) && error.Value<int>() == 1)
{
JToken message;
if (jsonContent.TryGetValue("msg", out message) || jsonContent.TryGetValue("_msg", out message))
{
JToken text;
if (jsonContent.TryGetValue("text", out text))
{
throw OrdrxClientException.CreateException(statusCode, message.Value<string>(), text.Value<string>());
}
throw OrdrxClientException.CreateException(statusCode, message.Value<string>());
}
}
value = default(T);
return false;
}
}
} | apache-2.0 |
topie/topie-oa | src/main/java/com/topie/user/ImageUtils.java | 2696 | package com.topie.user;
import java.awt.Image;
import java.awt.image.BufferedImage;
import java.io.InputStream;
import java.io.OutputStream;
import javax.imageio.ImageIO;
public class ImageUtils {
public static void zoomImage(InputStream inputStream,
OutputStream outputStream, int x1, int y1, int x2, int y2)
throws Exception {
BufferedImage bufferedImage = ImageIO.read(inputStream);
//
int height = bufferedImage.getHeight();
int width = bufferedImage.getWidth();
int defaultSize = Math.min(512, Math.min(height, width));
if (height > width) {
int h2 = defaultSize;
int w2 = (defaultSize * width) / height;
bufferedImage = zoomImage(bufferedImage, w2, h2);
} else {
int w2 = defaultSize;
int h2 = (defaultSize * height) / width;
bufferedImage = zoomImage(bufferedImage, w2, h2);
}
//
BufferedImage outImage = bufferedImage.getSubimage(x1, y1, x2 - x1, y2
- y1);
ImageIO.write(outImage, "png", outputStream);
inputStream.close();
outputStream.flush();
}
public static void zoomImage(InputStream inputStream,
OutputStream outputStream, int toWidth, int toHeight)
throws Exception {
BufferedImage bufferedImage = ImageIO.read(inputStream);
bufferedImage = zoomImage(bufferedImage, toWidth, toHeight);
BufferedImage outImage = bufferedImage;
ImageIO.write(outImage, "png", outputStream);
inputStream.close();
outputStream.flush();
}
/**
* @param im
* 原始图像
* @param resizeTimes
* 倍数,比如0.5就是缩小一半,0.98等等double类型
* @return 返回处理后的图像
*/
public static BufferedImage zoomImage(BufferedImage srcImage, int toWidth,
int toHeight) {
BufferedImage result = null;
try {
BufferedImage im = srcImage;
/* 原始图像的宽度和高度 */
// int width = im.getWidth();
// int height = im.getHeight();
/* 新生成结果图片 */
result = new BufferedImage(toWidth, toHeight,
BufferedImage.TYPE_INT_RGB);
result.getGraphics()
.drawImage(
im.getScaledInstance(toWidth, toHeight,
Image.SCALE_SMOOTH), 0, 0, null);
} catch (Exception e) {
System.out.println("创建缩略图发生异常" + e.getMessage());
}
return result;
}
}
| apache-2.0 |
guhongyeying/shoujishi | mobilesafe/src/com/example/mobilesafe/engine/AppInfoProvider.java | 2058 | package com.example.mobilesafe.engine;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.graphics.drawable.Drawable;
import com.example.mobilesafe.bean.AppInfo;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* Created by wangren on 15/8/29.
*/
public class AppInfoProvider {
public static List<AppInfo> getAppInfos(Context context){
ArrayList<AppInfo> infos = new ArrayList<AppInfo>();
PackageManager pM = context.getPackageManager();
//获得报名集合
List<PackageInfo> installedPackages = pM.getInstalledPackages(0);
for (PackageInfo installedPackage : installedPackages) {
AppInfo appInfo = new AppInfo();
Drawable icon = installedPackage.applicationInfo.loadIcon(pM);
String apkName = (String) installedPackage.applicationInfo.loadLabel(pM);
String packageName = installedPackage.packageName;
//获取apk资源路径
String sourceDir = installedPackage.applicationInfo.sourceDir;
File file = new File(sourceDir);
long sizeApp = file.length();
appInfo.setIcon(icon);
appInfo.setApkName(apkName);
appInfo.setPackageName(packageName);
appInfo.setSizeApp(sizeApp);
//获取y是否是系统程序
int flags = installedPackage.applicationInfo.flags;
if((flags & ApplicationInfo.FLAG_SYSTEM) != 0){
//系统app
appInfo.setUserApp(false);
}else {
appInfo.setUserApp(true);
}
//内存关系
if((flags & ApplicationInfo.FLAG_EXTERNAL_STORAGE) != 0) {
//sd
appInfo.setIsRom(false);
}else {
appInfo.setIsRom(true);
}
infos.add(appInfo);
}
return infos;
}
}
| apache-2.0 |
inloop/apk-method-count | js/ts/dexdefs.ts | 5653 | ///<reference path="dexformat.ts"/>
namespace DexFormat {
export enum AccessFlags {
ACC_PUBLIC = 0x1, ACC_PRIVATE = 0x2, ACC_PROTECTED = 0x4, ACC_STATIC = 0x8, ACC_FINAL = 0x10,
ACC_SYNCHRONIZED = 0x20, ACC_VOLATILE = 0x40, ACC_BRIDGE = 0x40, ACC_TRANSIENT = 0x80, ACC_VARARGS = 0x80,
ACC_NATIVE = 0x100, ACC_INTERFACE = 0x200, ACC_ABSTRACT = 0x400, ACC_STRICT = 0x800, ACC_SYNTHETIC = 0x1000,
ACC_ANNOTATION = 0x2000, ACC_ENUM = 0x4000, ACC_CONSTRUCTOR = 0x10000, ACC_DECLARED_SYNCHRONIZED = 0x20000
}
abstract class BaseDef {
protected static NO_INDEX:number = 0xffffffff;
protected reader:DexFileReader;
constructor(reader:DexFileReader) {
this.reader = reader;
}
}
abstract class ClassBaseDef extends BaseDef {
protected classIdx:number;
public getClassData():ClassDef {
return this.reader.getClass(this.classIdx);
}
public getClassType():TypeDef {
return this.reader.getType(this.classIdx);
}
public getClassIdx():number {
return this.classIdx;
}
}
export class TypeDef extends BaseDef {
private descriptorIdx:number;
private internal:boolean;
constructor(reader:DexFileReader, descriptorIdx:number) {
super(reader);
this.descriptorIdx = descriptorIdx;
}
public getDescriptor():string {
return this.reader.getString(this.descriptorIdx);
}
public setInternal(internal: boolean):void {
this.internal = internal;
}
public isInternal():boolean {
return this.internal;
}
}
export class ProtoDef extends BaseDef {
private shortyIdx:number;
private returnTypeIdx:number;
private parametersOff:number;
constructor(reader:DexFileReader, shortyIdx:number, returnTypeIdx:number, parametersOff:number) {
super(reader);
this.shortyIdx = shortyIdx;
this.returnTypeIdx = returnTypeIdx;
this.parametersOff = parametersOff;
}
public getShorty():string {
return this.reader.getString(this.shortyIdx);
}
public getParametersOff():number {
return this.parametersOff;
}
public getReturnType():TypeDef {
return this.reader.getType(this.returnTypeIdx);
}
}
export class FieldDef extends ClassBaseDef {
private typeIdx:number;
private nameIdx:number;
constructor(reader:DexFileReader, classIdx:number, typeIdx:number, nameIdx:number) {
super(reader);
this.classIdx = classIdx;
this.typeIdx = typeIdx;
this.nameIdx = nameIdx;
}
public getName():string {
return this.reader.getString(this.nameIdx);
}
public getType():TypeDef {
return this.reader.getType(this.typeIdx);
}
}
export class MethodDef extends ClassBaseDef {
private protoIdx:number;
private nameIdx:number;
constructor(reader:DexFileReader, classIdx:number, protoIdx:number, nameIdx:number) {
super(reader);
this.classIdx = classIdx;
this.protoIdx = protoIdx;
this.nameIdx = nameIdx;
}
public getName():string {
return this.reader.getString(this.nameIdx);
}
public getProto():ProtoDef {
return this.reader.getProto(this.protoIdx);
}
}
export class ClassDef extends BaseDef {
private classIdx:number;
private accessFlags:number;
private superclassIdx:number;
private interfacesOff:number;
private sourceFileIdx:number;
private annotationsOff:number;
private classDataOff:number;
private staticValuesOff:number;
constructor(reader:DexFileReader, classIdx:number, accessFlags:number, superclassIdx:number, interfacesOff:number,
sourceFileIdx:number, annotationsOff:number, classDataOff:number, staticValuesOff:number) {
super(reader);
this.classIdx = classIdx;
this.accessFlags = accessFlags;
this.superclassIdx = superclassIdx;
this.interfacesOff = interfacesOff;
this.sourceFileIdx = sourceFileIdx;
this.annotationsOff = annotationsOff;
this.classDataOff = classDataOff;
this.staticValuesOff = staticValuesOff;
}
public getClassIdx():number {
return this.classIdx;
}
public isAccessFlag(flag:AccessFlags):boolean {
return (this.accessFlags & flag) == flag;
}
public getSourceFileName():string {
return this.reader.getString(this.sourceFileIdx);
}
public getClassType():TypeDef {
return this.reader.getType(this.classIdx);
}
public getSuperclassType():TypeDef {
if (this.superclassIdx == BaseDef.NO_INDEX) {
return null;
} else {
return this.reader.getType(this.superclassIdx);
}
}
public getInterfacesOff():number {
return this.interfacesOff;
}
public getAnnotationsOff():number {
return this.annotationsOff;
}
public getClassDataOff():number {
return this.classDataOff;
}
public getStaticValuesOff():number {
return this.staticValuesOff;
}
}
} | apache-2.0 |
jonatalamantes/NotariusAdOmnes | Backend/Rector.php | 4402 | <?php
require_once('Person.php');
require_once('Church.php');
/**
* Class to registry one Rector/Priest
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
*/
class Rector
{
private $id;
private $type;
private $status;
private $position;
private $idActualChurch;
private $idPerson;
/**
* Contructor for class Person
*
* @param integer $i id
* @param string $t type
* @param string $s status
* @param string $p position
* @param integer $ac idActualChurch
* @param integer $ip idPerson
*/
function __construct($i = 0, $t = "", $s = 'A', $p = "", $ac = 0, $ip = 0)
{
$this->id = $i;
$this->type = $t;
$this->status = $s;
$this->position = $p;
$this->idActualChurch = $ac;
$this->idPerson = $ip;
}
/**
* Gets the value of id.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* Sets the value of id.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @param mixed $id the id
*/
public function setId($id)
{
$this->id = $id;
}
/**
* Gets the value of type.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @return mixed
*/
public function getType()
{
return $this->type;
}
/**
* Sets the value of type.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @param mixed $type the type
*/
public function setType($type)
{
$this->type = $type;
}
/**
* Gets the value of status.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @return mixed
*/
public function getStatus()
{
return $this->status;
}
/**
* Sets the value of status.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @param mixed $status the status
*/
public function setStatus($status)
{
$this->status = $status;
}
/**
* Gets the value of position.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @return mixed
*/
public function getPosition()
{
return $this->position;
}
/**
* Sets the value of position.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @param mixed $position the position
*/
public function setPosition($position)
{
$this->position = $position;
}
/**
* Gets the value of idActualChurch.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @return mixed
*/
public function getIdActualChurch()
{
return $this->idActualChurch;
}
/**
* Sets the value of idActualChurch.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @param mixed $idActualChurch the id actual church
*/
public function setIdActualChurch($idActualChurch)
{
$this->idActualChurch = $idActualChurch;
}
/**
* Gets the value of idPerson.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @return mixed
*/
public function getIdPerson()
{
return $this->idPerson;
}
/**
* Sets the value of idPerson.
*
* @author Jonathan Sandoval <jonathan_s_pisis@yahoo.com.mx>
* @param mixed $idPerson the id person
*/
public function setIdPerson($idPerson)
{
$this->idPerson = $idPerson;
}
}
?> | apache-2.0 |
hpe-cct/cct-core | src/test/scala/cogx/utilities/Array2DSpec.scala | 4799 | /*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.utilities
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import org.scalatest.MustMatchers
/** Test code.
*
* @author Greg Snider
*/
@RunWith(classOf[JUnitRunner])
class Array2DSpec extends FunSuite with MustMatchers {
test("ApplyUpdate") {
def value(row: Int, col: Int) = row * 100 + col
val Rows = 3
val Columns = 4
val a = new Array2D[Int](Rows, Columns)
require(a.rows == Rows && a.columns == Columns)
for (row <- 0 until Rows; col <- 0 until Columns)
a(row, col) = value(row, col)
for (row <- 0 until Rows; col <- 0 until Columns)
require(a(row, col) == value(row, col))
}
test("Equality") {
val a = Array2D(
Array(1, 2, 3, 4),
Array(5, 6, 7, 8),
Array(9, 10, 11, 12)
)
val b = Array2D(
Array(1, 2, 3, 4),
Array(5, 6, 7, 8),
Array(9, 10, 11, 12)
)
val c = Array2D(
Array(1, 2, 3, 4),
Array(5, 6, 7, 8)
)
require(a === a)
require(a === b)
require(!(a === c))
}
test("Subarray") {
val a = Array2D(
Array(1, 2, 3, 4),
Array(5, 6, 7, 8),
Array(9, 10, 11, 12)
)
val sub1 = a.subarray(1, 1, 2, 3)
val expect1 = Array2D(
Array(6, 7, 8),
Array(10, 11, 12)
)
require(sub1 === expect1)
val sub2 = a.subarray(0, 1, 3, 2)
val expect2 = Array2D(
Array(2, 3),
Array(6, 7),
Array(10, 11)
)
require(sub2 === expect2)
}
test("MapReduce") {
val a = Array2D[Int](
Array(1, 2, 3, 4),
Array(5, 6, 7, 8),
Array(9, 10, 11, 12)
)
val b: Array2D[Double] = a.map[Double](2.0 * _.toDouble)
// val b: Array2D[Double] = a.map(2.0 * _.toDouble)
val bExpected = Array2D[Double](
Array(2.0, 4.0, 6.0, 8.0),
Array(10.0, 12.0, 14.0, 16.0),
Array(18.0, 20.0, 22.0, 24.0)
)
require(b === bExpected)
require(a.reduce[Int](_ min _) == 1)
require(a.reduce[Int](_ max _) == 12)
require(b.reduce[Double](_ min _) == 2)
require(b.reduce[Double](_ max _) == 24)
}
test("ToArray") {
val a = Array2D(
Array(1, 2, 3, 4),
Array(5, 6, 7, 8),
Array(9, 10, 11, 12)
)
val flat = a.flatten
val expect = Array(1,2,3,4,5,6,7,8,9,10,11,12)
require(flat.length == expect.length)
for (i <- 0 until flat.length)
require(flat(i) == expect(i))
}
test("ArgMax") {
class Pair(val x: Int, val y: Int) {
def value =
if (x == y) 10 * x
else x + y
}
val a = Array2D(
Array(new Pair(0, 0), new Pair(0, 1)),
Array(new Pair(1, 0), new Pair(1, 1)),
Array(new Pair(2, 0), new Pair(2, 1))
)
val winner = a.argmax(_.value)
require(winner == a(1, 1))
}
test("ColumnRow") {
def equal[T](a: Array[T], b: Array[T]): Boolean = {
if (a.length != b.length)
return false
for (i <- 0 until a.length)
if (a(i) != b(i))
return false
true
}
val a = Array2D(
Array(1, 2, 3, 4),
Array(5, 6, 7, 8),
Array(9, 10, 11, 12)
)
require(equal(a.row(0), Array(1, 2, 3, 4)))
require(equal(a.row(1), Array(5, 6, 7, 8)))
require(equal(a.row(2), Array(9, 10, 11, 12)))
require(equal(a.column(0), Array(1, 5, 9)))
require(equal(a.column(1), Array(2, 6, 10)))
require(equal(a.column(2), Array(3, 7, 11)))
require(equal(a.column(3), Array(4, 8, 12)))
}
test("Flatten") {
val nw = Array2D(
Array(1, 2, 3),
Array(7, 8, 9)
)
val ne = Array2D(
Array(4, 5, 6),
Array(10, 11, 12)
)
val sw = Array2D(
Array(13, 14, 15),
Array(19, 20, 21)
)
val se = Array2D(
Array(16, 17, 18),
Array(22, 23, 24)
)
/*
val array4D = new Array2D[Array2D[Int]](2, 2)
array4D(0, 0) = nw
array4D(0, 1) = ne
array4D(1, 0) = sw
array4D(1, 1) = se
val flat = Array2D(array4D)
val expected = Array2D(
Array(1,2,3,4,5,6),
Array(7,8,9,10,11,12),
Array(13,14,15,16,17,18),
Array(19,20,21,22,23,24)
)
require(flat === expected)
*/
}
} | apache-2.0 |
crnk-project/crnk-framework | crnk-security/src/main/java/io/crnk/security/SecurityModule.java | 15156 | package io.crnk.security;
import io.crnk.core.engine.information.resource.ResourceInformation;
import io.crnk.core.engine.query.QueryContext;
import io.crnk.core.engine.registry.RegistryEntry;
import io.crnk.core.engine.registry.ResourceRegistry;
import io.crnk.core.engine.security.SecurityProvider;
import io.crnk.core.engine.security.SecurityProviderContext;
import io.crnk.core.exception.RepositoryNotFoundException;
import io.crnk.core.module.Module;
import io.crnk.core.repository.BulkResourceRepository;
import io.crnk.core.repository.ManyRelationshipRepository;
import io.crnk.core.repository.OneRelationshipRepository;
import io.crnk.core.repository.ResourceRepository;
import io.crnk.core.repository.foward.ForwardingRelationshipRepository;
import io.crnk.core.utils.Supplier;
import io.crnk.security.internal.DataRoomBulkResourceFilter;
import io.crnk.security.internal.DataRoomMatcher;
import io.crnk.security.internal.DataRoomRelationshipFilter;
import io.crnk.security.internal.DataRoomResourceFilter;
import io.crnk.security.internal.SecurityRepositoryFilter;
import io.crnk.security.internal.SecurityResourceFilter;
import io.crnk.security.repository.CallerPermissionRepository;
import io.crnk.security.repository.RolePermissionRepository;
import io.crnk.security.repository.RoleRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
public class SecurityModule implements Module {
protected static final String ANY_ROLE = "ANY";
private static final Logger LOGGER = LoggerFactory.getLogger(SecurityModule.class);
private Map<String, Map<String, ResourcePermission>> permissions;
private ModuleContext moduleContext;
private Supplier<Boolean> enabled = new Supplier<Boolean>() {
@Override
public Boolean get() {
return Boolean.TRUE;
}
};
private SecurityConfig config;
private DataRoomMatcher matcher;
private SecurityProvider callerSecurityProvider = new SecurityProvider() {
@Override
public boolean isUserInRole(String role, SecurityProviderContext context) {
return SecurityModule.this.isUserInRole(context.getQueryContext(), role);
}
@Override
public boolean isAuthenticated(SecurityProviderContext context) {
return moduleContext.getSecurityProvider().isAuthenticated(context);
}
};
// protected for CDI
protected SecurityModule() {
}
protected SecurityModule(SecurityConfig config) {
this.config = config;
}
/**
* @return helper to perform data access control checks.
*/
public DataRoomMatcher getDataRoomMatcher() {
return matcher;
}
public static SecurityModule newServerModule(SecurityConfig config) {
return new SecurityModule(config);
}
public static SecurityModule newClientModule() {
return new SecurityModule(null);
}
private static void configureRule(Map<String, Map<String, ResourcePermission>> newPermissions, String resourceType,
String role, ResourcePermission permission) {
Map<String, ResourcePermission> set = newPermissions.get(resourceType);
if (set == null) {
set = new HashMap<>();
newPermissions.put(resourceType, set);
}
ResourcePermission existingPermissions = set.get(role);
ResourcePermission newPermission = permission;
if (existingPermissions != null) {
newPermission = existingPermissions.or(permission);
}
set.put(role, newPermission);
LOGGER.debug("configure rule for resourceType={} role={} permission={}", resourceType, role, permission);
}
private static ResourcePermission updateMissingPermissions(ResourcePermission missingPermission,
ResourcePermission grantedPermissions) {
return missingPermission.and(missingPermission.xor(grantedPermissions));
}
/**
* @param enabled to only perform security checks when true.
*/
public void setEnabled(final boolean enabled) {
setEnabled(() -> enabled);
}
/**
* @return true if enabled
*/
public boolean isEnabled() {
boolean en = enabled.get();
LOGGER.debug("enabled={}", en);
return en;
}
/**
* @param enabled supplier to only perform security checks when true.
*/
public void setEnabled(Supplier<Boolean> enabled) {
this.enabled = enabled;
}
@Override
public String getModuleName() {
return "security";
}
protected void checkInit() {
if (config != null && permissions == null) {
reconfigure(config);
}
}
/**
* Applies the new configuration to this module.
*/
public void reconfigure(SecurityConfig config) {
this.config = config;
LOGGER.debug("reconfiguring with {} rules", config.getRules().size());
Map<String, Map<String, ResourcePermission>> newPermissions = new HashMap<>();
for (SecurityRule rule : config.getRules()) {
String resourceType = rule.getResourceType();
if (resourceType == null) {
Class<?> resourceClass = rule.getResourceClass();
if (resourceClass != null) {
resourceType = toType(resourceClass);
}
}
if (resourceType == null) {
Collection<RegistryEntry> entries = moduleContext.getResourceRegistry().getEntries();
for (RegistryEntry entry : entries) {
String entryResourceType = entry.getResourceInformation().getResourceType();
configureRule(newPermissions, entryResourceType, rule.getRole(), rule.getPermission());
}
} else {
ResourceRegistry resourceRegistry = moduleContext.getResourceRegistry();
RegistryEntry entry = resourceRegistry.getEntry(resourceType);
if (entry == null) {
throw new RepositoryNotFoundException(resourceType);
}
configureRule(newPermissions, resourceType, rule.getRole(), rule.getPermission());
}
}
this.permissions = newPermissions;
}
public SecurityConfig getConfig() {
return config;
}
@Override
public void setupModule(ModuleContext context) {
this.moduleContext = context;
if (config != null) {
context.addRepositoryFilter(new SecurityRepositoryFilter(this));
context.addResourceFilter(new SecurityResourceFilter(this, context));
if (config.isExposeRepositories()) {
context.addRepository(new RolePermissionRepository(this));
context.addRepository(new CallerPermissionRepository(this));
context.addRepository(new RoleRepository(this));
}
if (config.getDataRoomFilter() != null && config.getPerformDataRoomChecks()) {
matcher = new DataRoomMatcher(() -> config.getDataRoomFilter(), callerSecurityProvider);
LOGGER.debug("registering dataroom filter {}", config.getDataRoomFilter());
context.addRepositoryDecoratorFactory(repository -> {
if (repository instanceof BulkResourceRepository) {
return new DataRoomBulkResourceFilter((BulkResourceRepository) repository, matcher);
}
if (repository instanceof ResourceRepository) {
return new DataRoomResourceFilter((ResourceRepository) repository, matcher);
}
if (repository instanceof ForwardingRelationshipRepository) {
return repository; // no need to filter forwarding ones twice
}
if (repository instanceof OneRelationshipRepository || repository instanceof ManyRelationshipRepository) {
return new DataRoomRelationshipFilter(repository, matcher);
}
// no support for legacy repositories and custom onces
LOGGER.warn("no dataroom support for unknown repository {}", repository);
return repository;
});
} else {
matcher = new DataRoomMatcher(() -> (querySpec, method, callerSecurityProvider) -> querySpec, callerSecurityProvider);
}
}
}
/**
* @param resourceClass to check the permissions for
* @param permission the required permissions.
* @return true if the requested permissions are satisfied for the given resourceClass.
*/
public boolean isAllowed(QueryContext queryContext, Class<?> resourceClass, ResourcePermission permission) {
String resourceType = toType(resourceClass);
return isAllowed(queryContext, resourceType, permission);
}
/**
* @param resourceType to check the permissions for
* @param permission the required permissions.
* @return true if the requested permissions are satisfied for the given resourceType.
*/
public boolean isAllowed(QueryContext queryContext, String resourceType, ResourcePermission permission) {
ResourcePermission missingPermissions = getMissingPermissions(resourceType, permission, callerSecurityProvider, toSecurityContext(queryContext));
boolean allowed = missingPermissions.isEmpty();
if (allowed) {
LOGGER.debug("isAllowed returns {} for permission {}", allowed, permission);
} else {
LOGGER.debug("isAllowed returns {} for permission {} due to missing permission {}", allowed, permission, missingPermissions);
}
return allowed;
}
/**
* @return permissions the caller is authorized to for the passed resourceType.
*/
public ResourcePermission getCallerPermissions(QueryContext queryContext, String resourceType) {
ResourcePermission missingPermissions = getMissingPermissions(resourceType, ResourcePermission.ALL, callerSecurityProvider,
toSecurityContext(queryContext));
return missingPermissions.xor(ResourcePermission.ALL);
}
private SecurityProviderContext toSecurityContext(QueryContext queryContext) {
return () -> queryContext;
}
/**
* @return permissions the caller is authorized to for the passed resourceType.
*/
public ResourcePermission getRolePermissions(QueryContext queryContext, String resourceType, String checkedRole) {
SecurityProviderContext securityContext = toSecurityContext(queryContext);
ResourcePermission missingPermissions = getMissingPermissions(resourceType, ResourcePermission.ALL, new SecurityProvider() {
@Override
public boolean isUserInRole(String role, SecurityProviderContext context) {
return checkedRole.equals(role) || role.equals(ANY_ROLE);
}
@Override
public boolean isAuthenticated(SecurityProviderContext securityContext) {
throw new UnsupportedOperationException("not implemented");
}
}, securityContext);
return missingPermissions.xor(ResourcePermission.ALL);
}
private ResourcePermission getMissingPermissions(String resourceType, ResourcePermission requiredPermissions,
SecurityProvider securityProvider, SecurityProviderContext securityContext) {
if (!isEnabled()) {
return ResourcePermission.EMPTY;
}
checkInit();
Map<String, ResourcePermission> map = permissions.get(resourceType);
ResourcePermission missingPermission = requiredPermissions;
if (map != null) {
for (Entry<String, ResourcePermission> entry : map.entrySet()) {
String role = entry.getKey();
ResourcePermission intersection = entry.getValue().and(requiredPermissions);
boolean hasMorePermissions = !intersection.isEmpty();
if (hasMorePermissions && securityProvider.isUserInRole(role, securityContext)) {
missingPermission = updateMissingPermissions(missingPermission, intersection);
if (missingPermission.isEmpty()) {
break;
}
}
}
}
return missingPermission;
}
/**
* @param resourceClass to get the permissions for
* @return ResourcePermission for the given resource for the current user.
*/
public ResourcePermission getResourcePermission(QueryContext queryContext, Class<?> resourceClass) {
String resourceType = toType(resourceClass);
return getResourcePermission(queryContext, resourceType);
}
/**
* @param resourceType to get the permissions for
* @return ResourcePermission for the given resource for the current user.
*/
public ResourcePermission getResourcePermission(QueryContext queryContext, String resourceType) {
checkInit();
if (!isEnabled()) {
return ResourcePermission.ALL;
}
Map<String, ResourcePermission> map = permissions.get(resourceType);
ResourcePermission result = ResourcePermission.EMPTY;
if (map != null) {
for (Entry<String, ResourcePermission> entry : map.entrySet()) {
String role = entry.getKey();
if (isUserInRole(queryContext, role)) {
result = result.or(entry.getValue());
}
}
}
return result;
}
/**
* Checks whether the current user posses the provided role
*
* @param role to check
* @return true if in this role
*/
public boolean isUserInRole(QueryContext queryContext, String role) {
if (!isEnabled()) {
throw new IllegalStateException("security module is disabled");
}
checkInit();
SecurityProvider securityProvider = moduleContext.getSecurityProvider();
boolean contained = role.equals(ANY_ROLE) || securityProvider.isUserInRole(role, toSecurityContext(queryContext));
LOGGER.debug("isUserInRole returns {} for role {}", contained, role);
return contained;
}
private <T> String toType(Class<T> resourceClass) {
ResourceRegistry resourceRegistry = moduleContext.getResourceRegistry();
RegistryEntry entry = resourceRegistry.getEntry(resourceClass);
if (entry == null) {
throw new RepositoryNotFoundException(resourceClass);
}
ResourceInformation resourceInformation = entry.getResourceInformation();
return resourceInformation.getResourceType();
}
public SecurityProvider getCallerSecurityProvider() {
return callerSecurityProvider;
}
}
| apache-2.0 |
SpruceHillio/urbanairship-java-api-wrapper | src/main/java/io/sprucehill/urbanairship/model/PushRequestNotification.java | 1789 | package io.sprucehill.urbanairship.model;
import java.util.HashMap;
import java.util.Map;
/**
* Model DTP object for defining the push to send.
*
* @author Michael Duergner <michael@sprucehill.io>
*/
public class PushRequestNotification extends HashMap<String,Object> {
/**
* Get a Builder for this class.
*
* @return The Builder instance
*/
public static Builder builder() {
return new Builder();
}
/**
* A Builder object for the notification DTO model object
*
*/
public static class Builder {
PushRequestNotification notification;
protected Builder() {
notification = new PushRequestNotification();
notification.put("actions",new HashMap<>());
}
/**
* Set the iOS specific notification definition.
*
* @param deviceNotification The iOS specific notification instance
* @return The Builder instance
*/
public Builder withIOS(PushRequestIOSDeviceNotification deviceNotification) {
notification.put("ios",deviceNotification);
return this;
}
/**
* Add an action to this notification.
*
* @param action The action to add.
* @return The Builder instance
*/
public Builder withAction(PushRequestNotificationAction action) {
((Map<String,Object>)notification.get("actions")).put(action.key(),action);
return this;
}
/**
* Builder the notification object.
*
* @return The notification DTO model object.
*/
public PushRequestNotification build() {
return notification;
}
}
}
| apache-2.0 |
CreditEaseDBA/Themis | task_export/assets/js/chart-flot.demo.js | 16013 | /*
Template Name: Color Admin - Responsive Admin Dashboard Template build with Twitter Bootstrap 3.3.4
Version: 1.7.0
Author: Sean Ngu
Website: http://www.seantheme.com/color-admin-v1.7/admin/
*/
var blue = '#348fe2',
blueLight = '#5da5e8',
blueDark = '#1993E4',
aqua = '#49b6d6',
aquaLight = '#6dc5de',
aquaDark = '#3a92ab',
green = '#00acac',
greenLight = '#33bdbd',
greenDark = '#008a8a',
orange = '#f59c1a',
orangeLight = '#f7b048',
orangeDark = '#c47d15',
dark = '#2d353c',
grey = '#b6c2c9',
purple = '#727cb6',
purpleLight = '#8e96c5',
purpleDark = '#5b6392',
red = '#ff5b57';
var handleBasicChart = function () {
"use strict";
var d1 = [];
for (var x = 0; x < Math.PI * 2; x += 0.25) {
d1.push([x, Math.sin(x)]);
}
var d2 = [];
for (var y = 0; y < Math.PI * 2; y += 0.25) {
d2.push([y, Math.cos(y)]);
}
var d3 = [];
for (var z = 0; z < Math.PI * 2; z += 0.1) {
d3.push([z, Math.tan(z)]);
}
if ($('#basic-chart').length !== 0) {
$.plot($("#basic-chart"), [
{ label: "data 1", data: d1, color: purple, shadowSize: 0 },
{ label: "data 2", data: d2, color: green, shadowSize: 0 },
{ label: "data 3", data: d3, color: dark, shadowSize: 0 }
], {
series: {
lines: { show: true },
points: { show: false }
},
xaxis: {
tickColor: '#ddd'
},
yaxis: {
min: -2,
max: 2,
tickColor: '#ddd'
},
grid: {
borderColor: '#ddd',
borderWidth: 1
}
});
}
};
var handleStackedChart = function () {
"use strict";
var d1 = [];
for (var a = 0; a <= 5; a += 1) {
d1.push([a, parseInt(Math.random() * 5)]);
}
var d2 = [];
for (var b = 0; b <= 5; b += 1) {
d2.push([b, parseInt(Math.random() * 5 + 5)]);
}
var d3 = [];
for (var c = 0; c <= 5; c += 1) {
d3.push([c, parseInt(Math.random() * 5 + 5)]);
}
var d4 = [];
for (var d = 0; d <= 5; d += 1) {
d4.push([d, parseInt(Math.random() * 5 + 5)]);
}
var d5 = [];
for (var e = 0; e <= 5; e += 1) {
d5.push([e, parseInt(Math.random() * 5 + 5)]);
}
var d6 = [];
for (var f = 0; f <= 5; f += 1) {
d6.push([f, parseInt(Math.random() * 5 + 5)]);
}
var ticksLabel = [
[0, "Monday"], [1, "Tuesday"], [2, "Wednesday"], [3, "Thursday"],
[4, "Friday"], [5, "Saturday"]
];
var options = {
xaxis: { tickColor: 'transparent', ticks: ticksLabel},
yaxis: { tickColor: '#ddd', ticksLength: 10},
grid: {
hoverable: true,
tickColor: "#ccc",
borderWidth: 0,
borderColor: 'rgba(0,0,0,0.2)'
},
series: {
stack: true,
lines: { show: false, fill: false, steps: false },
bars: { show: true, barWidth: 0.5, align: 'center', fillColor: null },
highlightColor: 'rgba(0,0,0,0.8)'
},
legend: {
show: true,
labelBoxBorderColor: '#ccc',
position: 'ne',
noColumns: 1
}
};
var xData = [
{
data:d1,
color: purpleDark,
label: 'China',
bars: {
fillColor: purpleDark
}
},
{
data:d2,
color: purple,
label: 'Russia',
bars: {
fillColor: purple
}
},
{
data:d3,
color: purpleLight,
label: 'Canada',
bars: {
fillColor: purpleLight
}
},
{
data:d4,
color: blueDark,
label: 'Japan',
bars: {
fillColor: blueDark
}
},
{
data:d5,
color: blue,
label: 'USA',
bars: {
fillColor: blue
}
},
{
data:d6,
color: blueLight,
label: 'Others',
bars: {
fillColor: blueLight
}
}
];
$.plot("#stacked-chart", xData, options);
function showTooltip2(x, y, contents) {
$('<div id="tooltip" class="flot-tooltip">' + contents + '</div>').css( {
top: y,
left: x + 35
}).appendTo("body").fadeIn(200);
}
var previousXValue = null;
var previousYValue = null;
$("#stacked-chart").bind("plothover", function (event, pos, item) {
if (item) {
var y = item.datapoint[1] - item.datapoint[2];
if (previousXValue != item.series.label || y != previousYValue) {
previousXValue = item.series.label;
previousYValue = y;
$("#tooltip").remove();
showTooltip2(item.pageX, item.pageY, y + " " + item.series.label);
}
}
else {
$("#tooltip").remove();
previousXValue = null;
previousYValue = null;
}
});
};
var handleTrackingChart = function () {
"use strict";
var sin = [], cos = [];
for (var i = 0; i < 14; i += 0.1) {
sin.push([i, Math.sin(i)]);
cos.push([i, Math.cos(i)]);
}
function updateLegend() {
updateLegendTimeout = null;
var pos = latestPosition;
var axes = plot.getAxes();
if (pos.x < axes.xaxis.min || pos.x > axes.xaxis.max ||
pos.y < axes.yaxis.min || pos.y > axes.yaxis.max) {
return;
}
var i, j, dataset = plot.getData();
for (i = 0; i < dataset.length; ++i) {
var series = dataset[i];
for (j = 0; j < series.data.length; ++j) {
if (series.data[j][0] > pos.x) {
break;
}
}
var y, p1 = series.data[j - 1], p2 = series.data[j];
if (p1 === null) {
y = p2[1];
} else if (p2 === null) {
y = p1[1];
} else {
y = p1[1] + (p2[1] - p1[1]) * (pos.x - p1[0]) / (p2[0] - p1[0]);
}
legends.eq(i).text(series.label.replace(/=.*/, "= " + y.toFixed(2)));
}
}
if ($('#tracking-chart').length !== 0) {
var plot = $.plot($("#tracking-chart"),
[
{ data: sin, label: "Series1", color: dark, shadowSize: 0},
{ data: cos, label: "Series2", color: red, shadowSize: 0}
],
{
series: {
lines: { show: true }
},
crosshair: { mode: "x", color: grey },
grid: { hoverable: true, autoHighlight: false, borderColor: '#ccc', borderWidth: 0 },
xaxis: { tickLength: 0 },
yaxis: { tickColor: '#ddd' },
legend: {
labelBoxBorderColor: '#ddd',
backgroundOpacity: 0.4,
color:'#fff',
show: true
}
});
var legends = $("#tracking-chart .legendLabel");
legends.each(function () {
$(this).css('width', $(this).width());
});
var updateLegendTimeout = null;
var latestPosition = null;
$("#tracking-chart").bind("plothover", function (pos) {
latestPosition = pos;
if (!updateLegendTimeout) {
updateLegendTimeout = setTimeout(updateLegend, 50);
}
});
}
};
var handleBarChart = function () {
"use strict";
if ($('#bar-chart').length !== 0) {
var data = [ ["January", 10], ["February", 8], ["March", 4], ["April", 13], ["May", 17], ["June", 9] ];
$.plot("#bar-chart", [ {data: data, color: purple} ], {
series: {
bars: {
show: true,
barWidth: 0.4,
align: 'center',
fill: true,
fillColor: purple,
zero: true
}
},
xaxis: {
mode: "categories",
tickColor: '#ddd',
tickLength: 0
},
grid: {
borderWidth: 0
}
});
}
};
var handleInteractivePieChart = function () {
"use strict";
if ($('#interactive-pie-chart').length !== 0) {
var data = [];
var series = 3;
var colorArray = [purple, dark, grey];
for( var i = 0; i<series; i++)
{
data[i] = { label: "Series"+(i+1), data: Math.floor(Math.random()*100)+1, color: colorArray[i]};
}
$.plot($("#interactive-pie-chart"), data,
{
series: {
pie: {
show: true
}
},
grid: {
hoverable: true,
clickable: true
},
legend: {
labelBoxBorderColor: '#ddd',
backgroundColor: 'none'
}
});
}
};
var handleDonutChart = function () {
"use strict";
if ($('#donut-chart').length !== 0) {
var data = [];
var series = 3;
var colorArray = [dark, green, purple];
var nameArray = ['Unique Visitor', 'Bounce Rate', 'Total Page Views', 'Avg Time On Site', '% New Visits'];
var dataArray = [20,14,12,31,23];
for( var i = 0; i<series; i++)
{
data[i] = { label: nameArray[i], data: dataArray[i], color: colorArray[i] };
}
$.plot($("#donut-chart"), data,
{
series: {
pie: {
innerRadius: 0.5,
show: true,
combine: {
color: '#999',
threshold: 0.1
}
}
},
grid:{borderWidth:0, hoverable: true, clickable: true},
legend: {
show: false
}
});
}
};
var handleInteractiveChart = function () {
"use strict";
function showTooltip(x, y, contents) {
$('<div id="tooltip" class="flot-tooltip">' + contents + '</div>').css( {
top: y - 45,
left: x - 55
}).appendTo("body").fadeIn(200);
}
if ($('#interactive-chart').length !== 0) {
var d1 = [[0, 42], [1, 53], [2,66], [3, 60], [4, 68], [5, 66], [6,71],[7, 75], [8, 69], [9,70], [10, 68], [11, 72], [12, 78], [13, 86]];
var d2 = [[0, 12], [1, 26], [2,13], [3, 18], [4, 35], [5, 23], [6, 18],[7, 35], [8, 24], [9,14], [10, 14], [11, 29], [12, 30], [13, 43]];
$.plot($("#interactive-chart"), [
{
data: d1,
label: "Page Views",
color: purple,
lines: { show: true, fill:false, lineWidth: 2 },
points: { show: false, radius: 5, fillColor: '#fff' },
shadowSize: 0
}, {
data: d2,
label: 'Visitors',
color: green,
lines: { show: true, fill:false, lineWidth: 2, fillColor: '' },
points: { show: false, radius: 3, fillColor: '#fff' },
shadowSize: 0
}
],
{
xaxis: { tickColor: '#ddd',tickSize: 2 },
yaxis: { tickColor: '#ddd', tickSize: 20 },
grid: {
hoverable: true,
clickable: true,
tickColor: "#ccc",
borderWidth: 1,
borderColor: '#ddd'
},
legend: {
labelBoxBorderColor: '#ddd',
margin: 0,
noColumns: 1,
show: true
}
}
);
var previousPoint = null;
$("#interactive-chart").bind("plothover", function (event, pos, item) {
$("#x").text(pos.x.toFixed(2));
$("#y").text(pos.y.toFixed(2));
if (item) {
if (previousPoint !== item.dataIndex) {
previousPoint = item.dataIndex;
$("#tooltip").remove();
var y = item.datapoint[1].toFixed(2);
var content = item.series.label + " " + y;
showTooltip(item.pageX, item.pageY, content);
}
} else {
$("#tooltip").remove();
previousPoint = null;
}
event.preventDefault();
});
}
};
var handleLiveUpdatedChart = function () {
"use strict";
function update() {
plot.setData([ getRandomData() ]);
// since the axes don't change, we don't need to call plot.setupGrid()
plot.draw();
setTimeout(update, updateInterval);
}
function getRandomData() {
if (data.length > 0) {
data = data.slice(1);
}
// do a random walk
while (data.length < totalPoints) {
var prev = data.length > 0 ? data[data.length - 1] : 50;
var y = prev + Math.random() * 10 - 5;
if (y < 0) {
y = 0;
}
if (y > 100) {
y = 100;
}
data.push(y);
}
// zip the generated y values with the x values
var res = [];
for (var i = 0; i < data.length; ++i) {
res.push([i, data[i]]);
}
return res;
}
if ($('#live-updated-chart').length !== 0) {
var data = [], totalPoints = 150;
// setup control widget
var updateInterval = 1000;
$("#updateInterval").val(updateInterval).change(function () {
var v = $(this).val();
if (v && !isNaN(+v)) {
updateInterval = +v;
if (updateInterval < 1) {
updateInterval = 1;
}
if (updateInterval > 2000) {
updateInterval = 2000;
}
$(this).val("" + updateInterval);
}
});
// setup plot
var options = {
series: { shadowSize: 0, color: purple, lines: { show: true, fill:true } }, // drawing is faster without shadows
yaxis: { min: 0, max: 100, tickColor: '#ddd' },
xaxis: { show: true, tickColor: '#ddd' },
grid: {
borderWidth: 1,
borderColor: '#ddd'
}
};
var plot = $.plot($("#live-updated-chart"), [ getRandomData() ], options);
update();
}
};
var Chart = function () {
"use strict";
return {
//main function
init: function () {
handleBasicChart();
handleStackedChart();
handleTrackingChart();
handleBarChart();
handleInteractivePieChart();
handleDonutChart();
handleInteractiveChart();
handleLiveUpdatedChart();
}
};
}(); | apache-2.0 |
overlook940/company | resources/views/Login/login.blade.php | 3021 | @extends('layouts.admin')
@section('content')
<div class="container">
<div class="row">
<div class="col-md-8 col-md-offset-2">
<div class="panel panel-default">
<div class="panel-heading">Admin Login</div>
<div class="panel-body">
<form class="form-horizontal" role="form" method="POST" action="{{ url('/admin/login') }}">
{{ csrf_field() }}
<div class="form-group{{ $errors->has('name') ? ' has-error' : '' }}">
<label for="name" class="col-md-4 control-label">用户名</label>
<div class="col-md-6">
<input id="name" type="text" class="form-control" name="name" value="{{ old('name') }}" required autofocus>
@if ($errors->has('name'))
<span class="help-block">
<strong>{{ $errors->first('name') }}</strong>
</span>
@endif
</div>
</div>
<div class="form-group{{ $errors->has('password') ? ' has-error' : '' }}">
<label for="password" class="col-md-4 control-label">密码</label>
<div class="col-md-6">
<input id="password" type="password" class="form-control" name="password" required>
@if ($errors->has('password'))
<span class="help-block">
<strong>{{ $errors->first('password') }}</strong>
</span>
@endif
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<div class="checkbox">
<label>
<input type="checkbox" name="remember"> 记住我
</label>
</div>
</div>
</div>
<div class="form-group">
<div class="col-md-8 col-md-offset-4">
<button type="submit" class="btn btn-primary">
登录
</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection | apache-2.0 |
batlinal/ftt | src/app/task/task-view/task-view.component.spec.ts | 643 | import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { TaskViewComponent } from './task-view.component';
describe('TaskViewComponent', () => {
let component: TaskViewComponent;
let fixture: ComponentFixture<TaskViewComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ TaskViewComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(TaskViewComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
| apache-2.0 |
apache/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableGlobalSqlSkewnessAggregateDescriptor.java | 2791 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.aggregates.serializable.std;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptor;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.runtime.aggregates.base.AbstractSerializableAggregateFunctionDynamicDescriptor;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluator;
import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluatorFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class SerializableGlobalSqlSkewnessAggregateDescriptor
extends AbstractSerializableAggregateFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SerializableGlobalSqlSkewnessAggregateDescriptor();
}
};
@Override
public FunctionIdentifier getIdentifier() {
return BuiltinFunctions.SERIAL_GLOBAL_SQL_SKEWNESS;
}
@Override
public ISerializedAggregateEvaluatorFactory createSerializableAggregateEvaluatorFactory(
final IScalarEvaluatorFactory[] args) {
return new ISerializedAggregateEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public ISerializedAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx)
throws HyracksDataException {
return new SerializableGlobalSqlSkewnessAggregateFunction(args, ctx, sourceLoc);
}
};
}
}
| apache-2.0 |
shrinkwrap/resolver | maven/impl-maven/src/main/java/org/jboss/shrinkwrap/resolver/impl/maven/aether/ClasspathWorkspaceReader.java | 16825 | /*
* JBoss, Home of Professional Open Source
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.shrinkwrap.resolver.impl.maven.aether;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import javax.xml.xpath.XPathFactoryConfigurationException;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.repository.WorkspaceReader;
import org.eclipse.aether.repository.WorkspaceRepository;
import org.jboss.shrinkwrap.resolver.impl.maven.util.Validate;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
/**
* {@link WorkspaceReader} implementation capable of reading from the ClassPath
*
* @author <a href="mailto:aslak@redhat.com">Aslak Knutsen</a>
* @author <a href="mailto:mmatloka@gmail.com">Michal Matloka</a>
* @author <a href="mailto:ggastald@redhat.com">George Gastaldi</a>
*/
public class ClasspathWorkspaceReader implements WorkspaceReader {
private static final Logger log = Logger.getLogger(ClasspathWorkspaceReader.class.getName());
/**
* class path entry
*/
private static final String CLASS_PATH_KEY = "java.class.path";
/**
* surefire cannot modify class path for test execution, so it have to store it in a different variable
*/
static final String SUREFIRE_CLASS_PATH_KEY = "surefire.test.class.path";
/**
* System property to override the relative path of the "flattened" pom.xml to prefer over the regular pom.xml, if present.
*
* @since SHRINKRES-299
*/
static final String FLATTENED_POM_PATH_KEY = "org.apache.maven.flattened-pom-path";
/**
* Contains File object and retrieved cached isFile and isDirectory values
*/
private static final class FileInfo {
private final File file;
private final boolean isFile;
private final boolean isDirectory;
private FileInfo(final File file, final boolean isFile, final boolean isDirectory) {
this.file = file;
this.isFile = isFile;
this.isDirectory = isDirectory;
}
private FileInfo(final File file) {
this(file, file.isFile(), file.isDirectory());
}
private FileInfo(final String classpathEntry) {
this(new File(classpathEntry));
}
private File getFile() {
return file;
}
private boolean isFile() {
return isFile;
}
private boolean isDirectory() {
return isDirectory;
}
}
private final Set<String> classPathEntries = new LinkedHashSet<String>();
/**
* Cache classpath File objects and retrieved isFile isDirectory values. Key is a classpath entry
*
* @see #getClasspathFileInfo(String)
*/
private final Map<String, FileInfo> classpathFileInfoCache = new HashMap<String, FileInfo>();
/**
* Cache pom File objects and retrieved isFile isDirectory values. Key - child File
*
* @see #getPomFileInfo(java.io.File)
*/
private final Map<File, FileInfo> pomFileInfoCache = new HashMap<File, FileInfo>();
/**
* Cache Found in classpath artifacts. Key is a pom file.
*
* @see #getFoundArtifact(java.io.File)
*/
private final Map<File, Artifact> foundArtifactCache = new HashMap<File, Artifact>();
/**
* The relative path of the "flattened" pom.xml to prefer over the regular pom.xml, if present.
*
* @see #createPomFileInfo(File)
* @since SHRINKRES-299
*/
private final String flattenedPomPath;
/**
* Reuse DocumentBuilder.
*
* @see #getDocumentBuilder()
*/
private DocumentBuilder documentBuilder;
/**
* Reuse XPath
*
* @see #getXPath()
*/
private XPath xPath;
/*
* Compiled lazy-loaded xpath expressions. See getter methods.
*/
private XPathExpression xPathParentGroupIdExpression;
private XPathExpression xPathGroupIdExpression;
private XPathExpression xPathArtifactIdExpression;
private XPathExpression xPathTypeExpression;
private XPathExpression xPathVersionExpression;
private XPathExpression xPathParentVersionExpression;
public ClasspathWorkspaceReader() {
final String classPath = SecurityActions.getProperty(CLASS_PATH_KEY);
final String surefireClassPath = SecurityActions.getProperty(SUREFIRE_CLASS_PATH_KEY);
this.classPathEntries.addAll(getClassPathEntries(surefireClassPath));
this.classPathEntries.addAll(getClassPathEntries(classPath));
final String configuredFlattenedPomPath = SecurityActions.getProperty(FLATTENED_POM_PATH_KEY);
this.flattenedPomPath = configuredFlattenedPomPath != null ? configuredFlattenedPomPath : ".flattened-pom.xml";
}
@Override
public WorkspaceRepository getRepository() {
return new WorkspaceRepository("classpath");
}
@Override
public File findArtifact(final Artifact artifact) {
for (String classpathEntry : classPathEntries) {
final FileInfo fileInfo = getClasspathFileInfo(classpathEntry);
final File file = fileInfo.getFile();
if (fileInfo.isDirectory()) {
// TODO: This is not reliable, file might have different name
// FIXME: Surefire might user jar in the classpath instead of the target/classes
final FileInfo pomFileInfo = getPomFileInfo(file);
if (pomFileInfo != null && pomFileInfo.isFile()) {
final File pomFile = pomFileInfo.getFile();
final Artifact foundArtifact = getFoundArtifact(pomFile);
if (areEquivalent(artifact, foundArtifact)) {
return pomFile;
}
}
}
// this is needed for Surefire when executed as 'mvn package'
else if (fileInfo.isFile()) {
final StringBuilder name = new StringBuilder(artifact.getArtifactId()).append("-").append(
artifact.getVersion());
// SHRINKRES-102, consider classifier as well
if (!Validate.isNullOrEmpty(artifact.getClassifier())) {
name.append("-").append(artifact.getClassifier());
}
String candidateName = file.getName();
int suffixPosition = candidateName.lastIndexOf('.');
if (suffixPosition != -1) {
candidateName = candidateName.substring(0, suffixPosition);
}
// TODO: This is nasty
// we need to get a a pom.xml file to be sure we fetch transitive deps as well
if (candidateName.equals(name.toString())) {
if ("pom".equals(artifact.getExtension())) {
// try to get pom file for the project
final FileInfo pomFileInfo = getPomFileInfo(file);
if (pomFileInfo != null && pomFileInfo.isFile()) {
final File pomFile = pomFileInfo.getFile();
final Artifact foundArtifact = getFoundArtifact(pomFile);
if (areEquivalent(artifact, foundArtifact)) {
return pomFile;
}
}
}
// we are looking for a non pom artifact, let's get it
name.append(".").append(artifact.getExtension());
if (file.getName().endsWith(name.toString())) {
// return raw file
return file;
}
}
}
}
return null;
}
/**
* Returns if two artifacts are equivalent, that is, have the same groupId, artifactId and Version
*
* @param artifact
* left side artifact to be compared
* @param foundArtifact
* right side artifact to be compared
*
* @return true if the groupId, artifactId and version matches
*/
private boolean areEquivalent(final Artifact artifact, final Artifact foundArtifact) {
boolean areEquivalent = (foundArtifact.getGroupId().equals(artifact.getGroupId())
&& foundArtifact.getArtifactId().equals(artifact.getArtifactId()) && foundArtifact.getVersion().equals(
artifact.getVersion()));
return areEquivalent;
}
@Override
public List<String> findVersions(final Artifact artifact) {
return Collections.emptyList();
}
private Set<String> getClassPathEntries(final String classPath) {
if (Validate.isNullOrEmpty(classPath)) {
return Collections.emptySet();
}
return new LinkedHashSet<String>(Arrays.asList(classPath.split(String.valueOf(File.pathSeparatorChar))));
}
private FileInfo getClasspathFileInfo(final String classpathEntry) {
FileInfo classpathFileInfo = classpathFileInfoCache.get(classpathEntry);
if (classpathFileInfo == null) {
classpathFileInfo = new FileInfo(classpathEntry);
classpathFileInfoCache.put(classpathEntry, classpathFileInfo);
}
return classpathFileInfo;
}
private FileInfo getPomFileInfo(final File childFile) {
FileInfo pomFileInfo = pomFileInfoCache.get(childFile);
if (pomFileInfo == null) {
pomFileInfo = createPomFileInfo(childFile);
if (pomFileInfo != null) {
pomFileInfoCache.put(childFile, pomFileInfo);
}
}
return pomFileInfo;
}
private FileInfo createPomFileInfo(final File childFile) {
// assuming that directory entry on classpath is target/classes directory, we need
// to go two directories up in the structure and grab a pom.xml file from there
File parent = childFile.getParentFile();
if (parent != null) {
parent = parent.getParentFile();
if (parent != null) {
final File pomFile = new File(parent, "pom.xml");
return new FileInfo(pomFile);
}
}
return null;
}
private Artifact getFoundArtifact(final File pomFile) {
Artifact foundArtifact = foundArtifactCache.get(pomFile);
if (foundArtifact == null) {
foundArtifact = createFoundArtifact(pomFile);
foundArtifactCache.put(pomFile, foundArtifact);
}
return foundArtifact;
}
private Artifact createFoundArtifact(final File pomFile) {
try {
if (log.isLoggable(Level.FINE)) {
log.fine("Processing " + pomFile.getAbsolutePath() + " for classpath artifact resolution");
}
// TODO: load pom using Maven Model?
// This might include a cycle in graph reconstruction, to be investigated
final Document pom = loadPom(choosePomToLoad(pomFile));
String groupId = getXPathGroupIdExpression().evaluate(pom);
String artifactId = getXPathArtifactIdExpression().evaluate(pom);
String type = getXPathTypeExpression().evaluate(pom);
String version = getXPathVersionExpression().evaluate(pom);
if (Validate.isNullOrEmpty(groupId)) {
groupId = getXPathParentGroupIdExpression().evaluate(pom);
}
if (Validate.isNullOrEmpty(type)) {
type = "jar";
}
if (version == null || version.equals("")) {
version = getXPathParentVersionExpression().evaluate(pom);
}
final Artifact foundArtifact = new DefaultArtifact(groupId + ":" + artifactId + ":" + type + ":" + version);
foundArtifact.setFile(pomFile);
return foundArtifact;
} catch (final Exception e) {
throw new RuntimeException("Could not parse pom.xml: " + pomFile, e);
}
}
// SHRINKRES-299, "Maven CI Friendly Versions": we prefer a "flattened" pom.xml (written by flatten-maven-plugin), if present,
// effectively acting as a kind of "proxy" for the regular pom.xml
private File choosePomToLoad(final File regularPomFile) {
final File parentDir = regularPomFile.getParentFile();
if (parentDir != null) {
final File flattenedPomFile = new File(parentDir, flattenedPomPath);
if (flattenedPomFile.isFile()) {
return flattenedPomFile;
}
}
return regularPomFile;
}
private Document loadPom(final File pom) throws IOException, SAXException, ParserConfigurationException {
final DocumentBuilder documentBuilder = getDocumentBuilder();
return documentBuilder.parse(pom);
}
private DocumentBuilder getDocumentBuilder() throws ParserConfigurationException {
if (documentBuilder == null) {
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
documentBuilder = factory.newDocumentBuilder();
}
return documentBuilder;
}
/*
* XPath expressions reuse
*/
private XPath getXPath() {
if (xPath == null) {
XPathFactory factory;
try {
factory = XPathFactory.newInstance(XPathFactory.DEFAULT_OBJECT_MODEL_URI,
"com.sun.org.apache.xpath.internal.jaxp.XPathFactoryImpl",
ClassLoader.getSystemClassLoader());
} catch (XPathFactoryConfigurationException e) {
factory = XPathFactory.newInstance();
}
xPath = factory.newXPath();
}
return xPath;
}
private XPathExpression getXPathParentGroupIdExpression() throws XPathExpressionException {
if (xPathParentGroupIdExpression == null) {
xPathParentGroupIdExpression = getXPath().compile("/project/parent/groupId");
}
return xPathParentGroupIdExpression;
}
private XPathExpression getXPathGroupIdExpression() throws XPathExpressionException {
if (xPathGroupIdExpression == null) {
xPathGroupIdExpression = getXPath().compile("/project/groupId");
}
return xPathGroupIdExpression;
}
private XPathExpression getXPathArtifactIdExpression() throws XPathExpressionException {
if (xPathArtifactIdExpression == null) {
xPathArtifactIdExpression = getXPath().compile("/project/artifactId");
}
return xPathArtifactIdExpression;
}
private XPathExpression getXPathTypeExpression() throws XPathExpressionException {
if (xPathTypeExpression == null) {
xPathTypeExpression = getXPath().compile("/project/packaging");
}
return xPathTypeExpression;
}
private XPathExpression getXPathVersionExpression() throws XPathExpressionException {
if (xPathVersionExpression == null) {
xPathVersionExpression = getXPath().compile("/project/version");
}
return xPathVersionExpression;
}
private XPathExpression getXPathParentVersionExpression() throws XPathExpressionException {
if (xPathParentVersionExpression == null) {
xPathParentVersionExpression = getXPath().compile("/project/parent/version");
}
return xPathParentVersionExpression;
}
}
| apache-2.0 |
werkt/bazel | src/main/java/com/google/devtools/build/lib/starlarkbuildapi/java/JavaStarlarkApiProviderApi.java | 1528 | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.starlarkbuildapi.java;
import com.google.devtools.build.lib.starlarkbuildapi.FileApi;
import com.google.devtools.build.lib.syntax.StarlarkValue;
import net.starlark.java.annot.StarlarkBuiltin;
import net.starlark.java.annot.StarlarkDeprecated;
import net.starlark.java.annot.StarlarkDocumentationCategory;
/**
* Provides access to information about Java rules. Every Java-related target provides this struct,
* accessible as a java field on a Target.
*/
@StarlarkBuiltin(
name = "JavaStarlarkApiProvider",
title = "java",
category = StarlarkDocumentationCategory.PROVIDER,
doc =
"Deprecated. Use <a"
+ " href=\"https://docs.bazel.build/versions/master/skylark/lib/JavaInfo.html\">JavaInfo</a>"
+ " instead.")
@StarlarkDeprecated
public interface JavaStarlarkApiProviderApi<FileT extends FileApi> extends StarlarkValue {}
| apache-2.0 |
kvantstudio/kvantstudio-drupal8 | site_price/src/Form/ConfirmPriceGroupDeleteForm.php | 2449 | <?php
/**
* @file
* Contains \Drupal\site_price\Form\ConfirmPriceGroupDeleteForm
*/
namespace Drupal\site_price\Form;
use Drupal\Core\Cache\Cache;
use Drupal\Core\Form\ConfirmFormBase;
use Drupal\Core\Form\FormStateInterface;
use Drupal\Core\Url;
use Drupal\site_price\Controller\PriceDatabaseController;
use Symfony\Component\DependencyInjection\ContainerInterface;
/**
* Confirm group delete form.
*/
class ConfirmPriceGroupDeleteForm extends ConfirmFormBase {
/**
* The ID of the parametrs.
*
* @var integral
*/
protected $gid;
/**
* The object to delete.
*
* @var object
*/
protected $group;
/**
* The servises classes.
*
* @var \Drupal\site_price\Controller\PriceDatabaseController
*/
protected $databasePrice;
/**
* Constructs a new DblogClearLogForm.
*
* @param \Drupal\site_price\Controller\PriceDatabaseController $connection
* The database connection.
*/
public function __construct(PriceDatabaseController $databasePrice) {
$this->databasePrice = $databasePrice;
}
/**
* {@inheritdoc}
*/
public static function create(ContainerInterface $container) {
return new static(
$container->get('site_price.database')
);
}
/**
* {@inheritdoc}
*/
public function getFormId() {
return 'price_group_delete_form';
}
/**
* {@inheritdoc}
*/
public function buildForm(array $form, FormStateInterface $form_state, $gid = 0) {
$this->gid = $gid;
if ($this->gid) {
$this->group = $this->databasePrice->loadPriceGroup($this->gid);
}
return parent::buildForm($form, $form_state);
}
/**
* {@inheritdoc}
*/
public function getQuestion() {
return $this->t('Are you sure you want to delete «@title»?', array('@title' => $this->group->title));
}
/**
* {@inheritdoc}
*/
public function getConfirmText() {
return $this->t('Delete');
}
/**
* {@inheritdoc}
*/
public function getCancelUrl() {
return new Url('site_price.admin');
}
/**
* {@inheritdoc}
*/
public function submitForm(array &$form, FormStateInterface $form_state) {
$this->databasePrice->deletePriceGroup($this->group->gid);
// Очищает cache.
Cache::invalidateTags(['price']);
drupal_set_message($this->t('Group «@title» deleted.', array('@title' => $this->group->title)));
$form_state->setRedirect('site_price.admin');
}
} | apache-2.0 |
PascalSchumacher/incubator-groovy | subprojects/groovy-xml/src/main/java/groovy/xml/SAXBuilder.java | 4970 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.xml;
import groovy.util.BuilderSupport;
import java.util.Iterator;
import java.util.Map;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* A builder for generating W3C SAX events. Use similar to MarkupBuilder.
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @version $Revision$
*/
public class SAXBuilder extends BuilderSupport {
private ContentHandler handler;
private Attributes emptyAttributes = new AttributesImpl();
public SAXBuilder(ContentHandler handler) {
this.handler = handler;
}
protected void setParent(Object parent, Object child) {
}
protected Object createNode(Object name) {
doStartElement(name, emptyAttributes);
return name;
}
protected Object createNode(Object name, Object value) {
doStartElement(name, emptyAttributes);
doText(value);
return name;
}
/**
* @param value
*/
private void doText(Object value) {
try {
char[] text = value.toString().toCharArray();
handler.characters(text, 0, text.length);
}
catch (SAXException e) {
handleException(e);
}
}
protected Object createNode(Object name, Map attributeMap, Object text) {
AttributesImpl attributes = new AttributesImpl();
for (Iterator iter = attributeMap.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry) iter.next();
Object key = entry.getKey();
Object value = entry.getValue();
String uri = "";
String localName = null;
String qualifiedName = "";
String valueText = (value != null) ? value.toString() : "";
if (key instanceof QName) {
QName qname = (QName) key;
uri = qname.getNamespaceURI();
localName = qname.getLocalPart();
qualifiedName = qname.getQualifiedName();
}
else {
localName = key.toString();
qualifiedName = localName;
}
attributes.addAttribute(uri, localName, qualifiedName, "CDATA", valueText);
}
doStartElement(name, attributes);
if (text != null) {
doText(text);
}
return name;
}
protected void doStartElement(Object name, Attributes attributes) {
String uri = "";
String localName = null;
String qualifiedName = "";
if (name instanceof QName) {
QName qname = (QName) name;
uri = qname.getNamespaceURI();
localName = qname.getLocalPart();
qualifiedName = qname.getQualifiedName();
}
else {
localName = name.toString();
qualifiedName = localName;
}
try {
handler.startElement(uri, localName, qualifiedName, attributes);
}
catch (SAXException e) {
handleException(e);
}
}
protected void nodeCompleted(Object parent, Object name) {
String uri = "";
String localName = null;
String qualifiedName = "";
if (name instanceof QName) {
QName qname = (QName) name;
uri = qname.getNamespaceURI();
localName = qname.getLocalPart();
qualifiedName = qname.getQualifiedName();
}
else {
localName = name.toString();
qualifiedName = localName;
}
try {
handler.endElement(uri, localName, qualifiedName);
}
catch (SAXException e) {
handleException(e);
}
}
protected void handleException(SAXException e) {
throw new RuntimeException(e);
}
/* (non-Javadoc)
* @see groovy.util.BuilderSupport#createNode(java.lang.Object, java.util.Map, java.lang.Object)
*/
protected Object createNode(Object name, Map attributes) {
return createNode(name, attributes, null);
}
}
| apache-2.0 |
OSGP/Platform | osgp-adapter-ws-smartmetering/src/main/java/org/opensmartgridplatform/adapter/ws/smartmetering/infra/jms/messageprocessor/GetAdministrativeStatusResponseMessageProcessor.java | 726 | /**
* Copyright 2015 Smart Society Services B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.opensmartgridplatform.adapter.ws.smartmetering.infra.jms.messageprocessor;
import org.opensmartgridplatform.shared.infra.jms.MessageType;
import org.springframework.stereotype.Component;
@Component
public class GetAdministrativeStatusResponseMessageProcessor extends DomainResponseMessageProcessor {
protected GetAdministrativeStatusResponseMessageProcessor() {
super(MessageType.GET_ADMINISTRATIVE_STATUS);
}
}
| apache-2.0 |
cardil/cdi-inheritance-wildfly-swarm | src/main/java/pl/wavesoftware/examples/wildflyswarm/service/api/DefaultImpl.java | 531 | package pl.wavesoftware.examples.wildflyswarm.service.api;
import javax.inject.Qualifier;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* @author Krzysztof Suszynski <krzysztof.suszynski@coi.gov.pl>
* @since 07.03.16
*/
@Target({ TYPE, METHOD, PARAMETER, FIELD })
@Retention(RUNTIME)
@Documented
@Qualifier
public @interface DefaultImpl {
}
| apache-2.0 |
midle110/spider | spider_curl/SpiderWeb.cpp | 4851 |
#include "SpiderWeb.h"
#include <string>
#include <string.h>
#include <algorithm>
#include "common.h"
#include "common/WSFunc.h"
#include <boost/locale.hpp>
#include <boost/algorithm/string.hpp>
#include <stdio.h>
#include "glog/logging.h"
using namespace google;
using namespace std;
CSpiderWeb::CSpiderWeb()
{
}
CSpiderWeb::~CSpiderWeb()
{
}
void CSpiderWeb::TranslateToUtf8()
{
switch (GetWebEncode())
{
case utf8:
{
break;
}
case gbk:
{
strpage = boost::locale::conv::between(strpage, "utf8", "gbk");
break;
}
case gb2312:
{
strpage = boost::locale::conv::between(strpage, "utf8", "gb2312");
break;
}
default:
{
LOG(INFO) << "get web encode error" << endl;
break;
}
}
}
void CSpiderWeb::TranslateToGbk()
{
switch (GetWebEncode())
{
case utf8:
{
strpage = boost::locale::conv::between(strpage, "gbk", "utf8");
break;
}
case gbk:
{
break;
}
case gb2312:
{
break;
}
default:
{
LOG(INFO) << "get web encode error" << endl;
break;
}
}
}
WEB_ENCODE CSpiderWeb::GetWebEncode()
{
boost::algorithm::to_lower(strpage);
smatch m;
//taobao <meta charset="gbk">
//£¼meta http-equiv="content-Type" content="text/html; charset=gb2312"£¾
// tmall regex reg("<meta http-equiv(\\s)*=(\\s)*\"content(\\s)*-(\\s)*type\"(\\s)*content(\\s)*=(\\s)*\"([^\"])*\"(\\s)*/>");
regex reg("<meta http-equiv\\s*=\\s*\"content\\s*-\\s*type\"\\s*content\\s*=\\s*\"([^\"]*)\"\\s*/>");
//regex reg2("<meta\\s*charset\\s*=\\s*\"([^\\\"]*)\">");
regex reg2("<meta charset=([^>]*)>");
if (regex_search(strpage, m, reg) )
{
string str = m[1].str();
if (str.find("gbk") != string::npos)
{
return WEB_ENCODE::gbk;
}
else if (str.find("gb2312") != string::npos)
{
return WEB_ENCODE::gb2312;
}
else if (str.find("utf8") != string::npos)
{
return WEB_ENCODE::utf8;
}
else if ( str.find("utf-8") != string::npos )
{
return WEB_ENCODE::utf8;;
}
else
{
return WEB_ENCODE::normal;
}
}
if (regex_search(strpage, m, reg2))
{
string str = m[1].str();
if (str.find("gbk") != string::npos)
{
return WEB_ENCODE::gbk;
}
else if (str.find("gb2312") != string::npos)
{
return WEB_ENCODE::gb2312;
}
else if (str.find("utf8") != string::npos)
{
return WEB_ENCODE::utf8;
}
else if (str.find("utf-8") != string::npos)
{
return WEB_ENCODE::utf8;;
}
else
{
return WEB_ENCODE::normal;
}
}
return WEB_ENCODE::normal;
}
void CSpiderWeb::FindString(const std::string &source, const std::string &str, std::vector<std::smatch> &vec, const int max)
{
vec.clear();
auto it_start = source.cbegin();
smatch sresult;
int index = 0;
while (regex_search(it_start, source.cend(), sresult, regex(str)))
{
vec.push_back(sresult);
it_start = sresult.suffix().first;
if ( max )
{
++index;
if ( index >= max )
{
return;
}
}
}
}
std::string CSpiderWeb::ReplaceString(const std::string str_reg, const std::string &str_cource, const std::string &str_replace)
{
//regex reg(str_reg);
return regex_replace(str_cource, regex(str_reg), str_replace);
}
void CSpiderWeb::Write2File(const std::string& filename)
{
FILE *pfile = fopen(filename.c_str(), "w+");
if (pfile)
{
fwrite(strpage.c_str(), strpage.size(), 1, pfile);
fclose(pfile);
}
}
std::string CSpiderWeb::GetTaobaoSplitParam(unsigned step)
{
string str= "&data-key=s&data-value=";
str += to_string(step);
str += "&ajax=true&_ksTS=";
str += to_string(GetSecondTime());
str += "_492&callback=jsonp493";
return str;
}
void CSpiderWeb::TranslateUnicode()
{
TranslateToUtf8();
strpage = ReplaceString("\\"", strpage, "");
strpage = ReplaceString("\\/", strpage, "/");
strpage = ReplaceString("amp;", strpage, "");
strpage = ReplaceString("\\\\u003d", strpage, "=");
strpage = ReplaceString("\\\\u0026", strpage, "&");
strpage = ReplaceString("\\\\u003e", strpage, ">");
strpage = ReplaceString("\\\\u003c", strpage, "<");
strpage = ReplaceString("<span class=h>", strpage, "");
strpage = ReplaceString("</span>", strpage, "");
}
void CSpiderWeb::Spider100Page(const std::string &str_first_url, std::vector<std::string> &vec_out, const std::string &str_reg, int reg_index, int step , int maxpage )
{
int index = 0;
string str_tmp;
while (index < maxpage) //×î¶àÅÀÈ¡100Ò³
{
if (!index)
{
str_tmp = str_first_url;
}
else
{
str_tmp = str_first_url + GetTaobaoSplitParam(index * step);
}
if (!GetWebsite(str_tmp.c_str()))
{
break;
}
TranslateUnicode();
vector<smatch> vec;
FindString(strpage, str_reg , vec);
for_each(vec.cbegin(), vec.cend(),
[&](const smatch & ts)
{
vec_out.push_back(ts.str(reg_index));
}
);
if (vec.size() != step)
{
break;
}
++index;
}
} | apache-2.0 |
prakashd1/LadStorm | src/main/java/com/pd/JmsProvider.java | 845 | package com.pd;
import java.io.Serializable;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
/**
* A <code>JmsProvider</code> object encapsulates the <code>ConnectionFactory</code>
* and <code>Destination</code> JMS objects the <code>JmsSpout</code> needs to manage
* a topic/queue connection over the course of it's lifecycle.
*
* @author P. Taylor Goetz
*
*/
public interface JmsProvider extends Serializable{
/**
* Provides the JMS <code>ConnectionFactory</code>
* @return the connection factory
* @throws Exception
*/
public ConnectionFactory connectionFactory() throws Exception;
/**
* Provides the <code>Destination</code> (topic or queue) from which the
* <code>JmsSpout</code> will receive messages.
* @return
* @throws Exception
*/
public Destination destination() throws Exception;
}
| apache-2.0 |
cloudiator/colosseum | app/components/model/ModelValidator.java | 175 | package components.model;
import java.util.Set;
/**
* Created by daniel on 19.06.16.
*/
public interface ModelValidator<E> {
Set<ValidationMessage> validate(E e);
}
| apache-2.0 |
roncohen/apm-server | vendor/github.com/elastic/beats/libbeat/cmd/completion.go | 720 | package cmd
import (
"fmt"
"os"
"github.com/spf13/cobra"
)
func genCompletionCmd(name, version string, rootCmd *BeatsRootCmd) *cobra.Command {
completionCmd := cobra.Command{
Use: "completion SHELL",
Short: "Output shell completion code for the specified shell (bash only by the moment)",
// We don't want to expose this one in help:
Hidden: true,
Run: func(cmd *cobra.Command, args []string) {
if len(args) != 1 {
fmt.Println("Expected one argument with the desired shell")
os.Exit(1)
}
switch args[0] {
case "bash":
rootCmd.GenBashCompletion(os.Stdout)
default:
fmt.Printf("Unknown shell %s, only bash is available\n", args[0])
}
},
}
return &completionCmd
}
| apache-2.0 |
xlevus/half-keyboard | TrapKeys/StdAfx.cpp | 837 | // Copyright 2010 Chris Targett
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// stdafx.cpp : source file that includes just the standard includes
// TrapKeys.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
#include "stdafx.h"
| apache-2.0 |
BirkhoffLee/cherry-pick | build/webpack.dev.conf.js | 1371 | var utils = require('./utils')
var webpack = require('webpack')
var config = require('../config')
var merge = require('webpack-merge')
var baseWebpackConfig = require('./webpack.base.conf')
var HtmlWebpackPlugin = require('html-webpack-plugin')
var FriendlyErrorsPlugin = require('friendly-errors-webpack-plugin')
// add hot-reload related code to entry chunks
Object.keys(baseWebpackConfig.entry).forEach(function (name) {
baseWebpackConfig.entry[name] = ['./build/dev-client'].concat(baseWebpackConfig.entry[name])
})
module.exports = merge(baseWebpackConfig, {
module: {
rules: utils.styleLoaders({ sourceMap: config.dev.cssSourceMap })
},
// cheap-module-eval-source-map is faster for development
devtool: '#cheap-module-eval-source-map',
plugins: [
new webpack.ProvidePlugin({
$: "jquery",
jQuery: "jquery",
"window.jQuery": "jquery",
"root.jQuery": "jquery"
}),
new webpack.DefinePlugin({
'process.env': config.dev.env
}),
// https://github.com/glenjamin/webpack-hot-middleware#installation--usage
new webpack.HotModuleReplacementPlugin(),
new webpack.NoEmitOnErrorsPlugin(),
// https://github.com/ampedandwired/html-webpack-plugin
new HtmlWebpackPlugin({
filename: 'index.html',
template: 'index.html',
inject: true
}),
new FriendlyErrorsPlugin()
]
})
| apache-2.0 |
wearpants/osf.io | website/static/js/filepage/index.js | 29041 | var $ = require('jquery');
var m = require('mithril');
var mime = require('js/mime');
var bootbox = require('bootbox');
var $osf = require('js/osfHelpers');
var waterbutler = require('js/waterbutler');
// Local requires
var utils = require('./util.js');
var FileEditor = require('./editor.js');
var makeClient = require('js/clipboard');
var FileRevisionsTable = require('./revisions.js');
var storageAddons = require('json!storageAddons.json');
var CommentModel = require('js/comment');
var History = require('exports?History!history');
var SocialShare = require('js/components/socialshare');
// Sanity
var Panel = utils.Panel;
var EDITORS = {'text': FileEditor};
var clipboardConfig = function(element, isInitialized) {
if (!isInitialized) {
makeClient(element);
}
};
var CopyButton = {
view: function(ctrl, params) {
return m('span.input-group-btn', m('button.btn.btn-default.btn-md[type="button"]' +
'[data-clipboard-text="' + params.link + '"]',
{config: clipboardConfig, style: {height: params.height}},
m('.fa.fa-copy')));
}
};
var SharePopover = {
view: function(ctrl, params) {
var copyButtonHeight = '34px';
var popoverWidth = '450px';
var renderLink = params.link;
var fileLink = window.location.href;
var mfrHost = renderLink.substring(0, renderLink.indexOf('render'));
return m('button#sharebutton.disabled.btn.btn-sm.btn-primary.file-share', {onclick: function popOverShow() {
var pop = document.getElementById('popOver');
//This is bad, should only happen for Firefox, thanks @chrisseto
if (!pop){
return window.setTimeout(popOverShow, 100);
}
m.render(document.getElementById('popOver'), [
m('ul.nav.nav-tabs.nav-justified', [
m('li.active', m('a[href="#share"][data-toggle="tab"]', 'Share')),
m('li', m('a[href="#embed"][data-toggle="tab"]', 'Embed'))
]), m('br'),
m('.tab-content', [
m('.tab-pane.active#share', [
m('.input-group', [
CopyButton.view(ctrl, {link: renderLink, height: copyButtonHeight}), //workaround to allow button to show up on first click
m('input.form-control[readonly][type="text"][value="'+ renderLink +'"]')
]),
SocialShare.ShareButtons.view(ctrl, {title: window.contextVars.file.name, url: fileLink})
]),
m('.tab-pane#embed', [
m('p', 'Dynamically render iframe with JavaScript'),
m('textarea.form-control[readonly][type="text"][value="' +
'<script>window.jQuery || document.write(\'<script src="//code.jquery.com/jquery-1.11.2.min.js">\\x3C/script>\') </script>'+
'<link href="' + mfrHost + 'static/css/mfr.css" media="all" rel="stylesheet">' +
'<div id="mfrIframe" class="mfr mfr-file"></div>' +
'<script src="' + mfrHost + 'static/js/mfr.js">' +
'</script> <script>' +
'var mfrRender = new mfr.Render("mfrIframe", "' + renderLink + '");' +
'</script>' + '"]'
), m('br'),
m('p', 'Direct iframe with fixed height and width'),
m('textarea.form-control[readonly][value="' +
'<iframe src="' + renderLink + '" width="100%" scrolling="yes" height="' + params.height + '" marginheight="0" frameborder="0" allowfullscreen webkitallowfullscreen>"]'
)
])
])
]);
},
config: function(element, isInitialized) {
if(!isInitialized){
var button = $(element).popover();
button.on('show.bs.popover', function(e){
//max-width used to override, and width used to create space for the mithril object to be injected
button.data()['bs.popover'].$tip.css('text-align', 'center').css('max-width', popoverWidth).css('width', popoverWidth);
});
}
},
'data-toggle': 'popover', 'data-placement': 'bottom',
'data-content': '<div id="popOver"></div>', 'title': 'Share',
'data-container': 'body', 'data-html': 'true'
}, 'Share');
}
};
var FileViewPage = {
controller: function(context) {
var self = this;
self.context = context;
self.file = self.context.file;
self.node = self.context.node;
self.editorMeta = self.context.editor;
self.file.checkoutUser = null;
self.requestDone = false;
self.isLatestVersion = false;
self.selectLatest = function() {
self.isLatestVersion = true;
};
self.isCheckoutUser = function() {
$.ajax({
headers: {
'Accept': 'application/json',
'Content-Type': 'application/vnd.api+json'
},
method: 'get',
url: window.contextVars.apiV2Prefix + 'files' + self.file.path + '/',
beforeSend: $osf.setXHRAuthorization
}).done(function(resp) {
self.requestDone = true;
self.file.checkoutUser = resp.data.relationships.checkout ? ((resp.data.relationships.checkout.links.related.href).split('users/')[1]).replace('/', ''): null;
if ((self.file.checkoutUser) && (self.file.checkoutUser !== self.context.currentUser.id)) {
m.render(document.getElementById('alertBar'), m('.alert.alert-warning[role="alert"]', m('span', [
m('strong', 'File is checked out.'),
' This file has been checked out by a ',
m('a[href="/' + self.file.checkoutUser + '"]', 'collaborator'),
'. It needs to be checked in before any changes can be made.'
])));
}
});
};
if (self.file.provider === 'osfstorage'){
self.canEdit = function() {
return ((!self.file.checkoutUser) || (self.file.checkoutUser === self.context.currentUser.id)) ? self.context.currentUser.canEdit : false;
};
self.isCheckoutUser();
} else {
self.requestDone = true;
self.canEdit = function() {
return self.context.currentUser.canEdit;
};
}
$.extend(self.file.urls, {
delete: waterbutler.buildDeleteUrl(self.file.path, self.file.provider, self.node.id),
metadata: waterbutler.buildMetadataUrl(self.file.path, self.file.provider, self.node.id),
revisions: waterbutler.buildRevisionsUrl(self.file.path, self.file.provider, self.node.id),
content: waterbutler.buildDownloadUrl(self.file.path, self.file.provider, self.node.id, {accept_url: false, mode: 'render'})
});
if ($osf.urlParams().branch) {
var fileWebViewUrl = waterbutler.buildMetadataUrl(self.file.path, self.file.provider, self.node.id, {branch : $osf.urlParams().branch});
$.ajax({
dataType: 'json',
async: true,
url: fileWebViewUrl,
beforeSend: $osf.setXHRAuthorization
}).done(function(response) {
window.contextVars.file.urls.external = response.data.extra.webView;
});
self.file.urls.revisions = waterbutler.buildRevisionsUrl(self.file.path, self.file.provider, self.node.id, {sha: $osf.urlParams().branch});
self.file.urls.content = waterbutler.buildDownloadUrl(self.file.path, self.file.provider, self.node.id, {accept_url: false, mode: 'render', branch: $osf.urlParams().branch});
}
$(document).on('fileviewpage:delete', function() {
bootbox.confirm({
title: 'Delete file?',
message: '<p class="overflow">' +
'Are you sure you want to delete <strong>' +
self.file.safeName + '</strong>?' +
'</p>',
callback: function(confirm) {
if (!confirm) {
return;
}
$.ajax({
type: 'DELETE',
url: self.file.urls.delete,
beforeSend: $osf.setXHRAuthorization
}).done(function() {
window.location = self.node.urls.files;
}).fail(function() {
$osf.growl('Error', 'Could not delete file.');
});
},
buttons:{
confirm:{
label:'Delete',
className:'btn-danger'
}
}
});
});
$(document).on('fileviewpage:checkout', function() {
bootbox.confirm({
title: 'Confirm file check out?',
message: 'This would mean ' +
'other contributors cannot edit, delete or upload new versions of this file ' +
'as long as it is checked out. You can check it back in at anytime.',
callback: function(confirm) {
if (!confirm) {
return;
}
$.ajax({
method: 'put',
url: window.contextVars.apiV2Prefix + 'files' + self.file.path + '/',
beforeSend: $osf.setXHRAuthorization,
contentType: 'application/json',
dataType: 'json',
data: JSON.stringify({
data: {
id: self.file.path.replace('/', ''),
type: 'files',
attributes: {
checkout: self.context.currentUser.id
}
}
})
}).done(function(resp) {
window.location.reload();
}).fail(function(resp) {
$osf.growl('Error', 'Unable to check out file');
});
},
buttons:{
confirm:{
label: 'Check out file',
className: 'btn-warning'
}
}
});
});
$(document).on('fileviewpage:checkin', function() {
$.ajax({
method: 'put',
url: window.contextVars.apiV2Prefix + 'files' + self.file.path + '/',
beforeSend: $osf.setXHRAuthorization,
contentType: 'application/json',
dataType: 'json',
data: JSON.stringify({
data: {
id: self.file.path.replace('/', ''),
type: 'files',
attributes: {
checkout: null
}
}
})
}).done(function(resp) {
window.location.reload();
}).fail(function(resp) {
$osf.growl('Error', 'Unable to check in file');
});
});
$(document).on('fileviewpage:force_checkin', function() {
bootbox.confirm({
title: 'Force check in file?',
message: 'This will check in the file for all users, allowing it to be edited. Are you sure?',
buttons: {
confirm:{
label: 'Force check in',
className: 'btn-danger'
}
},
callback: function(confirm) {
if (!confirm) {
return;
}
$.ajax({
method: 'put',
url: window.contextVars.apiV2Prefix + 'files' + self.file.path + '/',
beforeSend: $osf.setXHRAuthorization,
contentType: 'application/json',
dataType: 'json',
data: JSON.stringify({
data: {
id: self.file.path.replace('/', ''),
type: 'files',
attributes: {
checkout: null
}
}
})
}).done(function(resp) {
window.location.reload();
}).fail(function(resp) {
$osf.growl('Error', 'Unable to force check in file. Make sure you have admin privileges.');
});
}
});
});
$(document).on('fileviewpage:download', function() {
//replace mode=render with action=download for download count incrementation
window.location = self.file.urls.content.replace('mode=render', 'action=download');
return false;
});
self.shareJSObservables = {
activeUsers: m.prop([]),
status: m.prop('connecting'),
userId: self.context.currentUser.id
};
self.editHeader = function() {
return m('.row', [
m('.col-sm-12', m('span[style=display:block;]', [
m('h3.panel-title',[m('i.fa.fa-pencil-square-o'), ' Edit ']),
m('.pull-right', [
m('.progress.no-margin.pointer', {
'data-toggle': 'modal',
'data-target': '#' + self.shareJSObservables.status() + 'Modal'
}, [
m('.progress-bar.p-h-sm.progress-bar-success', {
connected: {
style: 'width: 100%',
class: 'progress-bar progress-bar-success'
},
connecting: {
style: 'width: 100%',
class: 'progress-bar progress-bar-warning progress-bar-striped active'
},
saving: {
style: 'width: 100%',
class: 'progress-bar progress-bar-info progress-bar-striped active'
}
}[self.shareJSObservables.status()] || {
style: 'width: 100%',
class: 'progress-bar progress-bar-danger'
}, [
m('span.progress-bar-content', [
{
connected: 'Live editing mode ',
connecting: 'Attempting to connect ',
unsupported: 'Unsupported browser ',
saving: 'Saving... '
}[self.shareJSObservables.status()] || 'Unavailable: Live editing ',
m('i.fa.fa-question-circle.fa-large')
])
])
])
])
]))
]);
};
// Hack to delay creation of the editor
// until we know this is the current file revision
self.enableEditing = function() {
// Sometimes we can get here twice, check just in case
if (self.editor || !self.canEdit()) {
m.redraw(true);
return;
}
var fileType = mime.lookup(self.file.name.toLowerCase());
// Only allow files < 1MB to be editable
if (self.file.size < 1048576 && fileType) { //May return false
var editor = EDITORS[fileType.split('/')[0]];
if (editor) {
self.editor = new Panel('Edit', self.editHeader, editor, [self.file.urls.content, self.file.urls.sharejs, self.editorMeta, self.shareJSObservables], false);
}
}
m.redraw(true);
};
//Hack to polyfill the Panel interface
//Ran into problems with mithrils caching messing up with multiple "Panels"
self.revisions = m.component(FileRevisionsTable, self.file, self.node, self.enableEditing, self.canEdit, self.selectLatest);
self.revisions.selected = false;
self.revisions.title = 'Revisions';
// inform the mfr of a change in display size performed via javascript,
// otherwise the mfr iframe will not update unless the document windows is changed.
self.triggerResize = $osf.throttle(function () {
$(document).trigger('fileviewpage:resize');
}, 1000);
self.mfrIframeParent = $('#mfrIframeParent');
function toggleRevisions(e){
if(self.editor){
self.editor.selected = false;
}
var viewable = self.mfrIframeParent.is(':visible');
var url = '';
if (viewable){
self.mfrIframeParent.toggle();
self.revisions.selected = true;
url = '?show=revision';
} else {
self.mfrIframeParent.toggle();
self.revisions.selected = false;
url = '?show=view';
}
var state = {
scrollTop: $(window).scrollTop(),
};
History.pushState(state, 'OSF | ' + window.contextVars.file.name, url);
}
function changeVersionHeader(){
document.getElementById('versionLink').style.display = 'inline';
m.render(document.getElementById('versionLink'), m('a', {onclick: toggleRevisions}, document.getElementById('versionLink').innerHTML));
}
var urlParams = $osf.urlParams();
// The parser found a query so lets check what we need to do
if ('show' in urlParams){
if(urlParams.show === 'revision'){
self.mfrIframeParent.toggle();
self.revisions.selected = true;
} else if (urlParams.show === 'view' || urlParams.show === 'edit'){
self.revisions.selected = false;
}
}
if(self.file.provider === 'osfstorage'){
changeVersionHeader();
}
},
view: function(ctrl) {
//This code was abstracted into a panel toggler at one point
//it was removed and shoved here due to issues with mithrils caching and interacting
//With other non-mithril components on the page
//anchor checking hack that will select if true
var state = {
scrollTop: $(window).scrollTop(),
};
var panelsShown = (
((ctrl.editor && ctrl.editor.selected) ? 1 : 0) + // Editor panel is active
(ctrl.mfrIframeParent.is(':visible') ? 1 : 0) // View panel is active
);
var mfrIframeParentLayout;
var fileViewPanelsLayout;
if (panelsShown === 2) {
// view | edit
mfrIframeParentLayout = 'col-sm-6';
fileViewPanelsLayout = 'col-sm-6';
} else {
// view
if (ctrl.mfrIframeParent.is(':visible')) {
mfrIframeParentLayout = 'col-sm-12';
fileViewPanelsLayout = '';
} else {
// edit or revisions
mfrIframeParentLayout = '';
fileViewPanelsLayout = 'col-sm-12';
}
}
$('#mfrIframeParent').removeClass().addClass(mfrIframeParentLayout);
$('.file-view-panels').removeClass().addClass('file-view-panels').addClass(fileViewPanelsLayout);
if(ctrl.file.urls.external && !ctrl.file.privateRepo) {
m.render(document.getElementById('externalView'), [
m('p.text-muted', 'View this file on ', [
m('a', {href:ctrl.file.urls.external}, storageAddons[ctrl.file.provider].fullName)
], '.')
]);
}
var editButton = function() {
if (ctrl.editor) {
return m('button.btn' + (ctrl.editor.selected ? '.btn-primary' : '.btn-default'), {
onclick: function (e) {
e.preventDefault();
// atleast one button must remain enabled.
if ((!ctrl.editor.selected || panelsShown > 1)) {
ctrl.editor.selected = !ctrl.editor.selected;
ctrl.revisions.selected = false;
var url = '?show=view';
state = {
scrollTop: $(window).scrollTop(),
};
History.pushState(state, 'OSF | ' + window.contextVars.file.name, url);
}
}
}, ctrl.editor.title);
}
};
var link = $('iframe').attr('src') ? $('iframe').attr('src').substring(0, $('iframe').attr('src').indexOf('download') + 8) +
'%26mode=render' : 'Data not available';
var height = $('iframe').attr('height') ? $('iframe').attr('height') : '0px';
m.render(document.getElementById('toggleBar'), m('.btn-toolbar.m-t-md', [
// Special case whether or not to show the delete button for published Dataverse files
(ctrl.canEdit() && (ctrl.file.provider !== 'osfstorage' || !ctrl.file.checkoutUser) && ctrl.requestDone && $(document).context.URL.indexOf('version=latest-published') < 0 ) ? m('.btn-group.m-l-xs.m-t-xs', [
ctrl.isLatestVersion ? m('button.btn.btn-sm.btn-danger.file-delete', {onclick: $(document).trigger.bind($(document), 'fileviewpage:delete')}, 'Delete') : null
]) : '',
ctrl.context.currentUser.canEdit && (!ctrl.canEdit()) && ctrl.requestDone && (ctrl.context.currentUser.isAdmin) ? m('.btn-group.m-l-xs.m-t-xs', [
ctrl.isLatestVersion ? m('.btn.btn-sm.btn-danger', {onclick: $(document).trigger.bind($(document), 'fileviewpage:force_checkin')}, 'Force check in') : null
]) : '',
ctrl.canEdit() && (!ctrl.file.checkoutUser) && ctrl.requestDone && (ctrl.file.provider === 'osfstorage') ? m('.btn-group.m-l-xs.m-t-xs', [
ctrl.isLatestVersion ? m('.btn.btn-sm.btn-warning', {onclick: $(document).trigger.bind($(document), 'fileviewpage:checkout')}, 'Check out') : null
]) : '',
(ctrl.canEdit() && (ctrl.file.checkoutUser === ctrl.context.currentUser.id) && ctrl.requestDone) ? m('.btn-group.m-l-xs.m-t-xs', [
ctrl.isLatestVersion ? m('.btn.btn-sm.btn-warning', {onclick: $(document).trigger.bind($(document), 'fileviewpage:checkin')}, 'Check in') : null
]) : '',
window.contextVars.node.isPublic? m('.btn-group.m-t-xs', [
m.component(SharePopover, {link: link, height: height})
]) : '',
m('.btn-group.m-t-xs', [
ctrl.isLatestVersion ? m('button.btn.btn-sm.btn-primary.file-download', {onclick: $(document).trigger.bind($(document), 'fileviewpage:download')}, 'Download') : null
]),
m('.btn-group.btn-group-sm.m-t-xs', [
ctrl.editor ? m( '.btn.btn-default.disabled', 'Toggle view: ') : null
].concat(
m('button.btn' + (ctrl.mfrIframeParent.is(':visible') ? '.btn-primary' : '.btn-default'), {
onclick: function (e) {
e.preventDefault();
// at least one button must remain enabled.
if (!ctrl.mfrIframeParent.is(':visible') || panelsShown > 1) {
ctrl.mfrIframeParent.toggle();
ctrl.revisions.selected = false;
History.pushState(state, 'OSF | ' + window.contextVars.file.name, '?show=view');
} else if (ctrl.mfrIframeParent.is(':visible') && !ctrl.editor){
ctrl.mfrIframeParent.toggle();
ctrl.revisions.selected = true;
History.pushState(state, 'OSF | ' + window.contextVars.file.name, '?show=revision');
}
}
}, 'View'), editButton())
),
m('.btn-group.m-t-xs', [
m('button.btn.btn-sm' + (ctrl.revisions.selected ? '.btn-primary': '.btn-default'), {onclick: function(){
var editable = ctrl.editor && ctrl.editor.selected;
var viewable = ctrl.mfrIframeParent.is(':visible');
if (editable || viewable){
if (viewable){
ctrl.mfrIframeParent.toggle();
}
if (editable) {
ctrl.editor.selected = false;
}
ctrl.revisions.selected = true;
History.pushState(state, 'OSF | ' + window.contextVars.file.name, '?show=revision');
} else {
ctrl.mfrIframeParent.toggle();
if (ctrl.editor) {
ctrl.editor.selected = false;
}
ctrl.revisions.selected = false;
History.pushState(state, 'OSF | ' + window.contextVars.file.name, '?show=view');
}
}}, 'Revisions')
])
]));
if (ctrl.revisions.selected){
return m('.file-view-page', m('.panel-toggler', [
m('.row', ctrl.revisions)
]));
}
var editDisplay = (ctrl.editor && !ctrl.editor.selected) ? 'display:none' : '' ;
ctrl.triggerResize();
return m('.file-view-page', m('.panel-toggler', [
m('.row[style="' + editDisplay + '"]', m('.col-sm-12', ctrl.editor),
m('.row[style="display:none"]', ctrl.revisions))
]));
}
};
// Initialize file comment pane
var $comments = $('.comments');
if ($comments.length) {
var options = {
nodeId: window.contextVars.node.id,
nodeApiUrl: window.contextVars.node.urls.api,
isRegistration: window.contextVars.node.isRegistration,
page: 'files',
rootId: window.contextVars.file.guid,
fileId: window.contextVars.file.id,
canComment: window.contextVars.currentUser.canComment,
hasChildren: window.contextVars.node.hasChildren,
currentUser: window.contextVars.currentUser,
pageTitle: window.contextVars.file.name,
inputSelector: '.atwho-input'
};
CommentModel.init('#commentsLink', '.comment-pane', options);
}
module.exports = function(context) {
// Treebeard forces all mithril to load twice, to avoid
// destroying the page iframe this out side of mithril.
if (!context.file.urls.render) {
$('#mfrIframe').html(context.file.error);
} else {
var url = context.file.urls.render;
if (navigator.appVersion.indexOf('MSIE 9.') !== -1) {
url += url.indexOf('?') > -1 ? '&' : '?';
url += 'cookie=' + (document.cookie.match(window.contextVars.cookieName + '=(.+?);|$')[1] || '');
}
if (window.mfr !== undefined) {
var mfrRender = new mfr.Render('mfrIframe', url, {}, 'cos_logo.png');
$(document).on('fileviewpage:reload', function() {
mfrRender.reload();
});
$(document).on('fileviewpage:resize', function() {
mfrRender.resize();
});
}
}
return m.component(FileViewPage, context);
};
| apache-2.0 |
Rafael-Prado/Fabrica_Cerveja | src/main/java/com/prado/cerveja/repository/helper/cerveja/CidadesImpl.java | 2013 | package com.prado.cerveja.repository.helper.cerveja;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import com.prado.cerveja.model.Cidade;
import com.prado.cerveja.repository.filter.CidadeFilter;
import com.prado.cerveja.repository.paginacao.PaginacaoUtil;
public class CidadesImpl implements CidadesQueries{
@PersistenceContext
private EntityManager manager;
@Autowired
private PaginacaoUtil paginacaoUtil;
@SuppressWarnings("unchecked")
@Override
@Transactional(readOnly = true)
public Page<Cidade> filtrar(CidadeFilter filtro, Pageable pageable) {
Criteria criteria = manager.unwrap(Session.class).createCriteria(Cidade.class);
paginacaoUtil.preparar(criteria, pageable);
adicionarFiltro(filtro, criteria);
criteria.createAlias("estado", "e");
return new PageImpl<>(criteria.list(), pageable, total(filtro));
}
private Long total(CidadeFilter filtro) {
Criteria criteria = manager.unwrap(Session.class).createCriteria(Cidade.class);
adicionarFiltro(filtro, criteria);
criteria.setProjection(Projections.rowCount());
return (Long) criteria.uniqueResult();
}
private void adicionarFiltro(CidadeFilter filtro, Criteria criteria) {
if (filtro != null) {
if (filtro.getEstado() != null) {
criteria.add(Restrictions.eq("estado", filtro.getEstado()));
}
if (!StringUtils.isEmpty(filtro.getNome())) {
criteria.add(Restrictions.ilike("nome", filtro.getNome(), MatchMode.ANYWHERE));
}
}
}
}
| apache-2.0 |
BigBlueHat/futon2 | _attachments/script/sammy/plugins/sammy.hogan.js | 4392 | (function($) {
Sammy = Sammy || {};
// <tt>Sammy.Hogan</tt> provides a quick way of using hogan.js style templates in your app.
// The plugin wraps the awesome hogan.js lib created and maintained by Twitter
// at http://twitter.github.com/hogan.js/
//
// Note: As of Sammy 0.7 the Hogan.js lib is not included in the templates source. Please download
// hogan.js and include it before Sammy.Hogan.
//
// Hogan.js is a clever templating system that relys on double brackets {{}} for interpolation.
// For full details on syntax check out the documantation at
// http://twitter.github.com/hogan.js/
//
// By default using Sammy.Hogan in your app adds the <tt>hogan()</tt> method to the EventContext
// prototype. However, just like <tt>Sammy.Hogan</tt> you can change the default name of the method
// by passing a second argument (e.g. you could use the hg() as the method alias so that all the template
// files could be in the form file.hg instead of file.hogan)
//
// ### Example #1
//
// The template (mytemplate.hg):
//
// <h1>{{title}}<h1>
//
// Hey, {{name}}! Welcome to Mustache!
//
// The app:
//
// var app = $.sammy(function() {
// // include the plugin and alias hogan() to hg()
// this.use('Hogan', 'hg');
//
// this.get('#/hello/:name', function() {
// // set local vars
// this.title = 'Hello!'
// this.name = this.params.name;
// // render the template and pass it through hogan
// this.partial('mytemplate.hg');
// });
// });
//
// $(function() {
// app.run()
// });
//
// If I go to #/hello/AQ in the browser, Sammy will render this to the <tt>body</tt>:
//
// <h1>Hello!</h1>
//
// Hey, AQ! Welcome to Mustache!
//
//
// ### Example #2 - Hogan partials
//
// The template (mytemplate.hg)
//
// Hey, {{name}}! {{>hello_friend}}
//
//
// The partial (mypartial.hg)
//
// Say hello to your friend {{friend}}!
//
// The app:
//
// var app = $.sammy(function() {
// // include the plugin and alias hogan() to hg()
// this.use('Hogan', 'hg');
//
// this.get('#/hello/:name/to/:friend', function(context) {
// // fetch hogan-partial first
// this.load('mypartial.hg')
// .then(function(partial) {
// // set local vars
// context.partials = {hello_friend: partial};
// context.name = context.params.name;
// context.friend = context.params.friend;
//
// // render the template and pass it through hogan
// context.partial('mytemplate.hg');
// });
// });
// });
//
// $(function() {
// app.run()
// });
//
// If I go to #/hello/AQ/to/dP in the browser, Sammy will render this to the <tt>body</tt>:
//
// Hey, AQ! Say hello to your friend dP!
//
// Note: You dont have to include the hogan.js file on top of the plugin as the plugin
// includes the full source.
//
Sammy.Hogan = function(app, method_alias) {
var cached_templates = {};
// *Helper* Uses Hogan.js to parse a template and interpolate and work with the passed data
//
// ### Arguments
//
// * `template` A String template. {{}} Tags are evaluated and interpolated by Hogan.js
// * `data` An Object containing the replacement values for the template.
// data is extended with the <tt>EventContext</tt> allowing you to call its methods within the template.
// * `partials` An Object containing one or more partials (String templates
// that are called from the main template).
//
var hogan = function(template, data, partials) {
var compiled_template = cached_templates[compiled_template];
if (!compiled_template){
compiled_template = Hogan.compile(template);
}
data = $.extend({}, this, data);
partials = $.extend({}, data.partials, partials);
return compiled_template.render(data, partials);
};
// set the default method name/extension
if (!method_alias) { method_alias = 'hogan'; }
app.helper(method_alias, hogan);
};
})(jQuery);
| apache-2.0 |
selentd/pythontools | pytools/src/test/test_evalresult.py | 4190 | '''
Created on 21.10.2015
@author: selen00r
'''
import datetime
import unittest
import evalresult
import fetchdata
import indexdata
class EvalResultTest(unittest.TestCase):
def setUp(self):
self.dbName = "stockdb"
self.idxDax = "dax"
self.startDate = datetime.datetime( 2013, 12, 1 )
self.endDate = datetime.datetime( 2014, 3, 1)
def tearDown(self):
pass
def testResultCalculator(self):
calculator = evalresult.ResultCalculator()
self.assertEqual(calculator.getTotal(), 0, "Invalid initial total")
self.assertAlmostEqual(calculator.calcResult(100, 110), 0.1, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), 0.1, 1, "Invalid total calculation")
self.assertAlmostEqual(calculator.calcResult(100, 90), -0.1, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), 0.0, 1, "Invalid total calculation")
self.assertAlmostEqual(calculator.calcResult(100, 110), 0.1, 1, "Invalid result calculation")
calculator.reset()
self.assertEqual(calculator.getTotal(), 0, "Invalid initial total")
def testResultCalculatorEuroFixed(self):
calculator = evalresult.ResultCalculatorEuro( 1000 )
self.assertEqual(calculator.getTotal(), 1000.0, "Invalid initial total")
self.assertAlmostEqual(calculator.calcResult(100, 110), 100.0, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), 1100.0, 1, "Invalid total calculation")
self.assertAlmostEqual(calculator.calcResult(100, 90), -100.0, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), 1000.0, 1, "Invalid total calculation")
self.assertAlmostEqual(calculator.calcResult(100, 110), 100.0, 1, "Invalid result calculation")
calculator.reset()
self.assertEqual(calculator.getTotal(), 1000.0, "Invalid initial total")
def testResultCalculatorEuroReinvest(self):
calculator = evalresult.ResultCalculatorEuro( 1000, False )
self.assertEqual(calculator.getTotal(), 1000.0, "Invalid initial total")
self.assertAlmostEqual(calculator.calcResult(100, 110), 100.0, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), 1100.0, 1, "Invalid total calculation")
self.assertAlmostEqual(calculator.calcResult(100, 90), -110.0, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), 1000.0, 1, "Invalid total calculation")
self.assertAlmostEqual(calculator.calcResult(100, 110), 100.0, 1, "Invalid result calculation")
self.assertAlmostEqual(calculator.getTotal(), (1000.0+100.0), 1, "Invalid total calculation")
calculator.reset()
self.assertEqual(calculator.getTotal(), 1000.0, "Invalid initial total")
def testEvalResultCall(self):
evaluation = evalresult.EvalResult( "test dax", 1000.0 )
transactionResultList = indexdata.TransactionResultHistory()
monthlyHistory = fetchdata.FetchData( self.idxDax ).fetchMonthlyHistory(self.startDate, self.endDate)
for historyList in monthlyHistory:
transactionResult = indexdata.TransactionResult()
transactionResult.setResult(historyList.getFirst(), historyList.getLast())
transactionResultList.addTransactionResult( transactionResult )
transactionResultList.evaluateResult( evaluation )
self.assertEqual( evaluation.getTotalCount(), 3, "Invalid total count for evaluation")
self.assertEqual(evaluation.winCount, 2, "Invalid count for wins")
self.assertEqual(evaluation.lossCount, 1, "Invalid count for losses")
self.assertAlmostEqual(evaluation.getWinRatio(), (2.0 / 3.0), 4, "Invalid win ratio" )
def suite():
moduleSuite = unittest.TestSuite()
moduleSuite.addTests( unittest.TestLoader().loadTestsFromTestCase(EvalResultTest) )
return moduleSuite
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | apache-2.0 |
streamsets/datacollector | container/src/main/java/com/streamsets/datacollector/main/RuntimeModule.java | 5013 | /*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.main;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import com.streamsets.datacollector.execution.EventListenerManager;
import com.streamsets.datacollector.http.WebServerTask;
import com.streamsets.datacollector.metrics.MetricsModule;
import com.streamsets.datacollector.runner.Pipeline;
import com.streamsets.datacollector.security.usermgnt.UsersManager;
import com.streamsets.datacollector.util.Configuration;
import com.streamsets.pipeline.api.impl.Utils;
import dagger.Module;
import dagger.Provides;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Singleton;
import java.io.File;
import java.util.Collections;
import java.util.List;
@Module(
library = true,
injects = {
BuildInfo.class,
RuntimeInfo.class,
Configuration.class,
EventListenerManager.class,
UsersManager.class,
UserGroupManager.class
},
includes = MetricsModule.class
)
public class RuntimeModule {
private static final Logger LOG = LoggerFactory.getLogger(RuntimeModule.class);
private static String productName = RuntimeInfo.SDC_PRODUCT;
private static String propertyPrefix = RuntimeInfo.SDC_PRODUCT;
private static File baseDir = null;
/**
* Kept under SDC-12270 to avoid changing too many files
*/
public static final String SDC_PROPERTY_PREFIX = RuntimeInfo.SDC_PRODUCT;
public static final String PIPELINE_EXECUTION_MODE_KEY = "pipeline.execution.mode";
private static List<ClassLoader> stageLibraryClassLoaders = Collections.emptyList();//ImmutableList.of(RuntimeModule.class.getClassLoader());
private static final String STAGE_CONFIG_PREFIX = "stage.conf_";
public static synchronized void setStageLibraryClassLoaders(List<? extends ClassLoader> classLoaders) {
stageLibraryClassLoaders = ImmutableList.copyOf(classLoaders);
}
public static synchronized void setProductName(String productName) {
RuntimeModule.productName = productName;
}
public static synchronized void setPropertyPrefix(String propertyPrefix) {
RuntimeModule.propertyPrefix = propertyPrefix;
}
public static synchronized void setBaseDir(File baseDir) {
RuntimeModule.baseDir = baseDir;
}
//TODO: add setProductName and make that available in RuntimeInfo when constructed
@Provides @Singleton
public BuildInfo provideBuildInfo() {
return new ProductBuildInfo(productName);
}
@Provides @Singleton
public RuntimeInfo provideRuntimeInfo(MetricRegistry metrics) {
RuntimeInfo info = new StandaloneRuntimeInfo(
productName,
propertyPrefix,
metrics,
stageLibraryClassLoaders,
baseDir
);
info.init();
return info;
}
@Provides @Singleton
public Configuration provideConfiguration(RuntimeInfo runtimeInfo) {
Configuration.setFileRefsBaseDir(new File(runtimeInfo.getConfigDir()));
Configuration.setFileRefsResourcesDir(new File(runtimeInfo.getResourcesDir()));
Configuration conf = new Configuration();
RuntimeInfo.loadOrReloadConfigs(runtimeInfo, conf);
// remapping max runner config so it is available to stages
int maxRunners = conf.get(Pipeline.MAX_RUNNERS_CONFIG_KEY, Pipeline.MAX_RUNNERS_DEFAULT);
conf.set(STAGE_CONFIG_PREFIX + Pipeline.MAX_RUNNERS_CONFIG_KEY, maxRunners);
return conf;
}
@Provides @Singleton
public EventListenerManager provideEventListenerManager() {
return new EventListenerManager();
}
@Provides @Singleton
public UsersManager provideUsersManager(RuntimeInfo runtimeInfo, Configuration configuration) {
return RuntimeModuleUtils.provideUsersManager(runtimeInfo, configuration);
}
@Provides @Singleton
public UserGroupManager provideUserGroupManager(Configuration configuration, UsersManager usersManager) {
String loginModule = configuration.get(
WebServerTask.HTTP_AUTHENTICATION_LOGIN_MODULE,
WebServerTask.HTTP_AUTHENTICATION_LOGIN_MODULE_DEFAULT
);
switch (loginModule) {
case WebServerTask.FILE:
return new FileUserGroupManager(usersManager);
case WebServerTask.LDAP:
return new LdapUserGroupManager();
default:
throw new RuntimeException(Utils.format("Invalid Authentication Login Module '{}', must be one of '{}'",
loginModule, WebServerTask.LOGIN_MODULES));
}
}
}
| apache-2.0 |
geosub/twitter-cldr-js | lib/twitter_cldr/js/renderers/parsers/unicode_regex/component.rb | 349 | # encoding: UTF-8
# Copyright 2012 Twitter, Inc
# http://www.apache.org/licenses/LICENSE-2.0
module TwitterCldr
module Js
module Renderers
module Parsers
class ComponentRenderer < TwitterCldr::Js::Renderers::Base
set_template "mustache/parsers/unicode_regex/component.coffee"
end
end
end
end
end | apache-2.0 |
Im-dex/xray-162 | code/engine/xrGame/ai/monsters/control_path_builder_base_path.cpp | 8238 | #include "stdafx.h"
#include "control_path_builder_base.h"
#include "../../cover_point.h"
#include "../../cover_manager.h"
#include "../../cover_evaluators.h"
#include "BaseMonster/base_monster.h"
#include "../../detail_path_manager.h"
//#include "../../level_location_selector.h"
#include "../../level_path_manager.h"
#include "../../ai_object_location.h"
const float pmt_find_point_dist = 30.f;
const u32 pmt_find_random_pos_attempts = 5;
//////////////////////////////////////////////////////////////////////////
bool CControlPathBuilderBase::target_point_need_update() {
if ((m_state & eStatePathFailed) == eStatePathFailed)
return true;
else if (m_state == eStatePathValid) {
// åñëè ïóòü åù¸ íå çàâåðøåí
if (!m_man->path_builder().is_path_end(m_distance_to_path_end)) {
if (m_target_actual && !global_failed())
return false; // åñëè global_failed - èãíîðèðîâàòü àêòóàëüíîñòü
// åñëè ïåðâûé ðàç ñòðîèì
if (m_last_time_target_set == 0)
return true;
// åñëè âðåìÿ äâèæåíèÿ ïî ïóòè íå âûøëî, íå ïåðåñòðàèâàòü
return (m_last_time_target_set + m_time < time());
}
// return (!m_target_actual); // ëîãè÷åñêèé êîíåö ïóòè
return (true);
//} else if ((m_state & eStateWaitParamsApplied) == eStateWaitParamsApplied) {
// return false;
} else if ((m_state & eStateWaitNewPath) == eStateWaitNewPath) {
return false;
} else if ((m_state & eStateNoPath) == eStateNoPath) {
return true;
} else if ((m_state & eStatePathEnd) == eStatePathEnd) {
if (m_target_set.node() != m_object->ai_location().level_vertex_id())
return true; // ôèçè÷åñêèé êîíåö ïóòè
}
return false;
}
//////////////////////////////////////////////////////////////////////////
// Íàõîæäåíèå m_target_found
// Íà âõîäå åñòü óñòàíîâëåííûå íîäà è ïîçèöèÿ m_target_set
void CControlPathBuilderBase::find_target_point_set() {
m_target_found.set(m_target_set.position(), m_target_set.node());
//---------------------------------------------------
// Áûñòðûå òåñòû
if (m_target_type == eMoveToTarget) {
// 1. áûñòðûé òåñò íà äîñòèæèìîñòü öåëè
Fvector new_position = m_target_found.position();
if (m_man->path_builder().valid_and_accessible(new_position, m_target_found.node())) {
m_target_found.set_position(new_position);
return;
}
m_target_found.set_position(new_position);
// 2. áûñòðûé òåñò íà íåäîñòèæèìîñòü öåëè (âûáðàòü ñëó÷àéíóþ ïîçèöèþ)
if (!m_man->path_builder().accessible(m_target_found.position())) {
Fvector new_position = m_target_found.position();
m_target_found.set_node(m_man->path_builder().restrictions().accessible_nearest(
m_target_found.position(), new_position));
m_target_found.set_position(new_position);
Fvector pos_random;
Fvector dir;
dir.random_dir();
pos_random.mad(m_object->Position(), dir, pmt_find_point_dist);
set_target_accessible(m_target_found, pos_random);
if (m_target_found.node() != u32(-1))
return;
}
}
m_target_found.set_node(u32(-1));
//---------------------------------------------------
// I. Âûáðàòü ïîçèöèþ
if (m_target_type == eRetreatFromTarget) {
Fvector dir;
dir.sub(m_object->Position(), m_target_found.position());
dir.normalize_safe();
m_target_found.set_position(
Fvector(m_target_found.position()).mad(m_object->Position(), dir, pmt_find_point_dist));
}
// ïðîâåðèòü ïîçèöèþ íà accessible
if (!m_man->path_builder().accessible(m_target_found.position())) {
Fvector new_position = m_target_found.position();
m_target_found.set_node(m_man->path_builder().restrictions().accessible_nearest(
Fvector().set(m_target_found.position()), new_position));
m_target_found.set_position(new_position);
}
// åñëè íîâàÿ ïîçèöèÿ = ïîçèöèè ìîíñòðà - âûáðàòü ðàíäîìíóþ âàëèäíóþ ïîçèöèþ
for (u32 i = 0; i < pmt_find_random_pos_attempts; i++) {
if (m_target_found.position().similar(m_object->Position(), 0.5f)) {
Fvector pos_random;
Fvector dir;
dir.random_dir();
pos_random.mad(m_object->Position(), dir, pmt_find_point_dist);
set_target_accessible(m_target_found, pos_random);
} else
break;
}
if (m_target_found.node() != u32(-1))
return;
if (!ai().level_graph().valid_vertex_position(m_target_found.position())) {
find_target_point_failed();
return;
}
//---------------------------------------------------
// II. Âûáðàíà ïîçèöèÿ, èùåì íîäó
find_node();
}
//////////////////////////////////////////////////////////////////////////
// if path FAILED
void CControlPathBuilderBase::find_target_point_failed() {
// åñëè íîâàÿ ïîçèöèÿ = ïîçèöèè ìîíñòðà - âûáðàòü ðàíäîìíóþ âàëèäíóþ ïîçèöèþ
for (u32 i = 0; i < pmt_find_random_pos_attempts; i++) {
Fvector pos_random;
Fvector dir;
dir.random_dir();
pos_random.mad(m_object->Position(), dir, pmt_find_point_dist);
set_target_accessible(m_target_found, pos_random);
if (!m_target_found.position().similar(m_object->Position(), 0.5f))
break;
}
if (m_target_found.node() != u32(-1))
return;
//---------------------------------------------------
// II. Âûáðàíà ïîçèöèÿ, èùåì íîäó
find_node();
}
void CControlPathBuilderBase::find_node() {
// íîäà â ïðÿìîé âèäèìîñòè?
m_man->path_builder().restrictions().add_border(m_object->Position(),
m_target_found.position());
m_target_found.set_node(ai().level_graph().check_position_in_direction(
m_object->ai_location().level_vertex_id(), m_object->Position(),
m_target_found.position()));
m_man->path_builder().restrictions().remove_border();
if (ai().level_graph().valid_vertex_id(m_target_found.node()) &&
m_man->path_builder().accessible(m_target_found.node())) {
// êîððåêòèðîâêà ïîçèöèè
Fvector new_position = m_target_found.position();
m_man->path_builder().fix_position(Fvector().set(m_target_found.position()),
m_target_found.node(), new_position);
m_target_found.set_position(new_position);
return;
}
// èñêàòü íîäó ïî ïðÿìîìó çàïðîñó
if (ai().level_graph().valid_vertex_position(m_target_found.position())) {
m_target_found.set_node(ai().level_graph().vertex_id(m_target_found.position()));
if (ai().level_graph().valid_vertex_id(m_target_found.node()) &&
m_man->path_builder().accessible(m_target_found.node())) {
// êîððåêòèðîâêà ïîçèöèè
Fvector new_position = m_target_found.position();
m_man->path_builder().fix_position(Fvector().set(m_target_found.position()),
m_target_found.node(), new_position);
m_target_found.set_position(new_position);
return;
}
}
// íàõîäèì ñ ïîìîùüþ êàâåðîâ
if (m_cover_info.use_covers) {
m_cover_approach->setup(m_target_found.position(), m_cover_info.min_dist,
m_cover_info.max_dist, m_cover_info.deviation);
const CCoverPoint* point = ai().cover_manager().best_cover(
m_object->Position(), m_cover_info.radius, *m_cover_approach);
// íàøëè êàâåð?
if (point) {
m_target_found.set_node(point->m_level_vertex_id);
m_target_found.set_position(point->m_position);
return;
}
}
// íîäà íå íàéäåíà. íà ñëåäóþùåì ýòàïå áóäåò èñïîëüçîâàí ñåëåêòîð
m_target_found.set_node(m_man->path_builder().find_nearest_vertex(
m_object->ai_location().level_vertex_id(), m_target_found.position(), 30.f));
m_target_found.set_position(ai().level_graph().vertex_position(m_target_found.node()));
}
| apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_4286.java | 151 | package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_4286 {
}
| apache-2.0 |
pdrados/cas | support/cas-server-support-radius-core/src/test/java/org/apereo/cas/adaptors/radius/RadiusUtilsTests.java | 2290 | package org.apereo.cas.adaptors.radius;
import org.apereo.cas.util.CollectionUtils;
import lombok.val;
import net.jradius.dictionary.Attr_ClientId;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import javax.security.auth.login.FailedLoginException;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
/**
* This is {@link RadiusUtilsTests}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
@Tag("Radius")
public class RadiusUtilsTests {
@Test
public void verifyActionPasses() throws Exception {
val server = mock(RadiusServer.class);
val attribute = new Attr_ClientId("client_id");
val response = new CasRadiusResponse(100, 100, CollectionUtils.wrapList(attribute));
when(server.authenticate(anyString(), anyString(), any())).thenReturn(response);
val result = RadiusUtils.authenticate("casuser", "Mellon",
CollectionUtils.wrapList(server), true, false, Optional.empty());
assertTrue(result.getKey());
assertTrue(result.getRight().isPresent());
}
@Test
public void verifyActionFailsWithFailOver() throws Exception {
val server = mock(RadiusServer.class);
when(server.authenticate(anyString(), anyString())).thenReturn(null);
val result = RadiusUtils.authenticate("casuser", "Mellon", CollectionUtils.wrapList(server), true, false, Optional.empty());
assertFalse(result.getKey());
}
@Test
public void verifyActionFails() throws Exception {
val server = mock(RadiusServer.class);
when(server.authenticate(anyString(), anyString())).thenReturn(null);
assertThrows(FailedLoginException.class,
() -> RadiusUtils.authenticate("casuser", "Mellon", CollectionUtils.wrapList(server), false, false, Optional.empty()));
}
@Test
public void verifyActionFailsWithException() throws Exception {
val server = mock(RadiusServer.class);
when(server.authenticate(anyString(), anyString())).thenThrow(FailedLoginException.class);
assertThrows(FailedLoginException.class,
() -> RadiusUtils.authenticate("casuser", "Mellon", CollectionUtils.wrapList(server), false, false, Optional.empty()));
}
}
| apache-2.0 |