repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
adamstrickland/cottontail
|
lib/cottontail/producer.rb
|
<filename>lib/cottontail/producer.rb
require "cottontail/producible"
module Cottontail
class Producer
include Cottontail::Producible
end
end
|
dgreid/platform2
|
camera/hal/intel/ipu6/include/ia_imaging/ia_alloc.h
|
<gh_stars>1-10
/*
* Copyright (C) 2017-2020 Intel Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*!
* \file ia_alloc.h
* \brief Linear scope stack allocator interface.
*/
#ifndef IA_ALLOC_H_
#define IA_ALLOC_H_
#include "ia_types.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef void(*finalizer_function)(void *ptr);
typedef struct ia_alloc_t ia_alloc;
/*!
* Initializes the linear allocator.
*
* \param [in] size Size of the allocator memory space.
* If 0, no allocation is made at this point and memory must be reserved with ia_alloc_reserve().
*
* \return A pointer to the allocator object, or NULL if errors.
*/
LIBEXPORT ia_alloc* ia_alloc_init(size_t size);
/*!
* Initializes the linear allocator using pre-allocated memory.
*
* \param [in] buffer Pre-allocated memory buffer.
* \param [in] size The size of the given buffer.
*
* \return A pointer to the allocator object, or NULL if errors.
*/
LIBEXPORT ia_alloc* ia_alloc_init_from_memory(void* buffer, size_t size);
/*!
* Reserves memory for the allocator. Can also be used to increase the size of the allocation.
*
* \param [in] alloc The allocator.
* \param [in] size The size of the internal memory space to allocate.
*
* \note Stack position is automatically rewound to the start of the allocator buffer.
*
* \return ia_err_none, if no errors.
* ia_err_nomemory, if out of memory.
* ia_err_argument, if invalid allocator or the allocator used pre-allocated memory.
*/
LIBEXPORT ia_err ia_alloc_reserve(ia_alloc* alloc, size_t size);
/*!
* Allocates a block of memory from the allocator memory space.
*
* \param [in] alloc The allocator.
* \param [in] size The size of the requested allocation.
*
* \return A pointer to the allocation, or NULL, if ran out of allocator memory space.
*/
LIBEXPORT void* ia_alloc_allocate(ia_alloc* alloc, size_t size);
/*!
* Allocates a block of memory from the allocator with a custom specified finalizer for destruction.
*
* \param [in] alloc The allocator.
* \param [in] size The size of the requested allocation.
* \param [in] func A pointer to the finalizer function.
*
* \return A pointer to the allocation, or NULL, if ran out of allocator memory space.
*/
LIBEXPORT void* ia_alloc_allocate_with_finalizer(ia_alloc* alloc, size_t size, finalizer_function func);
/*!
* Enters a new scope.
*
* \param [in] alloc The allocator.
*
* \return ia_err_none, if no errors.
* ia_err_internal, if maximum scope depth was reached.
*/
LIBEXPORT ia_err ia_alloc_enter_scope(ia_alloc* alloc);
/*!
* Leaves a scope.
*
* \param [in] alloc The allocator.
*
* \return ia_err_none, if no errors.
* ia_err_internal, if there is a mismatch between enter and leave calls.
*/
LIBEXPORT ia_err ia_alloc_leave_scope(ia_alloc* alloc);
/*!
* Deinitializes the linear allocator.
*
* \param [in] alloc The allocator.
*/
LIBEXPORT void ia_alloc_deinit(ia_alloc* alloc);
#ifdef __cplusplus
}
#endif
#endif /* IA_ALLOC_H_ */
|
isabella232/modite-adventure
|
src/main.cpp
|
<filename>src/main.cpp
#include "Game.h"
BViewPort *gViewPort;
BGameEngine *gGameEngine;
TOptions *gOptions;
GGame *gGame;
// app_main
extern "C" void app_main() {
gGame = new GGame();
gGame->Run();
delete gGame;
}
int main() {
app_main();
return 0;
}
|
lechium/tvOS145Headers
|
usr/libexec/nearbyd/PRAidedRangingClientProtocol-Protocol.h
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import "PRRangingClientProtocol-Protocol.h"
@class NSData;
@protocol PRAidedRangingClientProtocol <PRRangingClientProtocol>
- (void)sendDataToPeers:(NSData *)arg1;
@end
|
project-arcana/phantasm-hardware-interface
|
src/phantasm-hardware-interface/vulkan/common/diagnostic_util.hh
|
<reponame>project-arcana/phantasm-hardware-interface<gh_stars>1-10
#pragma once
typedef struct RENDERDOC_API_1_4_0 RENDERDOC_API_1_4_0;
namespace phi::vk::util
{
struct diagnostic_state
{
void init();
void free();
bool start_capture();
bool end_capture();
bool is_renderdoc_present() const { return _renderdoc_handle != nullptr; }
private:
RENDERDOC_API_1_4_0* _renderdoc_handle = nullptr;
bool _renderdoc_capture_running = false;
};
}
|
HeWeMel/adventofcode
|
2021/day12.py
|
from collections import defaultdict
from mylib.aoc_frame import Day
class PartA(Day):
def parse(self, text, d): # store puzzle parsing result data into attributes of d
d.edges = defaultdict(list)
for line in text.splitlines():
f, t = line.split("-")
d.edges[f].append(t)
d.edges[t].append(f)
def compute(self, d): # return puzzle result, get parsing data from attributes of d
return do(d, vertex="start", visited=set(), one_vertex_reusable=False)
class PartB(PartA):
def compute(self, d): # return puzzle result, get parsing data from attributes of d
return do(d, vertex="start", visited=set(), one_vertex_reusable=True)
def do(d, vertex, visited, one_vertex_reusable):
if vertex == "end":
return 1
if vertex in visited:
if vertex == "start" or not one_vertex_reusable:
return 0
one_vertex_reusable = False
if vertex.lower() == vertex:
visited = visited | {vertex}
return sum(do(d, t, visited, one_vertex_reusable) for t in d.edges[vertex])
Day.do_day(day=12, year=2021, part_a=PartA, part_b=PartB)
|
Jan777/3ProgAva2016
|
dominio/src/main/java/prograavanzada2016/anotherworld/enemigos/Esqueleto.java
|
package prograavanzada2016.anotherworld.enemigos;
public class Esqueleto {
}
|
jrmie/math
|
test/unit/math/prim/mat/fun/promote_scalar_type_test.cpp
|
<reponame>jrmie/math
#include <stan/math/prim/mat.hpp>
#include <test/unit/math/prim/scal/fun/promote_type_test_util.hpp>
#include <gtest/gtest.h>
#include <vector>
TEST(MathFunctionsPromoteScalar, TypeMatrix) {
using Eigen::Dynamic;
using Eigen::Matrix;
using std::vector;
expect_promote_type<Matrix<double, Dynamic, Dynamic>, double,
Matrix<int, Dynamic, Dynamic> >();
expect_promote_type<Matrix<double, Dynamic, Dynamic>, double,
Matrix<double, Dynamic, Dynamic> >();
expect_promote_type<vector<Matrix<double, Dynamic, Dynamic> >, double,
vector<Matrix<int, Dynamic, Dynamic> > >();
}
TEST(MathFunctionsPromoteScalar, TypeVector) {
using Eigen::Dynamic;
using Eigen::Matrix;
using std::vector;
expect_promote_type<Matrix<double, Dynamic, 1>, double,
Matrix<int, Dynamic, 1> >();
expect_promote_type<Matrix<double, Dynamic, 1>, double,
Matrix<double, Dynamic, 1> >();
expect_promote_type<vector<Matrix<double, Dynamic, 1> >, double,
vector<Matrix<int, Dynamic, 1> > >();
}
TEST(MathFunctionsPromoteScalar, TypeRowVector) {
using Eigen::Dynamic;
using Eigen::Matrix;
using std::vector;
expect_promote_type<Matrix<double, 1, Dynamic>, double,
Matrix<int, 1, Dynamic> >();
expect_promote_type<Matrix<double, 1, Dynamic>, double,
Matrix<double, 1, Dynamic> >();
expect_promote_type<vector<Matrix<double, 1, Dynamic> >, double,
vector<Matrix<int, 1, Dynamic> > >();
}
|
crazecdwn/aws-sdk-cpp
|
aws-cpp-sdk-health/include/aws/health/HealthClient.h
|
<gh_stars>0
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <aws/health/Health_EXPORTS.h>
#include <aws/health/HealthErrors.h>
#include <aws/core/client/AWSError.h>
#include <aws/core/client/ClientConfiguration.h>
#include <aws/core/client/AWSClient.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/health/model/DescribeAffectedEntitiesResult.h>
#include <aws/health/model/DescribeEntityAggregatesResult.h>
#include <aws/health/model/DescribeEventAggregatesResult.h>
#include <aws/health/model/DescribeEventDetailsResult.h>
#include <aws/health/model/DescribeEventTypesResult.h>
#include <aws/health/model/DescribeEventsResult.h>
#include <aws/core/client/AsyncCallerContext.h>
#include <aws/core/http/HttpTypes.h>
#include <future>
#include <functional>
namespace Aws
{
namespace Http
{
class HttpClient;
class HttpClientFactory;
} // namespace Http
namespace Utils
{
template< typename R, typename E> class Outcome;
namespace Threading
{
class Executor;
} // namespace Threading
} // namespace Utils
namespace Auth
{
class AWSCredentials;
class AWSCredentialsProvider;
} // namespace Auth
namespace Client
{
class RetryStrategy;
} // namespace Client
namespace Health
{
namespace Model
{
class DescribeAffectedEntitiesRequest;
class DescribeEntityAggregatesRequest;
class DescribeEventAggregatesRequest;
class DescribeEventDetailsRequest;
class DescribeEventTypesRequest;
class DescribeEventsRequest;
typedef Aws::Utils::Outcome<DescribeAffectedEntitiesResult, Aws::Client::AWSError<HealthErrors>> DescribeAffectedEntitiesOutcome;
typedef Aws::Utils::Outcome<DescribeEntityAggregatesResult, Aws::Client::AWSError<HealthErrors>> DescribeEntityAggregatesOutcome;
typedef Aws::Utils::Outcome<DescribeEventAggregatesResult, Aws::Client::AWSError<HealthErrors>> DescribeEventAggregatesOutcome;
typedef Aws::Utils::Outcome<DescribeEventDetailsResult, Aws::Client::AWSError<HealthErrors>> DescribeEventDetailsOutcome;
typedef Aws::Utils::Outcome<DescribeEventTypesResult, Aws::Client::AWSError<HealthErrors>> DescribeEventTypesOutcome;
typedef Aws::Utils::Outcome<DescribeEventsResult, Aws::Client::AWSError<HealthErrors>> DescribeEventsOutcome;
typedef std::future<DescribeAffectedEntitiesOutcome> DescribeAffectedEntitiesOutcomeCallable;
typedef std::future<DescribeEntityAggregatesOutcome> DescribeEntityAggregatesOutcomeCallable;
typedef std::future<DescribeEventAggregatesOutcome> DescribeEventAggregatesOutcomeCallable;
typedef std::future<DescribeEventDetailsOutcome> DescribeEventDetailsOutcomeCallable;
typedef std::future<DescribeEventTypesOutcome> DescribeEventTypesOutcomeCallable;
typedef std::future<DescribeEventsOutcome> DescribeEventsOutcomeCallable;
} // namespace Model
class HealthClient;
typedef std::function<void(const HealthClient*, const Model::DescribeAffectedEntitiesRequest&, const Model::DescribeAffectedEntitiesOutcome&, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&) > DescribeAffectedEntitiesResponseReceivedHandler;
typedef std::function<void(const HealthClient*, const Model::DescribeEntityAggregatesRequest&, const Model::DescribeEntityAggregatesOutcome&, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&) > DescribeEntityAggregatesResponseReceivedHandler;
typedef std::function<void(const HealthClient*, const Model::DescribeEventAggregatesRequest&, const Model::DescribeEventAggregatesOutcome&, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&) > DescribeEventAggregatesResponseReceivedHandler;
typedef std::function<void(const HealthClient*, const Model::DescribeEventDetailsRequest&, const Model::DescribeEventDetailsOutcome&, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&) > DescribeEventDetailsResponseReceivedHandler;
typedef std::function<void(const HealthClient*, const Model::DescribeEventTypesRequest&, const Model::DescribeEventTypesOutcome&, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&) > DescribeEventTypesResponseReceivedHandler;
typedef std::function<void(const HealthClient*, const Model::DescribeEventsRequest&, const Model::DescribeEventsOutcome&, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&) > DescribeEventsResponseReceivedHandler;
/**
* <fullname>AWS Health</fullname> <p>The AWS Health API provides programmatic
* access to the AWS Health information that is presented in the <a
* href="https://phd.aws.amazon.com/phd/home#/">AWS Personal Health Dashboard</a>.
* You can get information about events that affect your AWS resources:</p> <ul>
* <li> <p> <a>DescribeEvents</a>: Summary information about events.</p> </li> <li>
* <p> <a>DescribeEventDetails</a>: Detailed information about one or more
* events.</p> </li> <li> <p> <a>DescribeAffectedEntities</a>: Information about
* AWS resources that are affected by one or more events.</p> </li> </ul> <p>In
* addition, these operations provide information about event types and summary
* counts of events or affected entities:</p> <ul> <li> <p>
* <a>DescribeEventTypes</a>: Information about the kinds of events that AWS Health
* tracks.</p> </li> <li> <p> <a>DescribeEventAggregates</a>: A count of the number
* of events that meet specified criteria.</p> </li> <li> <p>
* <a>DescribeEntityAggregates</a>: A count of the number of affected entities that
* meet specified criteria.</p> </li> </ul> <p>The Health API requires a Business
* or Enterprise support plan from <a
* href="http://aws.amazon.com/premiumsupport/">AWS Support</a>. Calling the Health
* API from an account that does not have a Business or Enterprise support plan
* causes a <code>SubscriptionRequiredException</code>. </p> <p>For authentication
* of requests, AWS Health uses the <a
* href="http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html">Signature
* Version 4 Signing Process</a>.</p> <p>See the <a
* href="http://docs.aws.amazon.com/health/latest/ug/what-is-aws-health.html">AWS
* Health User Guide</a> for information about how to use the API.</p> <p>
* <b>Service Endpoint</b> </p> <p>The HTTP endpoint for the AWS Health API is:</p>
* <ul> <li> <p>https://health.us-east-1.amazonaws.com </p> </li> </ul>
*/
class AWS_HEALTH_API HealthClient : public Aws::Client::AWSJsonClient
{
public:
typedef Aws::Client::AWSJsonClient BASECLASS;
/**
* Initializes client to use DefaultCredentialProviderChain, with default http client factory, and optional client config. If client config
* is not specified, it will be initialized to default values.
*/
HealthClient(const Aws::Client::ClientConfiguration& clientConfiguration = Aws::Client::ClientConfiguration());
/**
* Initializes client to use SimpleAWSCredentialsProvider, with default http client factory, and optional client config. If client config
* is not specified, it will be initialized to default values.
*/
HealthClient(const Aws::Auth::AWSCredentials& credentials, const Aws::Client::ClientConfiguration& clientConfiguration = Aws::Client::ClientConfiguration());
/**
* Initializes client to use specified credentials provider with specified client config. If http client factory is not supplied,
* the default http client factory will be used
*/
HealthClient(const std::shared_ptr<Aws::Auth::AWSCredentialsProvider>& credentialsProvider,
const Aws::Client::ClientConfiguration& clientConfiguration = Aws::Client::ClientConfiguration());
virtual ~HealthClient();
inline virtual const char* GetServiceClientName() const override { return "Health"; }
/**
* <p>Returns a list of entities that have been affected by the specified events,
* based on the specified filter criteria. Entities can refer to individual
* customer resources, groups of customer resources, or any other construct,
* depending on the AWS service. Events that have impact beyond that of the
* affected entities, or where the extent of impact is unknown, include at least
* one entity indicating this.</p> <p>At least one event ARN is required. Results
* are sorted by the <code>lastUpdatedTime</code> of the entity, starting with the
* most recent.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeAffectedEntities">AWS
* API Reference</a></p>
*/
virtual Model::DescribeAffectedEntitiesOutcome DescribeAffectedEntities(const Model::DescribeAffectedEntitiesRequest& request) const;
/**
* <p>Returns a list of entities that have been affected by the specified events,
* based on the specified filter criteria. Entities can refer to individual
* customer resources, groups of customer resources, or any other construct,
* depending on the AWS service. Events that have impact beyond that of the
* affected entities, or where the extent of impact is unknown, include at least
* one entity indicating this.</p> <p>At least one event ARN is required. Results
* are sorted by the <code>lastUpdatedTime</code> of the entity, starting with the
* most recent.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeAffectedEntities">AWS
* API Reference</a></p>
*
* returns a future to the operation so that it can be executed in parallel to other requests.
*/
virtual Model::DescribeAffectedEntitiesOutcomeCallable DescribeAffectedEntitiesCallable(const Model::DescribeAffectedEntitiesRequest& request) const;
/**
* <p>Returns a list of entities that have been affected by the specified events,
* based on the specified filter criteria. Entities can refer to individual
* customer resources, groups of customer resources, or any other construct,
* depending on the AWS service. Events that have impact beyond that of the
* affected entities, or where the extent of impact is unknown, include at least
* one entity indicating this.</p> <p>At least one event ARN is required. Results
* are sorted by the <code>lastUpdatedTime</code> of the entity, starting with the
* most recent.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeAffectedEntities">AWS
* API Reference</a></p>
*
* Queues the request into a thread executor and triggers associated callback when operation has finished.
*/
virtual void DescribeAffectedEntitiesAsync(const Model::DescribeAffectedEntitiesRequest& request, const DescribeAffectedEntitiesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context = nullptr) const;
/**
* <p>Returns the number of entities that are affected by each of the specified
* events. If no events are specified, the counts of all affected entities are
* returned.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEntityAggregates">AWS
* API Reference</a></p>
*/
virtual Model::DescribeEntityAggregatesOutcome DescribeEntityAggregates(const Model::DescribeEntityAggregatesRequest& request) const;
/**
* <p>Returns the number of entities that are affected by each of the specified
* events. If no events are specified, the counts of all affected entities are
* returned.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEntityAggregates">AWS
* API Reference</a></p>
*
* returns a future to the operation so that it can be executed in parallel to other requests.
*/
virtual Model::DescribeEntityAggregatesOutcomeCallable DescribeEntityAggregatesCallable(const Model::DescribeEntityAggregatesRequest& request) const;
/**
* <p>Returns the number of entities that are affected by each of the specified
* events. If no events are specified, the counts of all affected entities are
* returned.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEntityAggregates">AWS
* API Reference</a></p>
*
* Queues the request into a thread executor and triggers associated callback when operation has finished.
*/
virtual void DescribeEntityAggregatesAsync(const Model::DescribeEntityAggregatesRequest& request, const DescribeEntityAggregatesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context = nullptr) const;
/**
* <p>Returns the number of events of each event type (issue, scheduled change, and
* account notification). If no filter is specified, the counts of all events in
* each category are returned.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventAggregates">AWS
* API Reference</a></p>
*/
virtual Model::DescribeEventAggregatesOutcome DescribeEventAggregates(const Model::DescribeEventAggregatesRequest& request) const;
/**
* <p>Returns the number of events of each event type (issue, scheduled change, and
* account notification). If no filter is specified, the counts of all events in
* each category are returned.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventAggregates">AWS
* API Reference</a></p>
*
* returns a future to the operation so that it can be executed in parallel to other requests.
*/
virtual Model::DescribeEventAggregatesOutcomeCallable DescribeEventAggregatesCallable(const Model::DescribeEventAggregatesRequest& request) const;
/**
* <p>Returns the number of events of each event type (issue, scheduled change, and
* account notification). If no filter is specified, the counts of all events in
* each category are returned.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventAggregates">AWS
* API Reference</a></p>
*
* Queues the request into a thread executor and triggers associated callback when operation has finished.
*/
virtual void DescribeEventAggregatesAsync(const Model::DescribeEventAggregatesRequest& request, const DescribeEventAggregatesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context = nullptr) const;
/**
* <p>Returns detailed information about one or more specified events. Information
* includes standard event data (region, service, etc., as returned by
* <a>DescribeEvents</a>), a detailed event description, and possible additional
* metadata that depends upon the nature of the event. Affected entities are not
* included; to retrieve those, use the <a>DescribeAffectedEntities</a>
* operation.</p> <p>If a specified event cannot be retrieved, an error message is
* returned for that event.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventDetails">AWS
* API Reference</a></p>
*/
virtual Model::DescribeEventDetailsOutcome DescribeEventDetails(const Model::DescribeEventDetailsRequest& request) const;
/**
* <p>Returns detailed information about one or more specified events. Information
* includes standard event data (region, service, etc., as returned by
* <a>DescribeEvents</a>), a detailed event description, and possible additional
* metadata that depends upon the nature of the event. Affected entities are not
* included; to retrieve those, use the <a>DescribeAffectedEntities</a>
* operation.</p> <p>If a specified event cannot be retrieved, an error message is
* returned for that event.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventDetails">AWS
* API Reference</a></p>
*
* returns a future to the operation so that it can be executed in parallel to other requests.
*/
virtual Model::DescribeEventDetailsOutcomeCallable DescribeEventDetailsCallable(const Model::DescribeEventDetailsRequest& request) const;
/**
* <p>Returns detailed information about one or more specified events. Information
* includes standard event data (region, service, etc., as returned by
* <a>DescribeEvents</a>), a detailed event description, and possible additional
* metadata that depends upon the nature of the event. Affected entities are not
* included; to retrieve those, use the <a>DescribeAffectedEntities</a>
* operation.</p> <p>If a specified event cannot be retrieved, an error message is
* returned for that event.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventDetails">AWS
* API Reference</a></p>
*
* Queues the request into a thread executor and triggers associated callback when operation has finished.
*/
virtual void DescribeEventDetailsAsync(const Model::DescribeEventDetailsRequest& request, const DescribeEventDetailsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context = nullptr) const;
/**
* <p>Returns the event types that meet the specified filter criteria. If no filter
* criteria are specified, all event types are returned, in no particular
* order.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventTypes">AWS
* API Reference</a></p>
*/
virtual Model::DescribeEventTypesOutcome DescribeEventTypes(const Model::DescribeEventTypesRequest& request) const;
/**
* <p>Returns the event types that meet the specified filter criteria. If no filter
* criteria are specified, all event types are returned, in no particular
* order.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventTypes">AWS
* API Reference</a></p>
*
* returns a future to the operation so that it can be executed in parallel to other requests.
*/
virtual Model::DescribeEventTypesOutcomeCallable DescribeEventTypesCallable(const Model::DescribeEventTypesRequest& request) const;
/**
* <p>Returns the event types that meet the specified filter criteria. If no filter
* criteria are specified, all event types are returned, in no particular
* order.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEventTypes">AWS
* API Reference</a></p>
*
* Queues the request into a thread executor and triggers associated callback when operation has finished.
*/
virtual void DescribeEventTypesAsync(const Model::DescribeEventTypesRequest& request, const DescribeEventTypesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context = nullptr) const;
/**
* <p>Returns information about events that meet the specified filter criteria.
* Events are returned in a summary form and do not include the detailed
* description, any additional metadata that depends on the event type, or any
* affected resources. To retrieve that information, use the
* <a>DescribeEventDetails</a> and <a>DescribeAffectedEntities</a> operations.</p>
* <p>If no filter criteria are specified, all events are returned. Results are
* sorted by <code>lastModifiedTime</code>, starting with the most
* recent.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEvents">AWS
* API Reference</a></p>
*/
virtual Model::DescribeEventsOutcome DescribeEvents(const Model::DescribeEventsRequest& request) const;
/**
* <p>Returns information about events that meet the specified filter criteria.
* Events are returned in a summary form and do not include the detailed
* description, any additional metadata that depends on the event type, or any
* affected resources. To retrieve that information, use the
* <a>DescribeEventDetails</a> and <a>DescribeAffectedEntities</a> operations.</p>
* <p>If no filter criteria are specified, all events are returned. Results are
* sorted by <code>lastModifiedTime</code>, starting with the most
* recent.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEvents">AWS
* API Reference</a></p>
*
* returns a future to the operation so that it can be executed in parallel to other requests.
*/
virtual Model::DescribeEventsOutcomeCallable DescribeEventsCallable(const Model::DescribeEventsRequest& request) const;
/**
* <p>Returns information about events that meet the specified filter criteria.
* Events are returned in a summary form and do not include the detailed
* description, any additional metadata that depends on the event type, or any
* affected resources. To retrieve that information, use the
* <a>DescribeEventDetails</a> and <a>DescribeAffectedEntities</a> operations.</p>
* <p>If no filter criteria are specified, all events are returned. Results are
* sorted by <code>lastModifiedTime</code>, starting with the most
* recent.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/health-2016-08-04/DescribeEvents">AWS
* API Reference</a></p>
*
* Queues the request into a thread executor and triggers associated callback when operation has finished.
*/
virtual void DescribeEventsAsync(const Model::DescribeEventsRequest& request, const DescribeEventsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context = nullptr) const;
void OverrideEndpoint(const Aws::String& endpoint);
private:
void init(const Aws::Client::ClientConfiguration& clientConfiguration);
void DescribeAffectedEntitiesAsyncHelper(const Model::DescribeAffectedEntitiesRequest& request, const DescribeAffectedEntitiesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const;
void DescribeEntityAggregatesAsyncHelper(const Model::DescribeEntityAggregatesRequest& request, const DescribeEntityAggregatesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const;
void DescribeEventAggregatesAsyncHelper(const Model::DescribeEventAggregatesRequest& request, const DescribeEventAggregatesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const;
void DescribeEventDetailsAsyncHelper(const Model::DescribeEventDetailsRequest& request, const DescribeEventDetailsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const;
void DescribeEventTypesAsyncHelper(const Model::DescribeEventTypesRequest& request, const DescribeEventTypesResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const;
void DescribeEventsAsyncHelper(const Model::DescribeEventsRequest& request, const DescribeEventsResponseReceivedHandler& handler, const std::shared_ptr<const Aws::Client::AsyncCallerContext>& context) const;
Aws::String m_uri;
Aws::String m_configScheme;
std::shared_ptr<Aws::Utils::Threading::Executor> m_executor;
};
} // namespace Health
} // namespace Aws
|
doyaguillo1997/Data2Gether
|
5. WEB/app/external_sources/idealista/migrations/0005_rename_geo_element_historic_geo_zone.py
|
# Generated by Django 3.2.4 on 2021-06-19 07:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('idealista', '0004_rename_historioco_historic'),
]
operations = [
migrations.RenameField(
model_name='historic',
old_name='geo_element',
new_name='geo_zone',
),
]
|
void889275714/repository18
|
project3_final/meeting-theater/guns/guns-promo/src/main/java/com/stylefeng/guns/rest/config/RedisConfig.java
|
package com.stylefeng.guns.rest.config;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import redis.clients.jedis.Jedis;
import java.net.UnknownHostException;
@Configuration
public class RedisConfig {
@Value("${spring.redis.host}")
private String host;
@Value("${spring.redis.port}")
private Integer port;
/**
* 单例的bean
* @return
*/
@Bean
public Jedis jedis() {
Jedis jedis = new Jedis(host, port);
return jedis;
}
/**
* 定制化
* @return
*/
@Bean
public RedisTemplate<Object, Object> redisTemplate(RedisConnectionFactory redisConnectionFactory) throws UnknownHostException {
RedisTemplate<Object, Object> template = new RedisTemplate();
template.setConnectionFactory(redisConnectionFactory);
//定制化模板 定制其序列化方式
StringRedisSerializer stringRedisSerializer = new StringRedisSerializer();
template.setKeySerializer(stringRedisSerializer);
//表示传入的Object对象要使用这种序列化方式
Jackson2JsonRedisSerializer jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<>(Object.class);
ObjectMapper objectMapper = new ObjectMapper();
//对于不是基本属性的变量显示全类名
objectMapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
//设置值的属性可见,类似于 wrapper
objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
jackson2JsonRedisSerializer.setObjectMapper(objectMapper);
template.setValueSerializer(jackson2JsonRedisSerializer);
return template;
}
}
|
hanchao5272/design-pattern
|
src/main/java/pers/hanchao/designpattern/state/state2/state/impl/UnloadState.java
|
package pers.hanchao.designpattern.state.state2.state.impl;
import lombok.extern.slf4j.Slf4j;
import pers.hanchao.designpattern.state.state2.Shooter;
import pers.hanchao.designpattern.state.state2.state.AbstractState;
/**
* <p>无子弹状态</P>
*
* @author hanchao
*/
@Slf4j
public class UnloadState extends AbstractState {
public UnloadState(Shooter shooter) {
super(shooter);
}
/**
* 射击
*/
@Override
public void shoot() {
//如果无子弹,则提示
log.info("[{}]没有子弹了,请填充!!!", super.getShooter().getName());
}
}
|
BihanZhuang/super-game-engine
|
src/view/gameplay/hud/deprecated/InfoDisplay.java
|
package view.gameplay.hud.deprecated;
import javafx.scene.layout.Pane;
public abstract class InfoDisplay {
/*
* Displays game info such as
* - health bar
* - score bar
* - stats
* - etc
*/
Pane myPane;
public InfoDisplay(){
myPane = new Pane();
}
// public abstract void displayInfo();
//
// public abstract void setValue(double value);
public abstract void updateValues();
/*
* set the position of the info display based on values given by the user.
*/
public void setPos(double x, double y){
myPane.setLayoutX(x);
myPane.setLayoutY(y);
};
public Pane getPane(){
return myPane;
}
}
|
hawkular/hawkular-commons
|
hawkular-bus/hawkular-bus-common/src/test/java/org/hawkular/bus/common/BasicMessageObjectMapperTest.java
|
<filename>hawkular-bus/hawkular-bus-common/src/test/java/org/hawkular/bus/common/BasicMessageObjectMapperTest.java
/*
* Copyright 2014-2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.bus.common;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import org.junit.Test;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
public class BasicMessageObjectMapperTest {
@Test
public void testWithGetterSetterSupport() {
SomeMessage.FAIL_ON_UNKNOWN_PROPERTIES = true;
SomeMessage.SUPPORT_GETTER_SETTER = true;
AnotherMessage msg = new AnotherMessage("1", "2");
msg.setSomeAttrib("someValue");
msg.setAnotherAttrib("anotherValue");
assertNotNull(msg.getOne());
assertNotNull(msg.getTwo());
assertNotNull(msg.getSomeAttrib());
assertNotNull(msg.getAnotherAttrib());
String json = msg.toJSON();
System.out.println(json);
assertNotNull("missing JSON", json);
AnotherMessage msg2 = AnotherMessage.fromJSON(json, AnotherMessage.class);
assertNotNull("JSON conversion failed", msg2);
assertNotSame(msg, msg2);
assertNotNull(msg2.getOne());
assertNotNull(msg2.getTwo());
assertNotNull(msg2.getSomeAttrib());
assertNotNull(msg2.getAnotherAttrib());
assertEquals(msg.getOne(), msg2.getOne());
assertEquals(msg.getTwo(), msg2.getTwo());
assertEquals(msg.getSomeAttrib(), msg2.getSomeAttrib());
assertEquals(msg.getAnotherAttrib(), msg2.getAnotherAttrib());
}
@Test
public void testWithoutGetterSetterSupport() {
SomeMessage.FAIL_ON_UNKNOWN_PROPERTIES = true;
SomeMessage.SUPPORT_GETTER_SETTER = false;
AnotherMessage msg = new AnotherMessage("1", "2");
msg.setSomeAttrib("someValueNoSupport");
msg.setAnotherAttrib("anotherValueNoSupport");
assertNotNull(msg.getOne());
assertNotNull(msg.getTwo());
assertNotNull(msg.getSomeAttrib());
assertNotNull(msg.getAnotherAttrib());
String json = msg.toJSON();
System.out.println(json);
assertNotNull("missing JSON", json);
AnotherMessage msg2 = AnotherMessage.fromJSON(json, AnotherMessage.class);
assertNotNull("JSON conversion failed", msg2);
assertNotSame(msg, msg2);
assertNotNull(msg2.getOne());
assertNotNull(msg2.getTwo());
assertNull("Should not have been deserialized, getter/setter support was off", msg2.getSomeAttrib());
assertNull("Should not have been deserialized, getter/setter support was off", msg2.getAnotherAttrib());
assertEquals(msg.getOne(), msg2.getOne());
assertEquals(msg.getTwo(), msg2.getTwo());
assertNotEquals(msg.getSomeAttrib(), msg2.getSomeAttrib());
assertNotEquals(msg.getAnotherAttrib(), msg2.getAnotherAttrib());
}
@Test
public void testOverrideStaticDeserializingMapper() {
// This tests that AbstractMessage is able to get the subclass' overriding ObjectMapper for deserialization
// which is obtained by invoking a static method on the subclass.
SomeMessage.SUPPORT_GETTER_SETTER = false;
String jsonWithAllKnownProperties = "{\"one\":\"1\",\"two\":\"2\"}";
String jsonWithUnknownProperties = "{\"one\":\"1\",\"two\":\"2\", \"wot\":\"gorilla\"}";
AnotherMessage msg;
// because we will not fail on unknown properties, no failures should occur
SomeMessage.FAIL_ON_UNKNOWN_PROPERTIES = false;
msg = AnotherMessage.fromJSON(jsonWithAllKnownProperties, AnotherMessage.class);
assertEquals("1", msg.getOne());
assertEquals("2", msg.getTwo());
assertNull(msg.getSomeAttrib());
assertNull(msg.getAnotherAttrib());
msg = AnotherMessage.fromJSON(jsonWithUnknownProperties, AnotherMessage.class);
assertEquals("1", msg.getOne());
assertEquals("2", msg.getTwo());
assertNull(msg.getSomeAttrib());
assertNull(msg.getAnotherAttrib());
// now we will fail on unknown properties
SomeMessage.FAIL_ON_UNKNOWN_PROPERTIES = true;
msg = AnotherMessage.fromJSON(jsonWithAllKnownProperties, AnotherMessage.class);
assertEquals("1", msg.getOne());
assertEquals("2", msg.getTwo());
assertNull(msg.getSomeAttrib());
assertNull(msg.getAnotherAttrib());
try {
msg = AnotherMessage.fromJSON(jsonWithUnknownProperties, AnotherMessage.class);
fail("Custom mapper should not have been able to deserialize this.");
} catch (Exception ok) {
}
}
}
class SomeMessage extends AbstractMessage {
// we'll flip this in our tests
public static boolean FAIL_ON_UNKNOWN_PROPERTIES = false;
public static boolean SUPPORT_GETTER_SETTER = true;
public String one;
// this will be included in the JSON due to its getter/setter
private String someAttrib;
SomeMessage() {
}
SomeMessage(String one) {
this.one = one;
}
public String getOne() {
return this.one;
}
public String getSomeAttrib() {
return this.someAttrib;
}
public void setSomeAttrib(String value) {
this.someAttrib = value;
}
protected static ObjectMapper buildObjectMapperForDeserialization() {
final ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, FAIL_ON_UNKNOWN_PROPERTIES);
return mapper;
}
@Override
protected ObjectMapper buildObjectMapperForSerialization() {
final ObjectMapper mapper = new ObjectMapper();
mapper.setVisibilityChecker(mapper.getSerializationConfig().getDefaultVisibilityChecker()
.withFieldVisibility(JsonAutoDetect.Visibility.PUBLIC_ONLY)
.withGetterVisibility(
(SUPPORT_GETTER_SETTER)
? JsonAutoDetect.Visibility.PUBLIC_ONLY
: JsonAutoDetect.Visibility.NONE)
.withSetterVisibility(
(SUPPORT_GETTER_SETTER)
? JsonAutoDetect.Visibility.PUBLIC_ONLY
: JsonAutoDetect.Visibility.NONE)
.withCreatorVisibility(JsonAutoDetect.Visibility.NONE));
return mapper;
}
}
class AnotherMessage extends SomeMessage {
public String two;
// if our superclass supports getter/setter JSON, this will be included in the JSON
private String anotherAttrib;
AnotherMessage() {
}
AnotherMessage(String one, String two) {
super(one);
this.two = two;
}
public String getTwo() {
return this.two;
}
public String getAnotherAttrib() {
return this.anotherAttrib;
}
public void setAnotherAttrib(String value) {
this.anotherAttrib = value;
}
}
|
rackerlabs/jclouds-labs
|
jclouds-management/management-core/src/test/java/org/jclouds/management/ViewMBeanFactoriesTest.java
|
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.management;
import com.google.common.reflect.TypeToken;
import org.jclouds.apis.Compute;
import org.jclouds.apis.Storage;
import org.testng.annotations.Test;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@Test(groups = "unit", testName = "ViewManagementFactoriesTest")
public class ViewMBeanFactoriesTest {
private final ViewMBeanFactory storageFactory = new StorageMBeanFactory();
private final ViewMBeanFactory computeTestFactory = new ComputeMBeanFactory();
@Test
void testAll() {
Iterable<ViewMBeanFactory> factories = ViewMBeanFactories.all();
assertTrue(contains(factories, storageFactory));
assertTrue(contains(factories, computeTestFactory));
}
@Test
void testManagesView() {
Iterable<ViewMBeanFactory> storageFactories = ViewMBeanFactories.forType(TypeToken.of(Storage.class));
Iterable<ViewMBeanFactory> otherTestViewfactories = ViewMBeanFactories.forType(TypeToken.of(Compute.class));
assertTrue(contains(storageFactories, storageFactory));
assertFalse(contains(storageFactories, computeTestFactory));
assertFalse(contains(otherTestViewfactories, storageFactory));
assertTrue(contains(otherTestViewfactories, computeTestFactory));
}
private static boolean contains(Iterable<ViewMBeanFactory> factories, ViewMBeanFactory f) {
for (ViewMBeanFactory factory : factories) {
if (f.equals(factory)) {
return true;
}
}
return false;
}
}
|
fhornain/patternfly-react-seed_1
|
node_modules/@patternfly/react-icons/dist/js/icons/volume-icon.d.js
|
"use strict";
//# sourceMappingURL=volume-icon.d.js.map
|
mmtechslv/PhyloMAF
|
pmaf/pipe/markers/_marker.py
|
<gh_stars>1-10
from ._metakit import MarkerBackboneMetabase
from pmaf.pipe.specs._metakit import SpecificationBackboneMetabase
from pmaf.pipe.agents.dockers._metakit import DockerBackboneMetabase
from datetime import datetime
from typing import Any, Optional
class Marker(MarkerBackboneMetabase):
"""This class assists user in using pipe module."""
def __init__(
self, input: Any, name: Optional[str] = None, metadata: Optional[dict] = None
):
"""Constructor for :class:`.Marker`
Parameters
----------
input
Instance of any type that has `.data` attribute.
For example, :mod:`~pmaf.biome` or :mod:`pmaf.pipe.agents.dockers`.
name
Name of the marker instance.
metadata
Metadata of the marker instance.
"""
if input is not None:
if not isinstance(input, DockerBackboneMetabase):
if hasattr(input, "data"):
self.__input = input.data
else:
self.__input = input
else:
self.__input = input
self.__inlet = type(input)
else:
raise ValueError("`data` cannot be None.")
if isinstance(name, (str, int, type(None))):
tmp_name = name
else:
raise TypeError("`name` can be str,int or None")
if isinstance(metadata, dict):
tmp_metadata = metadata
elif metadata is None:
tmp_metadata = {}
else:
raise TypeError("`metadata` can be dict or None")
if hasattr(input, "name") and name is None:
self.__name = input.name
else:
self.__name = tmp_name
if hasattr(input, "metadata") and metadata is None:
self.__metadata = input.metadata
else:
self.__metadata = tmp_metadata
self.__outlet = None
self.__output = None
self.__tasks = []
self.__task_pointer = None
def __repr__(self):
class_name = self.__class__.__name__
state = "Active" if len(self.__tasks) > 0 else "Inactive"
step = (
"{}/{}".format(str(self.__task_pointer), str(len(self.__tasks)))
if len(self.__tasks) > 0
else "N/A"
)
inlet = self.inlet.__name__
outlet = self.outlet.__name__ if self.outlet is not None else "N/A"
repr_str = "<{}:[{}], Step/Total:[{}], Inlet:[{}], Outlet:[{}]>".format(
class_name, state, step, inlet, outlet
)
return repr_str
def embed_specs(self, *args):
"""Embed :term:`specs<spec>` to the :class:`.Marker`.
Parameters
----------
*args
Instances of :term:`spec` that must be embedded.
Returns
-------
Returns the outlet type of the last :term:`spec`
"""
for spec in args:
if isinstance(spec, SpecificationBackboneMetabase):
if self.__task_pointer is None:
self.__task_pointer = 0
last_tix = None
else:
last_tix = len(self.__tasks) - 1
for name, method, outlet, description in spec.steps:
self.__tasks.append(
{
"status": False,
"results": None,
"time": datetime.now(),
"request": {
"method": method,
"input": last_tix,
"outlet": outlet,
"name": name,
"description": description,
},
}
)
last_tix = len(self.__tasks) - 1
self.__outlet = outlet
else:
raise TypeError("`marker` has invalid type.")
def __get_pending(self):
"""Get the pending :term:`specs<spec>` to be evaluated."""
return sorted(
[ix for ix, task in enumerate(self.__tasks) if not task["status"]]
)
def __get_finished(self):
"""Get the last finished :term:`spec`"""
return sorted([ix for ix, task in enumerate(self.__tasks) if task["status"]])
def __move_to_next_task(self, current_task_results):
"""Move cursor to the next :term:`spec` and mark last completed."""
self.__tasks[self.__task_pointer]["results"] = current_task_results
self.__tasks[self.__task_pointer]["status"] = True
self.__task_pointer = self.__task_pointer + 1
self.__output = current_task_results[0]
return
def __get_next_task(self):
"""get the next :term:`spec` to be evaluated."""
if self.__task_pointer is not None:
if self.__task_pointer < len(self.__tasks):
return self.__tasks[self.__task_pointer]
else:
return None
else:
raise RuntimeError("Marker is not initiated.")
def __iter__(self):
"""Iterate over pending :term:`specs<spec>`"""
if len(self.__tasks) > 0:
while len(self.__get_pending()) > 0:
yield self.__next()
else:
raise RuntimeError("Marker is not initiated.")
def __next__(self):
"""Run next :term:`spec`"""
if len(self.__tasks) > 0:
if len(self.__get_pending()) > 0:
return self.__next()
else:
raise StopIteration
else:
raise RuntimeError("Marker is not initiated.")
def __next(self):
"""Helper for __next__"""
next_task = self.__get_next_task()
if next_task is not None:
current_input = next_task["request"]["input"]
spec_method = next_task["request"]["method"]
if isinstance(current_input, int):
last_results = self.__tasks[current_input]["results"]
next_args = (last_results[0], *last_results[1])
next_kwargs = last_results[2]
else:
next_args = (self.__input,)
next_kwargs = {"metadata": self.__metadata, "name": self.__name}
tmp_results = spec_method(*next_args, **next_kwargs)
self.__move_to_next_task(tmp_results)
return tmp_results[0]
else:
return None
def next(self):
"""Same as builtin next() command."""
if len(self.__tasks) > 0:
if len(self.__get_pending()) > 0:
return self.__next()
else:
raise StopIteration
else:
raise RuntimeError("Marker is not initiated.")
def compute(self):
"""Evaluate :term:`specs<spec>`"""
if len(self.__tasks) > 0:
tmp_product = None
while len(self.__get_pending()) > 0:
tmp_product = self.__next()
return tmp_product
else:
raise RuntimeError("Marker is not initiated.")
def get_outputs(self):
"""Get the outputs."""
return [self.__tasks[ix]["results"][0] for ix in self.__get_finished()]
@property
def tasks(self):
"""List all the tasks."""
return self.__tasks
@property
def name(self):
"""Name of the instance."""
return self.__name
@property
def metadata(self):
"""Metadata of the instance."""
return self.__metadata
@property
def inlet(self):
"""Inlet type of the marker."""
return self.__inlet
@property
def outlet(self):
"""Outlet type marker."""
return self.__outlet
@property
def input(self):
"""Input of the marker."""
return self.__input
@property
def output(self):
"""Output of the marker."""
return self.__output
@property
def upcoming(self):
"""Upcoming task."""
if len(self.__tasks) > 0:
return self.__tasks[self.__task_pointer]["request"]["name"]
else:
return None
|
scott-wisniewski/dolly
|
xpki/oid/oid_test.go
|
<filename>xpki/oid/oid_test.go<gh_stars>1-10
package oid
import (
"encoding/asn1"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func Test_NewObjectIdentifierFromOID(t *testing.T) {
oids := []asn1.ObjectIdentifier{
Data,
SignedData,
TSTInfo,
AttributeContentType,
AttributeMessageDigest,
AttributeSigningTime,
AttributeTimeStampToken,
SignatureAlgorithmRSA,
SignatureAlgorithmECDSA,
DigestAlgorithmSHA1,
DigestAlgorithmMD5,
DigestAlgorithmSHA256,
DigestAlgorithmSHA384,
DigestAlgorithmSHA512,
SubjectKeyIdentifier,
}
for _, oid := range oids {
oidstr := oid.String()
t.Run(oidstr, func(t *testing.T) {
oi, err := NewObjectIdentifier(oidstr)
assert.NoError(t, err)
assert.Equal(t, oidstr, oi.String())
})
}
}
func Test_NewObjectIdentifier2(t *testing.T) {
t.Run("should fails", func(t *testing.T) {
_, err := NewObjectIdentifier("")
assert.Error(t, err)
var s string
_, err = NewObjectIdentifier(s)
assert.Error(t, err)
_, err = NewObjectIdentifier("1.2.")
assert.Error(t, err)
_, err = NewObjectIdentifier("1.2.a.3")
assert.Error(t, err)
_, err = NewObjectIdentifier("{iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) messageDigest(4}")
assert.Error(t, err)
})
t.Run("asn1 notation", func(t *testing.T) {
oid1, err := NewObjectIdentifier("{iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) messageDigest(4)}")
require.NoError(t, err)
oid2, err := NewObjectIdentifier("1.2.840.113549.1.9.4")
require.NoError(t, err)
assert.Equal(t, oid2, oid1)
})
}
|
areway/turms
|
turms/src/main/java/im/turms/turms/workflow/service/util/DomainConstraintUtil.java
|
/*
* Copyright (C) 2019 The Turms Project
* https://github.com/turms-im/turms
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im.turms.turms.workflow.service.util;
import im.turms.common.constant.DeviceType;
import im.turms.common.constant.GroupMemberRole;
import im.turms.common.constant.ProfileAccessStrategy;
import im.turms.common.constant.RequestStatus;
import im.turms.common.constant.ResponseAction;
import im.turms.server.common.constant.TurmsStatusCode;
import im.turms.server.common.exception.TurmsBusinessException;
import im.turms.turms.bo.GroupQuestionIdAndAnswer;
import im.turms.turms.workflow.dao.domain.group.GroupBlockedUser;
import im.turms.turms.workflow.dao.domain.group.GroupMember;
import im.turms.turms.workflow.dao.domain.user.UserRelationship;
import im.turms.turms.workflow.dao.domain.user.UserRelationshipGroup;
/**
* @author <NAME>
*/
public final class DomainConstraintUtil {
private DomainConstraintUtil() {
}
public static void validRequestStatus(RequestStatus status) {
if (status == RequestStatus.UNRECOGNIZED) {
throw TurmsBusinessException.get(TurmsStatusCode.ILLEGAL_ARGUMENT, "The request status must not be UNRECOGNIZED");
}
}
public static void validResponseAction(ResponseAction action) {
if (action == ResponseAction.UNRECOGNIZED) {
throw new IllegalArgumentException("The response action must not be UNRECOGNIZED");
}
}
public static void validRelationshipGroupKey(UserRelationshipGroup.Key key) {
if (key != null && key.getOwnerId() != null && key.getGroupIndex() != null) {
throw new IllegalArgumentException("The user relationship group key must not be null");
}
}
public static void validGroupMemberRole(GroupMemberRole role) {
if (role == GroupMemberRole.UNRECOGNIZED) {
throw new IllegalArgumentException("The group member role must not be UNRECOGNIZED");
}
}
public static void validGroupQuestionIdAndAnswer(GroupQuestionIdAndAnswer value) {
if (value == null || value.getId() == null || value.getAnswer() == null) {
throw new IllegalArgumentException("The question ID and answer must not be null");
}
}
public static void validDeviceType(DeviceType deviceType) {
if (deviceType == DeviceType.UNRECOGNIZED) {
throw new IllegalArgumentException("The device type must not be UNRECOGNIZED");
}
}
public static void validRelationshipKey(UserRelationship.Key key) {
if (key == null || key.getOwnerId() == null || key.getRelatedUserId() == null) {
throw new IllegalArgumentException("The user relationship key must not be null");
}
}
public static void validProfileAccess(ProfileAccessStrategy value) {
if (value == ProfileAccessStrategy.UNRECOGNIZED) {
throw new IllegalArgumentException("The profile access strategy must not be UNRECOGNIZED");
}
}
public static void validGroupMemberKey(GroupMember.Key key) {
if (key == null || key.getGroupId() == null || key.getUserId() == null) {
throw new IllegalArgumentException("The group member key must not be null");
}
}
public static void validGroupBlockedUserKey(GroupBlockedUser.Key key) {
if (key == null || key.getGroupId() == null || key.getUserId() == null) {
throw new IllegalArgumentException("The group member key must not be null");
}
}
}
|
Tiagotmxx/tiago-portfolio
|
10-js-fundamentals-functions/rewrite-the-function.js
|
<filename>10-js-fundamentals-functions/rewrite-the-function.js
/* The following function returns true if the parameter age is greater than 18.
Otherwise it asks for a confirmation and returns its result.
function checkAge(age) {
if (age > 18) {
return true;
} else {
return confirm('Did parents allow you?');
}
}
Rewrite it, to perform the same, but without if, in a single line.
Make two variants of checkAge:
Using a question mark operator ?
Using OR || */
//With ?
function checkAge(age) {
return (age > 18) ? true : confirm('Did parents allow you?');
}
//With ||
function checkAge(age) {
return (age > 18) || confirm('Did parents allow you?');
}
|
DataONEorg/d1_python
|
gmn/src/d1_gmn/app/middleware/response_handler.py
|
<gh_stars>10-100
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Response handler middleware.
Serialize DataONE response objects according to Accept header and set header (Size and
Content-Type) accordingly.
"""
import logging
import d1_common.const
import d1_common.date_time
import d1_common.types.exceptions
import d1_common.xml
import django.conf
import django.db
import django.db.models
import django.http
import django.http.response
import django.urls
import django.urls.base
import d1_gmn.app.views.slice
import d1_gmn.app.views.util
class ResponseHandler:
def __init__(self, next_in_chain_func):
self.next_in_chain_func = next_in_chain_func
def __call__(self, request):
"""Process return values from views.
- If view_result is a HttpResponse, return it unchanged.
- If response is a database query, run the query and create a response.
- If response is a string, assume that it is a PID.
"""
view_result = self.next_in_chain_func(request)
if isinstance(view_result, django.http.response.HttpResponseBase):
response = view_result
elif isinstance(view_result, (list, dict)):
response = self._serialize_object(request, view_result)
elif isinstance(view_result, str):
response = self._http_response_with_identifier_type(request, view_result)
elif isinstance(view_result, Exception):
logging.error(
"View exception: ".format(type(view_result), str(view_result))
)
return view_result
else:
raise d1_common.types.exceptions.ServiceFailure(
0, 'Unknown view result. view_result="{}"'.format(repr(view_result))
)
return self._debug_mode_responses(request, response)
def _debug_mode_responses(self, request, response):
"""Extra functionality available in debug mode.
- If pretty printed output was requested, force the content type to text. This
causes the browser to not try to format the output in any way.
- If SQL profiling is turned on, return a page with SQL query timing
information instead of the actual response.
"""
if django.conf.settings.DEBUG_GMN:
if "pretty" in request.GET:
response["Content-Type"] = d1_common.const.CONTENT_TYPE_TEXT
if (
"HTTP_VENDOR_PROFILE_SQL" in request.META
or django.conf.settings.DEBUG_PROFILE_SQL
):
response_list = []
for query in django.db.connection.queries:
response_list.append("{}\n{}".format(query["time"], query["sql"]))
return django.http.HttpResponse(
"\n\n".join(response_list), d1_common.const.CONTENT_TYPE_TEXT
)
return response
def _serialize_object(self, request, view_result):
response = django.http.HttpResponse()
name_to_func_map = {
"object_list": (self._generate_object_list, ["modified_timestamp", "id"]),
"object_list_json": (
self._generate_object_field_json,
["modified_timestamp", "id"],
),
"log": (self._generate_log_records, ["timestamp", "id"]),
}
d1_type_generator, sort_field_list = name_to_func_map[view_result["type"]]
d1_type_pyxb = d1_type_generator(
request, view_result["query"], view_result["start"], view_result["total"]
)
d1_type_latest_date = self._latest_date(
view_result["query"], sort_field_list[0]
)
d1_gmn.app.views.slice.cache_add_last_in_slice(
request,
view_result["query"],
view_result["start"],
view_result["total"],
sort_field_list,
)
response.write(
d1_common.xml.serialize_for_transport(
d1_type_pyxb,
xslt_url=django.urls.base.reverse("home_xslt")
# d1_gmn.app.util.get_static_path('xslt/xhtml_grid.xsl')
)
)
self._set_headers(response, d1_type_latest_date, response.tell())
return response
def _generate_object_list(self, request, db_query, start, total):
objectList = d1_gmn.app.views.util.dataoneTypes(request).objectList()
for row in db_query:
objectInfo = d1_gmn.app.views.util.dataoneTypes(request).ObjectInfo()
objectInfo.identifier = row.pid.did
objectInfo.formatId = row.format.format
checksum = d1_gmn.app.views.util.dataoneTypes(request).Checksum(
row.checksum
)
checksum.algorithm = row.checksum_algorithm.checksum_algorithm
objectInfo.checksum = checksum
objectInfo.dateSysMetadataModified = d1_common.date_time.normalize_datetime_to_utc(
row.modified_timestamp
)
objectInfo.size = row.size
objectList.objectInfo.append(objectInfo)
objectList.start = start
objectList.count = len(objectList.objectInfo)
objectList.total = total
return objectList
def _generate_object_field_json(self, request, db_query, start, total):
objectList = d1_gmn.app.views.util.dataoneTypes(request).objectList()
for row in db_query:
objectInfo = d1_gmn.app.views.util.dataoneTypes(request).ObjectInfo()
objectInfo.identifier = row.pid.did
objectInfo.formatId = row.format.format
checksum = d1_gmn.app.views.util.dataoneTypes(request).Checksum(
row.checksum
)
checksum.algorithm = row.checksum_algorithm.checksum_algorithm
objectInfo.checksum = checksum
objectInfo.dateSysMetadataModified = d1_common.date_time.normalize_datetime_to_utc(
d1_common.date_time.row.modified_timestamp
)
objectInfo.size = row.size
objectList.objectInfo.append(objectInfo)
objectList.start = start
objectList.count = len(objectList.objectInfo)
objectList.total = total
return objectList
def _generate_log_records(self, request, db_query, start, total):
log = d1_gmn.app.views.util.dataoneTypes(request).log()
for row in db_query:
logEntry = d1_gmn.app.views.util.dataoneTypes(request).LogEntry()
logEntry.entryId = str(row.id)
logEntry.identifier = row.sciobj.pid.did
# Redact ipAddress and subject on records for which client has only "read"
# access.
if getattr(row, "redact", False):
logEntry.ipAddress = "<NotAuthorized>"
logEntry.subject = "<NotAuthorized>"
else:
logEntry.ipAddress = row.ip_address.ip_address
logEntry.subject = row.subject.subject
logEntry.userAgent = row.user_agent.user_agent
logEntry.event = row.event.event
logEntry.dateLogged = d1_common.date_time.normalize_datetime_to_utc(
row.timestamp
)
logEntry.nodeIdentifier = django.conf.settings.NODE_IDENTIFIER
log.logEntry.append(logEntry)
log.start = start
log.count = len(log.logEntry)
log.total = total
return log
def _http_response_with_identifier_type(self, request, pid):
pid_pyxb = d1_gmn.app.views.util.dataoneTypes(request).identifier(pid)
pid_xml = pid_pyxb.toxml("utf-8")
return django.http.HttpResponse(pid_xml, d1_common.const.CONTENT_TYPE_XML)
def _set_headers(self, response, content_modified_timestamp, content_length):
if content_modified_timestamp is not None:
response["Last-Modified"] = d1_common.date_time.normalize_datetime_to_utc(
content_modified_timestamp
)
response["Content-Length"] = content_length
response["Content-Type"] = d1_common.const.CONTENT_TYPE_XML
def _latest_date(self, query, datetime_field_name):
"""Given a QuerySet and the name of field containing datetimes, return the
latest (most recent) date.
Return None if QuerySet is empty.
"""
return list(
query.aggregate(django.db.models.Max(datetime_field_name)).values()
)[0]
|
Yangzhengtang/mspass
|
cxx/include/mspass/utility/VectorStatistics.h
|
#include <vector>
#include <algorithm>
#include "mspass/utility/MsPASSError.h"
namespace mspass
{
namespace utility{
/*! \brief Generic object to compute common robust statistics from a vector container of data.
Robust estimators commonly use statistics based on ranked data. This fits naturally with an
STL vector container that is by definition sortable by a standard method. This object
has methods that return common statistics derived from sorted vector data. */
template <class T> class VectorStatistics
{
public:
/*! Primary constructor.
\param din is vector container from which statistics are to be derived. Currently
assume default sort is used. Probably should have an optional order template parameter
for the container. */
VectorStatistics(std::vector<T> din);
/*! Construct from a C style pointer to an array of T.*/
VectorStatistics(T *din,int n);
/*! Return median */
T median();
/*! Return the mean */
T mean();
/*! Return the lower quartile. */
T q1_4();
/*! Return the upper (3/4) quartile. */
T q3_4();
/*! Return the interquartile (q3/4 - q1/4) */
T interquartile();
/*! Return the median absolute distance robust measure of spread. */
T mad(T center);
/*! Return sum of squares. */
T ssq();
/*! Return largest value in the data set. */
T upper_bound();
/*! Return smallest value in the data set. */
T lower_bound();
/*! Return full range of data (largest - smallest) */
T range();
/*! Return nth value from the sorted data that is the nth 1-quantile.
Will throw a MsPASSError if n exceed the data length. */
T quantile(size_t n);
private:
std::vector<T> d;
};
template <class T> VectorStatistics<T>::VectorStatistics(std::vector<T> din)
{
if(din.size()<=1) throw mspass::utility::MsPASSError(std::string("VectorStatistics constructor: ")
+ "input vector has insufficient data to compute statistics",
mspass::utility::ErrorSeverity::Invalid);
d=din;
std::sort(d.begin(),d.end());
}
template <class T> VectorStatistics<T>::VectorStatistics(T *din,int n)
{
if(n<=1) throw mspass::utility::MsPASSError(std::string("VectorStatistics constructor: ")
+ "input vector has insufficient data to compute statistics",
mspass::utility::ErrorSeverity::Invalid);
d.reserve(n);
for(int i=0;i<n;++i) d.push_back(din[i]);
std::sort(d.begin(),d.end());
}
template <class T> T VectorStatistics<T>::median()
{
int count=d.size();
int medposition=count/2;
if(count%2)
return(d[medposition]);
else
return( (d[medposition]+d[medposition-1])/2 );
}
template <class T> T VectorStatistics<T>::mean()
{
T result;
result=0;
for(int i=0;i<d.size();++i)
{
result += d[i];
}
return (result/d.size());
}
template <class T> T VectorStatistics<T>::q1_4()
{
int n=d.size();
double result;
if(n<4)
return(d[0]);
else
{
int nover4=(n-1)/4;
switch(n%4)
{
case(0):
result=static_cast<double>(d[nover4]);
break;
case(1):
result=0.75*static_cast<double>(d[nover4]) + 0.25*static_cast<double>(d[nover4+1]);
break;
case(2):
result=static_cast<double>(d[nover4]) + static_cast<double>(d[nover4+1]);
result /= 2.0;
break;
case(3):
result=0.25*static_cast<double>(d[nover4]) + 0.75*static_cast<double>(d[nover4+1]);
}
}
return(static_cast<T>(result));
}
template <class T> T VectorStatistics<T>::q3_4()
{
int n=d.size();
double result;
if(n<4)
return(d[n-1]);
else
{
int n3_4=3*(n-1)/4;
switch(n%4)
{
case(0):
result=static_cast<double>(d[n3_4]);
break;
case(1):
result=0.75*static_cast<double>(d[n3_4]) + 0.25*static_cast<double>(d[n3_4+1]);
break;
case(2):
result=static_cast<double>(d[n3_4]) + static_cast<double>(d[n3_4+1]);
result /= 2.0;
break;
case(3):
result=0.25*static_cast<double>(d[n3_4]) + 0.75*static_cast<double>(d[n3_4+1]);
}
}
return(static_cast<T>(result));
}
template <class T> T VectorStatistics<T>::interquartile()
{
T result;
T d1_4,d3_4;
d1_4=this->q1_4();
d3_4=this->q3_4();
return(d3_4 - d1_4);
}
template <class T> T VectorStatistics<T>::mad(T center)
{
std::vector<T> absdiff;
int n=d.size();
int i;
for(i=0;i<n;++i)
{
T diff;
diff=d[i]-center;
if(diff<0) diff=-diff;
absdiff.push_back(diff);
}
VectorStatistics<T> result(absdiff);
return(result.median());
}
template <class T> T VectorStatistics<T>::ssq()
{
T result;
int i;
for(i=0;i<d.size();++i) result=d[i]*d[i];
return(result);
}
template <class T> T VectorStatistics<T>::upper_bound()
{
return(d[d.size()-1]);
}
template <class T> T VectorStatistics<T>::lower_bound()
{
return(d[0]);
}
template <class T> T VectorStatistics<T>::range()
{
T result;
result=this->upper_bound();
result-=this->lower_bound();
return(result);
}
template <class T> T VectorStatistics<T>::quantile(size_t n)
{
if(n>=d.size())
{
std::stringstream ss;
ss << "VectorStatistics::quantile method: asked for 1-quantile number "
<< n<<" but data vecotor length is only "<<d.size()<<std::endl;
throw mspass::utility::MsPASSError(ss.str(),mspass::utility::ErrorSeverity::Invalid);
}
}
} // end utility namespace
} /* End mspass namespace encapsulation */
|
iamacnhero/citrus
|
common/expr/src/test/java/com/alibaba/citrus/expr/CompositeExpressionTests.java
|
<reponame>iamacnhero/citrus
/*
* Copyright (c) 2002-2012 Alibaba Group Holding Limited.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.citrus.expr;
import static org.junit.Assert.*;
import java.util.Date;
import com.alibaba.citrus.expr.composite.CompositeExpressionFactory;
import com.alibaba.citrus.expr.support.MappedExpressionContext;
import org.junit.Before;
import org.junit.Test;
/**
* 测试<code>CompositeExpression</code>。
*
* @author <NAME>
*/
public class CompositeExpressionTests {
private ExpressionFactory factory;
private ExpressionContext context;
private Date now;
@Before
public void init() {
factory = new CompositeExpressionFactory();
context = new MappedExpressionContext();
now = new Date();
context.put("now", now);
}
@Test
public void constantExpression() throws Exception {
assertEquals("abc", evaluate("abc"));
assertEquals("null", evaluate("null"));
assertEquals("", evaluate(""));
}
@Test
public void jexlExpression() throws Exception {
assertSame(now, evaluate("${now}"));
assertEquals(new Long(now.getTime()), evaluate("${now.time}"));
assertEquals(null, evaluate("${null}"));
assertEquals(new Integer(123), evaluate("${123}"));
assertEquals("abc", evaluate("${'abc'}"));
assertEquals(Boolean.TRUE, evaluate("${2 > 1}"));
}
@Test
public void compositeExpression() throws Exception {
assertEquals("it is " + now + " now", evaluate("it is ${now} now"));
assertEquals("2 > 1 == true", evaluate("2 > 1 == ${2 > 1}"));
}
private Object evaluate(String expr) throws ExpressionParseException {
return factory.createExpression(expr).evaluate(context);
}
}
|
SebOchs/student_lab
|
dataloading.py
|
<filename>dataloading.py<gh_stars>0
import numpy as np
import torch
from torch.utils.data import Dataset
# data loaders for a given data set based on model
class MyBertDataset(Dataset):
def __init__(self, filename):
self.data = np.load(filename, allow_pickle=True)
def __len__(self):
return len(self.data)
def __getitem__(self, index):
ids, seg, att, lab = self.data[index]
return torch.tensor(ids).long(), torch.tensor(seg).long(), torch.tensor(att).long(), lab
class MyT5Dataset(Dataset):
def __init__(self, filename):
self.data = np.load(filename, allow_pickle=True)
def __len__(self):
return len(self.data)
def __getitem__(self, index):
# lab attention is not helpful right now, but maybe in the future
seq, attn, label, lab_attn = self.data[index]
return torch.tensor(seq).long(), torch.tensor(attn).long(), torch.tensor(label).long(), \
torch.tensor(lab_attn).long()
|
dawmlight/vendor_oh_fun
|
hihope_neptune-oh_hid/00_src/v0.1/third_party/LVM2/lib/filters/filter-persistent.c
|
<reponame>dawmlight/vendor_oh_fun
/*
* Copyright (C) 2001-2004 Sistina Software, Inc. All rights reserved.
* Copyright (C) 2004-2007 Red Hat, Inc. All rights reserved.
*
* This file is part of LVM2.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License v.2.1.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "lib.h"
#include "filter.h"
#include "config.h"
#include "lvm-file.h"
struct pfilter {
char *file;
struct dm_hash_table *devices;
struct dev_filter *real;
struct timespec ctime;
struct dev_types *dt;
};
/*
* The persistent filter is filter layer that sits above the other filters and
* caches the final result of those other filters. When a device is first
* checked against filters, it will not be in this cache, so this filter will
* pass the device down to the other filters to check it. The other filters
* will run and either include the device (good/pass) or exclude the device
* (bad/fail). That good or bad result propagates up through this filter which
* saves the result. The next time some code checks the filters against the
* device, this persistent/cache filter is checked first. This filter finds
* the previous result in its cache and returns it without reevaluating the
* other real filters.
*
* FIXME: a cache like this should not be needed. The fact it's needed is a
* symptom of code that should be fixed to not reevaluate filters multiple
* times. A device should be checked against the filter once, and then not
* need to be checked again. With scanning now controlled, we could probably
* do this.
*
* FIXME: "persistent" isn't a great name for this caching filter. This filter
* at one time saved its cache results to a file, which is how it got the name.
* That .cache file does not work well, causes problems, and is no longer used
* by default. The old code for it should be removed.
*/
static char* _good_device = "good";
static char* _bad_device = "bad";
/*
* The hash table holds one of these two states
* against each entry.
*/
#define PF_BAD_DEVICE ((void *) &_good_device)
#define PF_GOOD_DEVICE ((void *) &_bad_device)
static int _init_hash(struct pfilter *pf)
{
if (pf->devices)
dm_hash_destroy(pf->devices);
if (!(pf->devices = dm_hash_create(128)))
return_0;
return 1;
}
static void _persistent_filter_wipe(struct dev_filter *f)
{
struct pfilter *pf = (struct pfilter *) f->private;
dm_hash_wipe(pf->devices);
}
static int _read_array(struct pfilter *pf, struct dm_config_tree *cft,
const char *path, void *data)
{
const struct dm_config_node *cn;
const struct dm_config_value *cv;
if (!(cn = dm_config_find_node(cft->root, path))) {
log_very_verbose("Couldn't find %s array in '%s'",
path, pf->file);
return 0;
}
/*
* iterate through the array, adding
* devices as we go.
*/
for (cv = cn->v; cv; cv = cv->next) {
if (cv->type != DM_CFG_STRING) {
log_verbose("Devices array contains a value "
"which is not a string ... ignoring");
continue;
}
if (!dm_hash_insert(pf->devices, cv->v.str, data))
log_verbose("Couldn't add '%s' to filter ... ignoring",
cv->v.str);
/* Populate dev_cache ourselves */
dev_cache_get(cv->v.str, NULL);
}
return 1;
}
int persistent_filter_load(struct dev_filter *f, struct dm_config_tree **cft_out)
{
struct pfilter *pf = (struct pfilter *) f->private;
struct dm_config_tree *cft;
struct stat info;
int r = 0;
if (obtain_device_list_from_udev()) {
if (!stat(pf->file, &info)) {
log_very_verbose("Obtaining device list from udev. "
"Removing obsolete %s.",
pf->file);
if (unlink(pf->file) < 0 && errno != EROFS)
log_sys_error("unlink", pf->file);
}
return 1;
}
if (!stat(pf->file, &info))
lvm_stat_ctim(&pf->ctime, &info);
else {
log_very_verbose("%s: stat failed: %s", pf->file,
strerror(errno));
return_0;
}
if (!(cft = config_open(CONFIG_FILE_SPECIAL, pf->file, 1)))
return_0;
if (!config_file_read(cft))
goto_out;
log_debug_devs("Loading persistent filter cache from %s", pf->file);
_read_array(pf, cft, "persistent_filter_cache/valid_devices",
PF_GOOD_DEVICE);
/* We don't gain anything by holding invalid devices */
/* _read_array(pf, cft, "persistent_filter_cache/invalid_devices",
PF_BAD_DEVICE); */
log_very_verbose("Loaded persistent filter cache from %s", pf->file);
out:
if (r && cft_out)
*cft_out = cft;
else
config_destroy(cft);
return r;
}
static void _write_array(struct pfilter *pf, FILE *fp, const char *path,
void *data)
{
void *d;
int first = 1;
char buf[2 * PATH_MAX];
struct dm_hash_node *n;
for (n = dm_hash_get_first(pf->devices); n;
n = dm_hash_get_next(pf->devices, n)) {
d = dm_hash_get_data(pf->devices, n);
if (d != data)
continue;
if (!first)
fprintf(fp, ",\n");
else {
fprintf(fp, "\t%s=[\n", path);
first = 0;
}
dm_escape_double_quotes(buf, dm_hash_get_key(pf->devices, n));
fprintf(fp, "\t\t\"%s\"", buf);
}
if (!first)
fprintf(fp, "\n\t]\n");
}
static int _persistent_filter_dump(struct dev_filter *f, int merge_existing)
{
struct pfilter *pf;
char *tmp_file;
struct stat info, info2;
struct timespec ts;
struct dm_config_tree *cft = NULL;
FILE *fp;
int lockfd;
int r = 0;
if (obtain_device_list_from_udev())
return 1;
if (!f)
return_0;
pf = (struct pfilter *) f->private;
if (!dm_hash_get_num_entries(pf->devices)) {
log_very_verbose("Internal persistent device cache empty "
"- not writing to %s", pf->file);
return 1;
}
if (!dev_cache_has_scanned()) {
log_very_verbose("Device cache incomplete - not writing "
"to %s", pf->file);
return 0;
}
log_very_verbose("Dumping persistent device cache to %s", pf->file);
while (1) {
if ((lockfd = fcntl_lock_file(pf->file, F_WRLCK, 0)) < 0)
return_0;
/*
* Ensure we locked the file we expected
*/
if (fstat(lockfd, &info)) {
log_sys_error("fstat", pf->file);
goto out;
}
if (stat(pf->file, &info2)) {
log_sys_error("stat", pf->file);
goto out;
}
if (is_same_inode(info, info2))
break;
fcntl_unlock_file(lockfd);
}
/*
* If file contents changed since we loaded it, merge new contents
*/
lvm_stat_ctim(&ts, &info);
if (merge_existing && timespeccmp(&ts, &pf->ctime, !=))
/* Keep cft open to avoid losing lock */
persistent_filter_load(f, &cft);
tmp_file = alloca(strlen(pf->file) + 5);
sprintf(tmp_file, "%s.tmp", pf->file);
if (!(fp = fopen(tmp_file, "w"))) {
/* EACCES has been reported over NFS */
if (errno != EROFS && errno != EACCES)
log_sys_error("fopen", tmp_file);
goto out;
}
fprintf(fp, "# This file is automatically maintained by lvm.\n\n");
fprintf(fp, "persistent_filter_cache {\n");
_write_array(pf, fp, "valid_devices", PF_GOOD_DEVICE);
/* We don't gain anything by remembering invalid devices */
/* _write_array(pf, fp, "invalid_devices", PF_BAD_DEVICE); */
fprintf(fp, "}\n");
if (lvm_fclose(fp, tmp_file))
goto_out;
if (rename(tmp_file, pf->file))
log_error("%s: rename to %s failed: %s", tmp_file, pf->file,
strerror(errno));
r = 1;
out:
fcntl_unlock_file(lockfd);
if (cft)
config_destroy(cft);
return r;
}
static int _lookup_p(struct dev_filter *f, struct device *dev)
{
struct pfilter *pf = (struct pfilter *) f->private;
void *l;
struct dm_str_list *sl;
int pass = 1;
if (dm_list_empty(&dev->aliases)) {
log_debug_devs("%d:%d: filter cache skipping (no name)",
(int)MAJOR(dev->dev), (int)MINOR(dev->dev));
return 0;
}
l = dm_hash_lookup(pf->devices, dev_name(dev));
/* Cached bad, skip dev */
if (l == PF_BAD_DEVICE) {
log_debug_devs("%s: filter cache skipping (cached bad)", dev_name(dev));
return 0;
}
/* Cached good, use dev */
if (l == PF_GOOD_DEVICE) {
log_debug_devs("%s: filter cache using (cached good)", dev_name(dev));
return 1;
}
/* Uncached, check filters and cache the result */
if (!l) {
dev->flags &= ~DEV_FILTER_AFTER_SCAN;
pass = pf->real->passes_filter(pf->real, dev);
if (!pass) {
/*
* A device that does not pass one filter is excluded
* even if the result of another filter is deferred,
* because the deferred result won't change the exclude.
*/
l = PF_BAD_DEVICE;
} else if ((pass == -EAGAIN) || (dev->flags & DEV_FILTER_AFTER_SCAN)) {
/*
* When the filter result is deferred, we let the device
* pass for now, but do not cache the result. We need to
* rerun the filters later. At that point the final result
* will be cached.
*/
log_debug_devs("filter cache deferred %s", dev_name(dev));
dev->flags |= DEV_FILTER_AFTER_SCAN;
pass = 1;
goto out;
} else if (pass) {
l = PF_GOOD_DEVICE;
}
log_debug_devs("filter caching %s %s", pass ? "good" : "bad", dev_name(dev));
dm_list_iterate_items(sl, &dev->aliases)
if (!dm_hash_insert(pf->devices, sl->str, l)) {
log_error("Failed to hash alias to filter.");
return 0;
}
}
out:
return pass;
}
static void _persistent_destroy(struct dev_filter *f)
{
struct pfilter *pf = (struct pfilter *) f->private;
if (f->use_count)
log_error(INTERNAL_ERROR "Destroying persistent filter while in use %u times.", f->use_count);
dm_hash_destroy(pf->devices);
dm_free(pf->file);
pf->real->destroy(pf->real);
dm_free(pf);
dm_free(f);
}
struct dev_filter *persistent_filter_create(struct dev_types *dt,
struct dev_filter *real,
const char *file)
{
struct pfilter *pf;
struct dev_filter *f = NULL;
struct stat info;
if (!(pf = dm_zalloc(sizeof(*pf)))) {
log_error("Allocation of persistent filter failed.");
return NULL;
}
pf->dt = dt;
if (!(pf->file = dm_strdup(file))) {
log_error("Filename duplication for persistent filter failed.");
goto bad;
}
pf->real = real;
if (!(_init_hash(pf))) {
log_error("Couldn't create hash table for persistent filter.");
goto bad;
}
if (!(f = dm_zalloc(sizeof(*f)))) {
log_error("Allocation of device filter for persistent filter failed.");
goto bad;
}
/* Only merge cache file before dumping it if it changed externally. */
if (!stat(pf->file, &info))
lvm_stat_ctim(&pf->ctime, &info);
f->passes_filter = _lookup_p;
f->destroy = _persistent_destroy;
f->use_count = 0;
f->private = pf;
f->wipe = _persistent_filter_wipe;
f->dump = _persistent_filter_dump;
log_debug_devs("Persistent filter initialised.");
return f;
bad:
dm_free(pf->file);
if (pf->devices)
dm_hash_destroy(pf->devices);
dm_free(pf);
dm_free(f);
return NULL;
}
|
moutainhigh/ses-server
|
ses-service/ses-service-base/ses-service-foundation/src/main/java/com/redescooter/ses/service/foundation/dao/base/PlaUserPermissionMapper.java
|
<reponame>moutainhigh/ses-server
package com.redescooter.ses.service.foundation.dao.base;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.redescooter.ses.service.foundation.dm.base.PlaUserPermission;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface PlaUserPermissionMapper extends BaseMapper<PlaUserPermission> {
int updateBatch(List<PlaUserPermission> list);
int batchInsert(@Param("list") List<PlaUserPermission> list);
int insertOrUpdate(PlaUserPermission record);
int insertOrUpdateSelective(PlaUserPermission record);
}
|
inklesspen1scripter/ultramine_core
|
src/main/java/net/minecraft/command/server/CommandListPlayers.java
|
package net.minecraft.command.server;
import net.minecraft.command.CommandBase;
import net.minecraft.command.ICommandSender;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.ChatComponentText;
import net.minecraft.util.ChatComponentTranslation;
public class CommandListPlayers extends CommandBase
{
private static final String __OBFID = "CL_00000615";
public String getCommandName()
{
return "list";
}
public int getRequiredPermissionLevel()
{
return 0;
}
public String getCommandUsage(ICommandSender p_71518_1_)
{
return "commands.players.usage";
}
public void processCommand(ICommandSender p_71515_1_, String[] p_71515_2_)
{
p_71515_1_.addChatMessage(new ChatComponentTranslation("commands.players.list", new Object[] {Integer.valueOf(MinecraftServer.getServer().getCurrentPlayerCount()), Integer.valueOf(MinecraftServer.getServer().getMaxPlayers())}));
p_71515_1_.addChatMessage(new ChatComponentText(MinecraftServer.getServer().getConfigurationManager().func_152609_b(p_71515_2_.length > 0 && "uuids".equalsIgnoreCase(p_71515_2_[0]))));
}
}
|
samuelexferri/unibg-workspace
|
TVSW/Esercitazione1/src/esercizio1/Light.java
|
package esercizio1;
public class Light {
private boolean lightOn = false;
private boolean bottomIn = false;
private boolean bottomOut = false;
Light() {
}
public boolean onOff(boolean light, boolean in, boolean out) {
this.bottomIn = in;
this.bottomOut = out;
if ((bottomIn || bottomOut) && !light) {
lightOn = true;
return true; // Luce accesa
} else {
lightOn = false;
return false; // Luce spenta
}
}
}
|
coder-hugo/querydsl
|
querydsl-sql/src/main/java/com/querydsl/sql/Beans.java
|
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.sql;
import java.util.Map;
/**
* {@code Beans} contains Objects mapped to {@link RelationalPath} instances
*
* @author luis
*/
public class Beans {
private final Map<? extends RelationalPath<?>, ?> beans;
public Beans(Map<? extends RelationalPath<?>, ?> beans) {
this.beans = beans;
}
@SuppressWarnings("unchecked")
public <T> T get(RelationalPath<T> path) {
return (T) beans.get(path);
}
}
|
ambros-gleixner/rubberband
|
tests/base.py
|
<filename>tests/base.py
"""Baseclass for tests."""
import os
from tornado.testing import AsyncHTTPTestCase
from rubberband.boilerplate import make_app
# add application root to sys.path
APP_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
PACKAGE_ROOT = os.path.join(APP_ROOT, "rubberband")
app = make_app(PACKAGE_ROOT)
class TestHandlerBase(AsyncHTTPTestCase):
"""Test class that all tests will inherit from."""
def get_app(self):
"""Return the application."""
return app
|
federicolappa/vue-quiz-web
|
client/src/common/string.js
|
<gh_stars>1-10
export const S_CREATE_ACCOUNT_SUCCESS = "Tạo tài khoản thành công.";
export const S_CHANGE_PASS_SUCCESS = "Đổi mật khẩu thành công.";
export const S_LOGIN_ACCOUNT_SUCCESS = "Đăng nhập tài khoản thành công.";
export const S_LOGOUT_ACCOUNT_SUCCESS = "Đăng xuất tài khoản thành công.";
export const S_ADD_VALUES_SUCCESS = "Thêm dữ liệu thành công.";
export const S_EDIT_VALUES_SUCCESS = "Sửa các giá trị thành công.";
export const S_DELETE_VALUES_SUCCESS = "Xóa các giá trị thành công.";
export const S_ALERT_YOU_OUTED = "Bạn đã rời khỏi phòng.";
export const S_ALERT_YOU_KICKED = "Bạn đã bị mời khỏi phòng.";
export const S_ALERT_USER_JOINED = " đã tham gia.";
export const S_ALERT_USER_OUTED = " đã rời khỏi phòng.";
export const S_ALERT_USER_KICKED = " đã bị mời khỏi phòng.";
export const Q_CONFIRM_DELETE_VALUES =
"Bạn có chắc chắn muốn xóa? Hành động này không thể hoàn tác.";
export const Q_CONFIRM_STOP_QUIZ = "Bạn có muốn dừng ngay bây giờ không?";
export const E_ALERT_USERNAME_EXISTS =
"Tên người dùng đã tồn tại, vui lòng dùng tên khác.";
export const E_USERNAME_NOT_ALLOW =
"Tên người dùng của bạn chỉ được có kí tự và số, không có khoảng trắng và không có kí tự đặc biệt.";
export const E_ACCOUNT_NOT_MATCH = "Tài khoản hoặc mật khẩu không chính xác.";
export const E_PASSWORD_NOT_MATCH = "Mật khẩu nhập lại không khớp.";
export const E_PASSWORD_NOT_CORRECT = "Mật khẩu không chính xác.";
export const E_NOT_FOUND_QUIZ = "Không tìm thấy cuộc thi nào.";
export const E_QUIZ_IS_RUNNING =
"Cuộc thi này đang diễn ra, không thể tham gia.";
export const E_QUESTIONS_NOT_FOUND =
"Không có câu hỏi nào, hãy thêm câu hỏi trước.";
/* Text ERROR */
export const E_UNKNOWN_ERROR_DETECT =
"Lỗi không xác định, vui lòng thử lại sau.";
export const E_SERVER_ERROR_CONNECTING =
"Error connecting system to server. Please reload and try again.";
|
barslev/appgine
|
src/lib/createFragment.js
|
import { dom } from '../closure'
export default function createFragment(htmlString) {
const $fragment = _createFragmentElement();
try {
_createFragment($fragment, htmlString);
} catch (e) {
_createFragmentPartially($fragment, htmlString);
}
Array.from($fragment.querySelectorAll('script')).forEach(function($script) {
$script.textContent = String($script.textContent).replace('__END_SCRIPT_TAG__', '<');
});
const matched = htmlString.match(/<html\s+([^>]+)>/);
if (matched) {
copyAttributes($fragment, matched[1]);
}
return $fragment;
}
function _createFragmentElement() {
return document.createElement('iframe');
}
function _createFragment($fragment, htmlString) {
var tempDiv = document.createElement('html');
tempDiv.innerHTML = htmlString;
while (tempDiv.firstChild) {
$fragment.appendChild(tempDiv.firstChild);
}
}
function _createFragmentPartially($fragment, htmlString) {
['head', 'body'].
map(part => _createFragmentPart(htmlString, part)).
filter($part => $part).
forEach($part => $fragment.appendChild($part));
}
function _createFragmentPart(htmlString, part) {
const pos = htmlString.indexOf('<' + part);
const end = htmlString.indexOf('</'+part+'>')+3+part.length;
if (pos>=0) {
const string = htmlString.substr(pos, end-pos);
const $part = document.createElement(part);
const $temp = document.createElement('div');
$temp.innerHTML = string;
while ($temp.firstChild) {
$part.appendChild($temp.firstChild);
}
const matched = string.match(/^<[a-z]+([^>]*)>/);
const $attrs = document.createElement('div');
$attrs.innerHTML = '<div ' + matched[1] + '></div>';
const attrs = {};
Array.from($attrs.children[0].attributes).
forEach(attr => attrs[attr.name] = attr.value);
dom.setProperties($part, attrs);
return $part;
}
}
function copyAttributes($element, attrs) {
try {
if ($element.setAttribute) {
const tempDiv = document.createElement('div');
tempDiv.innerHTML = '<div '+ attrs +'></div>';
for (let attr of Array.from(tempDiv.firstChild.attributes)) {
$element.setAttribute(attr.name, attr.value);
}
}
} catch (e) {}
}
|
sopra-fs22-group-16/sopra-fs22-group-16-server
|
src/test/java/ch/uzh/ifi/hase/soprafs22/repository/UserRepositoryIntegrationTest.java
|
<filename>src/test/java/ch/uzh/ifi/hase/soprafs22/repository/UserRepositoryIntegrationTest.java
package ch.uzh.ifi.hase.soprafs22.repository;
import ch.uzh.ifi.hase.soprafs22.user.RegisteredUser;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@DataJpaTest
class UserRepositoryIntegrationTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private UserRepository userRepository;
@Test
void findByToken_success() {
// given
RegisteredUser registeredUser = new RegisteredUser();
registeredUser.setToken("token");
registeredUser.setUsername("username");
registeredUser.setPassword("password");
registeredUser.setWins(10);
registeredUser.setLosses(12);
registeredUser.setRankedScore(120);
entityManager.persist(registeredUser);
entityManager.flush();
// when
RegisteredUser found = userRepository.findRegisteredUserByToken(registeredUser.getToken());
// then
assertNotNull(found.getId());
assertEquals(registeredUser.getUsername(), found.getUsername());
assertEquals(registeredUser.getToken(), found.getToken());
assertEquals(registeredUser.getPassword(), found.getPassword());
assertEquals(registeredUser.getWins(), found.getWins());
assertEquals(registeredUser.getLosses(), found.getLosses());
assertEquals(registeredUser.getRankedScore(), found.getRankedScore());
}
}
|
monogon-dev/intellij-cue
|
src/test/java/dev/monogon/cue/lang/psi/impl/CueMultilineBytesLiteralMixinTest.java
|
<reponame>monogon-dev/intellij-cue
package dev.monogon.cue.lang.psi.impl;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiLanguageInjectionHost;
import dev.monogon.cue.CueLightTest;
import dev.monogon.cue.lang.psi.CueMultilineBytesLit;
import org.junit.Test;
public class CueMultilineBytesLiteralMixinTest extends CueLightTest {
@Test
public void contentRange() {
createCueFile("'''content<caret>'''");
var string = findTypedElement(CueMultilineBytesLit.class);
assertEquals(TextRange.create(3, 10), string.getLiteralContentRange());
}
@Test
public void contentRangeEmpty() {
createCueFile("'''\n'''");
var string = findTypedElement(CueMultilineBytesLit.class);
assertTrue(string.isValidHost());
assertEquals(TextRange.create(4, 4), string.getLiteralContentRange());
}
@Test
public void contentRangePadded() {
createCueFile("##'''content<caret>'''##");
var string = findTypedElement(CueMultilineBytesLit.class);
assertEquals(TextRange.create(5, 12), string.getLiteralContentRange());
}
@Test
public void updateText() {
createCueFile("'''\ncontent<caret>\n'''");
var string = findTypedElement(CueMultilineBytesLit.class);
var newString1 = WriteCommandAction.runWriteCommandAction(getProject(), (Computable<PsiLanguageInjectionHost>)() -> {
return string.updateText("new");
});
assertTrue(newString1 instanceof CueMultilineBytesLit);
assertEquals("'''\nnew\n'''", newString1.getText());
var newEmptyString = WriteCommandAction.runWriteCommandAction(getProject(), (Computable<PsiLanguageInjectionHost>)() -> {
return string.updateText("");
});
assertTrue(newEmptyString instanceof CueMultilineBytesLit);
assertEquals("'''\n'''", newEmptyString.getText());
}
@Test
public void updateTextEscaped() {
createCueFile("'''\ncontent<caret>\n'''");
var string = findTypedElement(CueMultilineBytesLit.class);
// updating with text, which requires escapes, has to insert insert escaped characters
var replacement = WriteCommandAction.runWriteCommandAction(getProject(), (Computable<PsiLanguageInjectionHost>)() -> {
return string.updateText("new\t\tcontent\nnext line");
});
assertTrue(replacement instanceof CueMultilineBytesLit);
assertEquals("'''\nnew\t\tcontent\nnext line\n'''", replacement.getText());
}
@Test
public void updateTextEscapedPadded() {
createCueFile("###'''\ncontent<caret>\n'''###");
var string = findTypedElement(CueMultilineBytesLit.class);
// updating with text, which requires escapes, has to insert insert escaped characters
var replacement = WriteCommandAction.runWriteCommandAction(getProject(), (Computable<PsiLanguageInjectionHost>)() -> {
return string.updateText("new\t\tcontent\nnext line");
});
assertTrue(replacement instanceof CueMultilineBytesLit);
assertEquals("###'''\nnew\t\tcontent\nnext line\n'''###", replacement.getText());
}
@Test
public void updateTextEmpty() {
createCueFile("###'''\n'''###");
var string = findTypedElement(CueMultilineBytesLit.class);
assertTrue(string.isValidHost());
// updating with text, which requires escapes, has to insert insert escaped characters
var replacement = WriteCommandAction.runWriteCommandAction(getProject(), (Computable<PsiLanguageInjectionHost>)() -> {
return string.updateText("line\nline");
});
assertTrue(replacement instanceof CueMultilineBytesLit);
assertTrue(replacement.isValidHost());
assertEquals("###'''\nline\nline\n'''###", replacement.getText());
}
@Test
public void updateTextUnicode() {
createCueFile("'''\ncontent<caret>\n'''");
var string = findTypedElement(CueMultilineBytesLit.class);
var replacement = WriteCommandAction.runWriteCommandAction(getProject(), (Computable<PsiLanguageInjectionHost>)() -> {
return string.updateText("\\u65e5本\\U00008a9e");
});
assertTrue(replacement instanceof CueMultilineBytesLit);
assertEquals("'''\n\\\\u65e5本\\\\U00008a9e\n'''", replacement.getText());
}
}
|
phoenixeliot/design-system
|
src/MLIcon/RightSquareOutlined.js
|
import { createWrappedMLIcon } from './icon-wrappers'
import AntRightSquareOutlined from '@ant-design/icons/RightSquareOutlined'
const RightSquareOutlined = createWrappedMLIcon(AntRightSquareOutlined)
export default RightSquareOutlined
|
JosephClay/ship-barrage
|
server/controllers/home.js
|
import fs from 'fs';
import path from 'path';
import { minify } from 'html-minifier';
import { IS_PROD } from '../config';
const pth = path.resolve(process.cwd(), './server/views/index.html');
const html = fs.readFileSync(pth).toString();
const page = IS_PROD ? minify(html, {
// https://github.com/kangax/html-minifier
collapseInlineTagWhitespace: true,
collapseWhitespace: true,
minifyURLs: true,
removeAttributeQuotes: true,
removeOptionalTags: true,
removeComments: true,
removeScriptTypeAttributes: true,
removeStyleLinkTypeAttributes: true,
}) : html;
export default async function home(req, res) {
res.send(page);
};
|
dev4rweb/ns-market
|
resources/js/store/actions/transactionTypesApi.js
|
<gh_stars>0
import {WORK_HOST} from "../routeConsts";
export const getAllTransactionTypesApi = () => {
return axios.get(`${WORK_HOST}market/transaction-types`)
.then(res => res).catch(err => err)
};
export const createTransactionTypeApi = type => {
return axios.post(`${WORK_HOST}market/transaction-types`, {
...type
}).then(res => res).catch(err => err)
};
export const updateTransactionTypeApi = type => {
return axios.post(`${WORK_HOST}market/transaction-types/${type.id}`, {
_method: 'PUT',
...type
}).then(res => res).catch(err => err)
};
export const removeTransactionTypeApi = id => {
return axios.post(`${WORK_HOST}market/transaction-types/${id}`, {
_method: 'DELETE',
}).then(res => res).catch(err => err)
};
|
ollfkaih/lego
|
lego/apps/comments/notifications.py
|
<reponame>ollfkaih/lego<gh_stars>10-100
from lego.apps.notifications.constants import COMMENT, COMMENT_REPLY
from lego.apps.notifications.notification import Notification
class CommentNotification(Notification):
name = COMMENT
def generate_mail(self):
target = self.kwargs["target"]
target_string = str(target)
author = self.kwargs["author"]
text = self.kwargs["text"]
return self._delay_mail(
to_email=self.user.email,
context={
"name": self.user.full_name,
"target": target_string,
"author_name": author.full_name,
"text": text,
"url": target.get_absolute_url(),
},
subject=f"{author.full_name} har kommentert på {target_string}",
plain_template="comments/email/comment.txt",
html_template="comments/email/comment.html",
)
class CommentReplyNotification(Notification):
name = COMMENT_REPLY
def generate_mail(self):
target = self.kwargs["target"]
target_string = str(target)
author = self.kwargs["author"]
text = self.kwargs["text"]
return self._delay_mail(
to_email=self.user.email,
context={
"name": self.user.full_name,
"target": target_string,
"author_name": author.full_name,
"text": text,
"url": target.get_absolute_url(),
},
subject=f"{author.full_name} har svart på kommentaren din på {target_string}",
plain_template="comments/email/comment_reply.txt",
html_template="comments/email/comment_reply.html",
)
|
Nickinthebox/polyfill-library
|
polyfills/ArrayBuffer/isView/polyfill.js
|
<filename>polyfills/ArrayBuffer/isView/polyfill.js<gh_stars>1-10
/* global ArrayBuffer, CreateMethodProperty, Type */
// 25.1.4.1 ArrayBuffer.isView ( arg )
CreateMethodProperty(ArrayBuffer, 'isView', function isView (arg) {
// 1. If Type(arg) is not Object, return false.
if (Type(arg) !== 'object') {
return false;
}
// 2. If arg has a [[ViewedArrayBuffer]] internal slot, return true.
// 3. Return false.
return arg.buffer instanceof ArrayBuffer;
});
|
Phillyclause89/reddit_scripts
|
Code_from_folks_on_discord/eleven_game/main.py
|
<filename>Code_from_folks_on_discord/eleven_game/main.py
import random
import eleven
class HumanPlayer(eleven.Player):
def get_move(self):
eleven.clear_screen()
print(self.board, self.score)
while True:
direction = input('Enter a move: ')
is_valid = self.board.is_valid_direction(direction.lower())
if is_valid:
return direction
print("Invalid input!")
def play(self):
player_won = super().play()
if player_won:
print('You won')
else:
print('Game over')
print(self.board, self.score)
class ComputerPlayer(eleven.Player):
def get_move(self):
return random.choice(self.board.valid_moves())
class ComputerPlayer2(eleven.Player):
def get_move(self):
moves = {}
for move in self.board.valid_moves():
_, point = self.board.calculate_move(move)
moves[point] = move
best_move_scores = [m for m in moves if m == max(moves)]
if len(best_move_scores) > 1:
return moves[best_move_scores[random.randint(len(best_move_scores))]]
return moves[best_move_scores[0]]
if __name__ == "__main__":
print(help(eleven.test_player))
# noinspection PyTypeChecker
print(eleven.test_player(ComputerPlayer))
# noinspection PyTypeChecker
print(eleven.test_player(ComputerPlayer2))
HumanPlayer().play()
# Code from showing the student how to us SPyObject to test their code
# # import spyobject.SPyObject as SPY
# from spyobject import SPyObject as SPY
#
# # Assign our tested class to a variable
# player_obj = ComputerPlayer2()
# # Call our spy on that object assigned to the variable
# SPY(player_obj, globals()).obj_info()
# # used the .get_move() method to return its output to another variable
# move = player_obj.get_move()
# # now spy that variable to see what it has returned
# SPY(move, globals()).obj_info()
# print(SPY(player_obj, globals()).attributes)
|
anniekfifer/tpau-gtfsutils
|
tpau_gtfsutilities/utilities/__init__.py
|
from . import averageheadway
from . import gtfsutility
from . import stopvisits
from . import interpolate_stoptimes
|
rfan-debug/java-aiplatform
|
proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/IoProto.java
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/io.proto
package com.google.cloud.aiplatform.v1;
public final class IoProto {
private IoProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_AvroSource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_AvroSource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_CsvSource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_CsvSource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_GcsSource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_GcsSource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_GcsDestination_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_GcsDestination_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_BigQuerySource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_BigQuerySource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_BigQueryDestination_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_BigQueryDestination_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_CsvDestination_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_CsvDestination_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_TFRecordDestination_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_TFRecordDestination_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n#google/cloud/aiplatform/v1/io.proto\022\032g"
+ "oogle.cloud.aiplatform.v1\032\037google/api/fi"
+ "eld_behavior.proto\032\034google/api/annotatio"
+ "ns.proto\"L\n\nAvroSource\022>\n\ngcs_source\030\001 \001"
+ "(\0132%.google.cloud.aiplatform.v1.GcsSourc"
+ "eB\003\340A\002\"K\n\tCsvSource\022>\n\ngcs_source\030\001 \001(\0132"
+ "%.google.cloud.aiplatform.v1.GcsSourceB\003"
+ "\340A\002\"\036\n\tGcsSource\022\021\n\004uris\030\001 \003(\tB\003\340A\002\"0\n\016G"
+ "csDestination\022\036\n\021output_uri_prefix\030\001 \001(\t"
+ "B\003\340A\002\"(\n\016BigQuerySource\022\026\n\tinput_uri\030\001 \001"
+ "(\tB\003\340A\002\".\n\023BigQueryDestination\022\027\n\noutput"
+ "_uri\030\001 \001(\tB\003\340A\002\"Z\n\016CsvDestination\022H\n\017gcs"
+ "_destination\030\001 \001(\0132*.google.cloud.aiplat"
+ "form.v1.GcsDestinationB\003\340A\002\"_\n\023TFRecordD"
+ "estination\022H\n\017gcs_destination\030\001 \001(\0132*.go"
+ "ogle.cloud.aiplatform.v1.GcsDestinationB"
+ "\003\340A\002\"7\n\034ContainerRegistryDestination\022\027\n\n"
+ "output_uri\030\001 \001(\tB\003\340A\002B\313\001\n\036com.google.clo"
+ "ud.aiplatform.v1B\007IoProtoP\001ZDgoogle.gola"
+ "ng.org/genproto/googleapis/cloud/aiplatf"
+ "orm/v1;aiplatform\252\002\032Google.Cloud.AIPlatf"
+ "orm.V1\312\002\032Google\\Cloud\\AIPlatform\\V1\352\002\035Go"
+ "ogle::Cloud::AIPlatform::V1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.AnnotationsProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1_AvroSource_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_aiplatform_v1_AvroSource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_AvroSource_descriptor,
new java.lang.String[] {
"GcsSource",
});
internal_static_google_cloud_aiplatform_v1_CsvSource_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_aiplatform_v1_CsvSource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_CsvSource_descriptor,
new java.lang.String[] {
"GcsSource",
});
internal_static_google_cloud_aiplatform_v1_GcsSource_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_aiplatform_v1_GcsSource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_GcsSource_descriptor,
new java.lang.String[] {
"Uris",
});
internal_static_google_cloud_aiplatform_v1_GcsDestination_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_aiplatform_v1_GcsDestination_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_GcsDestination_descriptor,
new java.lang.String[] {
"OutputUriPrefix",
});
internal_static_google_cloud_aiplatform_v1_BigQuerySource_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_aiplatform_v1_BigQuerySource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_BigQuerySource_descriptor,
new java.lang.String[] {
"InputUri",
});
internal_static_google_cloud_aiplatform_v1_BigQueryDestination_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_aiplatform_v1_BigQueryDestination_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_BigQueryDestination_descriptor,
new java.lang.String[] {
"OutputUri",
});
internal_static_google_cloud_aiplatform_v1_CsvDestination_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_aiplatform_v1_CsvDestination_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_CsvDestination_descriptor,
new java.lang.String[] {
"GcsDestination",
});
internal_static_google_cloud_aiplatform_v1_TFRecordDestination_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_aiplatform_v1_TFRecordDestination_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_TFRecordDestination_descriptor,
new java.lang.String[] {
"GcsDestination",
});
internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_descriptor,
new java.lang.String[] {
"OutputUri",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.AnnotationsProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
vtfn/tolb
|
src/list/pluck_test.js
|
/* eslint-env mocha */
import A from 'assert';
import pluck from './pluck';
import config from '../../config/constants';
describe('list.pluck(prop, list)', () => {
const list = [
{ name: 'foo', age: 11 },
{ name: 'bar', age: 12 },
{ age: 21 },
{ name: 'baz', age: 13 },
];
it('returns an array with the values extracted from "prop" of each object in "list"', () => {
A.deepEqual(pluck('name', list), ['foo', 'bar', undefined, 'baz']);
});
it('throws if "prop" is not a string', () => {
A.throws(() => pluck([], list), config.EXPECTED_STRING_ERRMSG);
});
it('allows partial application', () => {
A.deepEqual(pluck('age')(list), [11, 12, 21, 13]);
});
});
|
dberga/FACIL
|
src/networks/old/twobranch_concat2.py
|
<reponame>dberga/FACIL
from networks.twobranch import twobranch
import torch
import utils
from copy import deepcopy
class twobranch_concat2(twobranch):
def forward(self,x):
x1=deepcopy(x)
if self.scramble is True:
x1=utils.batch2scramble(deepcopy(x))
x2=utils.batch2gray(deepcopy(x),transform_type=self.togray)
return self.forward_concat(x1,x2)
def forward_concat(self,x1,x2):
features = self.feature_extractor(x1)
features = features.view(features.size(0),-1) #flatten
features2 = self.feature_extractor2(x2)
features2 = features2.view(features2.size(0),-1) #flatten
grouped = torch.cat((features,features2),dim=0)
output = grouped.view(grouped.size(0),-1) #flatten
output = self.fc(grouped)
return output
|
liaojh1998/cross-modal-concept2robot
|
simulation/env_27.py
|
<reponame>liaojh1998/cross-modal-concept2robot
#!/usr/bin/env python3
import time
import math
from datetime import datetime
from time import sleep
import numpy as np
import random
import cv2
import os
import argparse
import torch
import sys
sys.path.append('./')
from env import Engine
from utils_env import get_view,safe_path,cut_frame,point2traj,get_gripper_pos,backup_code
class Engine27(Engine):
def __init__(self, worker_id, opti, p_id, taskId=5, maxSteps=15, n_dmps=3, cReward=True):
super(Engine27,self).__init__(opti, wid=worker_id, p_id=p_id, maxSteps=maxSteps, taskId=taskId, n_dmps=n_dmps, cReward=cReward,robot_model=None)
self.opti = opti
self._wid = worker_id
self.robot.gripperMaxForce = 10000.0
self.robot.armMaxForce = 200.0
self.robot.jd = [0.01] * 14
self.p.setPhysicsEngineParameter(enableConeFriction=1)
self.p.setPhysicsEngineParameter(contactBreakingThreshold=0.001)
self.p.setPhysicsEngineParameter(allowedCcdPenetration=0.0)
self.p.setPhysicsEngineParameter(numSolverIterations=20)
self.p.setPhysicsEngineParameter(numSubSteps=10)
self.p.setPhysicsEngineParameter(constraintSolverType=self.p.CONSTRAINT_SOLVER_LCP_DANTZIG,globalCFM=0.000001)
self.p.setPhysicsEngineParameter(enableFileCaching=0)
self.p.setGravity(0,0,-9.81)
def reset_new(self):
self.seq_num = 0
self.init_dmp()
self.init_motion ()
self.init_rl ()
self.reset_obj ()
self.init_grasp ()
return self.get_observation()
def init_obj(self):
self.obj_id = self.p.loadURDF( os.path.join(self.resources_dir, "urdf/obj_libs/bottles/b1/b1.urdf"))
self.p.changeVisualShape (self.obj_id, -1, rgbaColor=[1.,0.,0.,1])
def reset_obj(self):
self.p.resetBasePositionAndOrientation(self.obj_id,[0.3637 + 0.06, -0.05, 0.34],[0, 0, -0.1494381, 0.9887711])
obj_friction_ceof = 20000.0
self.p.changeDynamics(self.obj_id, -1, mass=0.9)
self.p.changeDynamics(self.obj_id, -1, lateralFriction=obj_friction_ceof)
self.p.changeDynamics(self.obj_id, -1, rollingFriction=obj_friction_ceof)
self.p.changeDynamics(self.obj_id, -1, spinningFriction=obj_friction_ceof)
self.p.changeDynamics(self.obj_id, -1, linearDamping=40.0)
self.p.changeDynamics(self.obj_id, -1, angularDamping=1.0)
self.p.changeDynamics(self.obj_id, -1, contactStiffness=1.0, contactDamping=0.9)
table_friction_ceof = 0.4
self.p.changeDynamics(self.table_id, -1, lateralFriction=table_friction_ceof)
self.p.changeDynamics(self.table_id, -1, rollingFriction=table_friction_ceof)
self.p.changeDynamics(self.table_id, -1, spinningFriction=table_friction_ceof)
self.p.changeDynamics(self.table_id, -1, contactStiffness=1.0, contactDamping=0.9)
def init_motion(self):
self.data_q = np.load (os.path.join(self.robot_recordings_dir,"47-4/q.npy"))
self.data_gripper = np.load (self.configs_dir + '/init/gripper.npy')
self.robot.setJointValue(self.data_q[0],gripper=self.data_gripper[0])
def init_grasp(self):
self.robot.gripperControl(0)
qlist = np.load( os.path.join(self.robot_recordings_dir, "47-4/q.npy"))
glist = np.load( os.path.join(self.robot_recordings_dir, "47-4/gripper.npy"))
num_q = len(qlist[0])
self.fix_orn = np.load (os.path.join (self.configs_dir, 'init', 'orn.npy'))
self.robot.setJointValue(qlist[40],glist[40])
for i in range(40,180,1):
glist[i] = min(130,glist[i])
self.robot.jointPositionControl(qlist[i],gripper=glist[i])
pos = self.p.getLinkState (self.robotId, 7)[0]
up_traj = point2traj([pos, [pos[0], pos[1], pos[2]+0.3]])
self.start_pos = self.p.getLinkState (self.robotId, 7)[0]
cur_joint = self.robot.getJointValue()
cur_pos = self.robot.getEndEffectorPos()
cur_orn = self.robot.getEndEffectorOrn()
pos_diff = np.random.uniform(-0.1,0.1,size=(2,))
cur_pos[:2] = cur_pos[:2] + pos_diff
cur_pos[2] += 0.02
for i in range(19):
self.robot.positionControl(cur_pos,cur_orn,null_pose=cur_joint,gripperPos=130)
self.init_pos = self.p.getBasePositionAndOrientation(self.obj_id)[0]
def get_success (self,seg=None):
pos1 = self.p.getBasePositionAndOrientation(self.obj_id)[0]
pos1 = np.array(pos1)
if pos1[2] - self.init_pos[2] > 0.15:
return True
else:
return False
|
lechium/iOS1351Headers
|
System/Library/Frameworks/ARKit.framework/ARPersonOcclusionParentTechnique.h
|
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:16:35 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/Frameworks/ARKit.framework/ARKit
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
#import <ARKit/ARParentTechnique.h>
@interface ARPersonOcclusionParentTechnique : ARParentTechnique {
BOOL _shouldSkipFramesWhenBusy;
}
-(id)processData:(id)arg1 ;
-(id)initWithTechniques:(id)arg1 delegate:(id)arg2 ;
@end
|
smancill/clara-java
|
clara-api/src/main/java/org/jlab/clara/base/core/DataUtil.java
|
<gh_stars>0
/*
* SPDX-FileCopyrightText: © The Clara Framework Authors
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.jlab.clara.base.core;
import org.jlab.clara.base.ClaraUtil;
import org.jlab.clara.base.error.ClaraException;
import org.jlab.clara.engine.EngineData;
import org.jlab.clara.engine.EngineDataType;
import org.jlab.clara.engine.EngineStatus;
import org.jlab.clara.msg.core.Message;
import org.jlab.clara.msg.core.Topic;
import org.jlab.clara.msg.data.MetaDataProto.MetaData;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Set;
public final class DataUtil {
private static final EngineDataAccessor DATA_ACCESSOR = EngineDataAccessor.getDefault();
private DataUtil() { }
public static EngineData buildErrorData(String msg, int severity, Throwable exception) {
EngineData outData = new EngineData();
outData.setData(EngineDataType.STRING.mimeType(), msg);
outData.setDescription(ClaraUtil.reportException(exception));
outData.setStatus(EngineStatus.ERROR, severity);
return outData;
}
/**
* Convoluted way to access the internal EngineData metadata,
* which is hidden to users.
*
* @param data {@link org.jlab.clara.engine.EngineData} object
* @return {@link MetaData.Builder} object
*/
public static MetaData.Builder getMetadata(EngineData data) {
return DATA_ACCESSOR.getMetadata(data);
}
/**
* Builds a message by serializing passed data object using serialization
* routine defined in one of the data types objects.
*
* @param topic the topic where the data will be published
* @param data the data to be serialized
* @param dataTypes the set of registered data types
* @throws ClaraException if the data could not be serialized
*/
public static Message serialize(Topic topic,
EngineData data,
Set<EngineDataType> dataTypes)
throws ClaraException {
MetaData.Builder metadata = DATA_ACCESSOR.getMetadata(data);
String mimeType = metadata.getDataType();
for (EngineDataType dt : dataTypes) {
if (dt.mimeType().equals(mimeType)) {
try {
ByteBuffer bb = dt.serializer().write(data.getData());
if (bb.order() == ByteOrder.BIG_ENDIAN) {
metadata.setByteOrder(MetaData.Endian.Big);
} else {
metadata.setByteOrder(MetaData.Endian.Little);
}
return new Message(topic, metadata, bb.array());
} catch (ClaraException e) {
throw new ClaraException("Could not serialize " + mimeType, e);
}
}
}
if (mimeType.equals(EngineDataType.STRING.mimeType())) {
ByteBuffer bb = EngineDataType.STRING.serializer().write(data.getData());
return new Message(topic, metadata, bb.array());
}
throw new ClaraException("Unsupported mime-type = " + mimeType);
}
/**
* De-serializes data of the message {@link Message},
* represented as a byte[] into an object of az type defined using the mimeType/dataType
* of the meta-data (also as a part of the Message). Second argument is used to
* pass the serialization routine as a method of the
* {@link org.jlab.clara.engine.EngineDataType} object.
*
* @param msg {@link Message} object
* @param dataTypes set of {@link org.jlab.clara.engine.EngineDataType} objects
* @return {@link org.jlab.clara.engine.EngineData} object containing de-serialized data object
* and metadata
* @throws ClaraException
*/
public static EngineData deserialize(Message msg, Set<EngineDataType> dataTypes)
throws ClaraException {
MetaData.Builder metadata = msg.getMetaData();
String mimeType = metadata.getDataType();
for (EngineDataType dt : dataTypes) {
if (dt.mimeType().equals(mimeType)) {
try {
ByteBuffer bb = ByteBuffer.wrap(msg.getData());
if (metadata.getByteOrder() == MetaData.Endian.Little) {
bb.order(ByteOrder.LITTLE_ENDIAN);
}
Object userData = dt.serializer().read(bb);
return DATA_ACCESSOR.build(userData, metadata);
} catch (ClaraException e) {
throw new ClaraException("Clara-Error: Could not deserialize " + mimeType, e);
}
}
}
throw new ClaraException("Clara-Error: Unsupported mime-type = " + mimeType);
}
public abstract static class EngineDataAccessor {
private static volatile EngineDataAccessor defaultAccessor;
public static EngineDataAccessor getDefault() {
new EngineData(); // Load the accessor
EngineDataAccessor a = defaultAccessor;
if (a == null) {
throw new IllegalStateException("EngineDataAccessor should not be null");
}
return a;
}
public static void setDefault(EngineDataAccessor accessor) {
if (defaultAccessor != null) {
throw new IllegalStateException("EngineDataAccessor should be null");
}
defaultAccessor = accessor;
}
protected abstract MetaData.Builder getMetadata(EngineData data);
protected abstract EngineData build(Object data, MetaData.Builder metadata);
}
}
|
simcof/ringteki
|
server/game/cards/01-Core/Blackmail.js
|
const DrawCard = require('../../drawcard.js');
class Blackmail extends DrawCard {
setupCardAbilities(ability) {
this.action({
title: 'Take control of a character',
condition: () => this.game.isDuringConflict(),
target: {
cardType: 'character',
controller: 'opponent',
cardCondition: (card, context) => !card.anotherUniqueInPlay(context.player) && card.getCost() < 3,
gameAction: ability.actions.cardLastingEffect(context => ({
effect: ability.effects.takeControl(context.player)
}))
},
effect: 'take control of {0}'
});
}
canPlay(context) {
if(this.controller.opponent && this.controller.honor < this.controller.opponent.honor) {
return super.canPlay(context);
}
return false;
}
}
Blackmail.id = 'blackmail';
module.exports = Blackmail;
|
GasimGasimzada/liquid-engine
|
engine/src/liquid/scripting/EntityDecorator.h
|
<gh_stars>0
#pragma once
#include "liquid/entity/Entity.h"
#include "liquid/entity/EntityContext.h"
namespace liquid {
/**
* @brief Decorates scope with entity system
*/
class EntityDecorator {
public:
/**
* @brief Attach to scope
*
* @param scope Lua scope
* @param entity Entity
* @param entityContext Entity context
*/
void attachToScope(LuaScope &scope, Entity entity,
EntityContext &entityContext);
};
} // namespace liquid
|
fadygouda/cityspire-frontend
|
src/components/pages/PinnedCity/index.js
|
<reponame>fadygouda/cityspire-frontend
export { default as PinnedCityPage } from './PinnedCityContainer';
|
Vult-R/Astraeus-Framework
|
src/main/astraeus/game/model/entity/mobile/npc/drop/Chance.java
|
package main.astraeus.game.model.entity.mobile.npc.drop;
/**
* An enumeration of probable cases for an npc drop.
*
* @author SeVen <https://github.com/7winds>
*/
public enum Chance {
/**
* 100% 1/1
*/
ALWAYS(1),
/**
* 50%
*/
ALMOST_ALWAYS(2),
/**
* 20%
*/
VERY_COMMON(5),
/**
* 5%
*/
COMMON(20),
/**
* 2% 1/50
*/
UNCOMMON(50),
/**
* 1%
*/
VERY_UNCOMMON(100),
/**
* 0.5% 1/200
*/
RARE(200),
/**
* 0.35% 1/286
*/
VERY_RARE(286),
/**
* 0.2% 1/500
*/
EXTREMELY_RARE(500),
/**
* 0.1% 1/900
*/
LEGENDARY(900);
/**
* The rate at which an item will drop.
*/
private int rate;
private Chance(int rate) {
this.rate = rate;
}
/**
* Gets the rate at which an item drops.
*
* @return rate
*/
public int getRate() {
return rate;
}
}
|
HOT-Ecosystem/hapi-fhir
|
hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderTest.java
|
<filename>hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderTest.java
package ca.uhn.fhir.jaxrs.server;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jaxrs.client.JaxRsRestfulClientFactory;
import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsResponseException;
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsConformanceRestProvider;
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPageProvider;
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPatientRestProvider;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu2.composite.IdentifierDt;
import ca.uhn.fhir.model.dstu2.resource.*;
import ca.uhn.fhir.model.primitive.DateDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.junit.*;
import org.junit.runners.MethodSorters;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.*;
import ca.uhn.fhir.test.utilities.JettyUtil;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class AbstractJaxRsResourceProviderTest {
private static IGenericClient client;
private static FhirContext ourCtx = FhirContext.forDstu2();
private static final String PATIENT_NAME = "<NAME>";
private static int ourPort;
private static String serverBase;
private static Server jettyServer;
private TestJaxRsMockPatientRestProvider mock;
private ArgumentCaptor<IdDt> idCaptor;
private ArgumentCaptor<String> conditionalCaptor;
private ArgumentCaptor<Patient> patientCaptor;
private void compareResultId(int id, IResource resource) {
assertEquals(id, resource.getId().getIdPartAsLong().intValue());
}
private void compareResultUrl(String url, IResource resource) {
assertEquals(url, resource.getId().getValueAsString().substring(serverBase.length() - 1));
}
@AfterClass
public static void afterClassClearContext() throws Exception {
JettyUtil.closeServer(jettyServer);
TestUtil.clearAllStaticFieldsForUnitTest();
}
private Patient createPatient(long id) {
Patient theResource = new Patient();
theResource.setId(new IdDt(id));
return theResource;
}
private Patient createPatient(long id, String version) {
Patient theResource = new Patient();
theResource.setId(new IdDt(id).withVersion(version));
return theResource;
}
private List<Patient> createPatients(int firstId, int lastId) {
List<Patient> result = new ArrayList<Patient>(lastId - firstId);
for (long i = firstId; i <= lastId; i++) {
result.add(createPatient(i));
}
return result;
}
/** Find By Id */
@Test
public void findUsingGenericClientById() {
when(mock.find(any(IdDt.class))).thenReturn(createPatient(1));
Patient result = client.read(Patient.class, "1");
compareResultId(1, result);
compareResultUrl("/Patient/1", result);
reset(mock);
when(mock.find(withId(result.getId()))).thenReturn(createPatient(1));
result = (Patient) client.read(new UriDt(result.getId().getValue()));
compareResultId(1, result);
compareResultUrl("/Patient/1", result);
}
@Before
public void setUp() {
this.mock = TestJaxRsMockPatientRestProvider.mock;
idCaptor = ArgumentCaptor.forClass(IdDt.class);
patientCaptor = ArgumentCaptor.forClass(Patient.class);
conditionalCaptor = ArgumentCaptor.forClass(String.class);
reset(mock);
}
/** Conditional Creates */
@Test
public void testConditionalCreate() throws Exception {
Patient toCreate = createPatient(1);
MethodOutcome outcome = new MethodOutcome();
toCreate.getIdentifierFirstRep().setValue("myIdentifier");
outcome.setResource(toCreate);
when(mock.create(patientCaptor.capture(), eq("/Patient?_format=json&identifier=2"))).thenReturn(outcome);
client.setEncoding(EncodingEnum.JSON);
MethodOutcome response = client.create().resource(toCreate).conditional()
.where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferReturnEnum.REPRESENTATION).execute();
assertEquals("myIdentifier", patientCaptor.getValue().getIdentifierFirstRep().getValue());
IResource resource = (IResource) response.getResource();
compareResultId(1, resource);
}
/** Conformance - Server */
@Test
public void testConformance() {
final Conformance conf = client.fetchConformance().ofType(Conformance.class).execute();
assertEquals(conf.getRest().get(0).getResource().get(0).getType(), "Patient");
}
@Test
public void testCreatePatient() throws Exception {
Patient toCreate = createPatient(1);
MethodOutcome outcome = new MethodOutcome();
toCreate.getIdentifierFirstRep().setValue("myIdentifier");
outcome.setResource(toCreate);
when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome);
client.setEncoding(EncodingEnum.JSON);
final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION)
.execute();
IResource resource = (IResource) response.getResource();
compareResultId(1, resource);
assertEquals("myIdentifier", patientCaptor.getValue().getIdentifierFirstRep().getValue());
}
@Test
public void testDeletePatient() {
when(mock.delete(idCaptor.capture(), conditionalCaptor.capture())).thenReturn(new MethodOutcome());
final IBaseOperationOutcome results = client.delete().resourceById("Patient", "1").execute().getOperationOutcome();
assertEquals("1", idCaptor.getValue().getIdPart());
}
@Test
public void testConditionalDelete() {
when(mock.delete(idCaptor.capture(), conditionalCaptor.capture())).thenReturn(new MethodOutcome());
client.delete().resourceConditionalByType("Patient").where(Patient.IDENTIFIER.exactly().identifier("2")).execute();
assertEquals("Patient?identifier=2&_format=json", conditionalCaptor.getValue());
}
/** Extended Operations */
@Test
public void testExtendedOperations() {
// prepare mock
Parameters resultParameters = new Parameters();
resultParameters.addParameter().setName("return").setResource(createPatient(1)).setValue(new StringDt("outputValue"));
when(mock.someCustomOperation(any(IdDt.class), eq(new StringDt("myAwesomeDummyValue")))).thenReturn(resultParameters);
// Create the input parameters to pass to the server
Parameters inParams = new Parameters();
inParams.addParameter().setName("start").setValue(new DateDt("2001-01-01"));
inParams.addParameter().setName("end").setValue(new DateDt("2015-03-01"));
inParams.addParameter().setName("dummy").setValue(new StringDt("myAwesomeDummyValue"));
// invoke
Parameters outParams = client.operation().onInstance(new IdDt("Patient", "1")).named("$someCustomOperation")
.withParameters(inParams).execute();
// verify
assertEquals("outputValue", ((StringDt) outParams.getParameter().get(0).getValue()).getValueAsString());
}
@Test
public void testExtendedOperationsUsingGet() {
// prepare mock
Parameters resultParameters = new Parameters();
resultParameters.addParameter().setName("return").setResource(createPatient(1)).setValue(new StringDt("outputValue"));
when(mock.someCustomOperation(any(IdDt.class), eq(new StringDt("myAwesomeDummyValue")))).thenReturn(resultParameters);
// Create the input parameters to pass to the server
Parameters inParams = new Parameters();
inParams.addParameter().setName("start").setValue(new DateDt("2001-01-01"));
inParams.addParameter().setName("end").setValue(new DateDt("2015-03-01"));
inParams.addParameter().setName("dummy").setValue(new StringDt("myAwesomeDummyValue"));
// invoke
Parameters outParams = client.operation().onInstance(new IdDt("Patient", "1")).named("$someCustomOperation")
.withParameters(inParams).useHttpGet().execute();
// verify
assertEquals("outputValue", ((StringDt) outParams.getParameter().get(0).getValue()).getValueAsString());
}
@Test
public void testRead() {
when(mock.find(idCaptor.capture())).thenReturn(createPatient(1));
final Patient patient = client.read(Patient.class, "1");
compareResultId(1, patient);
compareResultUrl("/Patient/1", patient);
assertEquals("1", idCaptor.getValue().getIdPart());
}
/** Search - Compartments */
@Test
public void testSearchCompartements() {
when(mock.searchCompartment(any(IdDt.class))).thenReturn(Arrays.asList((IResource) createPatient(1)));
Bundle response = client.search().forResource(Patient.class).withIdAndCompartment("1", "Condition")
.returnBundle(ca.uhn.fhir.model.dstu2.resource.Bundle.class).execute();
IResource resource = response.getEntry().get(0).getResource();
compareResultId(1, resource);
compareResultUrl("/Patient/1", resource);
}
/** */
@Test
public void testSearchPost() {
when(mock.search(isNull(), isNull()))
.thenReturn(createPatients(1, 13));
Bundle result = client
.search()
.forResource("Patient")
.usingStyle(SearchStyleEnum.POST)
.returnBundle(Bundle.class).execute();
IResource resource = result.getEntry().get(0).getResource();
compareResultId(1, resource);
compareResultUrl("/Patient/1", resource);
}
/** Search/Query - Type */
@Test
public void testSearchUsingGenericClientBySearch() {
// Perform a search
when(mock.search(any(StringParam.class), Matchers.isNull(StringAndListParam.class)))
.thenReturn(Arrays.asList(createPatient(1)));
Bundle results = client.search().forResource(Patient.class)
.where(Patient.NAME.matchesExactly().value(PATIENT_NAME)).returnBundle(Bundle.class).execute();
verify(mock).search(any(StringParam.class), Matchers.isNull(StringAndListParam.class));
IResource resource = results.getEntry().get(0).getResource();
compareResultId(1, resource);
compareResultUrl("/Patient/1", resource);
}
/** Search - Multi-valued Parameters (ANY/OR) */
@Test
public void testSearchUsingGenericClientBySearchWithMultiValues() {
when(mock.search(any(StringParam.class), any(StringAndListParam.class)))
.thenReturn(Arrays.asList(createPatient(1)));
Bundle results = client.search().forResource(Patient.class)
.where(Patient.ADDRESS.matches().values("Toronto")).and(Patient.ADDRESS.matches().values("Ontario"))
.and(Patient.ADDRESS.matches().values("Canada"))
.where(Patient.NAME.matches().value("SHORTNAME")).returnBundle(Bundle.class).execute();
IResource resource = results.getEntry().get(0).getResource();
compareResultId(1, resource);
compareResultUrl("/Patient/1", resource);
}
/** Search - Paging */
@Test
public void testSearchWithPaging() {
// Perform a search
when(mock.search(isNull(), isNull()))
.thenReturn(createPatients(1, 13));
final Bundle results = client.search().forResource(Patient.class).limitTo(8).returnBundle(Bundle.class)
.execute();
assertEquals(results.getEntry().size(), 8);
IResource resource = results.getEntry().get(0).getResource();
compareResultId(1, resource);
compareResultUrl("/Patient/1", resource);
compareResultId(8, results.getEntry().get(7).getResource());
// ourLog.info("Next: " + results.getLink("next").getUrl());
// String url = results.getLink("next").getUrl().replace("?", "Patient?");
// results.getLink("next").setUrl(url);
// ourLog.info("New Next: " + results.getLink("next").getUrl());
// load next page
final Bundle nextPage = client.loadPage().next(results).execute();
resource = nextPage.getEntry().get(0).getResource();
compareResultId(9, resource);
compareResultUrl("/Patient/9", resource);
assertNull(nextPage.getLink(Bundle.LINK_NEXT));
}
/** Search - Subsetting (_summary and _elements) */
@Test
@Ignore
public void testSummary() {
Object response = client.search().forResource(Patient.class)
.returnBundle(ca.uhn.fhir.model.dstu2.resource.Bundle.class).execute();
}
@Test
public void testUpdateById() throws Exception {
when(mock.update(idCaptor.capture(), patientCaptor.capture(), conditionalCaptor.capture())).thenReturn(new MethodOutcome());
client.update("1", createPatient(1));
assertEquals("1", idCaptor.getValue().getIdPart());
compareResultId(1, patientCaptor.getValue());
}
@Test
public void testConditionalUpdate() throws Exception {
when(mock.update(idCaptor.capture(), patientCaptor.capture(), conditionalCaptor.capture())).thenReturn(new MethodOutcome());
client.update().resource(createPatient(1)).conditional().where(Patient.IDENTIFIER.exactly().identifier("2")).execute();
assertEquals(null, patientCaptor.getValue().getId().getIdPart());
assertEquals("Patient?identifier=2&_format=json", conditionalCaptor.getValue());
}
@SuppressWarnings("unchecked")
@Ignore
@Test
public void testResourceNotFound() throws Exception {
when(mock.update(idCaptor.capture(), patientCaptor.capture(), conditionalCaptor.capture())).thenThrow(ResourceNotFoundException.class);
try {
client.update("1", createPatient(2));
fail();
} catch (ResourceNotFoundException e) {
// good
}
}
@Test
public void testVRead() {
when(mock.findVersion(idCaptor.capture())).thenReturn(createPatient(1));
final Patient patient = client.vread(Patient.class, "1", "2");
compareResultId(1, patient);
compareResultUrl("/Patient/1", patient);
assertEquals("1", idCaptor.getValue().getIdPart());
assertEquals("2", idCaptor.getValue().getVersionIdPart());
}
@Test
public void testInstanceHistory() {
when(mock.getHistoryForInstance(idCaptor.capture())).thenReturn(new SimpleBundleProvider(Collections.singletonList(createPatient(1, "1"))));
final Bundle bundle = client.history().onInstance(new IdDt("Patient", 1L)).returnBundle(Bundle.class).execute();
Patient patient = (Patient) bundle.getEntryFirstRep().getResource();
compareResultId(1, patient);
compareResultUrl("/Patient/1/_history/1", patient);
assertEquals("1", idCaptor.getValue().getIdPart());
assertNull(idCaptor.getValue().getVersionIdPart());
}
@Test
public void testTypeHistory() {
when(mock.getHistoryForType()).thenReturn(new SimpleBundleProvider(Collections.singletonList(createPatient(1, "1"))));
final Bundle bundle = client.history().onType(Patient.class).returnBundle(Bundle.class).execute();
Patient patient = (Patient) bundle.getEntryFirstRep().getResource();
compareResultId(1, patient);
compareResultUrl("/Patient/1/_history/1", patient);
}
@Test
public void testXFindUnknownPatient() {
try {
JaxRsResponseException notFoundException = new JaxRsResponseException(new ResourceNotFoundException(new IdDt("999955541264")));
when(mock.find(idCaptor.capture())).thenThrow(notFoundException);
client.read(Patient.class, "999955541264");
fail();
} catch (final ResourceNotFoundException e) {
assertEquals(ResourceNotFoundException.STATUS_CODE, e.getStatusCode());
assertTrue(e.getMessage().contains("999955541264"));
}
}
@Test
public void testValidate() {
// prepare mock
final OperationOutcome oo = new OperationOutcome();
final Patient patient = new Patient();
patient.addIdentifier((new IdentifierDt().setValue("1")));
// invoke
final Parameters inParams = new Parameters();
inParams.addParameter().setResource(patient);
final MethodOutcome mO = client.validate().resource(patient).execute();
// verify
assertNotNull(mO.getOperationOutcome());
}
private <T> T withId(final T id) {
return argThat(other -> {
IdDt thisId;
IdDt otherId;
if (id instanceof IdDt) {
thisId = (IdDt) id;
otherId = (IdDt) other;
} else {
thisId = ((IResource) id).getId();
otherId = ((IResource) other).getId();
}
return thisId.getIdPartAsLong().equals(otherId.getIdPartAsLong());
});
}
@BeforeClass
public static void setUpClass() throws Exception {
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
jettyServer = new Server(0);
jettyServer.setHandler(context);
ServletHolder jerseyServlet = context.addServlet(org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher.class, "/*");
jerseyServlet.setInitOrder(0);
//@formatter:off
jerseyServlet.setInitParameter("resteasy.resources",
StringUtils.join(Arrays.asList(
TestJaxRsMockPatientRestProvider.class.getCanonicalName(),
TestJaxRsConformanceRestProvider.class.getCanonicalName(),
TestJaxRsMockPageProvider.class.getCanonicalName()
), ","));
//@formatter:on
JettyUtil.startServer(jettyServer);
ourPort = JettyUtil.getPortForStartedServer(jettyServer);
ourCtx.setRestfulClientFactory(new JaxRsRestfulClientFactory(ourCtx));
ourCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
ourCtx.getRestfulClientFactory().setSocketTimeout(1200 * 1000);
serverBase = "http://localhost:" + ourPort + "/";
client = ourCtx.newRestfulGenericClient(serverBase);
client.setEncoding(EncodingEnum.JSON);
client.registerInterceptor(new LoggingInterceptor(true));
}
}
|
mjpitz/libhealth
|
gauge/floats_test.go
|
<reponame>mjpitz/libhealth
package gauge
import (
"container/list"
"testing"
"github.com/stretchr/testify/require"
"oss.indeed.com/go/libhealth"
)
func floatList(floats ...float64) *list.List {
l := list.New()
for _, val := range floats {
l.PushFront(val)
}
return l
}
func Test_MaxFloatThreshold_Apply(t *testing.T) {
tests := []struct {
values *list.List
threshold MaxFloatThreshold
expState libhealth.Status
}{
// only AnyN
{
floatList(1.1),
MaxFloatThreshold{Threshold: 5.5, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(7.7),
MaxFloatThreshold{Threshold: 5.4, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(1.1, 2.2, 3.3, 4.4),
MaxFloatThreshold{Threshold: 5.5, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1, 2.2, 3.3, 4.4, 5.5),
MaxFloatThreshold{Threshold: 5.5, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(1.1, 2.2, 3.3, 4.4, 5.5),
MaxFloatThreshold{Threshold: 5.5, AnyN: 2, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1, 3.3, 5.5, 7.7, 9.9, 2.3, 3.3),
MaxFloatThreshold{Threshold: 6.6, AnyN: 2, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(1.1, 3.3, 5.5, 7.7, 9.9, 2.2, 3.3, 4.4, 5.5),
MaxFloatThreshold{Threshold: 6.6, AnyN: 3, Severity: libhealth.MAJOR},
libhealth.OK,
},
// only LastN
{
floatList(1.1),
MaxFloatThreshold{Threshold: 5.5, LastN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(5.5),
MaxFloatThreshold{Threshold: 5.5, LastN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(3.3),
MaxFloatThreshold{Threshold: 5.5, LastN: 9, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(4.4),
MaxFloatThreshold{Threshold: 3.3, LastN: 2, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 2.2),
MaxFloatThreshold{Threshold: 5.5, LastN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 2.2),
MaxFloatThreshold{Threshold: 5.5, LastN: 3, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7),
MaxFloatThreshold{Threshold: 5.5, LastN: 3, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(1, 2, 3, 4, 5, 6, 7, 1, 8),
MaxFloatThreshold{Threshold: 5, LastN: 3, Severity: libhealth.MAJOR},
libhealth.OK,
},
// combined AnyN and LastN
{
floatList(3.3),
MaxFloatThreshold{Threshold: 5.5, AnyN: 1, LastN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(3.3),
MaxFloatThreshold{Threshold: 3.3, AnyN: 1, LastN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(2.2, 3.3, 4.4, 5.5),
MaxFloatThreshold{Threshold: 4.4, AnyN: 3, LastN: 2, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(9.9, 7.7, 3.3, 2.2, 8.8, 1.1, 4.4),
MaxFloatThreshold{Threshold: 4.4, AnyN: 3, LastN: 2, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(4.4, 5.5, 6.6, 12.12, 3.3, 7.7, 8.7),
MaxFloatThreshold{Threshold: 8.8, AnyN: 3, LastN: 3, Severity: libhealth.MAJOR},
libhealth.OK,
},
}
for _, test := range tests {
state, _ := test.threshold.Apply(test.values)
require.Equal(t, test.expState, state, "expected state %s got %s", test.expState, state)
}
}
func Test_MinFloatThreshold_Apply(t *testing.T) {
tests := []struct {
values *list.List
threshold MinFloatThreshold
expState libhealth.Status
}{
// only AnyN
{
floatList(1.1),
MinFloatThreshold{Threshold: 3.3, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(5.5),
MinFloatThreshold{Threshold: 3.3, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(4.4, 5.5, 6.6, 7.7, 8.8),
MinFloatThreshold{Threshold: 4.4, AnyN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(4.4, 5.5, 6.6, 7.7, 8.8),
MinFloatThreshold{Threshold: 4.4, AnyN: 2, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(3.3, 4.4, 7.7, 8.8, 1.1, 2.2, 1.1),
MinFloatThreshold{Threshold: 3.3, AnyN: 4, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
// only LastN
{
floatList(5.5),
MinFloatThreshold{Threshold: 3.3, LastN: 1, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1),
MinFloatThreshold{Threshold: 3.3, LastN: 1, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(2.2, 5.5, 6.6, 1.1, 4.4, 4.4, 2.2),
MinFloatThreshold{Threshold: 3.3, LastN: 2, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(2.2, 5.5, 6.6, 1.1, 4.4, 4.4, 2.2),
MinFloatThreshold{Threshold: 4.4, LastN: 2, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
// combined AnyN and LastN
{
floatList(1.1),
MinFloatThreshold{Threshold: 5.5, AnyN: 4, LastN: 2, Severity: libhealth.MAJOR},
libhealth.OK,
},
{
floatList(1.1, 8.8, 2.2, 1.1, 1.1, 3.3, 7.7),
MinFloatThreshold{Threshold: 5.5, AnyN: 4, LastN: 2, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
{
floatList(9.9, 8.8, 9.9, 1.1, 7.7, 3.3, 5.5),
MinFloatThreshold{Threshold: 5.5, AnyN: 4, LastN: 2, Severity: libhealth.MAJOR},
libhealth.MAJOR,
},
}
for _, test := range tests {
state, _ := test.threshold.Apply(test.values)
require.Equal(t, test.expState, state, "expected state %s got %s", test.expState, state)
}
}
func gaugeFloats(g FloatGauger, vals ...float64) {
for _, val := range vals {
g.Gauge(val)
}
}
func Test_Floats_AnyN(t *testing.T) {
tests := []struct {
gauge FloatGauger
expState libhealth.Status
}{
{
gauge: func() FloatGauger {
g, err := Floats("test", 1)
require.NoError(t, err)
return g
}(), expState: libhealth.OK}, // default to OK if no values yet
{
gauge: func() FloatGauger {
g, err := Floats("test", 1)
require.NoError(t, err)
g.Set(MaxFloatThreshold{
Threshold: 5.5,
AnyN: 1,
Severity: libhealth.MAJOR,
})
gaugeFloats(g, 1.1)
return g
}(),
expState: libhealth.OK,
},
{
gauge: func() FloatGauger {
g, err := Floats("test", 1)
require.NoError(t, err)
g.Set(MaxFloatThreshold{
Threshold: 5.5,
AnyN: 1,
Severity: libhealth.MAJOR,
})
gaugeFloats(g, 7.7)
return g
}(),
expState: libhealth.MAJOR,
},
{
gauge: func() FloatGauger {
g, err := Floats("test", 10)
require.NoError(t, err)
g.Set(MaxFloatThreshold{
Threshold: 5.5,
AnyN: 3,
Severity: libhealth.MAJOR,
})
gaugeFloats(g, 9.9, 1.1, 5.5, 1.1, 3.3, 2.2, 0.0, 1.1, 2.2)
return g
}(),
expState: libhealth.OK,
},
{
gauge: func() FloatGauger {
g, err := Floats("test", 10)
require.NoError(t, err)
g.Set(MaxFloatThreshold{
Threshold: 5.5,
AnyN: 3,
Severity: libhealth.MAJOR,
})
gaugeFloats(g,
9.9, 5.5, 1.1, 8.8, 3.3, 6.6, 1.1,
3.3, 6.6, 1.1, 5.5, 7.7, 4.4, 1.1, 4.4,
)
return g
}(),
expState: libhealth.MAJOR,
},
{
gauge: func() FloatGauger {
g, err := Floats("test", 10)
require.NoError(t, err)
g.Set(MaxFloatThreshold{
Threshold: 5.5,
AnyN: 3,
Severity: libhealth.MINOR,
}).Set(MaxFloatThreshold{
Threshold: 8.8,
AnyN: 2,
Severity: libhealth.OUTAGE,
})
gaugeFloats(g, 1.1, 4.4, 2.2, 9.9, 4.4, 2.2, 1.1, 4.4, 8.8)
return g
}(),
expState: libhealth.OUTAGE,
},
{
gauge: func() FloatGauger {
g, err := Floats("test", 10)
require.NoError(t, err)
g.Set(MaxFloatThreshold{
Threshold: 4.4,
AnyN: 2,
Severity: libhealth.MINOR,
}).Set(MaxFloatThreshold{
Threshold: 5.5,
AnyN: 2,
Severity: libhealth.MAJOR,
}).Set(MaxFloatThreshold{
Threshold: 6.6,
AnyN: 3,
Severity: libhealth.OUTAGE,
})
gaugeFloats(g, 2.2, 9.9, 4.4, 7.7, 1.1, 4.4, 4.4)
return g
}(),
expState: libhealth.MAJOR,
},
}
for _, test := range tests {
state := test.gauge.Health().Status
require.Equal(t, test.expState, state, "expected %s got %s", test.expState, state)
}
}
func Test_Floats_Mixed(t *testing.T) {
tests := []struct {
gauge FloatGauger
expState libhealth.Status
expMsg string
}{
{
gauge: func() FloatGauger {
g, err := Floats("test", 5)
require.NoError(t, err)
g.Set(MinFloatThreshold{
Threshold: 3.3,
LastN: 2,
Severity: libhealth.MINOR,
Description: "min1",
}).Set(MinFloatThreshold{
Threshold: 1.1,
LastN: 2,
Severity: libhealth.MAJOR,
Description: "min2",
}).Set(MaxFloatThreshold{
Threshold: 8.8,
LastN: 2,
Severity: libhealth.MINOR,
Description: "max1",
}).Set(MaxFloatThreshold{
Threshold: 9.9,
LastN: 2,
Severity: libhealth.MAJOR,
Description: "max2",
})
gaugeFloats(g,
5.5, 6.6, 4.4, 8.8, 9.9, 7.7, 1.1, 5.5, 2.2, 1.1,
)
return g
}(),
expState: libhealth.MINOR,
expMsg: "min1",
},
{
gauge: func() FloatGauger {
g, err := Floats("test", 10)
require.NoError(t, err)
g.Set(MinFloatThreshold{
Threshold: 3.4,
AnyN: 2,
Severity: libhealth.MINOR,
Description: "min1",
}).Set(MinFloatThreshold{
Threshold: 2.2,
AnyN: 2,
Severity: libhealth.MAJOR,
Description: "min2",
}).Set(MinFloatThreshold{
Threshold: 1.1,
AnyN: 1,
Severity: libhealth.OUTAGE,
Description: "min3",
}).Set(MaxFloatThreshold{
Threshold: 6.6,
AnyN: 2,
Severity: libhealth.MINOR,
Description: "max1",
}).Set(MaxFloatThreshold{
Threshold: 7.7,
AnyN: 2,
Severity: libhealth.MAJOR,
Description: "max2",
}).Set(MaxFloatThreshold{
Threshold: 8.8,
AnyN: 3,
Severity: libhealth.OUTAGE,
Description: "max3",
})
gaugeFloats(g,
2.2, 4.4, 2.2, 1.1, 8.8, 4.4,
7.7, 6.6, 9.9, 12.12, 2.2,
)
return g
}(),
expState: libhealth.OUTAGE,
expMsg: "min3, max3",
},
}
for _, test := range tests {
health := test.gauge.Health()
state := health.Status
msg := string(health.Message)
require.Equal(t, test.expState, state, "expected %s got %s", test.expState, state)
require.Equal(t, test.expMsg, msg)
}
}
|
BuildAMovement/whistler-kobocat
|
fabfile/__init__.py
|
from .docker import deploy
from .legacy import deploy as deploy_legacy
from .legacy import deploy_ref as deploy_ref_legacy
|
lechium/iPhoneOS_12.1.1_Headers
|
System/Library/PrivateFrameworks/DataAccess.framework/Frameworks/DABookmarkDAV.framework/DADaemonBookmarkDAV.bundle/BookmarkDAVDaemonAccount.h
|
/*
* This header is generated by classdump-dyld 1.0
* on Saturday, June 1, 2019 at 6:50:53 PM Mountain Standard Time
* Operating System: Version 12.1.1 (Build 16C5050a)
* Image Source: /System/Library/PrivateFrameworks/DataAccess.framework/Frameworks/DABookmarkDAV.framework/DADaemonBookmarkDAV.bundle/DADaemonBookmarkDAV
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
#import <DABookmarkDAV/BookmarkDAVAccount.h>
#import <DADaemonBookmarkDAV/DADataclassLockWatcher.h>
#import <DADaemonBookmarkDAV/DABabysittable.h>
@class BookmarkDAVSyncDriver, NSString, NSDictionary;
@interface BookmarkDAVDaemonAccount : BookmarkDAVAccount <DADataclassLockWatcher, DABabysittable> {
BookmarkDAVSyncDriver* _syncDriver;
}
@property (nonatomic,readonly) NSString * pushKey;
@property (nonatomic,readonly) NSDictionary * pushTransport;
@property (readonly) unsigned long long hash;
@property (readonly) Class superclass;
@property (copy,readonly) NSString * description;
@property (copy,readonly) NSString * debugDescription;
-(void)discoveryTask:(id)arg1 gotAccountInfo:(id)arg2 error:(id)arg3 ;
-(NSString *)pushKey;
-(id)waiterID;
-(void)getRootFolderWithConsumer:(id)arg1 ;
-(void)synchronizeBookmarkTreeWithConsumer:(id)arg1 hasRemoteChanges:(BOOL)arg2 ;
-(id)initWithBackingAccountInfo:(id)arg1 ;
-(void)discoverInitialPropertiesWithConsumer:(id)arg1 ;
-(NSDictionary *)pushTransport;
@end
|
FlorianFranzen/kagome
|
core/consensus/babe/impl/threshold_util.cpp
|
/**
* Copyright Soramitsu Co., Ltd. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/
#include "consensus/babe/impl/threshold_util.hpp"
#include <boost/range/adaptors.hpp>
#include <boost/range/numeric.hpp>
namespace kagome::consensus {
Threshold calculateThreshold(const std::pair<uint64_t, uint64_t> &c_pair,
const primitives::AuthorityList &authorities,
primitives::AuthorityIndex authority_index) {
double c = double(c_pair.first) / c_pair.second;
using boost::adaptors::transformed;
double theta =
double(authorities[authority_index].weight)
/ boost::accumulate(authorities | transformed([](auto &authority) {
return authority.weight;
}),
0.);
using namespace boost::multiprecision; // NOLINT
cpp_rational p_rat(1. - pow(1. - c, theta));
static const auto a = (uint256_t{1} << 128);
return Threshold{a * numerator(p_rat) / denominator(p_rat)};
}
} // namespace kagome::consensus
|
lamhungypl/iianian-ecom-api
|
build-prod/dist/api/controllers/OrderStatusController.js
|
<reponame>lamhungypl/iianian-ecom-api<filename>build-prod/dist/api/controllers/OrderStatusController.js
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
exports.OrderStatusController = void 0;
const tslib_1 = require('tslib');
const lodash_1 = require('lodash');
require('reflect-metadata');
const routing_controllers_1 = require('routing-controllers');
const typeorm_1 = require('typeorm');
const orderStatus_1 = require('../models/orderStatus');
const orderStatusService_1 = require('../services/orderStatusService');
const createOrderStatusRequest_1 = require('./requests/createOrderStatusRequest');
let OrderStatusController = class OrderStatusController {
constructor(orderStatusService) {
this.orderStatusService = orderStatusService;
}
// Create Order Status API
/**
* @api {post} /api/order-status/create-order-status Create OrderStatus API
* @apiGroup OrderStatus
* @apiParam (Request body) {String} name name
* @apiParam (Request body) {String} colorCode colorCode
* @apiParam (Request body) {Number} status status
* @apiHeader {String} Authorization
* @apiParamExample {json} Input
* {
* "name" : "",
* "colorCode" : "",
* }
* @apiSuccessExample {json} Success
* HTTP/1.1 200 OK
* {
* "message": "New OrderStatus is created successfully",
* "status": "1"
* }
* @apiSampleRequest /api/order-status/create-order-status
* @apiErrorExample {json} createOrderStatus error
* HTTP/1.1 500 Internal Server Error
*/
createOrderStatus(orderStatusParam, response) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const newOrderStatus = new orderStatus_1.OrderStatus();
newOrderStatus.name = orderStatusParam.name;
newOrderStatus.colorCode = orderStatusParam.colorCode;
newOrderStatus.isActive = orderStatusParam.status;
const orderStatusSave = yield this.orderStatusService.create(
newOrderStatus
);
if (orderStatusSave !== undefined) {
const successResponse = {
status: 1,
message: 'successfully created a new order status.',
data: orderStatusSave,
};
return response.status(200).send(successResponse);
} else {
const errorResponse = {
status: 0,
message: 'unable to create OrderStatus',
};
return response.status(400).send(errorResponse);
}
});
}
// update Order Status API
/**
* @api {put} /api/order-status/update-order-status/:id Update OrderStatus API
* @apiGroup OrderStatus
* @apiHeader {String} Authorization
* @apiParam (Request body) {String} name OrderStatus name
* @apiParam (Request body) {String} colorCode colorCode
* @apiParam (Request body) {Number} status status
* @apiParamExample {json} Input
* {
* "name" : "",
* "colorCode" : "",
* }
* @apiSuccessExample {json} Success
* HTTP/1.1 200 OK
* {
* "message": "Successfully updated orderStatus.",
* "status": "1"
* }
* @apiSampleRequest /api/order-status/update-order-status/:id
* @apiErrorExample {json} OrderStatus error
* HTTP/1.1 500 Internal Server Error
*/
updateOrderStatus(orderStatusParams, id, response, request) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const orderStatus = yield this.orderStatusService.findOne({
where: {
orderStatusId: id,
},
});
if (!orderStatus) {
const errorResponse = {
status: 0,
message: 'Invalid orderStatusId',
};
return response.status(400).send(errorResponse);
}
orderStatus.name = orderStatusParams.name;
orderStatus.colorCode = orderStatusParams.colorCode;
orderStatus.isActive = orderStatusParams.status;
const orderStatusSave = yield this.orderStatusService.create(orderStatus);
//console.log('orderStatus' + orderStatusSave);
if (orderStatusSave !== undefined) {
const successResponse = {
status: 1,
message: 'Successfully updated the order status.',
data: orderStatusSave,
};
return response.status(200).send(successResponse);
} else {
const errorResponse = {
status: 1,
message: 'unable to update OrderStatus.',
};
return response.status(400).send(errorResponse);
}
});
}
// Order Status List API
/**
* @api {get} /api/order-status/order-status-list OrderStatus List API
* @apiGroup OrderStatus
* @apiHeader {String} Authorization
* @apiParam (Request body) {Number} limit limit
* @apiParam (Request body) {Number} offset offset
* @apiParam (Request body) {String} keyword keyword
* @apiParam (Request body) {String} count count
* @apiSuccessExample {json} Success
* HTTP/1.1 200 OK
* {
* "message": "Successfully get orderStatus list",
* "data":"{}"
* "status": "1"
* }
* @apiSampleRequest /api/order-status/order-status-list
* @apiErrorExample {json} OrderStatus error
* HTTP/1.1 500 Internal Server Error
*/
orderStatusList(limit, offset, keyword, count, response) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const options = Object.assign(
Object.assign(
{},
lodash_1.pickBy(
{
take: (limit && lodash_1.parseInt(limit)) || undefined,
skip: (offset && lodash_1.parseInt(offset)) || undefined,
},
value => lodash_1.isNumber(value)
)
),
{
select: ['orderStatusId', 'name', 'colorCode', 'isActive'],
where: lodash_1.pickBy(
{ name: (keyword && typeorm_1.Like(`%${keyword}%`)) || undefined },
value => value != null
),
}
);
if (count) {
const orderStatusCount = yield this.orderStatusService.count(options);
const successResponse = {
status: 1,
message: 'Successfully got the complete order status list.',
data: orderStatusCount,
};
return response.status(200).send(successResponse);
}
const orderStatusList = yield this.orderStatusService.list(options);
if (orderStatusList) {
const successResponse = {
status: 1,
message: 'Successfully got the complete order status list.',
data: orderStatusList,
};
return response.status(200).send(successResponse);
} else {
const errorResponse = {
status: 1,
message: 'unable to get OrderStatus.',
};
return response.status(400).send(errorResponse);
}
});
}
// Delete Order Status API
/**
* @api {delete} /api/order-status/delete-order-status/:id Delete OrderStatus API
* @apiGroup OrderStatus
* @apiHeader {String} Authorization
* @apiParamExample {json} Input
* {
* "orderStatusId" : "",
* }
* @apiSuccessExample {json} Success
* HTTP/1.1 200 OK
* {
* "message": "Successfully deleted orderStatus.",
* "status": "1"
* }
* @apiSampleRequest /api/order-status/delete-order-status/:id
* @apiErrorExample {json} OrderStatus error
* HTTP/1.1 500 Internal Server Error
*/
deleteOrderStatus(id, response, request) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const orderStatus = yield this.orderStatusService.findOne({
where: {
orderStatusId: id,
},
});
if (!orderStatus) {
const errorResponse = {
status: 0,
message: 'Invalid orderStatusId.',
};
return response.status(400).send(errorResponse);
}
const deleteOrderStatus = yield this.orderStatusService.delete(
orderStatus.orderStatusId
);
if (deleteOrderStatus) {
const successResponse = {
status: 1,
message: 'Successfully deleted the order status.',
};
return response.status(200).send(successResponse);
} else {
const errorResponse = {
status: 0,
message: 'unable to delete orderStatus.',
};
return response.status(400).send(errorResponse);
}
});
}
};
tslib_1.__decorate(
[
routing_controllers_1.Post('/create-order-status'),
routing_controllers_1.Authorized(),
tslib_1.__param(0, routing_controllers_1.Body({ validate: true })),
tslib_1.__param(1, routing_controllers_1.Res()),
tslib_1.__metadata('design:type', Function),
tslib_1.__metadata('design:paramtypes', [
createOrderStatusRequest_1.CreateOrderStatus,
Object,
]),
tslib_1.__metadata('design:returntype', Promise),
],
OrderStatusController.prototype,
'createOrderStatus',
null
);
tslib_1.__decorate(
[
routing_controllers_1.Put('/update-order-status/:id'),
routing_controllers_1.Authorized(),
tslib_1.__param(0, routing_controllers_1.Body({ validate: true })),
tslib_1.__param(1, routing_controllers_1.Param('id')),
tslib_1.__param(2, routing_controllers_1.Res()),
tslib_1.__param(3, routing_controllers_1.Req()),
tslib_1.__metadata('design:type', Function),
tslib_1.__metadata('design:paramtypes', [
createOrderStatusRequest_1.CreateOrderStatus,
Number,
Object,
Object,
]),
tslib_1.__metadata('design:returntype', Promise),
],
OrderStatusController.prototype,
'updateOrderStatus',
null
);
tslib_1.__decorate(
[
routing_controllers_1.Get('/order-status-list'),
routing_controllers_1.Authorized(),
tslib_1.__param(0, routing_controllers_1.QueryParam('limit')),
tslib_1.__param(1, routing_controllers_1.QueryParam('offset')),
tslib_1.__param(2, routing_controllers_1.QueryParam('keyword')),
tslib_1.__param(3, routing_controllers_1.QueryParam('count')),
tslib_1.__param(4, routing_controllers_1.Res()),
tslib_1.__metadata('design:type', Function),
tslib_1.__metadata('design:paramtypes', [
String,
String,
String,
Object,
Object,
]),
tslib_1.__metadata('design:returntype', Promise),
],
OrderStatusController.prototype,
'orderStatusList',
null
);
tslib_1.__decorate(
[
routing_controllers_1.Delete('/delete-order-status/:id'),
routing_controllers_1.Authorized(),
tslib_1.__param(0, routing_controllers_1.Param('id')),
tslib_1.__param(1, routing_controllers_1.Res()),
tslib_1.__param(2, routing_controllers_1.Req()),
tslib_1.__metadata('design:type', Function),
tslib_1.__metadata('design:paramtypes', [Number, Object, Object]),
tslib_1.__metadata('design:returntype', Promise),
],
OrderStatusController.prototype,
'deleteOrderStatus',
null
);
OrderStatusController = tslib_1.__decorate(
[
routing_controllers_1.JsonController('/order-status'),
tslib_1.__metadata('design:paramtypes', [
orderStatusService_1.OrderStatusService,
]),
],
OrderStatusController
);
exports.OrderStatusController = OrderStatusController;
//# sourceMappingURL=OrderStatusController.js.map
|
DevOrc/Robogeddon-libgdx
|
core/src/com/noahcharlton/robogeddon/world/item/ItemStack.java
|
package com.noahcharlton.robogeddon.world.item;
public class ItemStack {
private Item item;
private int amount;
public ItemStack(Item item, int amount) {
this.item = item;
this.amount = amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
public void setItem(Item item) {
this.item = item;
}
public Item getItem() {
return item;
}
public int getAmount() {
return amount;
}
public String getDisplayInfo(){
return amount + " " + item.getDisplayName();
}
@Override
public String toString() {
return "ItemStack{" +
"item=" + item +
", amount=" + amount +
'}';
}
}
|
jax-b/ModulerKTNE
|
rpi_software/controller/interProcessCom.go
|
<reponame>jax-b/ModulerKTNE<filename>rpi_software/controller/interProcessCom.go<gh_stars>0
package controller
import (
"encoding/json"
"log"
"strconv"
"strings"
"time"
ipc "github.com/james-barrow/golang-ipc"
mktnecf "github.com/jax-b/ModulerKTNE/rpi_software/commonfiles"
"go.uber.org/zap"
)
type InterProcessCom struct {
ipc *ipc.Server
closech chan bool
game *GameController
log *zap.SugaredLogger
}
// Creates a new interprocess communicator
func NewIPC(logger *zap.SugaredLogger, gamectrl *GameController) *InterProcessCom {
logger = logger.Named("IPC")
sv, err := ipc.StartServer("ktne-ipc", nil)
if err != nil {
log.Fatal("Failed to start IPC Server", err)
}
ipc := &InterProcessCom{
ipc: sv,
closech: make(chan bool),
game: gamectrl,
log: logger,
}
return ipc
}
func (sipc *InterProcessCom) Run() {
go func() {
for {
select {
case quit := <-sipc.closech:
if quit {
return
}
default:
sipc.commandTree()
}
}
}()
}
// Safely closes the interprocess communicator
func (sipc *InterProcessCom) Close() error {
sipc.closech <- true
sipc.ipc.Close()
return nil
}
func (sipc *InterProcessCom) commandTree() {
if sipc.ipc.StatusCode() == 3 {
message, err := sipc.ipc.Read()
if err != nil {
sipc.log.Error("Failed to read from IPC", err)
}
messages := string(message.Data)
sipc.log.Debug("Received message from IPC", message)
messagesCMDDTA := strings.Split(messages, ":")
messagesCMD := strings.Split(messagesCMDDTA[0], ".")
var ipcwerr error
if message.MsgType == 3 {
switch messagesCMD[1] {
case "game":
switch messagesCMD[2] {
case "start":
err := sipc.game.StartGame()
if err == nil {
sipc.log.Info("Game Started")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.start.ok"))
} else {
sipc.log.Error("Failed to Start Game:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.start.error"))
}
case "stop":
err := sipc.game.StopGame()
if err == nil {
sipc.log.Info("Game Stopped")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.stop.ok"))
} else {
sipc.log.Error("Failed to Stop game:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.stop.error"))
}
case "set_time": // Attempts to set the time. This command will be automatically followed by get_time
gametime, err := time.ParseDuration(messagesCMDDTA[1])
if err != nil {
sipc.log.Errorf("Failed to convert time: %e", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_time.error"))
break
}
err = sipc.game.SetGameTime(gametime)
if err == nil {
sipc.log.Info("Set game time to:", gametime)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_time.ok"))
} else {
sipc.log.Error("Failed to set time:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_time.error"))
}
fallthrough
case "get_time":
gametime := sipc.game.GetGameTime()
buffer := []byte("mktne.game.time:")
buffer = append(buffer, []byte(strconv.Itoa(int(gametime)))...)
ipcwerr = sipc.ipc.Write(2, buffer)
case "set_strike": // Attempts to set the strike count. This command will be automatically followed by get_strike
strike, err := strconv.ParseInt(messagesCMDDTA[1], 10, 16)
if err != nil {
sipc.log.Error("Failed to convert strike:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_strike.error"))
break
}
err = sipc.game.SetStrikes(int8(strike))
if err == nil {
sipc.log.Info("Set strike to:", strike)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_strike.ok"))
} else {
sipc.log.Error("Failed to set strike:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_strike.error"))
}
fallthrough
case "get_strike":
strikes := sipc.game.GetStrikes()
buffer := []byte("mktne.game.strike:")
buffer = append(buffer, []byte(strconv.Itoa(int(strikes)))...)
ipcwerr = sipc.ipc.Write(2, buffer)
case "set_strike_rate": // Attempts to set the strike rate. This command will be automatically followed by get_strike_rate
strikeRate, err := strconv.ParseFloat(messagesCMDDTA[1], 32)
if err != nil {
sipc.log.Error("Failed to convert strike rate:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_strike_rate.error"))
break
}
err = sipc.game.SetStrikeRate(float32(strikeRate))
if err == nil {
sipc.log.Info("Set strike rate to:", strikeRate)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_strike_rate.ok"))
} else {
sipc.log.Error("Failed to set strike rate:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.set_strike_rate.error"))
}
fallthrough
case "get_strike_rate":
strikeRate := sipc.game.GetStrikeRate()
buffer := []byte("mktne.game.strike_rate:")
buffer = append(buffer, []byte(strconv.FormatFloat(float64(strikeRate), 'f', 2, 32))...)
ipcwerr = sipc.ipc.Write(2, buffer)
case "set_serialnumber": // Attempts to set the serial number. This command will be automatically followed by get_serialnumber
serialNumber := messagesCMDDTA[1]
err := sipc.game.SetSerial(serialNumber)
if err != nil {
sipc.log.Error("Failed to set the serial number", err)
sipc.ipc.Write(1, []byte("mktne.game.set_serialnumber.error"))
break
}
sipc.log.Info("Set serial number to:", serialNumber)
sipc.ipc.Write(1, []byte("mktne.game.set_serialnumber.ok"))
fallthrough
case "get_serialnumber":
sipc.ipc.Write(1, []byte("mktne.game.serialnumber:"+sipc.game.GetSerial()))
case "add_indicator": // Attempts to add a indicator to the list. This command will be automatically followed by get_indicators.
var indiobj Indicator
err := json.Unmarshal([]byte(messagesCMDDTA[1]), &indiobj)
if err != nil {
sipc.log.Error("Failed to unmarshal indicator:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.add_indicator.error"))
break
}
sipc.game.AddIndicator(indiobj)
sipc.log.Info("Added Indicator to list", indiobj)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.add_indicator.ok"))
fallthrough
case "get_indicators":
indicators := sipc.game.GetIndicators()
buffer := []byte("mktne.game.indicators:")
indjson, err := json.Marshal(indicators)
if err != nil {
sipc.log.Error("Failed to marshal indicators json")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.get_indicators.error"))
break
}
buffer = append(buffer, indjson...)
ipcwerr = sipc.ipc.Write(2, buffer)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.get_indicators.ok"))
case "clear_indicators":
sipc.game.ClearIndicators()
sipc.log.Info("Cleared Active Indicators")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.clear_indicators.ok"))
case "add_port": // Attempts to add a port to the list. This command will be automatically followed by get_ports.
portInt64, err := strconv.ParseInt(messagesCMDDTA[1], 10, 8)
if err != nil {
sipc.log.Error("Failed to convert port:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.add_port.error"))
break
}
port := byte(portInt64)
err = sipc.game.SetPorts(port)
if err != nil {
sipc.log.Error("Failed to add port:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.add_port.error"))
break
}
sipc.log.Info("Added Port to list", port)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.add_port.ok"))
fallthrough
case "get_ports":
ports := sipc.game.GetPorts()
buffer := []byte("mktne.game.ports:")
portjson, err := json.Marshal(ports)
if err != nil {
sipc.log.Error("Failed to marshal ports json")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.get_ports.error"))
break
}
buffer = append(buffer, portjson...)
ipcwerr = sipc.ipc.Write(2, buffer)
case "clear_ports":
sipc.game.ClearPorts()
sipc.log.Info("Cleared Game Ports")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.game.clear_ports.ok"))
}
case "module":
modnum64, err := strconv.ParseInt(messagesCMD[4], 10, 8)
if err != nil {
sipc.log.Error("Failed to convert module number:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.module.error"))
break
}
modnum := int(modnum64)
switch messagesCMD[3] {
case "get_present":
buffer := []byte("mktne.module." + messagesCMD[4] + ".present:")
if sipc.game.modules[modnum].present {
buffer = append(buffer, []byte("true")...)
} else {
buffer = append(buffer, []byte("false")...)
}
ipcwerr = sipc.ipc.Write(2, buffer)
case "set_seed":
seed, err := strconv.ParseInt(messagesCMDDTA[1], 10, 16)
if err != nil {
sipc.log.Error("Failed to convert seed:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.module."+messagesCMD[4]+".set_seed.error"))
break
}
err = sipc.game.modules[modnum].mctrl.SetGameSeed(uint16(seed))
if err == nil {
sipc.log.Info("Set module ", modnum, " seed to: ", seed)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.module."+messagesCMD[4]+".set_seed.ok"))
} else {
sipc.log.Error("Failed to set module ", modnum, " seed: ", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.module."+messagesCMD[4]+".set_seed.error"))
}
case "get_type":
buffer := []byte("mktne.module." + messagesCMD[4] + ".type:")
mtype := sipc.game.modules[modnum].modtype
var mtypesl []rune
for i := range mtype {
mtypesl = append(mtypesl, mtype[i])
}
buffer = append(buffer, []byte(string(mtypesl))...)
ipcwerr = sipc.ipc.Write(2, buffer)
}
case "network":
switch messagesCMD[2] {
case "close":
sipc.game.multicast.mnetc.Close()
sipc.game.multicast.useMulti = false
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.close.ok"))
sipc.log.Info("Closed multicast")
case "open":
sipc.game.multicast.useMulti = true
sipc.game.multicast.mnetc, err = mktnecf.NewMultiCastCountdown(sipc.game.log, sipc.game.cfg.Network.MultiCastIP, sipc.game.cfg.Network.MultiCastPort)
if err != nil {
sipc.log.Error("Failed to open multicast:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.open.error"))
} else {
sipc.log.Info("Opened multicast")
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.open.ok"))
}
case "change":
switch messagesCMD[3] {
case "port":
port, err := strconv.ParseInt(messagesCMDDTA[1], 10, 32)
if err != nil {
sipc.log.Error("Failed to convert port:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.change.port.error"))
break
}
err = sipc.game.multicast.mnetc.ChangePort(int(port))
if err != nil {
sipc.log.Error("Failed to change port:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.change.port.error"))
} else {
sipc.log.Info("Changed port to:", port)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.change.port.ok"))
}
case "ip":
err := sipc.game.multicast.mnetc.ChangeIP(messagesCMDDTA[1])
if err != nil {
sipc.log.Error("Failed to change IP:", err)
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.change.ip.error"))
} else {
sipc.log.Info("Changed MCast IP to:", messagesCMDDTA[1])
ipcwerr = sipc.ipc.Write(1, []byte("mktne.network.change.ip.ok"))
}
}
}
}
}
if ipcwerr != nil {
sipc.log.Fatal("Failed to write to IPC:", ipcwerr)
}
}
}
func (sipc *InterProcessCom) SyncStatus(stat *mktnecf.Status) error {
type msg struct {
Time string `json:"timeleft"`
NumStrike int8 `json:"strike"`
Boom bool `json:"boom"`
Win bool `json:"win"`
Gamerun bool `json:"gamerun"`
Strikereductionrate float32 `json:"strikerate"`
}
omsg := msg{
Time: stat.Time.String(),
NumStrike: int8(stat.NumStrike),
Boom: stat.Boom,
Win: stat.Win,
Gamerun: stat.Gamerun,
Strikereductionrate: stat.Strikereductionrate,
}
json, err := json.Marshal(omsg)
if err != nil {
sipc.log.Warn("Failed to marshal status: ", err)
return err
}
ipcwerr := sipc.ipc.Write(9, json)
if ipcwerr != nil {
sipc.log.Fatal("Failed to write to IPC:", ipcwerr)
return ipcwerr
}
return nil
}
|
TierynnB/LeaguePyBot
|
LPBv2/bot/bot.py
|
<filename>LPBv2/bot/bot.py<gh_stars>0
from .. import *
from time import time
from ..logger import get_logger, Colors
logger = get_logger("LPBv2.Bot")
class LeaguePyBot:
def __init__(self):
logger.info(f"Welcome to {Colors.yellow}LeaguePyBotV2{Colors.reset}")
self.client = Client()
self.game = Game()
self.minimap = Vision(
bounding_box={
"top": 1080 - 210,
"left": 1920 - 210,
"width": 210,
"height": 210,
}
)
self.screen = Vision()
self.controller = Controller()
self.FPS = float()
self.loop = LoopInNewThread()
self.loop.submit_async(self.bot_loop())
async def bot_loop(self):
loop_time = time()
while True:
if not await self.is_in_game():
await self.reset()
continue
await self.computer_vision()
await self.update_game_objects()
# await self.decide_actions()
# await self.execute_actions()
logger.info(self.FPS)
self.FPS = round(float(1 / (time() - loop_time)), 2)
loop_time = time()
async def is_in_game(self):
return self.game.game_flow.is_ingame and self.game.members
async def reset(self):
if self.minimap.templates:
await self.minimap.clear_templates()
if self.screen.templates:
await self.screen.clear_templates()
if self.game.members:
await self.game.clear_members()
async def prepare_vision_objects(self):
if not self.minimap.templates:
names = await self.game.get_member_names()
await self.minimap.load_templates(names=names, folder="champions_16x16")
if not self.screen.templates:
await self.screen.load_templates(
names=["minion", "champion", "building_1", "building_2"],
folder="units",
)
async def computer_vision(self):
await self.prepare_vision_objects()
await self.minimap.screenshot()
await self.minimap.match(match_best_only=True)
await self.screen.screenshot()
await self.screen.match()
async def update_member_location(self):
for name in self.game.get_member_names():
match = await self.minimap.get_match(name)
if match:
zone = await self.find_closest_zone(match.x, match.y)
await self.game.update_member_location(name, match, zone)
async def udpdate_units_position(self):
await self.game.game_units.update(self.screen.matches)
async def update_game_objects(self):
await self.game.update_members(self.minimap.matches)
await self.game.update_units(self.screen.matches)
pass
async def decide_actions(self):
pass
async def execute_actions(self):
for action in self.actions:
action.execute()
# # misc
# - reset
# - update_FPS
# # actions:
# - fall_back
# - heal
# - recall
# - cast_spells
# - attack
# - attack_building
# - attack_champion
# - attack_tower
# - follow_allies
# - move_minimap / go_to_lane
# - buy items
# # calculations:
# - get_closest_enemy_building_position
# - get_closest_enemy_champion_position
# - get_closest_enemy_position
# - get_average_enemy_position
# - get_safest_ally_position
# - get_riskiest_ally_position
# - find_closest_ally_zone
# - find_closest_zone
# - get_units_position(units, function)
# - ally_minions
# - enemy_minions
# - enemy_champions
# - enemy_buildings
# - riskiest_position
# - safest_position
# - average_position
# # computer vision:
# - locate_game_objects_AND_update
# - locate_champions_on_minimap_AND_update
|
sigurasg/ghidra
|
Ghidra/Debug/Debugger-agent-lldb/src/main/java/agent/lldb/manager/cmd/LldbListThreadsCommand.java
|
<filename>Ghidra/Debug/Debugger-agent-lldb/src/main/java/agent/lldb/manager/cmd/LldbListThreadsCommand.java
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package agent.lldb.manager.cmd;
import java.util.*;
import SWIG.SBProcess;
import SWIG.SBThread;
import agent.lldb.lldb.DebugClient;
import agent.lldb.manager.impl.LldbManagerImpl;
public class LldbListThreadsCommand extends AbstractLldbCommand<Map<String, SBThread>> {
protected final SBProcess process;
private Map<String, SBThread> updatedThreadIds = new HashMap<>();
public LldbListThreadsCommand(LldbManagerImpl manager, SBProcess process) {
super(manager);
this.process = process;
}
@Override
public Map<String, SBThread> complete(LldbPendingCommand<?> pending) {
Map<String, SBThread> threads = manager.getKnownThreads(process);
Set<String> cur = threads.keySet();
for (String id : updatedThreadIds.keySet()) {
if (cur.contains(id)) {
continue; // Do nothing, we're in sync
}
manager.addThreadIfAbsent(process, updatedThreadIds.get(id));
}
for (String id : new ArrayList<>(cur)) {
if (updatedThreadIds.containsKey(id)) {
continue; // Do nothing, we're in sync
}
manager.removeThread(DebugClient.getId(process), id);
}
return manager.getKnownThreads(process);
}
@Override
public void invoke() {
updatedThreadIds.clear();
long n = process.GetNumThreads();
for (int i = 0; i < n; i++) {
SBThread thread = process.GetThreadAtIndex(i);
updatedThreadIds.put(DebugClient.getId(thread), thread);
}
}
}
|
interaction-lab/MoveToCode
|
docs/html/dir_bd79ff5a2247e228097ca24503898ff3.js
|
var dir_bd79ff5a2247e228097ca24503898ff3 =
[
[ "DiagnosticsEventData.cs", "_diagnostics_event_data_8cs.html", [
[ "DiagnosticsEventData", "class_microsoft_1_1_mixed_reality_1_1_toolkit_1_1_diagnostics_1_1_diagnostics_event_data.html", "class_microsoft_1_1_mixed_reality_1_1_toolkit_1_1_diagnostics_1_1_diagnostics_event_data" ]
] ]
];
|
fengjixuchui/Hades-Windows
|
MonitorEvent/netdrv/hashtable.c
|
#include "public.h"
#include "hashtable.h"
PHASH_TABLE hash_table_new(unsigned int size)
{
unsigned int memsize;
PHASH_TABLE pTable;
if (size < 1)
return NULL;
memsize = sizeof(HASH_TABLE) + sizeof(PHASH_TABLE_ENTRY) * (size - 1);
pTable = malloc_np(memsize);
if (!pTable)
return NULL;
memset(pTable, 0, memsize);
pTable->size = size;
return pTable;
}
void hash_table_free(PHASH_TABLE pTable)
{
if (pTable)
{
free_np(pTable);
}
}
int ht_add_entry(PHASH_TABLE pTable, PHASH_TABLE_ENTRY pEntry)
{
UINT64 hash = pEntry->id % pTable->size;
if (ht_find_entry(pTable, pEntry->id))
return 0;
pEntry->pNext = pTable->pEntries[hash];
pTable->pEntries[hash] = pEntry;
return 1;
}
PHASH_TABLE_ENTRY ht_find_entry(PHASH_TABLE pTable, UINT64 id)
{
PHASH_TABLE_ENTRY pEntry;
pEntry = pTable->pEntries[id % pTable->size];
while (pEntry)
{
if (pEntry->id == id)
{
return pEntry;
}
pEntry = pEntry->pNext;
}
return NULL;
}
int ht_remove_entry(PHASH_TABLE pTable, UINT64 id)
{
PHASH_TABLE_ENTRY pEntry, * ppNext;
ppNext = &pTable->pEntries[id % pTable->size];
pEntry = *ppNext;
while (pEntry)
{
if (pEntry->id == id)
{
*ppNext = pEntry->pNext;
return 1;
}
ppNext = &pEntry->pNext;
pEntry = *ppNext;
}
return 0;
}
|
HughWick/toolbox
|
src/main/java/com/github/hugh/util/DoubleMathUtils.java
|
<gh_stars>1-10
package com.github.hugh.util;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.text.NumberFormat;
/**
* double计算精准工具类
*
* @author hugh
* @since 1.0.1
*/
public class DoubleMathUtils {
// 这个类不能实例化
private DoubleMathUtils() {
}
// 默认除法运算精度
private static final int DEF_DIV_SCALE = 10;
/**
* 将double类型强制保留后两位
*/
public static final NumberFormat numberFormat = NumberFormat.getNumberInstance();
static {
int maximum = 2;
numberFormat.setMaximumFractionDigits(maximum);
numberFormat.setRoundingMode(RoundingMode.DOWN);
// 分组: 默认为true 就是 "4,000,000" 这种形式, false 输出= 4000
numberFormat.setGroupingUsed(false);
}
/**
* 将double类型强制保留后五位、多余的直接舍弃
*/
public static final NumberFormat numberGiveUp = NumberFormat.getNumberInstance();
static {
int maximum = 5;
numberGiveUp.setMaximumFractionDigits(maximum);
numberGiveUp.setRoundingMode(RoundingMode.DOWN);
// 分组: 默认为true 就是 "4,000,000" 这种形式, false 输出= 4000
numberGiveUp.setGroupingUsed(false);
}
/**
* 提供精确的加法运算。
*
* @param v1 被加数
* @param v2 加数
* @return 两个参数的和
*/
public static double add(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.add(b2).doubleValue();
}
/**
* 提供精确的减法运算。
*
* @param v1 被减数
* @param v2 减数
* @return 两个参数的差
*/
public static double sub(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.subtract(b2).doubleValue();
}
/**
* 提供精确的乘法运算。
*
* @param v1 被乘数
* @param v2 乘数
* @return 两个参数的积
*/
public static double mul(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.multiply(b2).doubleValue();
}
/**
* 提供(相对)精确的除法运算,当发生除不尽的情况时,精确到 小数点以后10位,以后的数字四舍五入。
*
* @param v1 被除数
* @param v2 除数
* @return 两个参数的商
*/
public static double div(double v1, double v2) {
return div(v1, v2, DEF_DIV_SCALE);
}
/**
* 提供(相对)精确的除法运算。当发生除不尽的情况时,由scale参数指 定精度,以后的数字四舍五入。
*
* @param v1 被除数
* @param v2 除数
* @param scale 表示表示需要精确到小数点以后几位。
* @return 两个参数的商
*/
public static double div(double v1, double v2, int scale) {
if (scale < 0) {
throw new IllegalArgumentException("The scale must be a positive integer or zero");
}
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.divide(b2, scale, BigDecimal.ROUND_HALF_UP).doubleValue();
}
/**
* 提供精确的小数位四舍五入处理。
*
* @param v 需要四舍五入的数字
* @param scale 小数点后保留几位
* @return 四舍五入后的结果
*/
public static double round(double v, int scale) {
if (scale < 0) {
throw new IllegalArgumentException("The scale must be a positive integer or zero");
}
BigDecimal b = new BigDecimal(Double.toString(v));
BigDecimal one = new BigDecimal("1");
return b.divide(one, scale, BigDecimal.ROUND_HALF_UP).doubleValue();
}
/**
* 如果是小数,保留两位,非小数,保留整数
*
* @param number 浮点数
* @return String
*/
public static String getString(double number) {
String numberStr;
if (((int) number * 1000) == (int) (number * 1000)) {
// 如果是一个整数
numberStr = String.valueOf((int) number);
} else {
DecimalFormat df = new DecimalFormat("######0.00");
numberStr = df.format(number);
}
return numberStr;
}
}
|
dan-overton/react-workshop-app
|
example/react-fundamentals/src/final/05.extra-1.js
|
// Styling
// 💯 Custom component that encapsulates styling
// http://localhost:3000/isolated/exercises-final/05.extra-1
import '../box-styles.css'
import React from 'react'
function Box({style, size, className = '', ...otherProps}) {
const sizeClassName = size ? `box--${size}` : ''
return (
<div
className={`box ${className} ${sizeClassName}`}
style={{fontStyle: 'italic', ...style}}
{...otherProps}
/>
)
}
function Usage() {
return (
<div>
<Box size="small" style={{backgroundColor: 'lightblue'}}>
small lightblue box
</Box>
<Box size="medium" style={{backgroundColor: 'pink'}}>
medium pink box
</Box>
<Box size="large" style={{backgroundColor: 'orange'}}>
large orange box
</Box>
<Box>sizeless box</Box>
</div>
)
}
export default Usage
|
xujiaji/erupt
|
erupt-annotation/src/main/java/xyz/erupt/annotation/config/EruptSmartSkipSerialize.java
|
<filename>erupt-annotation/src/main/java/xyz/erupt/annotation/config/EruptSmartSkipSerialize.java
package xyz.erupt.annotation.config;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @author YuePeng
* date 2021/12/26 22:42
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
@Comment("根据字段覆盖情况智能校验是否需要序列化,目前的算法仅支持字段单次覆盖")
public @interface EruptSmartSkipSerialize {
}
|
ledrsnet/music-web-site
|
src/main/java/com/maple/music/dao/AlbumDao.java
|
package com.maple.music.dao;
import com.maple.music.entity.Album;
import java.math.BigInteger;
import java.util.List;
/**
* @author LiangDong
* @Date 2020/3/23
*/
public interface AlbumDao {
/**
* 从歌曲表中查出所有的专辑ID
* @return
*/
List<BigInteger> getAlbumIdsFromSongs();
/**
* 判断该专辑是否存在
* @param id
* @return
*/
boolean isAlbumExist(BigInteger id);
/**
* 插入专辑到数据库
* @param album
*/
void insertAlbum(Album album);
/**
* 根据ID查找专辑
* @param albumId
* @return
*/
Album getAlbumById(BigInteger albumId);
}
|
Houkime/echo
|
editor/QLibrary/Resources/QLibrary/nodeeditor/internal/connection/ConnectionState.hpp
|
#pragma once
#include <QtCore/QUuid>
#include "../node/PortType.hpp"
class QPointF;
namespace QtNodes
{
class Node;
/// Stores currently dragging end.
/// Remembers last hovered Node.
class ConnectionState
{
public:
ConnectionState(PortType port = PortType::None)
: _requiredPort(port)
{}
ConnectionState(const ConnectionState&) = delete;
ConnectionState operator=(const ConnectionState&) = delete;
~ConnectionState();
public:
void setRequiredPort(PortType end) { _requiredPort = end; }
PortType requiredPort() const{ return _requiredPort; }
bool requiresPort() const{ return _requiredPort != PortType::None; }
void setNoRequiredPort(){ _requiredPort = PortType::None; }
public:
void interactWithNode(Node* node);
void setLastHoveredNode(Node* node);
Node* lastHoveredNode() const { return _lastHoveredNode; }
void resetLastHoveredNode();
private:
PortType _requiredPort;
Node* _lastHoveredNode{nullptr};
};
}
|
jcoder39/Quasura
|
tests/math/main.cpp
|
/*
* main.cpp
*
* Created by <NAME>
*
* Copyright (c) 2018 spectrobyte http://spectrobyte.com
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*
*/
#include "gtest/gtest.h"
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
int ret = RUN_ALL_TESTS();
return ret;
}
|
DGermano8/ChasteDom
|
cell_based/test/tutorial/TestRunningDifferentialAdhesionSimulationsTutorial.hpp
|
<gh_stars>0
/*
Copyright (c) 2005-2018, University of Oxford.
All rights reserved.
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
This file is part of Chaste.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the University of Oxford nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
*
* Chaste tutorial - this page gets automatically changed to a wiki page
* DO NOT remove the comments below, and if the code has to be changed in
* order to run, please check the comments are still accurate
*
*
*/
#ifndef TESTRUNNINGDIFFERENTIALADHESIONSIMULATIONSTUTORIAL_HPP_
#define TESTRUNNINGDIFFERENTIALADHESIONSIMULATIONSTUTORIAL_HPP_
/*
* = An example showing how to simulate cell sorting due to differential adhesion in a vertex-based model =
*
* == Introduction ==
*
* In this tutorial we show how Chaste can be used to simulate a growing cell monolayer culture
* comprising two distinct cell types, which exhibit differential adhesion. We encountered a
* similar implementation in the second test in the TestRunningPottsBasedSimulationsTutorial,
* which used a cellular Potts model of cell interactions; here we use a vertex-based model.
*
* == The test ==
*
* As in previous tutorials, we begin by including the necessary header files. We have
* encountered these files already. Recall that often, either {{{CheckpointArchiveTypes.hpp}}}
* or {{{CellBasedSimulationArchiver.hpp}}} must be included the first Chaste header.
*/
#include <cxxtest/TestSuite.h>
#include "CheckpointArchiveTypes.hpp"
#include "AbstractCellBasedTestSuite.hpp"
#include "HoneycombVertexMeshGenerator.hpp"
#include "CellsGenerator.hpp"
#include "FixedG1GenerationalCellCycleModel.hpp"
#include "CellLabel.hpp"
#include "DifferentiatedCellProliferativeType.hpp"
#include "VertexBasedCellPopulation.hpp"
#include "CellAgesWriter.hpp"
#include "CellVolumesWriter.hpp"
#include "CellMutationStatesCountWriter.hpp"
#include "CellProliferativePhasesWriter.hpp"
#include "CellProliferativeTypesCountWriter.hpp"
#include "CellProliferativePhasesCountWriter.hpp"
#include "OffLatticeSimulation.hpp"
#include "SmartPointers.hpp"
#include "FakePetscSetup.hpp"
/*
* The next header file defines a force law for describing the mechanical interactions
* between neighbouring cells in the cell population, subject to each vertex. This force
* law is a subclass of {{{NagaiHondaForce}}}, which we encountered in the {{{TestRunningVertexBasedSimulationsTutorial}}},
* that allows for different adhesion energy parameter values depending on the types of
* interacting cells.
*/
#include "NagaiHondaDifferentialAdhesionForce.hpp"
/*
* Similar to the {{{NagaiHondaForce}}}, this force law requires a child class of {{{AbstractTargetAreaModifier}}} which
* assigns target areas to each cell and updates this information in each time step. Here, we use the {{{SimpleTargetAreaModifier}}}.
*/
#include "SimpleTargetAreaModifier.hpp"
/* Having included all the necessary header files, we proceed by defining the test class. */
class TestRunningDifferentialAdhesionSimulationsTutorial : public AbstractCellBasedTestSuite
{
public:
/*
* EMPTYLINE
*
* In this test, we demonstrate how to simulate a heterotypic monolayer that incorporates
* differential adhesion, using a vertex-based approach. This may be compared with the
* second test in the TestRunningPottsBasedSimulationsTutorial, which implements a similar
* simulation using a cellular Potts model.
*/
void TestVertexBasedDifferentialAdhesionSimulation()
{
/* First we create a regular vertex mesh. Here we choose to set the value of the cell rearrangement threshold. */
HoneycombVertexMeshGenerator generator(5, 5);
MutableVertexMesh<2,2>* p_mesh = generator.GetMesh();
p_mesh->SetCellRearrangementThreshold(0.1);
/* We then create some cells using the helper class {{{CellsGenerator}}}. Note that in this simulation
* the cells are all differentiated, and thus no cell division occurs; if we wished, we could modify
* the three lines below in a straightforward manner to incorporate cell proliferation and investigate
* the effect of this on the cell sorting process. */
std::vector<CellPtr> cells;
MAKE_PTR(DifferentiatedCellProliferativeType, p_diff_type);
CellsGenerator<FixedG1GenerationalCellCycleModel, 2> cells_generator;
cells_generator.GenerateBasic(cells, p_mesh->GetNumElements(), std::vector<unsigned>(), p_diff_type);
/* Using the vertex mesh and cells, we create a cell-based population object, and specify which results to
* output to file. */
VertexBasedCellPopulation<2> cell_population(*p_mesh, cells);
cell_population.AddCellPopulationCountWriter<CellMutationStatesCountWriter>();
cell_population.AddCellPopulationCountWriter<CellProliferativeTypesCountWriter>();
cell_population.AddCellPopulationCountWriter<CellProliferativePhasesCountWriter>();
cell_population.AddCellWriter<CellProliferativePhasesWriter>();
cell_population.AddCellWriter<CellAgesWriter>();
cell_population.AddCellWriter<CellVolumesWriter>();
/* We randomly label some cells using the cell property {{{CellLabel}}}. We begin by creating a shared pointer to
* this cell property using the helper singleton {{{CellPropertyRegistry}}}. We then loop over the cells and label
* each cell independently with probability 0.5. Note that since the cells have been passed to the
* {{{VertexBasedCellPopulation}}} object, the vector {{{cells}}} above is now empty, so we must use the
* {{{Iterator}}} to loop over cells. */
boost::shared_ptr<AbstractCellProperty> p_label(CellPropertyRegistry::Instance()->Get<CellLabel>());
for (AbstractCellPopulation<2>::Iterator cell_iter = cell_population.Begin();
cell_iter != cell_population.End();
++cell_iter)
{
if (RandomNumberGenerator::Instance()->ranf() < 0.5)
{
cell_iter->AddCellProperty(p_label);
}
}
/* We are now in a position to create and configure the cell-based simulation object.
* We can make the simulation run for longer to see more cell sorting by increasing the end time. */
OffLatticeSimulation<2> simulator(cell_population);
simulator.SetOutputDirectory("TestVertexBasedDifferentialAdhesionSimulation");
simulator.SetSamplingTimestepMultiple(10);
simulator.SetEndTime(1.0);
/* Next we create the differential adhesion force law. This builds upon the model of Nagai, Honda and co-workers
* encounted in the TestRunningVertexBasedSimulationsTutorial by allowing different values of the adhesion
* energy parameters depending on the types of two neighbouring cells. Here we interpret the 'type' of a cell
* as whether or not it has the cell property {{{CellLabel}}}; it would be straightforward to create a similar
* force law that took account of a cell's mutation state, for example. Having created the force law, we set the
* values of the parameters. If the adhesion energy for two neighbouring homotypic cells is less than that of two
* heterotypic cells, then we may expect cell sorting to occur, in which the cells of each type will tend to locally
* aggregate over time. */
MAKE_PTR(NagaiHondaDifferentialAdhesionForce<2>, p_force);
p_force->SetNagaiHondaDeformationEnergyParameter(55.0);
p_force->SetNagaiHondaMembraneSurfaceEnergyParameter(0.0);
p_force->SetNagaiHondaCellCellAdhesionEnergyParameter(1.0);
p_force->SetNagaiHondaLabelledCellCellAdhesionEnergyParameter(6.0);
p_force->SetNagaiHondaLabelledCellLabelledCellAdhesionEnergyParameter(3.0);
p_force->SetNagaiHondaCellBoundaryAdhesionEnergyParameter(12.0);
p_force->SetNagaiHondaLabelledCellBoundaryAdhesionEnergyParameter(40.0);
simulator.AddForce(p_force);
/* A {{{NagaiHondaForceDifferentialAdhesionForce}}} assumes that each cell has been assigned a target area.
* The {{{SimpleTargetAreaModifier}}} will assign and update the target areas of all cells.
*/
MAKE_PTR(SimpleTargetAreaModifier<2>, p_growth_modifier);
simulator.AddSimulationModifier(p_growth_modifier);
/* Finally, we run the simulation. */
simulator.Solve();
}
/*
* EMPTYLINE
*
* To visualize the results, use Paraview. See the UserTutorials/VisualizingWithParaview tutorial for more information.
*
* Load the file {{{/tmp/$USER/testoutput/TestVertexBasedDifferentialAdhesionSimulation/results_from_time_0/results.pvd}}}.
*/
};
#endif /*TESTRUNNINGDIFFERENTIALADHESIONSIMULATIONSTUTORIAL_HPP_*/
|
r8d8/lastlock
|
QCA4020_SDK/target/thirdparty/aws_freertos/lib/tls/portable/qcom/qca402x/aws_tls.c
|
<reponame>r8d8/lastlock
/*
* Copyright (c) 2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
* $QTI_LICENSE_QDN_C
*
* Amazon FreeRTOS TLS for QCOM QCA402X V1.0.0
* Copyright (C) 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* http://aws.amazon.com/freertos
* http://www.FreeRTOS.org
*/
/* FreeRTOS includes. */
#include "FreeRTOS.h"
#include "FreeRTOSIPConfig.h"
#include "aws_tls.h"
#include "aws_crypto.h"
//#include "aws_pkcs11.h"
#include "semphr.h"
#include "task.h"
#include "aws_clientcredential.h"
#include "aws_default_root_certificates.h"
/*qapi*/
#include "qapi_ssl.h"
#include "qapi_crypto.h"
/* C runtime includes. */
#include <string.h>
#include <time.h>
#include <stdio.h>
#define CFG_PACKET_SIZE_MAX_TX 1576
#define CFG_PACKET_SIZE_MAX_RX 1556
/*It's used to get a CA file name to store in FLASH*/
#define LEN_CA_NAME 32
/**
* @brief Internal context structure.
*
* @param[in] pcDestination Server location, can be a DNS name or IP address.
* @param[in] pcServerCertificate Server X.509 certificate in PEM format to trust.
* @param[in] ulServerCertificateLength Length in bytes of the server certificate.
* @param[in] pxNetworkRecv Callback for receiving data on an open TCP socket.
* @param[in] pxNetworkSend Callback for sending data on an open TCP socket.
* @param[in] pvCallerContext Opaque pointer provided by caller for above callbacks.
* @param[out] sslCtx Handle to an SSL object.
* @param[out] ssl Handle to an SSL connection.
* @param[out] config Structure to configure an SSL connection.
* @param[in] config_set Whether to do config.
* @param[out] role Active as client
*/
typedef struct TLSContext
{
const char * pcDestination;
const char * pcServerCertificate;
uint32_t ulServerCertificateLength;
const char ** ppcAlpnProtocols;
uint32_t ulAlpnProtocolsCount;
NetworkRecv_t pxNetworkRecv;
NetworkSend_t pxNetworkSend;
void * pvCallerContext;
/* qcom TLS */
qapi_Net_SSL_Obj_Hdl_t sslCtx;
qapi_Net_SSL_Con_Hdl_t ssl;
qapi_Net_SSL_Config_t config;
uint8_t config_set;
qapi_Net_SSL_Role_t role;
char caname[LEN_CA_NAME];
} TLSContext_t;
/*
* Helper routines.
*/
static int prvGetHashName( const char * cert,
uint32_t len,
char* hash_name)
{
qapi_Crypto_Op_Hdl_t opHdl;
uint32_t name_len = 32;
int i;
int32_t status;
status = qapi_Crypto_Op_Alloc(QAPI_CRYPTO_ALG_SHA1_E, QAPI_CRYPTO_MODE_DIGEST_E, 0, &opHdl);
if (QAPI_OK == status) //QAPI_OK is 0
{
if (QAPI_OK != qapi_Crypto_Op_Digest_Update(opHdl, (void*)cert, 32))
{
dconfigPRINTF(("\nFailed to digest update\n"));
return -1;
}
if (QAPI_OK != qapi_Crypto_Op_Digest_Update(opHdl, (void*)cert, 64) )
{
dconfigPRINTF(("\nFailed to digest update\n"));
return -1;
}
if (QAPI_OK != qapi_Crypto_Op_Digest_Final(opHdl, (void*)cert, (uint32_t)len-32-64, hash_name, &name_len))
{
dconfigPRINTF(("digest failed\n"));
return -1;
}
#if 0
dconfigPRINTF(("Calc HASH name :%d.\n",name_len));
for(i = 0; i< name_len; i++)
{
dconfigPRINTF((" %x ",hash_name[i]));
}
#endif
qapi_Crypto_Op_Free(opHdl);
return name_len;
}
else
{
dconfigPRINTF(("qapi_Crypto_Op_Alloc failed,status:%d\n",status));
return -1;
}
}
static SemaphoreHandle_t g_tls_semaph;
BaseType_t TLS_Sys_Init()
{
g_tls_semaph = xSemaphoreCreateMutex();
if (NULL == g_tls_semaph)
return pdFALSE;
return pdPASS;
}
/*
* Interface routines.
*/
BaseType_t TLS_Init( void ** ppvContext,
TLSParams_t * pxParams )
{
BaseType_t xResult = 0;
TLSContext_t * pCtx = NULL;
int xNameLen;
/* Allocate an internal context. */
pCtx = ( TLSContext_t * ) pvPortMalloc( sizeof( TLSContext_t ) ); /*lint !e9087 !e9079 Allow casting void* to other types. */
if( NULL != pCtx )
{
memset( pCtx, 0, sizeof( TLSContext_t ) );
*ppvContext = pCtx;
/* Initialize the context. */
pCtx->pcDestination = pxParams->pcDestination;
pCtx->pcServerCertificate = pxParams->pcServerCertificate;
pCtx->ulServerCertificateLength = pxParams->ulServerCertificateLength;
pCtx->ppcAlpnProtocols = pxParams->ppcAlpnProtocols;
pCtx->ulAlpnProtocolsCount = pxParams->ulAlpnProtocolsCount;
pCtx->pxNetworkRecv = pxParams->pxNetworkRecv;
pCtx->pxNetworkSend = pxParams->pxNetworkSend;
pCtx->pvCallerContext = pxParams->pvCallerContext;
/*ssl */
/*calculate the HASH name*/
if(pCtx->pcServerCertificate)
{
xNameLen = prvGetHashName(pCtx->pcServerCertificate, pCtx->ulServerCertificateLength,pCtx->caname);
if(-1 == xNameLen)
{
dconfigPRINTF(("Calc HASH name failed.\n"));
return QAPI_ERROR;
}
}
pCtx->role = QAPI_NET_SSL_CLIENT_E;
pCtx->sslCtx = qapi_Net_SSL_Obj_New(pCtx->role);
if (pCtx->sslCtx == QAPI_NET_SSL_INVALID_HANDLE)
{
return QAPI_ERROR;
}
/*Set SNI*/
if(pCtx->pcDestination)
{
pCtx->config.sni_Name = pCtx->pcDestination;
pCtx->config.sni_Name_Size = strlen(pCtx->pcDestination);
pCtx->config_set = 1;
}
}
else
{
xResult = ( BaseType_t ) QAPI_ERROR;
}
return xResult;
}
/*-----------------------------------------------------------*/
#ifdef QCOMTLS_DEBUG_C
static void prvTlsDebugPrint( void * ctx,
int level,
const char * file,
int line,
const char * str )
{
/* Unused parameters. */
( void ) ctx;
( void ) file;
( void ) line;
/* Send the debug string to the portable logger. */
vLoggingPrintf( "TLS: |%d| %s", level, str );
}
#endif /* ifdef MBEDTLS_DEBUG_C */
/*-----------------------------------------------------------*/
char* cert_with_key = "aws_cert_and_key_pemtobin";
char* defulat_calist = "defulat_calist_pemtobin";
qapi_Net_SSL_Cert_List_t cafile_list;
BaseType_t TLS_Connect( void * pvContext )
{
BaseType_t xResult = 0;
TLSContext_t * pCtx = ( TLSContext_t * ) pvContext; /*lint !e9087 !e9079 Allow casting void* to other types. */
qapi_Net_SSL_Cert_Info_t cert_info;
qapi_CA_Info_t ca_info;
qapi_Status_t status;
const TickType_t xTimeout = pdMS_TO_TICKS( 10000UL );
if (pdTRUE == xSemaphoreTake(g_tls_semaph, xTimeout))
{
if (QAPI_OK != qapi_Net_SSL_Cert_Load(pCtx->sslCtx, QAPI_NET_SSL_CERTIFICATE_E, cert_with_key))
{
dconfigPRINTF(("cert load failed\n"));
xSemaphoreGive(g_tls_semaph);
xResult = QAPI_ERROR;
goto ssl_error;
}
if( NULL != pCtx->pcServerCertificate)
{
uint32_t isCAstored = 0;
int32_t i, numFiles;
numFiles = qapi_Net_SSL_Cert_List(QAPI_NET_SSL_CA_LIST_E, &cafile_list);
for (i=0; i<numFiles; i++)
{
if( 0 == strcmp(cafile_list.name[i],pCtx->caname))
{
isCAstored = 1;
break;
}
}
if(0 == isCAstored)
{
/* change PEM format to bin format */
memset(&cert_info, 0, sizeof(cert_info));
memset(&ca_info, 0, sizeof(ca_info));
ca_info.ca_Buf = (uint8_t *)pCtx->pcServerCertificate;
ca_info.ca_Size = pCtx->ulServerCertificateLength;
cert_info.cert_Type = QAPI_NET_SSL_PEM_CA_LIST_E;
cert_info.info.pem_CA_List.ca_Cnt = 1;
cert_info.info.pem_CA_List.ca_Info[0] = &ca_info;
status = qapi_Net_SSL_Cert_Store(&cert_info, pCtx->caname);
if(QAPI_OK != status)
{
dconfigPRINTF(("ssl ca store failed:%d\n",status));
xSemaphoreGive(g_tls_semaph);
xResult = QAPI_ERROR;
goto ssl_error;
}
}
if (QAPI_OK != qapi_Net_SSL_Cert_Load(pCtx->sslCtx, QAPI_NET_SSL_CA_LIST_E, pCtx->caname))
{
dconfigPRINTF(("ca load failed\n"));
xSemaphoreGive(g_tls_semaph);
xResult = QAPI_ERROR;
goto ssl_error;
}
}
else
{
/*use default calist*/
if (QAPI_OK != qapi_Net_SSL_Cert_Load(pCtx->sslCtx, QAPI_NET_SSL_CA_LIST_E, defulat_calist))
{
dconfigPRINTF(("ca load failed\n"));
xSemaphoreGive(g_tls_semaph);
xResult = QAPI_ERROR;
goto ssl_error;
}
}
xSemaphoreGive(g_tls_semaph);
}
pCtx->ssl = qapi_Net_SSL_Con_New(pCtx->sslCtx, QAPI_NET_SSL_TLS_E);
qapi_Net_SSL_Fd_Set(pCtx->ssl, *((int32_t*)pCtx->pvCallerContext)); //get pvCallerContext->xsocket.
// configure the SSL connection
if (pCtx->config_set)
{
if (qapi_Net_SSL_Configure(pCtx->ssl, &pCtx->config) < QAPI_OK)
{
xResult = QAPI_ERROR;
goto ssl_error;
}
}
status = qapi_Net_SSL_Connect(pCtx->ssl);
if (status != QAPI_SSL_OK_HS)
{
dconfigPRINTF(("ssl connect failed:%d,pCtx->ssl:%d\n",status,pCtx->ssl));
xResult = QAPI_ERROR;
if (status == QAPI_ERR_SSL_CERT_CN)
{
/** The peer's SSL certificate is trusted, CN matches the host name, time is expired */
dconfigPRINTF(("ERROR: The certificate is expired\n"));
}
else if (status == QAPI_ERR_SSL_CERT_TIME)
{
/** The peer's SSL certificate is trusted, CN does NOT match the host name, time is valid */
dconfigPRINTF(("ERROR: The certificate is trusted, but the host name is not valid\n"));
}
else if (status == QAPI_ERR_SSL_CERT_NONE)
{
/** The peer's SSL certificate is trusted, CN does NOT match host name, time is expired */
dconfigPRINTF(("ERROR: The certificate is expired and the host name is not valid\n"));
}
else
{
dconfigPRINTF(("ERROR: SSL connect failed\n"));
}
goto ssl_error;
}
return xResult;
ssl_error:
if (pCtx->ssl != QAPI_NET_SSL_INVALID_HANDLE)
{
qapi_Net_SSL_Shutdown(pCtx->ssl);
pCtx->ssl = QAPI_NET_SSL_INVALID_HANDLE;
}
return xResult;
}
/*-----------------------------------------------------------*/
BaseType_t TLS_Recv( void * pvContext,
unsigned char * pucReadBuffer,
size_t xReadLength )
{
BaseType_t xResult = 0;
TLSContext_t * pCtx = ( TLSContext_t * ) pvContext; /*lint !e9087 !e9079 Allow casting void* to other types. */
int32_t xRead = 0;
if(CFG_PACKET_SIZE_MAX_TX < xReadLength)
{
xReadLength = CFG_PACKET_SIZE_MAX_TX;
}
if( NULL != pCtx )
{
xResult = qapi_Net_SSL_Read(pCtx->ssl,
pucReadBuffer,
xReadLength);
}
return xResult;
}
/*-----------------------------------------------------------*/
BaseType_t TLS_Send( void * pvContext,
const unsigned char * pucMsg,
size_t xMsgLength )
{
BaseType_t xResult = 0;
TLSContext_t * pCtx = ( TLSContext_t * ) pvContext; /*lint !e9087 !e9079 Allow casting void* to other types. */
size_t xWritten = 0;
if(CFG_PACKET_SIZE_MAX_TX < xMsgLength)
xMsgLength = CFG_PACKET_SIZE_MAX_TX;
if( NULL != pCtx )
{
xResult = qapi_Net_SSL_Write(pCtx->ssl,
pucMsg,
xMsgLength);
}
return xResult;
}
/*-----------------------------------------------------------*/
void TLS_Cleanup( void * pvContext )
{
TLSContext_t * pCtx = ( TLSContext_t * ) pvContext; /*lint !e9087 !e9079 Allow casting void* to other types. */
if( NULL != pCtx )
{
if (pCtx->ssl)
{
qapi_Net_SSL_Shutdown(pCtx->ssl);
pCtx->ssl = QAPI_NET_SSL_INVALID_HANDLE;
}
if (pCtx->sslCtx)
{
qapi_Net_SSL_Obj_Free(pCtx->sslCtx);
pCtx->sslCtx = QAPI_NET_SSL_INVALID_HANDLE;
}
}
/* Free memory. */
vPortFree( pCtx );
}
|
things-scene/things-scene
|
packages/visualizer/src/text-extrude.js
|
/*
* Copyright © HatioLab Inc. All rights reserved.
*/
import * as THREE from 'three'
import NanumGothicFont from '../obj/fonts/nanum_gothic.json?3d'
import Component3d from './component-3d'
import Object3D from './object3d'
const NATURE = {
mutable: false,
resizable: true,
rotatable: true,
properties: [
{
type: 'number',
label: 'z-pos',
name: 'zPos',
property: 'zPos'
},
{
type: 'number',
label: 'depth',
name: 'depth',
property: 'depth'
}
]
}
export default class TextExtrude extends Object3D {
get fontLoader() {
if (!this._fontLoader) {
this._fontLoader = new THREE.FontLoader()
}
return this._fontLoader
}
get cx() {
if (!this._cx) {
var {
left = 0,
width = 1
// width = 0
} = this.model
var canvasSize = this._canvasSize
var width = this.children[0].geometry.boundingBox.max.x - this.children[0].geometry.boundingBox.min.x
this._cx = left + width / 2 - canvasSize.width / 2
}
return this._cx
}
get cy() {
if (!this._cy) {
var { top = 0, height = 0 } = this.model
var canvasSize = this._canvasSize
var height = this.children[0].geometry.boundingBox.max.y - this.children[0].geometry.boundingBox.min.y
this._cy = top + height / 2 - canvasSize.height / 2
}
return this._cy
}
set fontJSON(font) {
this._fontJSON = font
}
get fontJSON() {
return this._fontJSON
}
get fontSettings() {
return {
steps: 1,
curveSegments: 8,
bevelEnabled: false
}
}
async createObject() {
var { type, depth = 1, fontSize = 10, text = '', fontColor = 0x000000 } = this.model
this.fontLoader.load(NanumGothicFont, font => {
this.fontJSON = font
var geometry = this.createTextGeometry()
var materials = [
new THREE.MeshStandardMaterial({ color: fontColor }), // front
new THREE.MeshStandardMaterial({ color: fontColor }) // side
]
var mesh = new THREE.Mesh(geometry, materials)
mesh.rotation.x = -Math.PI / 2
this.add(mesh)
this._fontLoaded = true
this.setPosition()
// setInterval(() => {
// var t = Math.round(Math.random() * 1000)
// this.model.text = t;
// this.changeText()
// }, 10000)
})
}
setPosition() {
if (!this._fontLoaded) return
super.setPosition()
}
createTextGeometry() {
var { fontSize = 10, depth = 1, text = '' } = this.model
var font = this.fontJSON
var fontSettings = Object.assign(this.fontSettings, {
font: font,
size: fontSize,
height: depth
})
var geometry = new THREE.TextGeometry(text, fontSettings)
geometry.center()
return geometry
}
changeText() {
if (this.children && this.children[0]) {
this.children[0].geometry.dispose()
this.children[0].geometry = this.createTextGeometry()
this._cx = this._cy = null
this.setPosition()
}
}
raycast(raycaster, intersects) {}
onUserDataChanged() {
super.onUserDataChanged()
if (!(this.userData && this.userData.items && this.userData.items.length > 0)) return
var data = this.userData.items[0].data
this.model.text = data
this.changeText()
}
}
Component3d.register('text', TextExtrude)
|
m-m-m/ui
|
core/src/main/java/io/github/mmm/ui/api/datatype/color/Alpha.java
|
<filename>core/src/main/java/io/github/mmm/ui/api/datatype/color/Alpha.java
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package io.github.mmm.ui.api.datatype.color;
/**
* This {@link ColorSegment} represents the alpha value of a color what is its opacity. A value of {@code 0} stands for
* fully transparent (color not visible), while {@code 1.0} stands for a regular color (color fully visible). An alpha
* value of {@code 0.5} will be 50% transparent so you can see the color but the background shines through.
*
* @see Color#getAlpha()
* @see GenericColor#getAlpha()
*
* @since 1.0.0
*/
public class Alpha extends AbstractColorFactor<Alpha> {
/** The maximum {@link Alpha} value for full opaque (no transparency). */
public static final Alpha OPAQUE = new Alpha(1.0);
/**
* The constructor.
*
* @param value is the {@link #getValue() value}.
*/
public Alpha(double value) {
super(value);
}
/**
* The constructor.
*
* @param value is the {@link #getValue() value} given as {@link #toFactorString() factor} or
* {@link #toPercentString() percent} {@link String}.
*/
public Alpha(String value) {
super(value);
}
/**
* The constructor.
*
* @param byteValue is the {@link #getValueAsByte() value given as byte}.
*/
public Alpha(int byteValue) {
super(byteValue);
}
@Override
protected Alpha newInstance(double value) {
return new Alpha(value);
}
}
|
SvenAugustus/photon
|
01-java-core/jdk-io-samples/src/main/java/xyz/flysium/photon/c001_serializable/SerializableCloneTest.java
|
<filename>01-java-core/jdk-io-samples/src/main/java/xyz/flysium/photon/c001_serializable/SerializableCloneTest.java
/*
* MIT License
*
* Copyright (c) 2020 SvenAugustus
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package xyz.flysium.photon.c001_serializable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Map;
/**
* 如何实现对象克隆? 答:有两种方式: 1). 实现Cloneable接口并重写Object类中的clone()方法; 2). 实现Serializable接口,通过对象的序列化和反序列化实现克隆,可以实现真正的深度克隆,代码如下。
*/
public class SerializableCloneTest {
public static void main(String[] args) throws IOException, ClassNotFoundException {
Map<String, String> m = new HashMap<String, String>();
m.put("k", "v");
Test t = new Test(1, "s", m);
Test t2 = clone(t);
System.out.println(t);
System.out.println(t2);
}
@SuppressWarnings("serial")
static class Test implements java.io.Serializable {
private int a;
private String b;
private Map<String, String> c;
public Test() {
}
public Test(int a, String b, Map<String, String> c) {
super();
this.a = a;
this.b = b;
this.c = c;
}
public int getA() {
return a;
}
public void setA(int a) {
this.a = a;
}
public String getB() {
return b;
}
public void setB(String b) {
this.b = b;
}
public Map<String, String> getC() {
return c;
}
public void setC(Map<String, String> c) {
this.c = c;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "Test [a=" + a + ", b=" + b + ", c=" + c + "]";
}
}
/**
* 注意:基于序列化和反序列化实现的克隆不仅仅是深度克隆, 更重要的是通过泛型限定,可以检查出要克隆的对象是否支持序列化, 这项检查是编译器完成的,不是在运行时抛出异常,这种是方案明显优于使用Object类的clone方法克隆对象。
* 让问题在编译的时候暴露出来总是优于把问题留到运行时。
*/
@SuppressWarnings("unchecked")
public static <T> T clone(T obj) throws IOException, ClassNotFoundException {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bout);
oos.writeObject(obj);
ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bin);
T readObject = (T) ois.readObject();
return readObject;
// 说明:调用ByteArrayInputStream或ByteArrayOutputStream对象的close方法没有任何意义
// 这两个基于内存的流只要垃圾回收器清理对象就能够释放资源,这一点不同于对外部资源(如文件流)的释放
}
}
|
dimitaruzunov/oop-practicum-is-2015-16
|
week03/solutions/problem1/person_with_dog_cstring.cpp
|
#include <iostream>
#include <cstring>
using namespace std;
class Dog {
char name[30];
public:
Dog(const char _name[] = "") {
strcpy(name, _name);
}
void greet() {
cout << "Bark, bark! I am " << name << ", a talking dog." << endl;
}
};
class Person {
char name[30];
int age;
Dog dog;
public:
Person(const char _name[], int _age, Dog _dog) {
strcpy(name, _name);
age = _age;
dog = _dog;
}
void greet() {
cout << "Hi! I am " << name << " and I am " << age << " years old." << endl;
cout << "And this is my dog:" << endl;
dog.greet();
}
};
int main() {
Person ivan("Ivan", 15, Dog("Johny"));
ivan.greet();
return 0;
}
|
isabella232/spring-data-geode
|
spring-data-geode/src/main/java/org/springframework/data/gemfire/config/schema/SchemaObjectDefiner.java
|
/*
* Copyright 2017-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.gemfire.config.schema;
import java.util.Collections;
import java.util.Optional;
import java.util.Set;
/**
* The {@link SchemaObjectDefiner} interface defines a contract for implementing objects
* that can reverse engineer a schema object instance back into a definition of the schema object.
*
* @author <NAME>
* @see org.springframework.data.gemfire.config.schema.SchemaObjectDefinition
* @see org.springframework.data.gemfire.config.schema.SchemaObjectType
* @since 2.0.0
*/
public interface SchemaObjectDefiner {
/**
* Returns a {@link Set} of {@link SchemaObjectType schema object types} definable by this definer.
*
* @return a {@link Set} of {@link SchemaObjectType schema object types} definable by this definer.
* @see org.springframework.data.gemfire.config.schema.SchemaObjectType
* @see java.util.Set
*/
default Set<SchemaObjectType> getSchemaObjectTypes() {
return Collections.emptySet();
}
/**
* Determines whether this definer is able to define the given {@link Object schema object} instance.
*
* @param schemaObject {@link Object} to evaluate.
* @return a boolean value indicating whether this definer is able to define
* the given {@link Object schema object} instance.
* @see java.lang.Object#getClass()
* @see #canDefine(Class)
*/
default boolean canDefine(Object schemaObject) {
return Optional.ofNullable(schemaObject).map(Object::getClass).filter(this::canDefine).isPresent();
}
/**
* Determines whether this definer is able to define schema objects of the given {@link Class type}.
*
* @param schemaObjectType {@link Class type} of the {@link Object schema object} instance to evaluate.
* @return a boolean value indicating whether this definer is able to define {@link Object schema objects}
* of the given {@link Class type}.
* @see org.springframework.data.gemfire.config.schema.SchemaObjectType#from(Class)
* @see #canDefine(SchemaObjectType)
*/
default boolean canDefine(Class<?> schemaObjectType) {
return canDefine(SchemaObjectType.from(schemaObjectType));
}
/**
* Determines whether this definer is able to define schema objects of the given
* {@link SchemaObjectType enumerated schema object type}.
*
* @param schemaObjectType {@link SchemaObjectType} to evaluate.
* @return a boolean value indicating whether this handler is able to handle schema objects
* of the given {@link SchemaObjectType enumerated schema object type}.
* @see org.springframework.data.gemfire.config.schema.SchemaObjectType
*/
default boolean canDefine(SchemaObjectType schemaObjectType) {
return getSchemaObjectTypes().contains(schemaObjectType);
}
/**
* Returns an {@link Optional} {@link SchemaObjectDefinition definition} for the given
* {@link Object schema object} instance.
*
* @param schemaObject {@link Object schema object} to define.
* @return an {@link Optional} {@link SchemaObjectDefinition definition} for the given
* {@link Object schema object} instance.
* @see org.springframework.data.gemfire.config.schema.SchemaObjectDefinition
*/
Optional<? extends SchemaObjectDefinition> define(Object schemaObject);
}
|
cragkhit/elasticsearch
|
references/bcb_chosen_clones/selected#560379#23#27.java
|
<reponame>cragkhit/elasticsearch<gh_stars>10-100
public SlurpedByteArrayItem(InputStream is) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
IOUtils.copy(is, baos);
data = baos.toByteArray();
}
|
Waitsnake/xee
|
Classes/XeeListSource.h
|
#import "XeeImageSource.h"
@class XeeListEntry;
@interface XeeListSource : XeeImageSource {
NSMutableArray *entries;
NSRecursiveLock *listlock, *loadlock;
NSArray *types;
XeeListEntry *currentry, *nextentry, *preventry;
XeeChange changes;
NSInteger oldindex;
BOOL loader_running, exiting;
XeeImage *loadingimage;
}
- (instancetype)init;
- (void)stop;
@property (readonly) NSInteger numberOfImages;
@property (readonly) NSInteger indexOfCurrentImage;
- (NSString *)descriptiveNameOfCurrentImage;
- (void)pickImageAtIndex:(NSInteger)index next:(NSInteger)next;
- (void)pickImageAtIndex:(NSInteger)index;
- (void)startListUpdates;
- (void)endListUpdates;
- (void)addEntry:(XeeListEntry *)entry;
- (void)addEntryUnlessExists:(XeeListEntry *)entry;
- (void)removeEntry:(XeeListEntry *)entry;
- (void)removeEntryMatchingObject:(id)obj;
- (void)removeAllEntries;
- (void)setCurrentEntry:(XeeListEntry *)entry;
- (void)setPreviousEntry:(XeeListEntry *)entry;
- (void)setNextEntry:(XeeListEntry *)entry;
- (void)launchLoader;
- (void)loader;
@end
@interface XeeListEntry : NSObject <NSCopying> {
XeeImage *savedimage;
int imageretain;
}
- (instancetype)init;
- (instancetype)initAsCopyOf:(XeeListEntry *)other;
- (NSString *)descriptiveName;
- (BOOL)matchesObject:(id)obj;
- (void)retainImage;
- (void)releaseImage;
- (XeeImage *)image;
- (XeeImage *)produceImage;
- (id)copyWithZone:(NSZone *)zone;
@end
|
pboechat/xithcluster
|
xith3d/src/org/xith3d/effects/bloom/GLSLBloomFactory.java
|
<reponame>pboechat/xithcluster
/**
* Copyright (c) 2003-2010, Xith3D Project Group all rights reserved.
*
* Portions based on the Java3D interface, Copyright by Sun Microsystems.
* Many thanks to the developers of Java3D and Sun Microsystems for their
* innovation and design.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the 'Xith3D Project Group' nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) A
* RISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE
*/
package org.xith3d.effects.bloom;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import org.jagatoo.opengl.enums.FaceCullMode;
import org.jagatoo.opengl.enums.TextureFormat;
import org.openmali.types.twodee.Sized2iRO;
import org.openmali.vecmath2.Colorf;
import org.xith3d.base.Xith3DEnvironment;
import org.xith3d.loaders.shaders.impl.glsl.GLSLShaderLoader;
import org.xith3d.loaders.texture.TextureCreator;
import org.xith3d.render.ForegroundRenderPass;
import org.xith3d.render.RenderPass;
import org.xith3d.render.TextureRenderTarget;
import org.xith3d.scenegraph.Appearance;
import org.xith3d.scenegraph.GLSLFragmentShader;
import org.xith3d.scenegraph.GLSLContext;
import org.xith3d.scenegraph.GLSLShaderProgram;
import org.xith3d.scenegraph.GLSLVertexShader;
import org.xith3d.scenegraph.GroupNode;
import org.xith3d.scenegraph.PolygonAttributes;
import org.xith3d.scenegraph.StaticTransform;
import org.xith3d.scenegraph.Texture;
import org.xith3d.scenegraph.Texture2D;
import org.xith3d.scenegraph.Transform3D;
import org.xith3d.scenegraph.primitives.Rectangle;
/**
* The GLSLBloomFactory is an implementation of Bloom effect using GLSL shaders.<br>
* <br>
* I requires more render passes but all pixels computations (brightness, gaussian, and blending)
* are done by the GPU.<br>
* <br>
*
* @author <NAME> (aka Mancer)
*/
public class GLSLBloomFactory extends BloomFactory
{
private static final int TEXTURE_SIZE = 128;
private GLSLContext brightpassFilter = null;
private GLSLContext verticalGaussianFilter = null;
private GLSLContext horizontalGaussianFilter = null;
private GLSLContext blendFilter = null;
private Texture2D inTex, brightTex, gaussianTex, outTex;
private final Colorf bgColor = new Colorf( 0f, 0f, 0f, 1.0f );
private static URL getResource( String resName ) throws IOException
{
URL url = GLSLBloomFactory.class.getClassLoader().getResource( resName );
if ( url == null )
{
throw new IOException( "Could not find resource \"" + resName + "\"." );
}
return ( url );
}
private void loadShaders() throws IOException
{
GLSLVertexShader vertexShader;
GLSLFragmentShader fragmentShader;
vertexShader = GLSLShaderLoader.getInstance().loadVertexShader( getResource( "resources/org/xith3d/shaders/bloom/bloom.glslvert" ) );
fragmentShader = GLSLShaderLoader.getInstance().loadFragmentShader( getResource( "resources/org/xith3d/shaders/bloom/brightness_filter.glslfrag" ) );
GLSLShaderProgram program = new GLSLShaderProgram();
program.addShader( vertexShader );
program.addShader( fragmentShader );
brightpassFilter = new GLSLContext( program );
brightpassFilter.getUniformParameters().setUniformVar( "tex", 0 );
fragmentShader = GLSLShaderLoader.getInstance().loadFragmentShader( getResource( "resources/org/xith3d/shaders/bloom/gaussian_v.glslfrag" ) );
program = new GLSLShaderProgram();
program.addShader( vertexShader );
program.addShader( fragmentShader );
verticalGaussianFilter = new GLSLContext( program );
verticalGaussianFilter.getUniformParameters().setUniformVar( "inTexture", 0 );
fragmentShader = GLSLShaderLoader.getInstance().loadFragmentShader( getResource( "resources/org/xith3d/shaders/bloom/gaussian_h.glslfrag" ) );
program = new GLSLShaderProgram();
program.addShader( vertexShader );
program.addShader( fragmentShader );
horizontalGaussianFilter = new GLSLContext( program );
horizontalGaussianFilter.getUniformParameters().setUniformVar( "inTexture", 0 );
fragmentShader = GLSLShaderLoader.getInstance().loadFragmentShader( getResource( "resources/org/xith3d/shaders/bloom/bloom.glslfrag" ) );
program = new GLSLShaderProgram();
program.addShader( vertexShader );
program.addShader( fragmentShader );
blendFilter = new GLSLContext( program );
}
private void initTextures( Sized2iRO resolution )
{
inTex = TextureCreator.createTexture( TextureFormat.RGBA, resolution.getWidth(), resolution.getHeight(), bgColor );
inTex.enableAutoFreeLocalData();
brightTex = TextureCreator.createTexture( TextureFormat.RGBA, TEXTURE_SIZE, TEXTURE_SIZE, bgColor );
brightTex.enableAutoFreeLocalData();
gaussianTex = TextureCreator.createTexture( TextureFormat.RGBA, TEXTURE_SIZE, TEXTURE_SIZE, bgColor );
gaussianTex.enableAutoFreeLocalData();
outTex = TextureCreator.createTexture( TextureFormat.RGBA, TEXTURE_SIZE, TEXTURE_SIZE, bgColor );
outTex.enableAutoFreeLocalData();
}
private void createFilter( Xith3DEnvironment env, Sized2iRO res, GLSLContext program, Texture in, Texture out )
{
// TODO: Check, in what way this texture could be cached!
Texture2D empty = TextureCreator.createTexture( TextureFormat.RGBA, 1024, 1024, new Colorf( 0f, 0f, 0f, 1f ) );
Rectangle finalTarget = new Rectangle( 2f, 2f / res.getWidth() * res.getHeight(), empty );
Appearance appearance = finalTarget.getAppearance( true );
appearance.setTexture( 0, in );
appearance.setShaderProgramContext( program );
RenderPass finalPass = ForegroundRenderPass.createParallel();
finalPass.getConfig().setViewTransform( Transform3D.IDENTITY );
finalPass.getBranchGroup().addChild( finalTarget );
TextureRenderTarget renderTarget = new TextureRenderTarget( finalPass.getBranchGroup(), out, bgColor );
finalPass.setRenderTarget( renderTarget );
env.addRenderPass( finalPass );
}
@Override
public void prepareForBloom( Xith3DEnvironment env, Sized2iRO resolution, GroupNode group ) throws IOException
{
loadShaders();
initTextures( resolution );
List< RenderPass > passes = env.getRenderer().getRenderPasses( group.getRoot() );
TextureRenderTarget renderTarget1 = new TextureRenderTarget( group, inTex, bgColor );
passes.get( 0 ).setRenderTarget( renderTarget1 );
createFilter( env, resolution, brightpassFilter, inTex, brightTex );
createFilter( env, resolution, horizontalGaussianFilter, brightTex, gaussianTex );
createFilter( env, resolution, verticalGaussianFilter, gaussianTex, outTex );
// TODO: Check, in what way this texture could be cached!
Texture2D empty = TextureCreator.createTexture( TextureFormat.RGBA, 1024, 1024, new Colorf( 0f, 0f, 0f, 1f ) );
Rectangle finalTarget = new Rectangle( 2f, 2f / resolution.getWidth() * resolution.getHeight(), empty );
Appearance appearance = finalTarget.getAppearance( true );
appearance.setTexture( 0, inTex );
appearance.setTexture( 1, outTex );
blendFilter.getUniformParameters().setUniformVar( "originalWeight", getSceneWeight() );
blendFilter.getUniformParameters().setUniformVar( "bloomWeight", getBloomWeight() );
blendFilter.getUniformParameters().setUniformVar( "originalTex", 0 );
blendFilter.getUniformParameters().setUniformVar( "filteredTex", 1 );
appearance.setShaderProgramContext( blendFilter );
appearance.setPolygonAttributes( new PolygonAttributes( FaceCullMode.BACK ) );
//RenderPass finalPass = env.addParallelBranch();
RenderPass finalPass = ForegroundRenderPass.createParallel();
env.addRenderPass( finalPass );
finalPass.getConfig().setViewTransform( Transform3D.IDENTITY );
StaticTransform.translate( finalTarget, 0, 0, -1f );
finalPass.getBranchGroup().addChild( finalTarget );
//TODO the skybox is still not visible ... why ???
//TODO When the bloom is applied on BSPLoader testcase, the polygons aren't displayed correctly
}
@Override
protected void updateBloomSettings()
{
if ( blendFilter != null )
{
blendFilter.getUniformParameters().setUniformVar( "originalWeight", getSceneWeight() );
blendFilter.getUniformParameters().setUniformVar( "bloomWeight", getBloomWeight() );
}
}
public GLSLBloomFactory()
{
}
}
|
hangilc/myclinic-spring
|
rcpt/src/main/java/jp/chang/myclinic/rcpt/unit/Extendable.java
|
<filename>rcpt/src/main/java/jp/chang/myclinic/rcpt/unit/Extendable.java<gh_stars>0
package jp.chang.myclinic.rcpt.unit;
interface Extendable<T> {
boolean isExtendableWith(T a);
void extendWith(T a);
}
|
alexandrustana/pure-movie-server
|
project/Dependencies.scala
|
import sbt._
object Dependencies {
//============================================================================================
//============================================================================================
//======================================= DEPENDENCIES =======================================
//============================================================================================
//============================================================================================
lazy val pureHarmVersion = "0.0.5" //https://github.com/busymachines/pureharm/releases
lazy val catsCoreVersion = "2.1.1" //https://github.com/typelevel/cats/releases
lazy val catsEffectVersion = "2.1.3" //https://github.com/typelevel/cats-effect/releases
lazy val fs2Version = "2.3.0" //https://github.com/functional-streams-for-scala/fs2/releases
lazy val circeVersion = "0.13.0" //https://github.com/circe/circe/releases
lazy val http4sVersion = "0.21.4" //https://github.com/http4s/http4s/releases
lazy val tsecVersion = "0.2.0" //https://github.com/jmcardon/tsec/releases
lazy val doobieVersion = "0.9.0" //https://github.com/tpolecat/doobie/releases
lazy val flywayVersion = "6.4.1" //https://github.com/flyway/flyway/releases
lazy val shapelessVersion = "2.3.3" //https://github.com/milessabin/shapeless/releases
lazy val spireVersion = "0.17.0-M1" //https://github.com/non/spire/releases
lazy val log4catsVersion = "1.1.1" //https://github.com/ChristopherDavenport/log4cats/releases
lazy val logbackClassicVersion = "1.2.3" //https://github.com/qos-ch/logback/releases
lazy val javaxMailVersion = "1.6.2" // https://github.com/javaee/javamail/releases
lazy val scalaScrapperVersion = "2.2.0" //https://github.com/ruippeixotog/scala-scraper/releases
lazy val pureConfigVersion = "0.12.3" //https://github.com/pureconfig/pureconfig/releases
lazy val specs2Version = "4.9.4" //https://github.com/etorreborre/specs2/releases
def pureharm(m: String): ModuleID = "com.busymachines" %% s"pureharm-$m" % pureHarmVersion withSources ()
lazy val phCore: ModuleID = pureharm("core")
lazy val phConfig: ModuleID = pureharm("config")
lazy val phJson: ModuleID = pureharm("json-circe")
lazy val phFlyway: ModuleID = pureharm("db-core-flyway")
lazy val phEffects: ModuleID = pureharm("effects-cats")
//============================================================================================
//================================= http://typelevel.org/scala/ ==============================
//======================================== typelevel ========================================
//============================================================================================
lazy val catsCore: ModuleID = "org.typelevel" %% "cats-core" % catsCoreVersion withSources ()
lazy val catsKernel: ModuleID = "org.typelevel" %% "cats-kernel" % catsCoreVersion withSources ()
lazy val catsMacros: ModuleID = "org.typelevel" %% "cats-macros" % catsCoreVersion withSources ()
lazy val catsFree: ModuleID = "org.typelevel" %% "cats-free" % catsCoreVersion withSources ()
lazy val cats: Seq[ModuleID] = Seq(catsCore, catsKernel, catsMacros, catsFree)
lazy val catsEffect: ModuleID = "org.typelevel" %% "cats-effect" % catsEffectVersion withSources ()
lazy val fs2Core: ModuleID = "co.fs2" %% "fs2-core" % fs2Version withSources ()
lazy val fs2IO: ModuleID = "co.fs2" %% "fs2-io" % fs2Version withSources ()
lazy val fs2: Seq[ModuleID] = Seq(fs2Core, fs2IO)
lazy val circeCore: ModuleID = "io.circe" %% "circe-core" % circeVersion
lazy val circeGeneric: ModuleID = "io.circe" %% "circe-generic" % circeVersion
lazy val circeGenericExtras: ModuleID = "io.circe" %% "circe-generic-extras" % circeVersion
lazy val circe: Seq[ModuleID] = Seq(circeCore, circeGeneric, circeGenericExtras)
lazy val http4sBlazeServer: ModuleID = "org.http4s" %% "http4s-blaze-server" % http4sVersion withSources ()
lazy val http4sCirce: ModuleID = "org.http4s" %% "http4s-circe" % http4sVersion withSources ()
lazy val http4sDSL: ModuleID = "org.http4s" %% "http4s-dsl" % http4sVersion withSources ()
lazy val http4s: Seq[ModuleID] = Seq(http4sBlazeServer, http4sCirce, http4sDSL)
lazy val doobieCore = "org.tpolecat" %% "doobie-core" % doobieVersion withSources ()
lazy val doobieHikari = "org.tpolecat" %% "doobie-hikari" % doobieVersion withSources ()
lazy val doobiePostgres = "org.tpolecat" %% "doobie-postgres" % doobieVersion withSources ()
lazy val doobieTK = "org.tpolecat" %% "doobie-specs2" % doobieVersion % Test withSources ()
lazy val doobie: Seq[ModuleID] = Seq(doobieCore, doobieHikari, doobiePostgres, doobieTK)
lazy val shapeless: ModuleID = "com.chuusai" %% "shapeless" % shapelessVersion withSources ()
lazy val flyway = "org.flywaydb" % "flyway-core" % flywayVersion withSources ()
//============================================================================================
//========================================== math ===========================================
//============================================================================================
lazy val spire: ModuleID = "org.typelevel" %% "spire" % spireVersion withSources ()
//============================================================================================
//======================================== security ========================================
//============================================================================================
lazy val tsec: Seq[ModuleID] = Seq(
"io.github.jmcardon" %% "tsec-common" % tsecVersion withSources (),
"io.github.jmcardon" %% "tsec-password" % tsecVersion withSources (),
"io.github.jmcardon" %% "tsec-mac" % tsecVersion withSources (),
"io.github.jmcardon" %% "tsec-jwt-mac" % tsecVersion withSources ()
)
//============================================================================================
//========================================= logging =========================================
//============================================================================================
lazy val log4cats = "io.chrisdavenport" %% "log4cats-slf4j" % log4catsVersion withSources ()
//this is a Java library, notice that we used one single % instead of %%
//it is the backend implementation used by log4cats
lazy val logbackClassic = "ch.qos.logback" % "logback-classic" % logbackClassicVersion withSources ()
//============================================================================================
//========================================== email ==========================================
//============================================================================================
//this is a Java library, notice that we used one single % instead of %%
lazy val javaxMail = "com.sun.mail" % "javax.mail" % javaxMailVersion withSources ()
//============================================================================================
//========================================= html =============================================
//============================================================================================
lazy val scalaScrapper = "net.ruippeixotog" %% "scala-scraper" % scalaScrapperVersion withSources ()
//============================================================================================
//========================================== config ==========================================
//============================================================================================
lazy val pureConfig: ModuleID = "com.github.pureconfig" %% "pureconfig" % pureConfigVersion withSources ()
//============================================================================================
//========================================= testing =========================================
//============================================================================================
lazy val specs2: ModuleID = "org.specs2" %% "specs2-core" % specs2Version withSources ()
lazy val specs2Test: ModuleID = specs2 % Test
//============================================================================================
//======================================= transitive ========================================
//============================================================================================
//these are transitive dependencies that are brought in by other libraries, and here we
//list the ones that tend to come with conflicting version so that we can override them
//so as to remove the annoying eviction warning of older version. This list will have to
//be curated with great care from time to time.
lazy val transitive = Seq(
//---------------------------
//https://commons.apache.org/proper/commons-codec/
//tsec, and http4s depend on this
"commons-codec" % "commons-codec" % "1.12" withSources (),
//---------------------------
//https://github.com/Log4s/log4s
//different http4s modules depend on different versions
"org.log4s" %% "log4s" % "1.7.0" withSources (),
//---------------------------
//https://github.com/typelevel/machinist
//spire and cats core depend on this
"org.typelevel" %% "machinist" % "0.6.6" withSources (),
)
}
|
jcoehoorn/basic-computer-games
|
33_Dice/java/src/DiceGame.java
|
public class DiceGame {
public static void main(String[] args) {
Dice dice = new Dice();
dice.play();
}
}
|
phatblat/macOSPrivateFrameworks
|
PrivateFrameworks/SpeechRecognitionCommandServices/CMDPGrammarData.h
|
<reponame>phatblat/macOSPrivateFrameworks
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "NSObject.h"
@class NSString;
@interface CMDPGrammarData : NSObject
{
NSString *_commandIdentifier;
NSString *_builtInLMIdentifier;
NSString *_word;
}
@property(readonly) NSString *word; // @synthesize word=_word;
@property(readonly) NSString *builtInLMIdentifier; // @synthesize builtInLMIdentifier=_builtInLMIdentifier;
@property(readonly) NSString *commandIdentifier; // @synthesize commandIdentifier=_commandIdentifier;
- (void).cxx_destruct;
- (id)initWithGrammarDataWord:(id)arg1 lmIdentifer:(id)arg2 commandIdentifier:(id)arg3;
@end
|
davidstutz/daml-shape-completion
|
data/kitti/12_11_visualize_binvox_points.py
|
<reponame>davidstutz/daml-shape-completion
import argparse
import sys
import os
sys.path.insert(1, os.path.realpath(__file__ + '../lib/'))
from blender_utils import *
import common
import json
import h5py
import numpy as np
import ntpath
def read_json(file):
"""
Read a JSON file.
:param file: path to file to read
:type file: str
:return: parsed JSON as dict
:rtype: dict
"""
assert os.path.exists(file), 'file %s not found' % file
with open(file, 'r') as fp:
return json.load(fp)
def read_hdf5(file, key = 'tensor'):
"""
Read a tensor, i.e. numpy array, from HDF5.
:param file: path to file to read
:type file: str
:param key: key to read
:type key: str
:return: tensor
:rtype: numpy.ndarray
"""
assert os.path.exists(file), 'file %s not found' % file
h5f = h5py.File(file, 'r')
tensor = h5f[key][()]
h5f.close()
return tensor
if __name__ == '__main__':
try:
argv = sys.argv[sys.argv.index("--") + 1:]
except ValueError:
log('[Error] "--" not found, call as follows:', LogLevel.ERROR)
log('[Error] $BLENDER --background --python 12_3_visualize_binvox.py -- 1>/dev/null config_folder', LogLevel.ERROR)
exit()
if len(argv) < 1:
log('[Error] not enough parameters, call as follows:', LogLevel.ERROR)
log('[Error] $BLENDER --background --python 12_3_visualize_binvox.py -- 1>/dev/null config_folder', LogLevel.ERROR)
exit()
config_file = argv[0]
assert os.path.exists(config_file), 'file %s does not exist' % config_file
config = read_json(config_file)
set = ntpath.basename(config_file)[:-5]
height = config['height']
width = config['width']
depth = config['depth']
scale = 1./max(height, width, depth)
inputs = read_hdf5(common.filename(config, 'input_file', '_f.h5', set))
log(inputs.shape)
vis_directory = common.filename(config, 'vis_dir', '', set)
if not os.path.isdir(vis_directory):
os.makedirs(vis_directory)
voxel_size = 0.007
if height >= 32:
voxel_size = 0.0055
if height >= 48:
voxel_size = 0.004
log('[Data] voxel size ' + str(voxel_size))
N = 30
log('[Data] %d samples' % inputs.shape[0])
for i in range(N):
n = i * (inputs.shape[0] // N)
camera_target = initialize()
input_material = make_material('BRC_Material_Point_Cloud', (0.65, 0.23, 0.25), 1, True)
load_volume(inputs[n][0], voxel_size, input_material, (0, 0, 0), (width*scale, depth*scale, height*scale), 'zxy')
rotation = (5, 0, -55)
distance = 0.35
png_file = vis_directory + '/%d_bin_points.png' % n
render(camera_target, png_file, rotation, distance)
log('[Data] wrote %s' % png_file)
|
consulo/consulo-spring
|
web/src/com/intellij/spring/web/mvc/MVCViewProvider.java
|
package com.intellij.spring.web.mvc;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.util.Pair;
import com.intellij.spring.SpringModel;
import com.intellij.spring.model.PsiElementPointer;
import com.intellij.javaee.web.facet.WebFacet;
import java.util.List;
/**
* @author <NAME>
*/
public interface MVCViewProvider {
ExtensionPointName<MVCViewProvider> EP_NAME = ExtensionPointName.create("com.intellij.spring.mvcViewProvider");
List<Pair<String, PsiElementPointer>> getViews(SpringModel model, WebFacet facet);
}
|
eguneys/lose-your-marbles
|
app/scripts/prefabs/level_splash.js
|
'use strict';
define(['phaser'], function(Phaser) {
function LevelSplash(game, level, parent, isBonus) {
Phaser.Group.call(this, game, parent);
if (isBonus) {
this.bonusText = this.create(0, -60, 'marbleatlas', 'DIALOG_TEXT_BONUS');
}
this.levelText = this.create(0, 0, 'marbleatlas', 'DIALOG_TEXT_LEVEL');
this.levelDigit = this.create(0, 0, 'marbleatlas', 'DIALOG_TEXT_DIGITS');
this.levelDigit.animations.add('0', ['DIALOG_TEXT_DIGITS0']);
this.levelDigit.animations.add('1', ['DIALOG_TEXT_DIGITS1']);
this.levelDigit.animations.add('2', ['DIALOG_TEXT_DIGITS2']);
this.levelDigit.animations.add('3', ['DIALOG_TEXT_DIGITS3']);
this.levelDigit.animations.add('4', ['DIALOG_TEXT_DIGITS4']);
this.levelDigit.animations.add('5', ['DIALOG_TEXT_DIGITS5']);
this.levelDigit.animations.add('6', ['DIALOG_TEXT_DIGITS6']);
this.levelDigit.animations.add('7', ['DIALOG_TEXT_DIGITS7']);
this.levelDigit.animations.add('8', ['DIALOG_TEXT_DIGITS8']);
this.levelDigit.animations.add('9', ['DIALOG_TEXT_DIGITS9']);
this.levelDigit.x = this.levelText.width;
this.levelDigit.animations.play(level);
this.width = this.levelText.width;
this.height = this.levelText.height;
}
LevelSplash.prototype = Object.create(Phaser.Group.prototype);
LevelSplash.prototype.constructor = LevelSplash;
return LevelSplash;
});
|
muyiluop/nutzmore
|
nutz-plugins-postgis/src/main/java/org/nutz/postgis/dao/package-info.java
|
/**
* @author Kerbores(<EMAIL>)
*
*/
package org.nutz.postgis.dao;
|
hlubek/golang-meetup
|
2020-01/go-for-beginners/concurrency/coordination.go
|
<reponame>hlubek/golang-meetup<filename>2020-01/go-for-beginners/concurrency/coordination.go
package main
import (
"fmt"
"sync"
"time"
)
func main() {
// START OMIT
var wg sync.WaitGroup
wg.Add(1)
go func() {
time.Sleep(300 * time.Millisecond)
fmt.Println("Wake up!")
wg.Done()
}()
wg.Add(1)
go func() {
time.Sleep(5 * time.Millisecond)
fmt.Println("Get up!")
wg.Done()
}()
fmt.Println("Wait for it...")
wg.Wait()
// END OMIT
}
|
ruditimmermans/golden
|
pkg/config/ConfigManager.go
|
package config
import (
"github.com/vit1251/golden/pkg/mapper"
"github.com/vit1251/golden/pkg/registry"
"log"
)
type ConfigManager struct {
registry *registry.Container
}
func NewConfigManager(r *registry.Container) *ConfigManager {
newConfigManager := new(ConfigManager)
newConfigManager.registry = r
return newConfigManager
}
type Main struct {
NetAddr string
Password string
Address string /* Point FTN address */
Country string
City string
RealName string
StationName string
Link string /* Boss FTN address */
TearLine string /* Message TearLine */
Origin string /* Message Origin */
}
type Mailer struct {
Interval string
}
type Netmail struct {
Charset string
}
type Echomail struct {
Charset string
}
type Config struct {
Main Main `toml:"Main"`
Mailer Mailer `toml:"Mailer"`
Netmail Netmail `toml:"Netmail"`
Echomail Echomail `toml:"Echomail"`
}
var config *Config
func (self *ConfigManager) Store(c *Config) error {
mapperManager := self.restoreMapperManager()
configMapper := mapperManager.GetConfigMapper()
outdateConfig, _ := configMapper.GetConfigFromDatabase()
/* Main */
outdateConfig.Set("main", "Address", c.Main.Address)
outdateConfig.Set("main", "Password", c.Main.Password)
outdateConfig.Set("main", "Origin", c.Main.Origin)
outdateConfig.Set("main", "TearLine", c.Main.TearLine)
outdateConfig.Set("main", "Link", c.Main.Link)
outdateConfig.Set("main", "StationName", c.Main.StationName)
outdateConfig.Set("main", "RealName", c.Main.RealName)
outdateConfig.Set("main", "NetAddr", c.Main.NetAddr)
outdateConfig.Set("main", "City", c.Main.City)
outdateConfig.Set("main", "Country", c.Main.Country)
/* Mailer */
outdateConfig.Set("mailer", "Interval", c.Mailer.Interval)
/* Netmail */
outdateConfig.Set("netmail", "Charset", c.Netmail.Charset)
/* Echomail */
outdateConfig.Set("echomail", "Charset", c.Echomail.Charset)
configMapper.SetConfigToDatabase(outdateConfig)
return nil
}
func (self *ConfigManager) Restore() (*Config, error) {
var c Config
mapperManager := self.restoreMapperManager()
configMapper := mapperManager.GetConfigMapper()
outdateConfig, _ := configMapper.GetConfigFromDatabase()
/* Main */
c.Main.Address, _ = outdateConfig.Get("main", "Address")
c.Main.Password, _ = outdateConfig.Get("main", "Password")
c.Main.Origin, _ = outdateConfig.Get("main", "Origin")
c.Main.TearLine, _ = outdateConfig.Get("main", "TearLine")
c.Main.Link, _ = outdateConfig.Get("main", "Link")
c.Main.StationName, _ = outdateConfig.Get("main", "StationName")
c.Main.RealName, _ = outdateConfig.Get("main", "RealName")
c.Main.NetAddr, _ = outdateConfig.Get("main", "NetAddr")
c.Main.City, _ = outdateConfig.Get("main", "City")
c.Main.Country, _ = outdateConfig.Get("main", "Country")
/* Mailer */
c.Mailer.Interval, _ = outdateConfig.Get("mailer", "Interval")
/* Netmail */
c.Netmail.Charset, _ = outdateConfig.Get("netmail", "Charset")
/* Echomail */
c.Echomail.Charset, _ = outdateConfig.Get("echomail", "Charset")
return &c, nil
}
func (self *ConfigManager) GetConfig() *Config {
if config == nil {
newConfig, err1 := self.Restore()
if err1 != nil {
log.Printf("Restore config with error: err = %#v", err1)
}
config = newConfig
}
return config
}
func (self *ConfigManager) restoreMapperManager() *mapper.MapperManager {
managerPtr := self.registry.Get("MapperManager")
if manager, ok := managerPtr.(*mapper.MapperManager); ok {
return manager
} else {
panic("no mapper manager")
}
}
|
bench-os/bench-os
|
devices-framework/riot-app/riot/drivers/lps331ap/include/lps331ap-internal.h
|
/*
* Copyright (C) 2014 Freie Universität Berlin
*
* This file is subject to the terms and conditions of the GNU Lesser
* General Public License v2.1. See the file LICENSE in the top level
* directory for more details.
*/
/**
* @ingroup drivers_lps331ap
* @{
*
* @file
* @brief Definitions for the LPS331AP pressure sensor
*
* @author <NAME> <<EMAIL>>
*/
#ifndef LPS331AP_INTERNAL_H
#define LPS331AP_INTERNAL_H
#ifdef __cplusplus
extern "C" {
#endif
/**
* @name LPS331AP registers
* @{
*/
#define LPS331AP_AUTO_INC 0x80
#define LPS331AP_REG_REF_P_XL 0x08
#define LPS331AP_REG_REF_P_L 0x09
#define LPS331AP_REG_REF_P_H 0x0a
#define LPS331AP_REG_WHO_AM_I 0x0f
#define LPS331AP_REG_RES_CONF 0x10
#define LPS331AP_REG_CTRL_REG1 0x20
#define LPS331AP_REG_CTRL_REG2 0x21
#define LPS331AP_REG_CTRL_REG3 0x22
#define LPS331AP_REG_INT_CFG_REG 0x23
#define LPS331AP_REG_INT_SOURCE_REG 0x24
#define LPS331AP_REG_THS_P_LOW_REG 0x25
#define LPS331AP_REG_THS_P_HIGH_REG 0x26
#define LPS331AP_REG_STATUS_REG 0x27
#define LPS331AP_REG_PRESS_OUT_XL 0x28
#define LPS331AP_REG_PRESS_OUT_L 0x29
#define LPS331AP_REG_PRESS_OUT_H 0x2a
#define LPS331AP_REG_TEMP_OUT_L 0x2b
#define LPS331AP_REG_TEMP_OUT_H 0x2c
#define LPS331AP_REG_AMP_CTRL 0x30
/** @} */
/**
* @name LPS331AP CTRL_REG1 bitfields
* @{
*/
#define LPS331AP_CTRL_REG1_PD 0x80
#define LPS331AP_CTRL_REG1_ODR 0x70
#define LPS331AP_CTRL_REG1_ODR_POS 4
#define LPS331AP_CTRL_REG1_DIFF_EN 0x08
#define LPS331AP_CTRL_REG1_DBDU 0x04
#define LPS331AP_CTRL_REG1_DELTA_EN 0x02
#define LPS331AP_CTRL_REG1_SIM 0x01
/** @} */
#ifdef __cplusplus
}
#endif
#endif /* LPS331AP_INTERNAL_H */
/** @} */
|
BroadbandForum/obbaa-netconf-stack
|
netconf-server-modelnode-fwk/src/main/java/org/broadband_forum/obbaa/netconf/mn/fwk/server/model/support/constraints/validation/util/DataStoreValidationErrors.java
|
package org.broadband_forum.obbaa.netconf.mn.fwk.server.model.support.constraints.validation.util;
import java.util.HashMap;
import java.util.Map;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.model.api.MustDefinition;
import org.broadband_forum.obbaa.netconf.api.messages.NetconfRpcError;
import org.broadband_forum.obbaa.netconf.api.messages.NetconfRpcErrorTag;
import org.broadband_forum.obbaa.netconf.mn.fwk.schema.SchemaRegistry;
import org.broadband_forum.obbaa.netconf.mn.fwk.schema.constraints.payloadparsing.typevalidators.ValidationException;
import org.broadband_forum.obbaa.netconf.server.RequestScope;
import org.broadband_forum.obbaa.netconf.mn.fwk.server.model.ModelNode;
import org.broadband_forum.obbaa.netconf.mn.fwk.server.model.ModelNodeId;
import org.broadband_forum.obbaa.netconf.mn.fwk.server.model.ModelNodeRdn;
import org.broadband_forum.obbaa.netconf.mn.fwk.server.model.WhenValidationException;
import org.broadband_forum.obbaa.netconf.mn.fwk.server.model.util.NetconfRpcErrorUtil;
/**
* A utility class helps in generation of various ValidationException with NetconfRpcError during validation
*/
public class DataStoreValidationErrors {
private static final String NETCONF_RPC_ERROR = "NETCONF_RPC_ERROR";
private static NetconfRpcError getApplicationError(NetconfRpcErrorTag tag) {
RequestScope scope = RequestScope.getCurrentScope();
@SuppressWarnings("unchecked")
Map<NetconfRpcErrorTag, NetconfRpcError> error = (Map<NetconfRpcErrorTag, NetconfRpcError>) scope.getFromCache(NETCONF_RPC_ERROR);
if (error == null) {
error = new HashMap<NetconfRpcErrorTag, NetconfRpcError>();
scope.putInCache(NETCONF_RPC_ERROR, error);
}
NetconfRpcError rpcError = error.get(tag);
if (rpcError == null) {
rpcError = NetconfRpcErrorUtil.getApplicationError(tag, null);
error.putIfAbsent(tag, rpcError);
rpcError = error.get(tag);
}
return rpcError;
}
public static NetconfRpcError getDataMissingRpcError(String errorMessage, String errorPath, Map<String, String> prefixContext) {
NetconfRpcError rpcError = NetconfRpcErrorUtil.getApplicationError(NetconfRpcErrorTag.DATA_MISSING, errorMessage);
rpcError.setErrorAppTag("instance-required");
rpcError.setErrorPath(errorPath, prefixContext);
return rpcError;
}
public static ValidationException getViolateMaxElementException(String nodeType, int maxElements) {
NetconfRpcError rpcError = getApplicationError(NetconfRpcErrorTag.OPERATION_FAILED);
rpcError.setErrorMessage(String.format("Maximum elements allowed for %s is %s.", nodeType, maxElements));
rpcError.setErrorAppTag("too-many-elements");
return new ValidationException(rpcError);
}
public static ValidationException getMissingDataException(String errorMessage, String errorPath, Map<String, String> prefixContext) {
NetconfRpcError rpcError = getDataMissingRpcError(errorMessage, errorPath, prefixContext);
return new ValidationException(rpcError);
}
public static void throwDataMissingException(SchemaRegistry schemaRegistry, ModelNode modelNode, QName qname) {
ModelNodeId modelNodeId = new ModelNodeId(modelNode.getModelNodeId());
modelNodeId.addRdn(ModelNodeRdn.CONTAINER, qname.getNamespace().toString(), qname.getLocalName());
throw getMissingDataException(DataStoreValidationUtil.MISSING_MANDATORY_NODE, modelNodeId.xPathString(schemaRegistry), modelNodeId.xPathStringNsByPrefix(schemaRegistry));
}
public static ValidationException getViolateMinElementException(String nodeType, int minElements) {
NetconfRpcError rpcError = getApplicationError(NetconfRpcErrorTag.OPERATION_FAILED);
rpcError.setErrorMessage(String.format("Minimum elements required for %s is %s.", nodeType ,minElements));
rpcError.setErrorAppTag("too-few-elements");
return new ValidationException(rpcError);
}
public static ValidationException getViolateWhenConditionExceptionThrownUnknownElement(String whenXPath) {
NetconfRpcError rpcError = getApplicationError(NetconfRpcErrorTag.UNKNOWN_ELEMENT);
rpcError.setErrorMessage("Violate when constraints: " + whenXPath);
rpcError.setErrorAppTag("when-violation");
return new WhenValidationException(rpcError);
}
public static ValidationException getViolateMustContrainsException(MustDefinition mustDefinition) {
NetconfRpcError rpcError = getApplicationError(NetconfRpcErrorTag.OPERATION_FAILED);
rpcError.setErrorMessage("Violate must constraints: " + mustDefinition.toString());
// get ErrorAppTag from MustDefinition
if (mustDefinition.getErrorAppTag().isPresent()) {
rpcError.setErrorAppTag(mustDefinition.getErrorAppTag().get());
} else {
rpcError.setErrorAppTag("must-violation");
}
// get ErrorMessage from MustDefinition
if (mustDefinition.getErrorMessage().isPresent()) {
rpcError.setErrorMessage(mustDefinition.getErrorMessage().get());
}
return new ValidationException(rpcError);
}
public static ValidationException getUniqueConstraintException(String message) {
NetconfRpcError rpcError = getApplicationError(NetconfRpcErrorTag.OPERATION_FAILED);
rpcError.setErrorMessage(message);
rpcError.setErrorAppTag("data-not-unique");
return new ValidationException(rpcError);
}
public static String buildRpcErrorPath(ModelNode modelNode, String elementName, String namespace) {
ModelNodeId id = new ModelNodeId(modelNode.getModelNodeId());
id.addRdn(new ModelNodeRdn(ModelNodeRdn.CONTAINER, namespace, elementName));
return id.xPathString();
}
public static NetconfRpcError getViolateMinElementRPCError(String nodeType, int minElements) {
NetconfRpcError rpcError = getApplicationError(NetconfRpcErrorTag.OPERATION_FAILED);
rpcError.setErrorMessage(String.format("Minimum elements required for %s is %s.", nodeType ,minElements));
rpcError.setErrorAppTag("too-few-elements");
return rpcError;
}
}
|
RenatoTorres/Exode
|
Exode/Core/boardThread.py
|
# boardThread.py
#
# Some tasks must be execute periodically,
# to blink a led for example. Or sometimes we
# have to execute instructions successively.
#
# boardThread compile instructions and sending
# them to the board in a single arrayByte
# that's way the instructions'll be execute
# successively.
#
# If we set a period (milliseconds), the board
# will initialize a thread, and will execute
# the instructions periodically.
#
# If the period is 0, the board will execute
# the instructions only once time.
#
# Created by <NAME>, January, 2016.
# <EMAIL>
from .variable import _VARIABLES, _FUNCTIONS, _INV_FUNCTIONS, ID
from .exode import ExodeSpeaker
from . import logCore
class boardThread :
def __init__(self, board):
self._board = board
self._period = -1
self._instructions = []
# the thread's id on the board
self._ID = -1
self.on = False
def setID(self, id):
self._ID = id
def add(self, name, *args):
self._instructions.append([name, args])
logCore(name+"("+str(args)+") add to the "+str(self))
def getInstructionByteCluster(self):
# we use a fake Speaker to get the byteCode
fakeSpeaker = ExodeSpeaker(None,"fake")
fakeSpeaker.mute = True
# byteCluster holding the byteCode
byteCluster = bytearray()
for inst in self._instructions:
name = inst[0]
args = inst[1]
# a way to get the byteCode, this code is eqv to
# byteCluster += fakeSpeaker.name(*args)
byteCluster += getattr(fakeSpeaker, name)(*args)
return byteCluster
def start(self, period=0):
self._period = int(period)
byteCluster = self.getInstructionByteCluster()
# if the period is 0, we just execute Thread once time
if self._period == 0:
byteCluster = bytearray([0, ID('executeThread'),len(byteCluster)]) + byteCluster
# else we init a periodic thread
else:
self.on = True
key = self._board.getKey()
bytePeriod = bytearray(self._period.to_bytes(4,'little'))
byteCluster = bytearray([0, ID('initThread'), key]) + bytePeriod + bytearray([len(byteCluster)]) + byteCluster
self._board.addListener(key=key, updateFunction=self.setID)
self._board.sendByteArray(byteCluster)
self.logStart()
def logStart(self):
logCore(str(self)+" has been started with a cycle of "+str(self._period)+"ms ")
def stop(self):
if self.on == True and self._ID != -1:
self.period = -1
self.on = False
self._board.sendByteArray(bytearray([0, ID('deleteThread')])+self._ID.to_bytes(4,'little'))
logCore(str(self)+ " has been stopped ")
def __repr__(self):
return "<boardThread at {0}>".format(hex(id(self)))
|
mantamusica/interfacesDesign
|
Tema Instaladores/Desarrollo De Interfaces/JavaExe/doc/examples/9 - Registry/src/com/devwizard/javaexe/examples/example9/Example9_ActionForm.java
|
<reponame>mantamusica/interfacesDesign<gh_stars>0
/*****************************************************************************/
/*** (c) 2002-2013, DevWizard (<EMAIL>) ***/
/*** ***/
/*** ***/
/*** Example 9 ***/
/*** ***/
/*****************************************************************************/
package com.devwizard.javaexe.examples.example9;
import java.util.*;
import javax.swing.*;
import com.devwizard.javaexe.examples.common.*;
/*****************************************************************************/
public class Example9_ActionForm
{
/*******************************************/
public static boolean newKey(int hkeyRoot, Example9_TreeNodeKey nodeKey)
{
if(nodeKey==null)
return false;
String[] arr = displayForm("New Key", "Name of the New Key :"
,"JavaExe - ", null, null);
if(arr==null || arr.length < 1)
return false;
return Example9_RegistryManagement.regCreateKey(hkeyRoot
,nodeKey.getPathKey() + "\\" + arr[0]);
}
/*******************************************/
public static boolean newValue(int hkeyRoot, Example9_TreeNodeKey nodeKey)
{
if(nodeKey==null)
return false;
String[] arr = displayForm("New Value", null, "JavaExe - ", "Value ", "");
if(arr==null || arr.length < 2)
return false;
return Example9_RegistryManagement.regSetValueSTR(hkeyRoot
,nodeKey.getPathKey(), arr[0], arr[1], false);
}
/*******************************************/
public static boolean editValue(int hkeyRoot, Example9_TreeNodeVal nodeVal)
{
if(nodeVal==null)
return false;
String pathKey = nodeVal.getPathKey();
String name = nodeVal.getName();
String[] arr = displayForm("Edit Value", null, null, name+" = "
,Example9_RegistryManagement.regGetValueSTR(hkeyRoot,pathKey,name,false));
if(arr==null || arr.length < 1)
return false;
return Example9_RegistryManagement.regSetValueSTR(hkeyRoot
,pathKey, name, arr[0], false);
}
/*******************************************/
public static boolean delete(int hkeyRoot, Example9_TreeNode node)
{
if(node==null)
return false;
String nameValue = node.getName();
String pathKey = node.getPathKey();
boolean isKey = (nameValue==null);
String[] arr = displayForm("Delete "+(isKey ? "Key" : "Value")
,"Do you want to delete \""+(isKey ? pathKey : nameValue)+"\" ?"
,null, null, null);
if(arr==null)
return false;
if(isKey)
return Example9_RegistryManagement.regDeleteKey(hkeyRoot, pathKey);
return Example9_RegistryManagement.regDeleteValue(hkeyRoot, pathKey, nameValue);
}
/*******************************************/
static String[] displayForm(String title, String desc, String lblName, String lblVal
,String val)
{
Examples_Panel p = new Examples_Panel();
if(desc != null)
p.putLastComponent(new JLabel(desc), 0, 0, 10, 0);
JTextField txtN = (lblName != null ? Examples_UtilsGUI.putTextfield(p, lblName, 20) : null);
JTextField txtV = (lblVal != null ? Examples_UtilsGUI.putTextfield(p, lblVal, 30) : null);
if(val != null && txtV != null)
txtV.setText(val);
if(!Examples_UtilsGUI.showConfirmDialog(title, p))
return null;
Vector<String> vect = new Vector<String>();
if(txtN != null) vect.add("JavaExe - "+txtN.getText().trim());
if(txtV != null) vect.add(txtV.getText());
return vect.toArray(new String[0]);
}
}
|
youngzhu/DesignPatternsByYoungzy
|
src/sourcemaking/structural/decorator/example1/after/D.java
|
package sourcemaking.structural.decorator.example1.after;
public abstract class D implements I {
private I core;
public D(I inner) {
core = inner;
}
@Override
public void doIt() {
core.doIt();
}
}
|
alex729/RED
|
src/RobotFrameworkCore/org.robotframework.ide.core-functions/src/test/java/org/rf/ide/core/testdata/text/write/tables/keywords/creation/CreationOfKeywordReturnTest.java
|
/*
* Copyright 2016 Nokia Solutions and Networks
* Licensed under the Apache License, Version 2.0,
* see license.txt file for details.
*/
package org.rf.ide.core.testdata.text.write.tables.keywords.creation;
import org.junit.Test;
import org.rf.ide.core.testdata.model.FileFormat;
import org.rf.ide.core.testdata.model.RobotFile;
import org.rf.ide.core.testdata.model.table.KeywordTable;
import org.rf.ide.core.testdata.model.table.LocalSetting;
import org.rf.ide.core.testdata.model.table.keywords.UserKeyword;
import org.rf.ide.core.testdata.text.read.recognizer.RobotToken;
import org.rf.ide.core.testdata.text.write.NewRobotFileTestHelper;
import org.rf.ide.core.testdata.text.write.RobotFormatParameterizedTest;
public class CreationOfKeywordReturnTest extends RobotFormatParameterizedTest {
public CreationOfKeywordReturnTest(final String extension, final FileFormat format) {
super(extension, format);
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withoutKeywordName_andReturnDecOnly() throws Exception {
test_returnDecOnly("EmptyKeywordReturnNoKeywordName", "");
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withKeywordName_andReturnDecOnly() throws Exception {
test_returnDecOnly("EmptyKeywordReturn", "User Keyword");
}
private void test_returnDecOnly(final String fileNameWithoutExt, final String userKeywordName) throws Exception {
// prepare
final String filePath = convert(fileNameWithoutExt);
final RobotFile modelFile = NewRobotFileTestHelper.getModelFileToModify("2.9");
// test data prepare
modelFile.includeKeywordTableSection();
final KeywordTable keywordTable = modelFile.getKeywordTable();
final RobotToken keyName = new RobotToken();
keyName.setText(userKeywordName);
final UserKeyword uk = new UserKeyword(keyName);
keywordTable.addKeyword(uk);
uk.newReturn(0);
// verify
NewRobotFileTestHelper.assertNewModelTheSameAsInFile(filePath, modelFile);
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withoutKeywordName_andReturn_andComment() throws Exception {
test_returnWithCommentOnly("EmptyKeywordReturnNoKeywordNameComment", "");
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withKeywordName_andReturn_andComment() throws Exception {
test_returnWithCommentOnly("EmptyKeywordReturnComment", "User Keyword");
}
private void test_returnWithCommentOnly(final String fileNameWithoutExt, final String userKeywordName)
throws Exception {
// prepare
final String filePath = convert(fileNameWithoutExt);
final RobotFile modelFile = NewRobotFileTestHelper.getModelFileToModify("2.9");
// test data prepare
modelFile.includeKeywordTableSection();
final KeywordTable keywordTable = modelFile.getKeywordTable();
final RobotToken keyName = new RobotToken();
keyName.setText(userKeywordName);
final UserKeyword uk = new UserKeyword(keyName);
keywordTable.addKeyword(uk);
final LocalSetting<UserKeyword> keyReturn = uk.newReturn(0);
keyReturn.addCommentPart("cm1");
keyReturn.addCommentPart("cm2");
keyReturn.addCommentPart("cm3");
// verify
NewRobotFileTestHelper.assertNewModelTheSameAsInFile(filePath, modelFile);
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withoutKeywordName_andReturn_with3Values() throws Exception {
test_return_With3Values("KeywordReturn3ReturnsNoKeywordName", "");
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withKeywordName_andReturn_with3Values() throws Exception {
test_return_With3Values("KeywordReturn3Returns", "User Keyword");
}
private void test_return_With3Values(final String fileNameWithoutExt, final String userKeywordName)
throws Exception {
// prepare
final String filePath = convert(fileNameWithoutExt);
final RobotFile modelFile = NewRobotFileTestHelper.getModelFileToModify("2.9");
// test data prepare
modelFile.includeKeywordTableSection();
final KeywordTable keywordTable = modelFile.getKeywordTable();
final RobotToken keyName = new RobotToken();
keyName.setText(userKeywordName);
final UserKeyword uk = new UserKeyword(keyName);
keywordTable.addKeyword(uk);
final LocalSetting<UserKeyword> keyReturn = uk.newReturn(0);
keyReturn.addToken("${r1}");
keyReturn.addToken("${r2}");
keyReturn.addToken("${r3}");
// verify
NewRobotFileTestHelper.assertNewModelTheSameAsInFile(filePath, modelFile);
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withoutKeywordName_andReturn_with3Values_andComment()
throws Exception {
test_returnWith_3ValuesAndComment("KeywordReturn3ReturnsCommentNoKeywordName", "");
}
@Test
public void test_emptyFile_and_thanCreateKeywordReturn_withKeywordName_andReturn_with3Values_andComment()
throws Exception {
test_returnWith_3ValuesAndComment("KeywordReturn3ReturnsComment", "User Keyword");
}
private void test_returnWith_3ValuesAndComment(final String fileNameWithoutExt, final String userKeywordName)
throws Exception {
// prepare
final String filePath = convert(fileNameWithoutExt);
final RobotFile modelFile = NewRobotFileTestHelper.getModelFileToModify("2.9");
// test data prepare
modelFile.includeKeywordTableSection();
final KeywordTable keywordTable = modelFile.getKeywordTable();
final RobotToken keyName = new RobotToken();
keyName.setText(userKeywordName);
final UserKeyword uk = new UserKeyword(keyName);
keywordTable.addKeyword(uk);
final LocalSetting<UserKeyword> keyReturn = uk.newReturn(0);
keyReturn.addToken("${r1}");
keyReturn.addToken("${r2}");
keyReturn.addToken("${r3}");
keyReturn.addCommentPart("cm1");
keyReturn.addCommentPart("cm2");
keyReturn.addCommentPart("cm3");
// verify
NewRobotFileTestHelper.assertNewModelTheSameAsInFile(filePath, modelFile);
}
private String convert(final String fileName) {
return "keywords/setting/return/new/" + fileName + "." + getExtension();
}
}
|
rceet/TencentOS-tiny
|
platform/vendor_bsp/TI/MSP432P4xx/pmap.h
|
<reponame>rceet/TencentOS-tiny<gh_stars>1-10
/*
* -------------------------------------------
* MSP432 DriverLib - v3_40_00_10
* -------------------------------------------
*
* --COPYRIGHT--,BSD,BSD
* Copyright (c) 2016, Texas Instruments Incorporated
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Texas Instruments Incorporated nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* --/COPYRIGHT--*/
#ifndef __PMAP_H__
#define __PMAP_H__
//*****************************************************************************
//
//! \addtogroup pmap_api
//! @{
//
//*****************************************************************************
//*****************************************************************************
//
// If building with a C++ compiler, make all of the definitions in this header
// have a C binding.
//
//*****************************************************************************
#ifdef __cplusplus
extern "C"
{
#endif
#include <stdint.h>
#include <msp.h>
//*****************************************************************************
//
//The following are values that can be passed to the PMAP_configurePorts() API
//as the portMapReconfigure parameter.
//
//*****************************************************************************
#define PMAP_ENABLE_RECONFIGURATION PMAP_CTL_PRECFG
#define PMAP_DISABLE_RECONFIGURATION 0x00
//*****************************************************************************
//
//The following are values that can be passed to the PMAP_configurePorts() API
//as the pxMAPy parameter.
//
//*****************************************************************************
#define PMAP_P1MAP ((uint32_t)P1MAP - PMAP_BASE)
#define PMAP_P2MAP ((uint32_t)P2MAP - PMAP_BASE)
#define PMAP_P3MAP ((uint32_t)P3MAP - PMAP_BASE)
#define PMAP_P4MAP ((uint32_t)P4MAP - PMAP_BASE)
#define PMAP_P5MAP ((uint32_t)P5MAP - PMAP_BASE)
#define PMAP_P6MAP ((uint32_t)P6MAP - PMAP_BASE)
#define PMAP_P7MAP ((uint32_t)P7MAP - PMAP_BASE)
//*****************************************************************************
//
//Prototypes for the APIs.
//
//*****************************************************************************
//*****************************************************************************
//
//! This function configures the MSP432 Port Mapper
//!
//! \param portMapping is the pointer to init Data
//! \param pxMAPy is the Port Mapper to initialize
//! \param numberOfPorts is the number of Ports to initialize
//! \param portMapReconfigure is used to enable/disable reconfiguration
//! Valid values are
//! \b PMAP_ENABLE_RECONFIGURATION
//! \b PMAP_DISABLE_RECONFIGURATION [Default value]
//! Modified registers are \b PMAPKEYID, \b PMAPCTL
//!
//! \return None
//
//*****************************************************************************
extern void PMAP_configurePorts(const uint8_t *portMapping, uint8_t pxMAPy,
uint8_t numberOfPorts, uint8_t portMapReconfigure);
/* Defines for future devices that might have multiple instances */
#define PMAP_configurePortsMultipleInstance(a,b,c,d,e) PMAP_configurePorts(b,c,d,e)
//*****************************************************************************
//
// Mark the end of the C bindings section for C++ compilers.
//
//*****************************************************************************
#ifdef __cplusplus
}
#endif
//*****************************************************************************
//
// Close the Doxygen group.
//! @}
//
//*****************************************************************************
#endif
|
refactorzone/spring-boot-validators
|
src/test/java/zone/refactor/spring/validation/validator/IntegerValidatorTest.java
|
package zone.refactor.spring.validation.validator;
import org.junit.Test;
import zone.refactor.spring.validation.localization.BuiltInLocalizationService;
import zone.refactor.spring.validation.validator.IntegerValidator;
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertTrue;
public class IntegerValidatorTest {
@Test
public void testValues() {
IntegerValidator validator = new IntegerValidator();
assertTrue(validator.isValid(null));
assertTrue(validator.isValid(""));
assertFalse(validator.isValid("a"));
assertTrue(validator.isValid("6"));
assertFalse(validator.isValid("6a"));
assertFalse(validator.isValid("6.2"));
assertTrue(validator.isValid(6));
assertFalse(validator.isValid(6.2));
}
}
|
JLLeitschuh/book
|
src/main/java/com/tamingtext/tagging/LuceneCategoryExtractor.java
|
/*
* Copyright 2008-2011 <NAME>, <NAME> and <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -------------------
* To purchase or learn more about Taming Text, by <NAME>, <NAME> and <NAME>, visit
* http://www.manning.com/ingersoll
*/
package com.tamingtext.tagging;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.Collections;
import org.apache.commons.cli2.CommandLine;
import org.apache.commons.cli2.Group;
import org.apache.commons.cli2.Option;
import org.apache.commons.cli2.OptionException;
import org.apache.commons.cli2.builder.ArgumentBuilder;
import org.apache.commons.cli2.builder.DefaultOptionBuilder;
import org.apache.commons.cli2.builder.GroupBuilder;
import org.apache.commons.cli2.commandline.Parser;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.mahout.common.CommandLineUtil;
import org.apache.mahout.common.IOUtils;
import org.apache.mahout.utils.vectors.lucene.Driver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Extract categories from the TamingText cluster schema */
public class LuceneCategoryExtractor {
private static final Logger log = LoggerFactory.getLogger(Driver.class);
public static void main(String[] args) throws IOException {
DefaultOptionBuilder obuilder = new DefaultOptionBuilder();
ArgumentBuilder abuilder = new ArgumentBuilder();
GroupBuilder gbuilder = new GroupBuilder();
Option inputOpt = obuilder.withLongName("dir")
.withRequired(true)
.withArgument(
abuilder.withName("dir")
.withMinimum(1)
.withMaximum(1).create())
.withDescription("The Lucene directory")
.withShortName("d").create();
Option outputOpt = obuilder.withLongName("output")
.withRequired(false)
.withArgument(
abuilder.withName("output")
.withMinimum(1)
.withMaximum(1).create())
.withDescription("The output directory")
.withShortName("o").create();
Option maxOpt = obuilder.withLongName("max")
.withRequired(false)
.withArgument(
abuilder.withName("max")
.withMinimum(1)
.withMaximum(1)
.create())
.withDescription("The maximum number of documents to analyze. If not specified, then it will loop over all docs")
.withShortName("m").create();
Option fieldOpt =
obuilder.withLongName("field")
.withRequired(true)
.withArgument(
abuilder.withName("field")
.withMinimum(1)
.withMaximum(1)
.create())
.withDescription("The field in the index")
.withShortName("f").create();
Option helpOpt = obuilder.withLongName("help")
.withDescription("Print out help")
.withShortName("h").create();
Group group = gbuilder.withName("Options")
.withOption(inputOpt)
.withOption(outputOpt)
.withOption(maxOpt)
.withOption(fieldOpt)
.create();
try {
Parser parser = new Parser();
parser.setGroup(group);
CommandLine cmdLine = parser.parse(args);
if (cmdLine.hasOption(helpOpt)) {
CommandLineUtil.printHelp(group);
return;
}
File inputDir = new File(cmdLine.getValue(inputOpt).toString());
if (!inputDir.isDirectory()) {
throw new IllegalArgumentException(inputDir + " does not exist or is not a directory");
}
long maxDocs = Long.MAX_VALUE;
if (cmdLine.hasOption(maxOpt)) {
maxDocs = Long.parseLong(cmdLine.getValue(maxOpt).toString());
}
if (maxDocs < 0) {
throw new IllegalArgumentException("maxDocs must be >= 0");
}
String field = cmdLine.getValue(fieldOpt).toString();
PrintWriter out = null;
if (cmdLine.hasOption(outputOpt)) {
out = new PrintWriter(new FileWriter(cmdLine.getValue(outputOpt).toString()));
}
else {
out = new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"));
}
dumpDocumentFields(inputDir, field, maxDocs, out);
IOUtils.close(Collections.singleton(out));
} catch (OptionException e) {
log.error("Exception", e);
CommandLineUtil.printHelp(group);
}
}
/** dump the terms found in the field specified to the specified writer in the form:
*
* <pre>term(tab)document_frequency</pre>
*
* @param indexDir the index to read.
* @param field the name of the field.
* @param out the print writer output will be written to
* @throws IOException
*/
public static void dumpTerms(File indexDir, String field, PrintWriter out) throws IOException {
Directory dir = FSDirectory.open(indexDir);
IndexReader reader = IndexReader.open(dir, true);
TermEnum te = reader.terms(new Term(field, ""));
do {
Term term = te.term();
if (term == null || term.field().equals(field) == false) {
break;
}
out.printf("%s %d\n", term.text(), te.docFreq());
} while (te.next());
te.close();
}
/** dump the values stored in the specified field for each document.
*
* <pre>term(tab)document_frequency</pre>
*
* @param indexDir the index to read.
* @param field the name of the field.
* @param out the print writer output will be written to
* @throws IOException
*/
public static void dumpDocumentFields(File indexDir, String field, long maxDocs, PrintWriter out) throws IOException {
Directory dir = FSDirectory.open(indexDir);
IndexReader reader = IndexReader.open(dir, true);
int max = reader.maxDoc();
for (int i=0; i < max; i++) {
if (!reader.isDeleted(i)) {
Document d = reader.document(i);
for (Field f: d.getFields(field)) {
if (f.isStored() && !f.isBinary()) {
String value = f.stringValue();
if (value != null) {
out.printf("%s\n", value);
}
}
}
}
}
}
}
|
zack-braun/4607_NS
|
ns-allinone-3.27/ns-3.27/src/lte/test/epc-test-s1u-downlink.cc
|
/* -*- Mode: C++; c-file-style: "gnu"; indent-tabs-mode:nil; -*- */
/*
* Copyright (c) 2011 Centre Tecnologic de Telecomunicacions de Catalunya (CTTC)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation;
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Author: <NAME> <<EMAIL>>
*/
#include "ns3/simulator.h"
#include "ns3/log.h"
#include "ns3/test.h"
#include "ns3/point-to-point-epc-helper.h"
#include "ns3/epc-enb-application.h"
#include "ns3/packet-sink-helper.h"
#include "ns3/udp-echo-helper.h"
#include "ns3/point-to-point-helper.h"
#include "ns3/csma-helper.h"
#include "ns3/internet-stack-helper.h"
#include "ns3/ipv4-address-helper.h"
#include "ns3/inet-socket-address.h"
#include "ns3/packet-sink.h"
#include <ns3/ipv4-static-routing-helper.h>
#include <ns3/ipv4-static-routing.h>
#include "ns3/boolean.h"
#include "ns3/uinteger.h"
#include "ns3/config.h"
#include "ns3/eps-bearer.h"
#include "lte-test-entities.h"
using namespace ns3;
NS_LOG_COMPONENT_DEFINE ("EpcTestS1uDownlink");
/**
* \ingroup lte-test
* \ingroup tests
*
* \brief Custom structure for testing UE downlink data
*/
struct UeDlTestData
{
/**
* Constructor
*
* \param n number of packets
* \param s packet size
*/
UeDlTestData (uint32_t n, uint32_t s);
uint32_t numPkts; ///< number of packets
uint32_t pktSize; ///< packet size
Ptr<PacketSink> serverApp; ///< Server application
Ptr<Application> clientApp; ///< Client application
};
UeDlTestData::UeDlTestData (uint32_t n, uint32_t s)
: numPkts (n),
pktSize (s)
{
}
/**
* \ingroup lte-test
* \ingroup tests
*
* \brief Custom structure for testing eNodeB downlink data, contains
* the list of data structures for UEs
*/
struct EnbDlTestData
{
std::vector<UeDlTestData> ues; ///< list of data structure for different UEs
};
/**
* \ingroup lte-test
* \ingroup tests
*
* \brief EpcS1uDlTestCase class
*/
class EpcS1uDlTestCase : public TestCase
{
public:
/**
* Constructor
*
* \param name the name of the test case instance
* \param v list of eNodeB downlink test data information
*/
EpcS1uDlTestCase (std::string name, std::vector<EnbDlTestData> v);
virtual ~EpcS1uDlTestCase ();
private:
virtual void DoRun (void);
std::vector<EnbDlTestData> m_enbDlTestData; ///< ENB DL test data
};
EpcS1uDlTestCase::EpcS1uDlTestCase (std::string name, std::vector<EnbDlTestData> v)
: TestCase (name),
m_enbDlTestData (v)
{
}
EpcS1uDlTestCase::~EpcS1uDlTestCase ()
{
}
void
EpcS1uDlTestCase::DoRun ()
{
Ptr<PointToPointEpcHelper> epcHelper = CreateObject<PointToPointEpcHelper> ();
Ptr<Node> pgw = epcHelper->GetPgwNode ();
// allow jumbo packets
Config::SetDefault ("ns3::CsmaNetDevice::Mtu", UintegerValue (30000));
Config::SetDefault ("ns3::PointToPointNetDevice::Mtu", UintegerValue (30000));
epcHelper->SetAttribute ("S1uLinkMtu", UintegerValue (30000));
// Create a single RemoteHost
NodeContainer remoteHostContainer;
remoteHostContainer.Create (1);
Ptr<Node> remoteHost = remoteHostContainer.Get (0);
InternetStackHelper internet;
internet.Install (remoteHostContainer);
// Create the internet
PointToPointHelper p2ph;
p2ph.SetDeviceAttribute ("DataRate", DataRateValue (DataRate ("100Gb/s")));
NetDeviceContainer internetDevices = p2ph.Install (pgw, remoteHost);
Ipv4AddressHelper ipv4h;
ipv4h.SetBase ("1.0.0.0", "255.0.0.0");
ipv4h.Assign (internetDevices);
// setup default gateway for the remote hosts
Ipv4StaticRoutingHelper ipv4RoutingHelper;
Ptr<Ipv4StaticRouting> remoteHostStaticRouting = ipv4RoutingHelper.GetStaticRouting (remoteHost->GetObject<Ipv4> ());
// hardcoded UE addresses for now
remoteHostStaticRouting->AddNetworkRouteTo (Ipv4Address ("7.0.0.0"), Ipv4Mask ("255.255.255.0"), 1);
NodeContainer enbs;
uint16_t cellIdCounter = 0;
for (std::vector<EnbDlTestData>::iterator enbit = m_enbDlTestData.begin ();
enbit < m_enbDlTestData.end ();
++enbit)
{
Ptr<Node> enb = CreateObject<Node> ();
enbs.Add (enb);
// we test EPC without LTE, hence we use:
// 1) a CSMA network to simulate the cell
// 2) a raw socket opened on the CSMA device to simulate the LTE socket
uint16_t cellId = ++cellIdCounter;
NodeContainer ues;
ues.Create (enbit->ues.size ());
NodeContainer cell;
cell.Add (ues);
cell.Add (enb);
CsmaHelper csmaCell;
NetDeviceContainer cellDevices = csmaCell.Install (cell);
// the eNB's CSMA NetDevice acting as an LTE NetDevice.
Ptr<NetDevice> enbDevice = cellDevices.Get (cellDevices.GetN () - 1);
// Note that the EpcEnbApplication won't care of the actual NetDevice type
epcHelper->AddEnb (enb, enbDevice, cellId);
// Plug test RRC entity
Ptr<EpcEnbApplication> enbApp = enb->GetApplication (0)->GetObject<EpcEnbApplication> ();
NS_ASSERT_MSG (enbApp != 0, "cannot retrieve EpcEnbApplication");
Ptr<EpcTestRrc> rrc = CreateObject<EpcTestRrc> ();
rrc->SetS1SapProvider (enbApp->GetS1SapProvider ());
enbApp->SetS1SapUser (rrc->GetS1SapUser ());
// we install the IP stack on UEs only
InternetStackHelper internet;
internet.Install (ues);
// assign IP address to UEs, and install applications
for (uint32_t u = 0; u < ues.GetN (); ++u)
{
Ptr<NetDevice> ueLteDevice = cellDevices.Get (u);
Ipv4InterfaceContainer ueIpIface = epcHelper->AssignUeIpv4Address (NetDeviceContainer (ueLteDevice));
Ptr<Node> ue = ues.Get (u);
// disable IP Forwarding on the UE. This is because we use
// CSMA broadcast MAC addresses for this test. The problem
// won't happen with a LteUeNetDevice.
ue->GetObject<Ipv4> ()->SetAttribute ("IpForward", BooleanValue (false));
uint16_t port = 1234;
PacketSinkHelper packetSinkHelper ("ns3::UdpSocketFactory", InetSocketAddress (Ipv4Address::GetAny (), port));
ApplicationContainer apps = packetSinkHelper.Install (ue);
apps.Start (Seconds (1.0));
apps.Stop (Seconds (10.0));
enbit->ues[u].serverApp = apps.Get (0)->GetObject<PacketSink> ();
Time interPacketInterval = Seconds (0.01);
UdpEchoClientHelper client (ueIpIface.GetAddress (0), port);
client.SetAttribute ("MaxPackets", UintegerValue (enbit->ues[u].numPkts));
client.SetAttribute ("Interval", TimeValue (interPacketInterval));
client.SetAttribute ("PacketSize", UintegerValue (enbit->ues[u].pktSize));
apps = client.Install (remoteHost);
apps.Start (Seconds (2.0));
apps.Stop (Seconds (10.0));
enbit->ues[u].clientApp = apps.Get (0);
uint64_t imsi = u+1;
epcHelper->AddUe (ueLteDevice, imsi);
epcHelper->ActivateEpsBearer (ueLteDevice, imsi, EpcTft::Default (), EpsBearer (EpsBearer::NGBR_VIDEO_TCP_DEFAULT));
enbApp->GetS1SapProvider ()->InitialUeMessage (imsi, (uint16_t) imsi);
}
}
Simulator::Run ();
for (std::vector<EnbDlTestData>::iterator enbit = m_enbDlTestData.begin ();
enbit < m_enbDlTestData.end ();
++enbit)
{
for (std::vector<UeDlTestData>::iterator ueit = enbit->ues.begin ();
ueit < enbit->ues.end ();
++ueit)
{
NS_TEST_ASSERT_MSG_EQ (ueit->serverApp->GetTotalRx (), (ueit->numPkts) * (ueit->pktSize), "wrong total received bytes");
}
}
Simulator::Destroy ();
}
/**
* Test that the S1-U interface implementation works correctly
*/
class EpcS1uDlTestSuite : public TestSuite
{
public:
EpcS1uDlTestSuite ();
} g_epcS1uDlTestSuiteInstance;
EpcS1uDlTestSuite::EpcS1uDlTestSuite ()
: TestSuite ("epc-s1u-downlink", SYSTEM)
{
std::vector<EnbDlTestData> v1;
EnbDlTestData e1;
UeDlTestData f1 (1, 100);
e1.ues.push_back (f1);
v1.push_back (e1);
AddTestCase (new EpcS1uDlTestCase ("1 eNB, 1UE", v1), TestCase::QUICK);
std::vector<EnbDlTestData> v2;
EnbDlTestData e2;
UeDlTestData f2_1 (1, 100);
e2.ues.push_back (f2_1);
UeDlTestData f2_2 (2, 200);
e2.ues.push_back (f2_2);
v2.push_back (e2);
AddTestCase (new EpcS1uDlTestCase ("1 eNB, 2UEs", v2), TestCase::QUICK);
std::vector<EnbDlTestData> v3;
v3.push_back (e1);
v3.push_back (e2);
AddTestCase (new EpcS1uDlTestCase ("2 eNBs", v3), TestCase::QUICK);
EnbDlTestData e3;
UeDlTestData f3_1 (3, 50);
e3.ues.push_back (f3_1);
UeDlTestData f3_2 (5, 1472);
e3.ues.push_back (f3_2);
UeDlTestData f3_3 (1, 1);
e3.ues.push_back (f3_2);
std::vector<EnbDlTestData> v4;
v4.push_back (e3);
v4.push_back (e1);
v4.push_back (e2);
AddTestCase (new EpcS1uDlTestCase ("3 eNBs", v4), TestCase::QUICK);
std::vector<EnbDlTestData> v5;
EnbDlTestData e5;
UeDlTestData f5 (10, 3000);
e5.ues.push_back (f5);
v5.push_back (e5);
AddTestCase (new EpcS1uDlTestCase ("1 eNB, 10 pkts 3000 bytes each", v5), TestCase::QUICK);
std::vector<EnbDlTestData> v6;
EnbDlTestData e6;
UeDlTestData f6 (50, 3000);
e6.ues.push_back (f6);
v6.push_back (e6);
AddTestCase (new EpcS1uDlTestCase ("1 eNB, 50 pkts 3000 bytes each", v6), TestCase::QUICK);
std::vector<EnbDlTestData> v7;
EnbDlTestData e7;
UeDlTestData f7 (10, 15000);
e7.ues.push_back (f7);
v7.push_back (e7);
AddTestCase (new EpcS1uDlTestCase ("1 eNB, 10 pkts 15000 bytes each", v7), TestCase::QUICK);
std::vector<EnbDlTestData> v8;
EnbDlTestData e8;
UeDlTestData f8 (100, 15000);
e8.ues.push_back (f8);
v8.push_back (e8);
AddTestCase (new EpcS1uDlTestCase ("1 eNB, 100 pkts 15000 bytes each", v8), TestCase::QUICK);
}
|
oschaaf/istio
|
pilot/pkg/serviceregistry/kube/controller/endpointslice_test.go
|
<filename>pilot/pkg/serviceregistry/kube/controller/endpointslice_test.go<gh_stars>10-100
// Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package controller
import (
"reflect"
"testing"
"time"
coreV1 "k8s.io/api/core/v1"
mcs "sigs.k8s.io/mcs-api/pkg/apis/v1alpha1"
"istio.io/api/label"
"istio.io/istio/pilot/pkg/model"
"istio.io/istio/pilot/pkg/serviceregistry/kube"
"istio.io/istio/pkg/config/host"
"istio.io/istio/pkg/config/labels"
)
func TestGetLocalityFromTopology(t *testing.T) {
cases := []struct {
name string
topology map[string]string
locality string
}{
{
"all standard kubernetes labels",
map[string]string{
NodeRegionLabelGA: "region",
NodeZoneLabelGA: "zone",
},
"region/zone",
},
{
"all standard kubernetes labels and Istio custom labels",
map[string]string{
NodeRegionLabelGA: "region",
NodeZoneLabelGA: "zone",
label.TopologySubzone.Name: "subzone",
},
"region/zone/subzone",
},
{
"missing zone",
map[string]string{
NodeRegionLabelGA: "region",
},
"region",
},
}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
got := getLocalityFromTopology(tt.topology)
if !reflect.DeepEqual(tt.locality, got) {
t.Fatalf("Expected %v, got %v", tt.topology, got)
}
})
}
}
func TestEndpointSliceFromMCSShouldBeIgnored(t *testing.T) {
const (
ns = "nsa"
svcName = "svc1"
appName = "prod-app"
)
controller, fx := NewFakeControllerWithOptions(FakeControllerOptions{Mode: EndpointSliceOnly})
defer controller.Stop()
node := generateNode("node1", map[string]string{
NodeZoneLabel: "zone1",
NodeRegionLabel: "region1",
label.TopologySubzone.Name: "subzone1",
})
addNodes(t, controller, node)
pod := generatePod("172.16.58.3", "pod1", ns, "svcaccount", "node1",
map[string]string{"app": appName}, map[string]string{})
pods := []*coreV1.Pod{pod}
addPods(t, controller, fx, pods...)
createService(controller, svcName, ns, nil,
[]int32{8080}, map[string]string{"app": appName}, t)
if ev := fx.Wait("service"); ev == nil {
t.Fatal("Timeout creating service")
}
// Ensure that the service is available.
hostname := kube.ServiceHostname(svcName, ns, controller.opts.DomainSuffix)
svc := controller.GetService(hostname)
if svc == nil {
t.Fatal("failed to get service")
}
// Create an endpoint that indicates it's an MCS endpoint for the service.
svc1Ips := []string{"172.16.58.3"}
portNames := []string{"tcp-port"}
createEndpoints(t, controller, svcName, ns, portNames, svc1Ips, nil, map[string]string{
mcs.LabelServiceName: svcName,
})
if ev := fx.WaitForDuration("eds", 2*time.Second); ev != nil {
t.Fatalf("Received unexpected EDS event")
}
// Ensure that getting by port returns no ServiceInstances.
instances := controller.InstancesByPort(svc, svc.Ports[0].Port, labels.Collection{})
if len(instances) != 0 {
t.Fatalf("should be 0 instances: len(instances) = %v", len(instances))
}
}
func TestEndpointSliceCache(t *testing.T) {
cache := newEndpointSliceCache()
hostname := host.Name("foo")
// add a endpoint
ep1 := &model.IstioEndpoint{
Address: "1.2.3.4",
ServicePortName: "http",
}
cache.Update(hostname, "slice1", []*model.IstioEndpoint{ep1})
if !testEndpointsEqual(cache.Get(hostname), []*model.IstioEndpoint{ep1}) {
t.Fatalf("unexpected endpoints")
}
if !cache.Has(hostname) {
t.Fatalf("expect to find the host name")
}
// add a new endpoint
ep2 := &model.IstioEndpoint{
Address: "2.3.4.5",
ServicePortName: "http",
}
cache.Update(hostname, "slice1", []*model.IstioEndpoint{ep1, ep2})
if !testEndpointsEqual(cache.Get(hostname), []*model.IstioEndpoint{ep1, ep2}) {
t.Fatalf("unexpected endpoints")
}
// change service port name
ep1 = &model.IstioEndpoint{
Address: "1.2.3.4",
ServicePortName: "http2",
}
ep2 = &model.IstioEndpoint{
Address: "2.3.4.5",
ServicePortName: "http2",
}
cache.Update(hostname, "slice1", []*model.IstioEndpoint{ep1, ep2})
if !testEndpointsEqual(cache.Get(hostname), []*model.IstioEndpoint{ep1, ep2}) {
t.Fatalf("unexpected endpoints")
}
// add a new slice
ep3 := &model.IstioEndpoint{
Address: "3.4.5.6",
ServicePortName: "http2",
}
cache.Update(hostname, "slice2", []*model.IstioEndpoint{ep3})
if !testEndpointsEqual(cache.Get(hostname), []*model.IstioEndpoint{ep1, ep2, ep3}) {
t.Fatalf("unexpected endpoints")
}
// dedup when transitioning
cache.Update(hostname, "slice2", []*model.IstioEndpoint{ep2, ep3})
if !testEndpointsEqual(cache.Get(hostname), []*model.IstioEndpoint{ep1, ep2, ep3}) {
t.Fatalf("unexpected endpoints")
}
cache.Delete(hostname, "slice1")
if !testEndpointsEqual(cache.Get(hostname), []*model.IstioEndpoint{ep2, ep3}) {
t.Fatalf("unexpected endpoints")
}
cache.Delete(hostname, "slice2")
if cache.Get(hostname) != nil {
t.Fatalf("unexpected endpoints")
}
}
func testEndpointsEqual(a, b []*model.IstioEndpoint) bool {
if len(a) != len(b) {
return false
}
m1 := make(map[endpointKey]int)
m2 := make(map[endpointKey]int)
for _, i := range a {
m1[endpointKey{i.Address, i.ServicePortName}]++
}
for _, i := range b {
m2[endpointKey{i.Address, i.ServicePortName}]++
}
return reflect.DeepEqual(m1, m2)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.