text
stringlengths 1
1.05M
|
|---|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res/run_rexi_fd_par_m0512_t001_n0128_r3220_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res/run_rexi_fd_par_m0512_t001_n0128_r3220_a1.err
#SBATCH -J rexi_fd_par_m0512_t001_n0128_r3220_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=3220
#SBATCH --cpus-per-task=1
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=03:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=1
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 1 -envall -ppn 28 -n 3220 ./build/rexi_fd_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=512 -C -5.0
|
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include "triangle.h"
triangle * triangle_new(double * p_x0, double * p_x1, double * p_x2)
{
triangle * p;
p = (triangle *) malloc(sizeof(triangle));
/* NULL pointer check */
memcpy(p->x0, p_x0, sizeof(double) * 2);
memcpy(p->x1, p_x1, sizeof(double) * 2);
memcpy(p->x2, p_x2, sizeof(double) * 2);
return p;
}
void triangle_free(triangle * p)
{
free(p->x2);
free(p->x1);
free(p->x0);
free(p);
}
double triangle_measure(triangle * p)
{
double y00, y01, y10, y11;
double p_x0[2], p_x1[2], p_x2[2];
memcpy(p_x0, p->x0, sizeof(double) * 2);
memcpy(p_x1, p->x1, sizeof(double) * 2);
memcpy(p_x2, p->x2, sizeof(double) * 2);
y00 = p_x1[0] - p_x0[0];
y01 = p_x1[1] - p_x0[1];
y10 = p_x2[0] - p_x0[0];
y11 = p_x2[1] - p_x0[1];
return fabs(y00 * y11 - y01 * y10) / 2;
}
|
#! /bin/bash -e
# Copyright 2019-Present Couchbase, Inc.
#
# Use of this software is governed by the Business Source License included in
# the file licenses/BSL-Couchbase.txt. As of the Change Date specified in that
# file, in accordance with the Business Source License, use of this software
# will be governed by the Apache License, Version 2.0, included in the file
# licenses/APL2.txt.
# This script builds the mbedTLS submodule.
# It's run by Xcode when building the "mbedTLS" target.
source "$SRCROOT/build_setup.sh" mbedtls
# Set up the CMake build options:
CMAKE_OPTS="$CMAKE_OPTS \
-DENABLE_PROGRAMS=0 \
-DENABLE_TESTING=0"
if [[ "$CONFIGURATION" == "Release*" ]]
then
CMAKE_OPTS="$CMAKE_OPTS -DCMAKE_BUILD_TYPE=RelWithDebInfo"
else
CMAKE_OPTS="$CMAKE_OPTS -DCMAKE_BUILD_TYPE=Debug"
if [ "$ENABLE_ADDRESS_SANITIZER" == "YES" ]
then
CMAKE_OPTS="$CMAKE_OPTS -DLWS_WITH_ASAN=1"
fi
fi
echo "CMake options: $CMAKE_OPTS"
# Build!
if [[ "$EFFECTIVE_PLATFORM_NAME" == "-maccatalyst" ]]
then
cmake "$SRCROOT/../vendor/mbedtls" $CMAKE_OPTS \
'-DCMAKE_CXX_FLAGS=-target x86_64-apple-ios13.1-macabi' \
'-DCMAKE_C_FLAGS=-target x86_64-apple-ios13.1-macabi'
else
cmake "$SRCROOT/../vendor/mbedtls" $CMAKE_OPTS
fi
make
# Copy the resulting static libraries to the Xcode build dir where the linker will find them:
mkdir -p "$BUILT_PRODUCTS_DIR"
cp -pv library/libmbed*.a "$BUILT_PRODUCTS_DIR/"
cp -pv crypto/library/libmbed*.a "$BUILT_PRODUCTS_DIR/"
|
package db
import "example/users/entities"
// RoleRepository handles storage of roles
type RoleRepository interface {
Persist(roles ...entities.Role) error
Find(ids ...entities.RoleID) ([]entities.Role, error)
All() ([]entities.Role, error)
}
func NewMemoryRoleRepository() *MemoryRoleRepository {
r := &MemoryRoleRepository{}
r.roles = make(map[entities.RoleID]entities.Role)
r.Persist(entities.CreateRole("User"))
r.Persist(entities.CreateRole("Admin"))
return r
}
type MemoryRoleRepository struct {
roles map[entities.RoleID]entities.Role
}
func (r *MemoryRoleRepository) Persist(roles ...entities.Role) error {
for _, role := range roles {
r.roles[role.ID] = role
}
return nil
}
func (r *MemoryRoleRepository) Find(ids ...entities.RoleID) ([]entities.Role, error) {
result := []entities.Role{}
for _, id := range ids {
role, ok := r.roles[id]
if !ok {
continue
}
result = append(result, role)
}
return result, nil
}
func (r *MemoryRoleRepository) All() ([]entities.Role, error) {
results := []entities.Role{}
for _, role := range r.roles {
results = append(results, role)
}
return results, nil
}
|
#!/bin/bash
dieharder -d 4 -g 2 -S 1740814690
|
# -*- coding: utf-8 -*-
"""
Azure Resource Manager (ARM) Container Instance Group State Module
.. versionadded:: 3.0.0
.. versionchanged:: 4.0.0
:maintainer: <<EMAIL>>
:configuration: This module requires Azure Resource Manager credentials to be passed via acct. Note that the
authentication parameters are case sensitive.
Required provider parameters:
if using username and password:
* ``subscription_id``
* ``username``
* ``password``
if using a service principal:
* ``subscription_id``
* ``tenant``
* ``client_id``
* ``secret``
Optional provider parameters:
**cloud_environment**: Used to point the cloud driver to different API endpoints, such as Azure GovCloud.
Possible values:
* ``AZURE_PUBLIC_CLOUD`` (default)
* ``AZURE_CHINA_CLOUD``
* ``AZURE_US_GOV_CLOUD``
* ``AZURE_GERMAN_CLOUD``
Example configuration for Azure Resource Manager authentication:
.. code-block:: yaml
azurerm:
default:
subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
tenant: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF
client_id: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF
secret: XXXXXXXXXXXXXXXXXXXXXXXX
cloud_environment: AZURE_PUBLIC_CLOUD
user_pass_auth:
subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
username: fletch
password: <PASSWORD>
The authentication parameters can also be passed as a dictionary of keyword arguments to the ``connection_auth``
parameter of each state, but this is not preferred and could be deprecated in the future.
"""
# Import Python libs
from dict_tools import differ
import logging
log = logging.getLogger(__name__)
async def present(
hub,
ctx,
name,
resource_group,
containers,
os_type,
restart_policy="OnFailure",
identity=None,
image_registry_credentials=None,
ip_address=None,
volumes=None,
diagnostics=None,
network_profile=None,
dns_config=None,
sku=None,
encryption_properties=None,
init_containers=None,
tags=None,
connection_auth=None,
**kwargs,
):
"""
.. versionadded:: 3.0.0
.. versionchanged:: 4.0.0
Ensure a container instance group exists.
:param name: The name of the container group.
:param resource_group: The name of the resource group to which the container group belongs.
:param containers: A list of the containers within the container group. The following are possible parameters for
the containers:
- **name**: Required. The user-provided name of the container instance.
- **image**: Required. The name of the image used to create the container instance.
- **resources**:
- **requests**:
- **memory_in_gb**: Required. The memory request in GB of this container instance.
- **cpu**: Required. The CPU request of this container instance.
- **gpu**: The GPU request of this container instance.
- **limits**:
- **memory_in_gb**: The memory limit in GB of this container instance.
- **cpu**: The CPU limit of this container instance.
- **gpu**: The GPU limit of this container instance.
- **command**: A list of commands to execute within the container instance in exec form.
- **ports**: A list of the dictionaries of exposed ports on the container instance
(i.e., ``{"protocol": "TCP", "port": 80}``).
- **environment_variables**: A list of environment variables to set in the container instance.
- **name**: Required if environment_variables is used. The name of the environment variable.
- **value**: The value of the environment variable.
- **secure_value**: The value of the secure environment variable.
- **volume_mounts**: A list of volume mounts available to the container instance.
- **name**: Required if volume_mounts is used. The name of the volume mount.
- **mount_path**: Required if volume_mounts is used. The path within the container where the volume should
be mounted. Must not contain colon (:).
- **read_only**: Boolean flag indicating whether the volume mount is read-only.
- **liveness_probe**:
- **exec_property**:
- **command**: The commands to execute within the container.
- **http_get**:
- **path**: The path to probe.
- **port**: Required if http_get is used. The port number to probe.
- **scheme**: The scheme. Possible values include: 'http', 'https'.
- **initial_delay_seconds**: The initial delay seconds.
- **period_seconds**: The period seconds.
- **failure_threshold**: The failure threshold.
- **success_threshold**: The success threshold.
- **timeout_seconds**: The timeout seconds.
- **readiness_probe**:
- **exec_property**:
- **command**: The commands to execute within the container.
- **http_get**:
- **path**: The path to probe.
- **port**: Required if http_get is used. The port number to probe.
- **scheme**: The scheme. Possible values include: 'http', 'https'
- **initial_delay_seconds**: The initial delay seconds.
- **period_seconds**: The period seconds.
- **failure_threshold**: The failure threshold.
- **success_threshold**: The success threshold.
- **timeout_seconds**: The timeout seconds.
:param os_type: The operating system type required by the containers in the container group. Possible values
include: 'Windows', 'Linux'.
:param restart_policy: Restart policy for all containers within the container group. Possible values are:
- ``Always``: Always restart.
- ``OnFailure``: Restart on failure.
- ``Never``: Never restart.
:param identity: A dictionary defining a ContainerGroupIdentity object which represents the identity for the
container group.
:param image_registry_credentials: A list of dictionaries defining ImageRegistryCredential objects for the image
registry credentials.
:param ip_address: A dictionary defining an IpAddress object which represents the IP address for the container
group. Possible keys are:
- ``ports``: The list of ports exposed on the container group. Required if ip_address is used.
- ``type``: Specifies if the IP is exposed to the public internet or private VNET. Required if ip_address is
used. Possible values include: 'Public', 'Private'.
- ``ip``: The IP exposed to the public internet.
- ``dns_name_label``: The Dns name label for the IP.
:param volumes: The list of dictionaries representing Volume objects that can be mounted by containers in this
container group.
:param diagnostics: A dictionary defining a ContainerGroupDiagnostics object which represents the diagnostic
information for the container group.
:param network_profile: A dictionary defining a ContainerGroupNetworkProfile object which represents the network
profile information for the container group.
:param dns_config: A dictionary defining a DnsConfiguration object which represents the DNS config information for
the container group.
:param sku: The SKU for a container group. Possible values include: 'Standard', 'Dedicated'.
:param encryption_properties: A dictionary defining an EncryptionProperties object which represents the encryption
properties for the container group.
:param init_containers: A list of dictionaries defining InitContainerDefinition objects which represent the init
containers for the container group.
:param tags: A dictionary of strings can be passed as tag metadata to the object.
:param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure container instance group exists:
azurerm.containerinstance.group.present:
- name: containergroup
- resource_group: testgroup
- containers:
- name: mycoolwebcontainer
image: "nginx:latest"
ports:
- protocol: TCP
port: 80
resources:
requests:
memory_in_gb: 1
cpu: 1
volume_mounts:
- name: testwebsite
mount_path: /usr/share/nginx
read_only: True
- os_type: Linux
- restart_policy: OnFailure
- ip_address:
ports:
- protocol: TCP
port: 80
type: Public
dns_name_label: supercoolcontainergroup
- volumes:
- name: testwebsite
git_repo:
directory: html
repository: "https://github.com/WooxSolo/test-website"
- tags:
how_awesome: very
contact_name: <NAME>
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
action = "create"
if not isinstance(connection_auth, dict):
if ctx["acct"]:
connection_auth = ctx["acct"]
else:
ret[
"comment"
] = "Connection information must be specified via acct or connection_auth dictionary!"
return ret
# get existing container instance group if present
acig = await hub.exec.azurerm.containerinstance.group.get(
ctx, name, resource_group, azurerm_log_level="info", **connection_auth
)
if "error" not in acig:
action = "update"
# containers changes
comp = await hub.exec.azurerm.utils.compare_list_of_dicts(
acig["containers"], containers
)
if comp.get("changes"):
ret["changes"]["containers"] = comp["changes"]
# os_type changes
if os_type.upper() != acig["os_type"].upper():
ret["changes"]["os_type"] = {"old": acig["os_type"], "new": os_type}
# restart_policy changes
if restart_policy.upper() != acig["restart_policy"].upper():
ret["changes"]["restart_policy"] = {
"old": acig["restart_policy"],
"new": restart_policy,
}
# identity changes
if identity:
id_diff = differ.deep_diff(acig.get("identity", {}), identity)
if id_diff:
ret["changes"]["identity"] = id_diff
# image_registry_credentials changes
if image_registry_credentials:
comp = await hub.exec.azurerm.utils.compare_list_of_dicts(
acig.get("image_registry_credentials", []),
image_registry_credentials,
key_name="server",
)
if comp.get("changes"):
ret["changes"]["image_registry_credentials"] = comp["changes"]
# ip_address changes
if ip_address:
old_ip = acig.get("ip_address", {}).copy()
# remove keys from the diff that can't be set
for key in ["fqdn", "ip"]:
if key in old_ip:
old_ip.pop(key)
ip_diff = differ.deep_diff(old_ip, ip_address)
if ip_diff:
ret["changes"]["ip_address"] = ip_diff
# volumes changes
if volumes:
comp = await hub.exec.azurerm.utils.compare_list_of_dicts(
acig.get("volumes", []), volumes
)
if comp.get("changes"):
ret["changes"]["volumes"] = comp["changes"]
# diagnostics changes
if diagnostics:
diag_diff = differ.deep_diff(acig.get("diagnostics", {}), diagnostics)
if diag_diff:
ret["changes"]["diagnostics"] = diag_diff
# network_profile changes
if network_profile:
net_diff = differ.deep_diff(
acig.get("network_profile", {}), network_profile
)
if net_diff:
ret["changes"]["network_profile"] = net_diff
# dns_config changes
if dns_config:
dns_diff = differ.deep_diff(acig.get("dns_config", {}), dns_config)
if dns_diff:
ret["changes"]["dns_config"] = dns_diff
# sku changes
if sku and sku.upper() != acig["sku"].upper():
ret["changes"]["sku"] = {
"old": acig["sku"],
"new": sku,
}
# encryption_properties changes
if encryption_properties:
enc_diff = differ.deep_diff(
acig.get("encryption_properties", {}), encryption_properties
)
if enc_diff:
ret["changes"]["encryption_properties"] = enc_diff
# init_containers changes
if init_containers:
comp = await hub.exec.azurerm.utils.compare_list_of_dicts(
acig["init_containers"], init_containers
)
if comp.get("changes"):
ret["changes"]["init_containers"] = comp["changes"]
# tag changes
tag_diff = differ.deep_diff(acig.get("tags", {}), tags or {})
if tag_diff:
ret["changes"]["tags"] = tag_diff
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Container instance group {0} is already present.".format(
name
)
return ret
if ctx["test"]:
ret["comment"] = "Container instance group {0} would be updated.".format(
name
)
ret["result"] = None
return ret
elif ctx["test"]:
ret["comment"] = "Container instance group {0} would be created.".format(name)
ret["result"] = None
return ret
acig_kwargs = kwargs.copy()
acig_kwargs.update(connection_auth)
if action == "create" or len(ret["changes"]) > 1 or not tag_diff:
acig = await hub.exec.azurerm.containerinstance.group.create_or_update(
ctx,
name,
resource_group,
containers=containers,
os_type=os_type,
restart_policy=restart_policy,
identity=identity,
image_registry_credentials=image_registry_credentials,
ip_address=ip_address,
volumes=volumes,
diagnostics=diagnostics,
network_profile=network_profile,
dns_config=dns_config,
sku=sku,
encryption_properties=encryption_properties,
init_containers=init_containers,
tags=tags,
**acig_kwargs,
)
# no idea why create_or_update doesn't work for tags
if action == "update" and tag_diff:
acig = await hub.exec.azurerm.containerinstance.group.update(
ctx, name, resource_group, tags=tags, **acig_kwargs,
)
if action == "create":
ret["changes"] = {"old": {}, "new": acig}
if "error" not in acig:
ret["result"] = True
ret["comment"] = f"Container instance group {name} has been {action}d."
return ret
ret["comment"] = "Failed to {0} container instance group {1}! ({2})".format(
action, name, acig.get("error")
)
if not ret["result"]:
ret["changes"] = {}
return ret
async def absent(hub, ctx, name, resource_group, connection_auth=None, **kwargs):
"""
.. versionadded:: 3.0.0
Ensure a container instance group does not exist in a resource group.
:param name: Name of the container instance group.
:param resource_group: The name of the resource group to which the container instance group belongs.
:param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
.. code-block:: yaml
Ensure container instance group is absent:
azurerm.containerinstance.group.absent:
- name: containergroup
- resource_group: testgroup
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
if ctx["acct"]:
connection_auth = ctx["acct"]
else:
ret[
"comment"
] = "Connection information must be specified via acct or connection_auth dictionary!"
return ret
acig = await hub.exec.azurerm.containerinstance.group.get(
ctx, name, resource_group, azurerm_log_level="info", **connection_auth
)
if "error" in acig:
ret["result"] = True
ret["comment"] = "Container instance group {0} is already absent.".format(name)
return ret
if ctx["test"]:
ret["comment"] = "Container instance group {0} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": acig,
"new": {},
}
return ret
deleted = await hub.exec.azurerm.containerinstance.group.delete(
ctx, name, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Container instance group {0} has been deleted.".format(name)
ret["changes"] = {"old": acig, "new": {}}
return ret
ret["comment"] = "Failed to delete container instance group {0}!".format(name)
return ret
|
int choose(int n, int k) {
if (k == 0 || k == n)
return 1;
return choose(n - 1, k - 1) + choose(n - 1, k);
}
|
#!/bin/bash
# Execute system setup hook
/systemsetup.sh
# If we are running docker natively, we want to create a user in the container
# with the same UID and GID as the user on the host machine, so that any files
# created are owned by that user. Without this they are all owned by root.
if [[ -n $BUILDER_UID ]] && [[ -n $BUILDER_GID ]]; then
groupadd -o -g $BUILDER_GID $BUILDER_GROUP 2> /dev/null
useradd -o -m -g $BUILDER_GID -u $BUILDER_UID $BUILDER_USER 2> /dev/null
shopt -s dotglob
# Make sure the home directory is owned by the specified user/group.
chown -R $BUILDER_UID:$BUILDER_GID $HOME
# Make sure build artifacts are accessible by the specified user/group.
chown -R $BUILDER_UID:$BUILDER_GID /binary
# Execute user setup hook
chpst -u :$BUILDER_UID:$BUILDER_GID /usersetup.sh
# Run the command as the specified user/group.
exec chpst -u :$BUILDER_UID:$BUILDER_GID ctest -S entrypoint.cmake "$@"
else
# Execute user setup hook
/usersetup.sh
# Just run the command as root.
exec ctest -S entrypoint.cmake "$@"
fi
|
#!/bin/bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# C++ tests
failing_cpu_cc_tests="\
//tensorflow/core/kernels:control_flow_ops_test + \
//tensorflow/core:example_example_parser_configuration_test + \
//tensorflow/core:lib_core_status_test + \
//tensorflow/core:lib_monitoring_collection_registry_test + \
//tensorflow/core:lib_strings_numbers_test + \
//tensorflow/core:lib_strings_str_util_test + \
//tensorflow/core/platform/hadoop:hadoop_file_system_test + \
//tensorflow/core:platform_file_system_test + \
//tensorflow/core:platform_logging_test + \
//tensorflow/core:util_sparse_sparse_tensor_test + \
//tensorflow/cc:framework_gradient_checker_test + \
//tensorflow/cc:framework_gradients_test + \
//tensorflow/cc:gradients_array_grad_test + \
//tensorflow/cc:gradients_math_grad_test + \
//tensorflow/cc:gradients_nn_grad_test + \
//tensorflow/cc/saved_model:loader_test \
"
broken_cpu_cc_tests="\
//tensorflow/core/kernels/hexagon:graph_transferer_test + \
//tensorflow/cc:framework_cc_ops_test + \
//tensorflow/core/platform/cloud:time_util_test + \
//tensorflow/core/platform/cloud:oauth_client_test + \
//tensorflow/core/platform/cloud:http_request_test + \
//tensorflow/core/platform/cloud:google_auth_provider_test + \
//tensorflow/core/platform/cloud:gcs_file_system_test + \
//tensorflow/core/kernels/cloud:bigquery_table_accessor_test + \
//tensorflow/core/kernels/hexagon:quantized_matmul_op_for_hexagon_test + \
//tensorflow/core/kernels:requantize_op_test + \
//tensorflow/core/kernels:requantization_range_op_test + \
//tensorflow/core/kernels:quantized_reshape_op_test + \
//tensorflow/core/kernels:quantized_pooling_ops_test + \
//tensorflow/core/kernels:quantized_matmul_op_test + \
//tensorflow/core/kernels:quantized_conv_ops_test + \
//tensorflow/core/kernels:quantized_concat_op_test + \
//tensorflow/core/kernels:quantized_bias_add_op_test + \
//tensorflow/core/kernels:quantized_batch_norm_op_test + \
//tensorflow/core/kernels:quantized_activation_ops_test + \
//tensorflow/core/kernels:quantize_op_test + \
//tensorflow/core/kernels:quantize_down_and_shrink_range_op_test + \
//tensorflow/core/kernels:quantize_and_dequantize_op_test_gpu + \
//tensorflow/core/kernels:quantize_and_dequantize_op_test + \
//tensorflow/core/kernels:quantization_utils_test + \
//tensorflow/core/kernels:debug_ops_test + \
//tensorflow/core/distributed_runtime/rpc:rpc_rendezvous_mgr_test_gpu + \
//tensorflow/core/distributed_runtime/rpc:rpc_rendezvous_mgr_test + \
//tensorflow/core/distributed_runtime/rpc:grpc_tensor_coding_test + \
//tensorflow/core/distributed_runtime/rpc:grpc_session_test_gpu + \
//tensorflow/core/distributed_runtime/rpc:grpc_session_test + \
//tensorflow/core/distributed_runtime/rpc:grpc_channel_test_gpu + \
//tensorflow/core/distributed_runtime/rpc:grpc_channel_test + \
//tensorflow/core/distributed_runtime:remote_device_test_gpu + \
//tensorflow/core/distributed_runtime:remote_device_test + \
//tensorflow/core/distributed_runtime:executor_test_gpu + \
//tensorflow/core/distributed_runtime:executor_test + \
//tensorflow/core/debug:debug_gateway_test + \
//tensorflow/core/debug:debug_grpc_io_utils_test + \
//tensorflow/core:util_reporter_test + \
//tensorflow/core:util_memmapped_file_system_test + \
//tensorflow/core:platform_subprocess_test + \
//tensorflow/core:platform_profile_utils_cpu_utils_test + \
//tensorflow/core:lib_jpeg_jpeg_mem_unittest + \
//tensorflow/core/debug:debug_io_utils_test \
"
# lib_core_threadpool_test is timeout, but it passes when running alone
extra_failing_gpu_cc_tests="\
//tensorflow/core:lib_core_threadpool_test + \
//tensorflow/core:cuda_libdevice_path_test + \
//tensorflow/core:common_runtime_direct_session_test + \
//tensorflow/core:common_runtime_direct_session_with_tracking_alloc_test + \
//tensorflow/core:gpu_tracer_test + \
//tensorflow/core:ops_math_grad_test \
"
exclude_cpu_cc_tests="${failing_cpu_cc_tests} + ${broken_cpu_cc_tests}"
exclude_gpu_cc_tests="${extra_failing_gpu_cc_tests} + ${exclude_cpu_cc_tests}"
# Python tests
# The first argument is the name of the python test direcotry
function get_failing_cpu_py_tests() {
echo "
//$1/tensorflow/python:basic_session_run_hooks_test + \
//$1/tensorflow/python:contrib_test + \
//$1/tensorflow/python:dequantize_op_test + \
//$1/tensorflow/python:directory_watcher_test + \
//$1/tensorflow/python:event_multiplexer_test + \
//$1/tensorflow/python:file_io_test + \
//$1/tensorflow/python:file_system_test + \
//$1/tensorflow/python:framework_meta_graph_test + \
//$1/tensorflow/python:framework_ops_test + \
//$1/tensorflow/python:framework_tensor_util_test + \
//$1/tensorflow/python:framework_test_util_test + \
//$1/tensorflow/python:image_ops_test + \
//$1/tensorflow/python:localhost_cluster_performance_test + \
//$1/tensorflow/python:monitored_session_test + \
//$1/tensorflow/python:nn_batchnorm_test + \
//$1/tensorflow/python:protobuf_compare_test + \
//$1/tensorflow/python:quantized_conv_ops_test + \
//$1/tensorflow/python:saver_large_variable_test + \
//$1/tensorflow/python:saver_test + \
//$1/tensorflow/python:session_test + \
//$1/tensorflow/python:supervisor_test + \
//$1/tensorflow/python:sync_replicas_optimizer_test + \
//$1/tensorflow/python/debug/... + \
//$1/tensorflow/python/kernel_tests:as_string_op_test + \
//$1/tensorflow/python/kernel_tests:benchmark_test + \
//$1/tensorflow/python/kernel_tests:cast_op_test + \
//$1/tensorflow/python/kernel_tests:clip_ops_test + \
//$1/tensorflow/python/kernel_tests:conv_ops_test + \
//$1/tensorflow/python/kernel_tests:decode_image_op_test + \
//$1/tensorflow/python/kernel_tests:depthwise_conv_op_test + \
//$1/tensorflow/python/kernel_tests:functional_ops_test + \
//$1/tensorflow/python/kernel_tests:py_func_test + \
//$1/tensorflow/python/kernel_tests:rnn_test + \
//$1/tensorflow/python/kernel_tests:sets_test + \
//$1/tensorflow/python/kernel_tests:sparse_matmul_op_test + \
//$1/tensorflow/python/kernel_tests:string_to_number_op_test + \
//$1/tensorflow/python/kernel_tests:summary_ops_test + \
//$1/tensorflow/python/kernel_tests:variable_scope_test + \
//$1/tensorflow/python/saved_model:saved_model_test \
"
}
function get_failing_gpu_py_tests() {
echo "
//$1/tensorflow/python/kernel_tests:diag_op_test + \
//$1/tensorflow/python/kernel_tests:one_hot_op_test + \
//$1/tensorflow/python/kernel_tests:rnn_test + \
//$1/tensorflow/python/kernel_tests:sets_test + \
//$1/tensorflow/python/kernel_tests:trace_op_test + \
$(get_failing_cpu_py_tests $1)
"
}
function clean_output_base() {
# TODO(pcloudy): bazel clean --expunge doesn't work on Windows yet.
# Clean the output base manually to ensure build correctness
bazel clean
output_base=$(bazel info output_base)
bazel shutdown
# Sleep 5s to wait for jvm shutdown completely
# otherwise rm will fail with device or resource busy error
sleep 5
rm -rf ${output_base}
}
function run_configure_for_cpu_build {
export TF_NEED_CUDA=0
echo "" | ./configure
}
function run_configure_for_gpu_build {
# Due to a bug in Bazel: https://github.com/bazelbuild/bazel/issues/2182
# yes "" | ./configure doesn't work on Windows, so we set all the
# environment variables in advance to avoid interact with the script.
export TF_NEED_CUDA=1
export TF_CUDA_VERSION=8.0
export CUDA_TOOLKIT_PATH="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v8.0"
export TF_CUDNN_VERSION=5
export CUDNN_INSTALL_PATH="C:/tools/cuda"
export TF_CUDA_COMPUTE_CAPABILITIES="3.5,5.2"
echo "" | ./configure
}
function create_python_test_dir() {
rm -rf "$1"
mkdir -p "$1"
cmd /c "mklink /J $1\\tensorflow .\\tensorflow"
}
function reinstall_tensorflow_pip() {
echo "y" | pip uninstall tensorflow -q || true
pip install ${1}
}
|
load ../test_setup.bash
teardown_file() {
delete_package "test-remote-build-python"
}
@test "deploy python projects with remote build" {
run $NIM project deploy $BATS_TEST_DIRNAME --remote-build
assert_success
assert_line "Submitted action 'default' for remote building and deployment in runtime python:default"
}
@test "invoke remotely built python lang actions" {
test_binary_action test-remote-build-python/default "When Chuck Norris throws exceptions, it's across the room."
}
|
date=$(date -d '2 day ago' "+%Y%m%d")
echo $date
REGION_NAME=$1
if [ "$1" = "" ]
then
echo "usage: ./multi_measure_2.sh [REGION_NAME]"
exit 1
fi
start_time=`date +%s`
#mkdir ingress
#mkdir egress
mkdir ingress_${REGION_NAME}
mkdir egress_${REGION_NAME}
mkdir ingress_${REGION_NAME}_${date}
mkdir egress_${REGION_NAME}_${date}
./build.sh multi_measure
BASEDIR="/root/"
echo "copying..."
time cp -r ${BASEDIR}${date} .
time ./multi_measure $date list-${REGION_NAME}
ls ./${date}/*ingress > list
while read line; do
fn_src=`echo $line`
fn_dst=`echo $line | cut -d "/" -f 3`
cat header > tmp
cat ${fn_src} >> tmp
echo "./ingress/${REGION_NAME}_${fn_dst}_${date}"
cp tmp ./ingress_${REGION_NAME}_${date}/${REGION_NAME}_${fn_dst}_${date}
mv tmp ./ingress_${REGION_NAME}/${REGION_NAME}_${fn_dst}_${date}
#mv tmp ./ingress/${REGION_NAME}_${fn_dst}_${date}
done < list
ls ./${date}/*egress > list
while read line; do
fn_src=`echo $line`
fn_dst=`echo $line | cut -d "/" -f 3`
cat header > tmp
cat ${fn_src} >> tmp
echo "./egress/${REGION_NAME}_${fn_dst}_${date}"
cp tmp ./egress_${REGION_NAME}_${date}/${REGION_NAME}_${fn_dst}_${date}
mv tmp ./egress_${REGION_NAME}/${REGION_NAME}_${fn_dst}_${date}
#mv tmp ./egress/${REGION_NAME}_${fn_dst}_${date}
done < list
rm -rf ${date}
end_time=`date +%s`
run_time=$((end_time - start_time))
run_time_minutes=`echo $(( ${run_time} / 60))`
echo "ELAPSED TIME:"${REGION_NAME}":"${date}":"$run_time":"$run_time_minutes
#scp -r egress_${REGION_NAME}_${date} 192.168.72.5:/mnt/sdd/nii-socs/
#scp -r ingress_${REGION_NAME}_${date} 192.168.72.5:/mnt/sdd/nii-socs/
du -h ${BASEDIR}${date}
date=$(date -d '40 day ago' "+%Y%m%d")
rm -rf ./egress_${REGION_NAME}/${REGION_NAME}*${date}
rm -rf ./ingress_${REGION_NAME}/${REGION_NAME}*${date}
|
<gh_stars>0
import numpy as np
import pandas as pd
import pytest
from sklearn.linear_model import LogisticRegression
from sklearn.base import BaseEstimator
from poniard import PoniardClassifier
@pytest.mark.parametrize(
"X,preprocess,scaler,numeric_imputer,include_preprocessor",
[
(
pd.DataFrame(
{
"A": [4, 3, 1, -1, np.nan],
"B": [-2, np.nan, 3, 7, 1],
"C": list("abcde"),
}
),
True,
None,
None,
True,
),
(
pd.DataFrame(
{
"A": [4, 200, 1, -1, np.nan],
"B": [-2, np.nan, 3, 7, 1],
"C": list("abcde"),
}
),
True,
"standard",
"iterative",
True,
),
(
pd.DataFrame(
{
"A": [4, 200, 1, -1, np.nan],
"B": [-2, np.nan, 3, 7, 1],
"C": list("abcde"),
}
),
True,
"robust",
"simple",
True,
),
(
pd.DataFrame(
{
"A": [4, 200, 1, -1, np.nan],
"B": [-2, np.nan, 3, 7, 1],
"C": list("abcde"),
}
),
True,
"minmax",
None,
True,
),
(
pd.DataFrame({"A": [4, 3, 1, -1, 0], "B": [-2, 1, 3, 7, 1]}),
False,
None,
None,
False,
),
],
)
def test_preprocessing_classifier(
X, preprocess, scaler, numeric_imputer, include_preprocessor
):
estimator = PoniardClassifier(
estimators=[LogisticRegression()],
preprocess=preprocess,
scaler=scaler,
numeric_imputer=numeric_imputer,
cv=2,
random_state=0,
)
y = [0, 1, 0, 1, 0]
estimator.setup(X, y)
estimator.fit()
assert estimator.show_results().isna().sum().sum() == 0
assert estimator.show_results().shape == (2, 12)
assert isinstance(
estimator.get_estimator(
"LogisticRegression", include_preprocessor=include_preprocessor
),
BaseEstimator,
)
|
package com.atguigu.web.config;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.stereotype.Controller;
import java.lang.reflect.Method;
/**
* SpringMVC只扫描controller组件,可以不指定父容器类,让MVC扫所有。@Component+@RequestMapping就生效了
*/
@ComponentScan(value = "com.atguigu.web",includeFilters = {
@ComponentScan.Filter(type= FilterType.ANNOTATION,value = Controller.class)
},useDefaultFilters = false)
public class SpringMVCConfig {
//SpringMVC的子容器,能扫描的Spring容器中的组件
}
|
<filename>node_modules/@medusajs/medusa/dist/services/fulfillment-provider.d.ts
export default FulfillmentProviderService;
/**
* Helps retrive fulfillment providers
*/
declare class FulfillmentProviderService {
constructor(container: any);
/** @private {logger} */
private container_;
registerInstalledProviders(providers: any): Promise<void>;
list(): Promise<any>;
listFulfillmentOptions(providers: any): Promise<any[]>;
/**
* @param {string} provider_id - the provider id
* @return {FulfillmentService} the payment fulfillment provider
*/
retrieveProvider(provider_id: string): FulfillmentService;
createFulfillment(method: any, items: any, order: any, fulfillment: any): Promise<any>;
canCalculate(option: any): Promise<any>;
validateFulfillmentData(option: any, data: any, cart: any): Promise<any>;
cancelFulfillment(fulfillment: any): Promise<any>;
calculatePrice(option: any, data: any, cart: any): Promise<any>;
validateOption(option: any): Promise<any>;
createReturn(returnOrder: any): Promise<any>;
/**
* Fetches documents from the fulfillment provider
* @param {string} providerId - the id of the provider
* @param {object} fulfillmentData - the data relating to the fulfillment
* @param {"invoice" | "label"} documentType - the typ of
* document to fetch
*/
retrieveDocuments(providerId: string, fulfillmentData: object, documentType: "invoice" | "label"): Promise<any>;
}
|
<filename>Modules/Detection/RoadExtraction/include/otbBreakAngularPathListFilter.hxx
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbBreakAngularPathListFilter_hxx
#define otbBreakAngularPathListFilter_hxx
#include "otbBreakAngularPathListFilter.h"
#include "otbMath.h"
namespace otb
{
/**
* Constructor
*/
template <class TPath>
BreakAngularPathListFilter<TPath>
::BreakAngularPathListFilter()
{
}
template <class TPath>
void
BreakAngularPathListFilter<TPath>
::BreakAngularPath(const MaxAngleType maxAngle, const PathPointerType inputPath, PathListPointerType outputPathList)
{
typename PathType::VertexListType::ConstPointer vertexList = inputPath->GetVertexList();
typename PathType::VertexListType::ConstIterator pathIt = vertexList->Begin();
typename PathType::VertexType pixel1, pixel2, pixel3;
// Initialization
PathPointerType newPath = PathType::New();
newPath->Initialize();
double alpha1(0.), alpha2(0.);
while (pathIt != vertexList->End())
{
// Add Pixel 1
newPath->AddVertex(pathIt.Value());
pixel1 = pathIt.Value();
++pathIt;
if (pathIt != vertexList->End())
{
pixel2 = pathIt.Value();
++pathIt;
if (pathIt != vertexList->End())
{
pixel3 = pathIt.Value();
alpha1 = std::atan2((pixel1[1] - pixel2[1]), (pixel1[0] - pixel2[0]));
alpha2 = std::atan2((pixel2[1] - pixel3[1]), (pixel2[0] - pixel3[0]));
alpha1 = (alpha1 >= 0) ? alpha1 : (alpha1 + CONST_2PI);
alpha2 = (alpha2 >= 0) ? alpha2 : (alpha2 + CONST_2PI);
if (std::abs(alpha1 - alpha2) > static_cast<double>(maxAngle))
{
// Add Pixel 2
newPath->AddVertex(pixel2);
//Create new PathType in the out path list
outputPathList->PushBack(newPath);
// Reinit
newPath = PathType::New();
}
--pathIt; // Return previous pixel
}
else
{
// Add last Pixel (Pixel 2)
newPath->AddVertex(pixel2);
}
}
}
//Create new PathType in the out list
outputPathList->PushBack(newPath);
}
template <class TPath>
void
BreakAngularPathListFilter<TPath>
::GenerateData()
{
const PathListType * inputPathList = this->GetInput();
PathListType * outputPathList = this->GetOutput();
typename PathListType::ConstIterator listIt = inputPathList->Begin();
outputPathList->Clear();
PathListPointerType newTempPathList = PathListType::New();
while (listIt != inputPathList->End())
{
(void) BreakAngularPath(m_MaxAngle, listIt.Get(), outputPathList);
++listIt;
}
}
/**
* PrintSelf Method
*/
template <class TPath>
void
BreakAngularPathListFilter<TPath>
::PrintSelf(std::ostream& os, itk::Indent indent) const
{
Superclass::PrintSelf(os, indent);
os << indent << "Angular max value : " << m_MaxAngle << std::endl;
}
} // End namespace otb
#endif
|
const DrawCard = require('../../../drawcard.js');
class WolvesOfTheNorth extends DrawCard {
setupCardAbilities() {
this.reaction({
when: {
onBypassedByStealth: event => event.source === this
},
handler: context => {
let target = context.event.target;
let strDecrease = -this.controller.getNumberOfCardsInPlay(c => c.hasTrait('Direwolf'));
this.untilEndOfPhase(ability => ({
match: target,
effect: ability.effects.modifyStrength(strDecrease)
}));
this.game.addMessage('{0} uses {1} to give {2} {3}STR until the end of the phase',
this.controller, this, target, strDecrease);
}
});
}
}
WolvesOfTheNorth.code = '03006';
module.exports = WolvesOfTheNorth;
|
import React, {PropTypes} from 'react';
import { observer } from 'mobx-react';
import styles from './index.less';
function Table({dataSource, columns}) {
const createThead = () => {
return columns.map((item) => {
return (<th key={item.key} className={styles['table-th']} width={item.width ? item.width : 'auto'}>{item.title}</th>);
});
};
const createTd = (data, rowIdx) => {
return columns.map((item, idx) => {
return (<td key={item.key + idx}>{item.render ? item.render(data[item.dataIndex], data, rowIdx) : data[item.dataIndex]}</td>);
});
};
const createTbody = () => {
const tbody = [];
dataSource.map((data, idx) => {
tbody.push(
<tr key={data.key ? data.key : idx} className={styles['table-tr']}>{createTd(data, idx)}</tr>
);
});
return tbody;
};
return (
<div className={styles['table-box']}>
<table className={styles.table}>
<thead><tr>{createThead()}</tr></thead>
<tbody>{createTbody()}</tbody>
</table>
</div>
);
}
Table.propTypes = {
dataSource: PropTypes.array,
columns: PropTypes.array,
};
export default observer(Table);
|
import { Component, Inject, OnInit } from '@angular/core';
import { Validators } from '@angular/forms';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material';
import { empty, Observable } from 'rxjs';
import { GendersService } from '../../../../shared/genders.service';
import { GovernoratesService } from '../../../../shared/governorates.service';
import { LanguagesService } from '../../../../shared/languages.service';
import { LevelsService } from '../../../../shared/levels.service';
import { NotificationService } from '../../../../shared/notification.service';
import { SchoolTypeService } from '../../../../shared/school-type.service';
import { SubscriberFlagService } from '../../../../shared/subscriber-flag.service';
import { SubscriberService } from '../../../../shared/subscriber.service';
@Component({
selector: 'ngx-subscriber-form',
templateUrl: './subscriber-form.component.html',
styles: []
})
export class SubscriberFormComponent implements OnInit {
genderList$:Observable<any>;
governmentList$:Observable<any>;
languageList$:Observable<any>;
subscriberFlagList$:Observable<any>;
levelsList$:Observable<any>;
subScriberList$:Observable<any>;
submitted:boolean=false;
constructor(
public service:SubscriberService,public dialogRef: MatDialogRef<SubscriberFormComponent>,
@Inject(MAT_DIALOG_DATA) public data,private serviceGender:GendersService,
private serviceGovernate:GovernoratesService,
private serviceSubscriberFlag:SubscriberFlagService,
private serviceLevels:LevelsService,private serviceLanguages:LanguagesService,
private notify:NotificationService) { }
ngOnInit() {
this.genderList$=this.serviceGender.getAllGenders();
this.levelsList$=this.serviceLevels.getAlllevels();
this.governmentList$=this.serviceGovernate.getAllGovernorates();
this.languageList$=this.serviceLanguages.getAllLanguages(1);
this.service.subscriberForm.controls['LevelId'].clearValidators();
this.service.subscriberForm.controls['SchoolTypeId'].clearValidators();
this.service.subscriberForm.controls["BirthDate"].clearValidators();
this.service.subscriberForm.controls['GenderId'].clearValidators();
this.service.subscriberForm.controls['SubscriberNumber'].clearValidators();
this.service.subscriberForm.controls["SubscriberTeacherName"].clearValidators();
this.service.subscriberForm.controls["latitude"].clearValidators();
this.service.subscriberForm.controls["Longitude"].clearValidators();
this.service.subscriberForm.controls["SubscriberIdFrom"].clearValidators();
this.service.subscriberForm.controls["Path"].clearValidators();
this.service.subscriberForm.controls['LevelId'].updateValueAndValidity();
this.service.subscriberForm.controls['SchoolTypeId'].updateValueAndValidity();
this.service.subscriberForm.controls["BirthDate"].updateValueAndValidity();
this.service.subscriberForm.controls['GenderId'].updateValueAndValidity();
this.service.subscriberForm.controls['SubscriberNumber'].updateValueAndValidity();
this.service.subscriberForm.controls["SubscriberTeacherName"].updateValueAndValidity();
this.service.subscriberForm.controls["latitude"].updateValueAndValidity();
this.service.subscriberForm.controls["Longitude"].updateValueAndValidity();
this.service.subscriberForm.controls["SubscriberIdFrom"].updateValueAndValidity();
this.service.subscriberForm.controls["Path"].updateValueAndValidity();
if(this.data.SubscriberFlagId==1)
{
this.subscriberFlagList$=this.serviceSubscriberFlag.getAllSubscriberFlag();
this.service.subscriberForm.controls['LevelId'].setValidators([Validators.required]);
this.service.subscriberForm.controls["BirthDate"].setValidators([Validators.required]);
this.service.subscriberForm.controls['GenderId'].setValidators([Validators.required]);
this.service.subscriberForm.controls['SubscriberIdFrom'].setValidators([Validators.required]);
this.service.subscriberForm.controls['Path'].setValidators([Validators.required]);
this.service.subscriberForm.controls['LevelId'].updateValueAndValidity();
this.service.subscriberForm.controls["BirthDate"].updateValueAndValidity();
this.service.subscriberForm.controls['GenderId'].updateValueAndValidity();
this.service.subscriberForm.controls['SubscriberIdFrom'].updateValueAndValidity();
this.service.subscriberForm.controls['Path'].updateValueAndValidity();
}
else if(this.data.SubscriberFlagId==2)
{
// this.service.subscriberForm.controls['SubscriberNumber'].setValidators([Validators.required,Validators.pattern("[0-9]*")]);
this.service.subscriberForm.controls["SubscriberTeacherName"].setValidators([Validators.required]);
this.service.subscriberForm.controls["latitude"].setValidators([Validators.required]);
this.service.subscriberForm.controls["Longitude"].setValidators([Validators.required]);
// this.service.subscriberForm.controls['SubscriberNumber'].updateValueAndValidity();
this.service.subscriberForm.controls["SubscriberTeacherName"].updateValueAndValidity();
this.service.subscriberForm.controls["latitude"].updateValueAndValidity();
this.service.subscriberForm.controls["Longitude"].updateValueAndValidity();
}
else if (this.data.SubscriberFlagId==3 || this.data.SubscriberFlagId==4){
// this.service.subscriberForm.controls['SubscriberNumber'].setValidators([Validators.required,Validators.pattern("[0-9]*")]);
this.service.subscriberForm.controls["latitude"].setValidators([Validators.required]);
this.service.subscriberForm.controls["Longitude"].setValidators([Validators.required]);
// this.service.subscriberForm.controls['SubscriberNumber'].updateValueAndValidity();
this.service.subscriberForm.controls["latitude"].updateValueAndValidity();
this.service.subscriberForm.controls["Longitude"].updateValueAndValidity();
}
if(this.data.SubscriberId==null){
this.service.subscriberForm.reset({SubscriberId:0, SubscriberIsActive:true});
this.service.subscriberForm.get('SubscriberFlagId').setValue(this.data.SubscriberFlagId);
console.log("aaaa");
if(this.data.SubscriberFlagId==2 || this.data.SubscriberFlagId==3|| this.data.SubscriberFlagId==4)
{
this.service.getPosition().then(pos=>
{
this.service.subscriberForm.get('latitude').setValue(pos.lat);
this.service.subscriberForm.get('Longitude').setValue(pos.lng)
});
}
this.service.subscriberForm;
}
else
{
this.service.getOneSubscriber(this.data.SubscriberId).subscribe((res:any)=>{
if(this.data.SubscriberFlagId==1)
{
this.subScriberList$=this.service.getAllSubsciberForTrainerByFlag(res.SubscriberFlag)
}
var d = new Date(res.BirthDate);
d.setMinutes( d.getMinutes() +480);
this.service.subscriberForm.setValue({
SubscriberId:res.SubscriberId,
SubscriberCode:res.SubscriberCode,
SubscriberDate:res.SubscriberDate,
SubscriberNameAr:res.SubscriberNameAr,
SubscriberNameEn:res.SubscriberNameEn,
SubscriberFlagId:res.SubscriberFlagId,
SubscriberPhone:res.SubscriberPhone,
LanguageId:res.LanguageId,
GovernorateId:res.GovernorateId,
Town:res.Town,
Village:res.Village,
BirthDate:res.BirthDate,
GenderId:res.GenderId,
LevelId:res.LevelId,
latitude:res.latitude,
Longitude:res.Longitude,
SchoolTypeId:res.SchoolTypeId,
SubscriberNumber:res.SubscriberNumber,
SubscriberTeacherName:res.SubscriberTeacherName,
SubscriberIsActive:res.SubscriberIsActive,
SubscriberIdFrom:res.SubscriberIdFrom,
SubscriberFlag:res.SubscriberFlag,
Path:res.Path
})
})
}
}
onFileSelect(event) {
if (event.target.files.length > 0 && event.target.files[0]['type'].split('/')[0]==='image' ) {
const file = event.target.files[0];
this.service.subscriberForm.get('Path').setValue(file);
}
else
{
event.srcElement.value = null;
this.service.subscriberForm.get('Path').setValue("");
}
}
get f(){
return this.service.subscriberForm.controls;
}
onClose(){
this.service.subscriberForm.reset({SubscriberId:0, SubscriberIsActive:true});
this.dialogRef.close();
}
changeSubscriberFlag(ctrl){
this.service.subscriberForm.get('SubscriberIdFrom').setValue("");
this.subScriberList$=empty();
if(typeof(ctrl)==="undefined"){
}
else
{
this.subScriberList$=this.service.getAllSubsciberForTrainerByFlag(ctrl.SubscriberFlagId)
}
}
onSubmit(){
console.log(this.service.subscriberForm.value);
this.submitted=true;
if(this.service.subscriberForm.invalid)
{
return;
}
else if (this.data.SubscriberFlagId==1)
{
const formData = new FormData();
formData.append('SubscriberCode', this.service.subscriberForm.get('SubscriberCode').value);
formData.append('SubscriberDate',this.service.subscriberForm.get('SubscriberDate').value);
formData.append('SubscriberNameAr',this.service.subscriberForm.get('SubscriberNameAr').value);
formData.append('SubscriberNameEn',this.service.subscriberForm.get('SubscriberNameEn').value);
formData.append('SubscriberFlagId', this.service.subscriberForm.get('SubscriberFlagId').value);
formData.append('SubscriberPhone',this.service.subscriberForm.get('SubscriberPhone').value);
formData.append('LanguageId',this.service.subscriberForm.get('LanguageId').value);
formData.append('GovernorateId',this.service.subscriberForm.get('GovernorateId').value);
formData.append('Town', this.service.subscriberForm.get('Town').value);
formData.append('Village',this.service.subscriberForm.get('Village').value);
formData.append('BirthDate',this.service.subscriberForm.get('BirthDate').value);
formData.append('GenderId',this.service.subscriberForm.get('GenderId').value);
formData.append('LevelId', this.service.subscriberForm.get('LevelId').value);
formData.append('SubscriberIdFrom', this.service.subscriberForm.get('SubscriberIdFrom').value);
formData.append('SubscriberIsActive',this.service.subscriberForm.get('SubscriberIsActive').value);
formData.append('Path',this.service.subscriberForm.get('Path').value);
if(this.data.SubscriberId==null)
{
this.service.postStudent(formData).subscribe(res=>{
this.notify.success('تمت الاضافه بنجاح');
this.service.subscriberForm.reset({SubscriberId:0, SubscriberIsActive:true});
this.dialogRef.close();
})
}
else
{
formData.append('SubscriberId',this.service.subscriberForm.get('SubscriberId').value);
this.service.putStudent(formData).subscribe(res=>{
this.notify.success('تم التعديل بنجاح');
this.service.subscriberForm.reset({SubscriberId:0, SubscriberIsActive:true});
this.dialogRef.close();
})
}
}
else
{
var body={
...this.service.subscriberForm.value
}
if(this.data.SubscriberId==null)
{
this.service.postSubscriber(body).subscribe(res=>{
this.notify.success('تمت الاضافه بنجاح');
this.service.subscriberForm.reset({SubscriberId:0, SubscriberIsActive:true});
this.dialogRef.close();
})
}
else
{
this.service.putSubscriber(this.data.SubscriberId,body).subscribe(res=>{
this.notify.success('تم التعديل بنجاح');
this.service.subscriberForm.reset({SubscriberId:0, SubscriberIsActive:true});
this.dialogRef.close();
})
}
}
}
}
|
#
# The BSD 3-Clause License. http://www.opensource.org/licenses/BSD-3-Clause
#
# This file is part of MinGW-W64(mingw-builds: https://github.com/niXman/mingw-builds) project.
# Copyright (c) 2011-2021 by niXman (i dotty nixman doggy gmail dotty com)
# Copyright (c) 2012-2015 by Alexpux (alexpux doggy gmail dotty com)
# All rights reserved.
#
# Project: MinGW-W64 ( http://sourceforge.net/projects/mingw-w64/ )
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the distribution.
# - Neither the name of the 'MinGW-W64' nor the names of its contributors may
# be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# **************************************************************************
PKG_VERSION=8.1.0
PKG_NAME=gcc-${PKG_VERSION}
PKG_DIR_NAME=gcc-${PKG_VERSION}
PKG_TYPE=.tar.xz
PKG_URLS=(
"https://ftp.gnu.org/gnu/gcc/gcc-${PKG_VERSION}/gcc-${PKG_VERSION}${PKG_TYPE}"
)
PKG_PRIORITY=main
#
PKG_PATCHES=(
gcc/gcc-4.7-stdthreads.patch
gcc/gcc-5.1-iconv.patch
gcc/gcc-4.8-libstdc++export.patch
gcc/gcc-4.8.2-fix-for-windows-not-minding-non-existant-parent-dirs.patch
gcc/gcc-4.8.2-windows-lrealpath-no-force-lowercase-nor-backslash.patch
gcc/gcc-4.9.1-enable-shared-gnat-implib.mingw.patch
gcc/gcc-5.1.0-make-xmmintrin-header-cplusplus-compatible.patch
gcc/gcc-5.2-fix-mingw-pch.patch
gcc/gcc-5-dwarf-regression.patch
gcc/gcc-5.1.0-fix-libatomic-building-for-threads=win32.patch
gcc/gcc-6-ktietz-libgomp.patch
gcc/gcc-libgomp-ftime64.patch
gcc/gcc-8.1.0-Backport-patches-for-std-filesystem-from-master.patch
gcc/gcc-8-isl-0.20-support.patch
)
#
PKG_CONFIGURE_FLAGS=(
--host=$HOST
--build=$BUILD
--target=$TARGET
#
--prefix=$MINGWPREFIX
--with-sysroot=$PREFIX
#--with-gxx-include-dir=$MINGWPREFIX/$TARGET/include/c++
#
$LINK_TYPE_GCC
#
$( [[ $USE_MULTILIB == yes ]] \
&& echo "--enable-targets=all --enable-multilib" \
|| echo "--disable-multilib" \
)
$( [[ "$DISABLE_GCC_LTO" == yes ]] \
&& echo "--enable-languages=$ENABLE_LANGUAGES" \
|| echo "--enable-languages=$ENABLE_LANGUAGES,lto"
)
--enable-libstdcxx-time=yes
--enable-threads=$THREADS_MODEL
--enable-libgomp
--enable-libatomic
$( [[ "$DISABLE_GCC_LTO" == yes ]] \
&& echo "--disable-lto" \
|| echo "--enable-lto"
)
--enable-graphite
--enable-checking=release
--enable-fully-dynamic-string
--enable-version-specific-runtime-libs
--enable-libstdcxx-filesystem-ts=yes
$( [[ $EXCEPTIONS_MODEL == dwarf ]] \
&& echo "--disable-sjlj-exceptions --with-dwarf2" \
)
$( [[ $EXCEPTIONS_MODEL == sjlj ]] \
&& echo "--enable-sjlj-exceptions" \
)
#
--disable-libstdcxx-pch
--disable-libstdcxx-debug
$( [[ $BOOTSTRAPING == yes ]] \
&& echo "--enable-bootstrap" \
|| echo "--disable-bootstrap" \
)
--disable-rpath
--disable-win32-registry
--disable-nls
--disable-werror
--disable-symvers
#
--with-gnu-as
--with-gnu-ld
#
$PROCESSOR_OPTIMIZATION
$PROCESSOR_TUNE
#
--with-libiconv
--with-system-zlib
--with-{gmp,mpfr,mpc,isl}=$PREREQ_DIR/$HOST-$LINK_TYPE_SUFFIX
--with-pkgversion="\"$BUILD_ARCHITECTURE-$THREADS_MODEL-$EXCEPTIONS_MODEL${REV_STRING}, $MINGW_W64_PKG_STRING\""
--with-bugurl=$BUG_URL
#
CFLAGS="\"$COMMON_CFLAGS\""
CXXFLAGS="\"$COMMON_CXXFLAGS\""
CPPFLAGS="\"$COMMON_CPPFLAGS\""
LDFLAGS="\"$COMMON_LDFLAGS $( [[ $BUILD_ARCHITECTURE == i686 ]] && echo -Wl,--large-address-aware )\""
LD_FOR_TARGET=$PREFIX/bin/ld.exe
)
#
PKG_MAKE_FLAGS=(
-j$JOBS
all
)
#
PKG_INSTALL_FLAGS=(
-j1
DESTDIR=$BASE_BUILD_DIR
$( [[ $STRIP_ON_INSTALL == yes ]] && echo install-strip || echo install )
)
# **************************************************************************
|
#include <iostream>
#include <string>
int main()
{
char str1[32], str2[32];
std::cout << "Enter a string: ";
std::cin >> str1;
std::cout << "Enter another string: ";
std::cin >> str2;
if (strcmp(str1, str2) == 0)
{
std::cout << "The strings are equal!" << std::endl;
}
else
{
std::cout << "The strings are not equal!" << std::endl;
}
return 0;
}
|
import logging
import os
class DirectoryManager:
def __init__(self):
self.logger = logging.getLogger(__name__)
self.pidPathesVars = [] # Assume this list is populated elsewhere
def fillDirValues(self):
for pidVar in self.pidPathesVars:
try:
# Populate the directory with specific values
# Example: Create a file with a specific value
file_path = os.path.join(pidVar, 'example.txt')
with open(file_path, 'w') as file:
file.write('Specific value')
except Exception as e:
# Log the error message along with the filename
self.logger.error(f"{e.strerror} to {e.filename}")
# Usage
# Instantiate the DirectoryManager class
directory_manager = DirectoryManager()
# Call the fillDirValues method to populate the directories with specific values
directory_manager.fillDirValues()
|
export RAILS_ENV="development"
alias b="bundle"
alias be="bundle exec"
alias fs="clear && foreman start"
export PATH=$HOME/.rbenv/bin:$PATH
eval "$(rbenv init -)"
|
// SPDX-License-Identifier: Apache-2.0
package nco
import breeze.math.Complex
import chisel3._
import dsptools.numbers._
import dsptools.numbers.implicits._
import org.scalatest.{FlatSpec, Matchers}
import scala.io.Source
import dsptools.RoundHalfUp
class NCOStreamingPINCandPOFFSpec extends FlatSpec with Matchers {
def dut[T <: Data : Real : BinaryRepresentation](params: NCOParams[T]): () => NCOStreamingPINCandPOFF[T] = () => {
NCOTableParams.tableNameList.clear()
new NCOStreamingPINCandPOFF(params)
}
behavior of "NCO"
for(phaseAcc <- Seq(true, false)) {
for(syncROM <- Seq(true, false)) {
it should f"""run the tester: NCO with table size of 128, table width of 16 bits,
phase width of 9 bits, working in standard mode without Taylor series correction,
without phase dithering, syncROM = $syncROM, phase accumulator = $phaseAcc, streaming phase increment and offset""" in {
//tableSize, tableWidth, phaseWidth, rasterized, taylorTerms, dither, syncROM, phaseAccEnable
val fixedParams = FixedNCOParams(128, 16, 9, false, 0, false, syncROM, phaseAcc, RoundHalfUp)
chisel3.iotesters.Driver.execute(Array("-tbn", "verilator"), dut(fixedParams)) {
//nco, tableSize, rasterized, syncROM, phaseAcc, tolLSB
c => new NCOStreamingPINCandPOFFTester(c, 128, false, syncROM, phaseAcc, 5)
} should be (true)
}
it should f"""run the tester: NCO with table size of 500, table width of 12 bits,
phase width of 11 bits, working in rasterized mode without Taylor series correction,
without phase dithering, syncROM = $syncROM, phase accumulator = $phaseAcc, streaming phase increment and offset""" in {
//tableSize, tableWidth, phaseWidth, rasterized, taylorTerms, dither, syncROM, phaseAccEnable
val fixedParams = FixedNCOParams(500, 12, 11, true, 0, false, syncROM, phaseAcc, RoundHalfUp)
chisel3.iotesters.Driver.execute(Array("-tbn", "verilator"), dut(fixedParams)) {
//nco, tableSize, rasterized, syncROM, phaseAcc, tolLSB
c => new NCOStreamingPINCandPOFFTester(c, 500, true, syncROM, phaseAcc, 5)
} should be (true)
}
it should f"""run the tester: NCO with table size of 256, table width of 18 bits,
phase width of 13 bits, working in standard mode with Taylor series of 4,
without phase dithering, syncROM = $syncROM, phase accumulator = $phaseAcc, streaming phase increment and offset""" in {
//tableSize, tableWidth, phaseWidth, rasterized, taylorTerms, dither, syncROM, phaseAccEnable
val fixedParams = FixedNCOParams(256, 18, 12, false, 4, false, syncROM, phaseAcc, RoundHalfUp)
chisel3.iotesters.Driver.execute(Array("-tbn", "verilator"), dut(fixedParams)) {
//nco, tableSize, rasterized, syncROM, phaseAcc, tolLSB
c => new NCOStreamingPINCandPOFFTester(c, 256, false, syncROM, phaseAcc, 4)
} should be (true)
}
}
it should f"run the tester: DspReal tester, NCO with table size of 89, working in rasterized mode, phase accumulator = $phaseAcc, streaming phase increment and offset" in {
val DspRealParams = DspRealNCOParams(89, true, phaseAcc)
chisel3.iotesters.Driver.execute(Array("-tbn", "verilator"), dut(DspRealParams)) {
//nco, tableSize, rasterized, phaseAcc, tolLSB
c => new NCOStreamingPINCandPOFFTesterDspReal(c, 89, true, phaseAcc, 8)
} should be (true)
}
it should f"run the tester: DspReal tester, NCO with table size of 256, working in standard mode, phase accumulator = $phaseAcc, streaming phase increment and offset" in {
val DspRealParams = DspRealNCOParams(256, false, phaseAcc)
chisel3.iotesters.Driver.execute(Array("-tbn", "verilator"), dut(DspRealParams)) {
//nco, tableSize, rasterized, phaseAcc, tolLSB
c => new NCOStreamingPINCandPOFFTesterDspReal(c, 256, false, phaseAcc, 8)
} should be (true)
}
}
/*val syncROM = true
val phaseAcc = false
it should f"""run the tester: NCO with table size of 128, table width of 16 bits,
phase width of 9 bits, working in standard mode without Taylor series correction,
without phase dithering, syncROM = $syncROM, phase accumulator = $phaseAcc, streaming phase increment and offset""" in {
//tableSize, tableWidth, phaseWidth, rasterized, taylorTerms, dither, syncROM, phaseAccEnable
val fixedParams = FixedNCOParams(128, 16, 9, false, 0, false, syncROM, phaseAcc, RoundHalfUp)
chisel3.iotesters.Driver.execute(Array("-tbn", "verilator"), dut(fixedParams)) {
//nco, tableSize, rasterized, syncROM, phaseAcc, tolLSB
c => new NCOStreamingPINCandPOFFTester(c, 128, false, syncROM, phaseAcc, 5)
} should be (true)
}*/
}
|
def rearrangeArray(arr):
# Initialize left and right indexes
left, right = 0, len(arr) - 1
while left < right:
# Increment left index while we see 0 at left
while (arr[left] % 2 == 0 and left < right):
left += 1
# Decrement right index while we see 1 at right
while (arr[right] % 2 == 1 and left < right):
right -= 1
# If left is smaller than right then there
# is a 1 at left and a 0 at right.
# exchange arr[left] and arr[right]
if (left < right):
arr[left], arr[right] = arr[right], arr[left]
left += 1
right = right-1
return arr
arr = [3, 5, 8, 2, 4, 7, 9]
print("Modified array is:")
print(rearrangeArray(arr))
|
<reponame>svegon/AutoItemSwitch
package net.autoitemswitch.mixin;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.At.Shift;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import org.spongepowered.asm.mixin.injection.At;
import net.autoitemswitch.SharedVariables;
import net.autoitemswitch.events.BlockInteractionEvent;
import net.autoitemswitch.events.ItemUseEvent;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraft.client.network.ClientPlayerEntity;
import net.minecraft.client.network.ClientPlayerInteractionManager;
import net.minecraft.client.world.ClientWorld;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ActionResult;
import net.minecraft.util.Hand;
import net.minecraft.util.hit.BlockHitResult;
import net.minecraft.world.World;
@Environment(EnvType.CLIENT)
@Mixin(ClientPlayerInteractionManager.class)
public abstract class ClientPlayerInteractionManagerMixin {
private ItemStack interactedStack;
private ItemStack blockInteractedStack;
@Inject(at = {@At(value = "INVOKE",
target = "Lnet/minecraft/item/ItemStack;use(Lnet/minecraft/world/World;"
+ "Lnet/minecraft/entity/player/PlayerEntity;Lnet/minecraft/util/Hand;)"
+ "Lnet/minecraft/util/TypedActionResult;",
ordinal = 0)}, method = {"interactItem"})
private void preInteractItem(PlayerEntity player, World world, Hand hand,
CallbackInfoReturnable<ActionResult> cir) {
interactedStack = player.getStackInHand(hand).copy();
}
@Inject(at = {@At(value = "INVOKE",
target = "Lnet/minecraft/item/ItemStack;use(Lnet/minecraft/world/World;"
+ "Lnet/minecraft/entity/player/PlayerEntity;Lnet/minecraft/util/Hand;)"
+ "Lnet/minecraft/util/TypedActionResult;",
shift = Shift.AFTER, ordinal = 0)}, method = {"interactItem"})
private void postInteractItem(PlayerEntity player, World world, Hand hand,
CallbackInfoReturnable<ActionResult> cir) {
SharedVariables.EVENT_HANDLER.onItemUse(new ItemUseEvent(player, world, hand,
interactedStack));
}
@Inject(at = {@At(value = "INVOKE", target = "Lnet/minecraft/item/ItemStack;"
+ "useOnBlock(Lnet/minecraft/item/ItemUsageContext;)Lnet/minecraft/util/ActionResult;",
ordinal = 1)}, method = {"interactBlock"})
private void preInteractBlock(ClientPlayerEntity player, ClientWorld world, Hand hand,
BlockHitResult hitResult, CallbackInfoReturnable<ActionResult> cir) {
blockInteractedStack = player.getStackInHand(hand).copy();
}
@Inject(at = {@At(value = "INVOKE", target = "Lnet/minecraft/item/ItemStack;"
+ "useOnBlock(Lnet/minecraft/item/ItemUsageContext;)Lnet/minecraft/util/ActionResult;",
shift = Shift.AFTER, ordinal = 1)}, method = {"interactBlock"})
private void postInteractBlock(ClientPlayerEntity player, ClientWorld world, Hand hand,
BlockHitResult hitResult, CallbackInfoReturnable<ActionResult> cir) {
SharedVariables.EVENT_HANDLER.onBlockInteraction(new BlockInteractionEvent(player, world, hand,
hitResult, blockInteractedStack));
}
}
|
<reponame>usa-usa-usa-usa/runelite<filename>runelite-client/src/main/java/net/runelite/client/plugins/pyramidplunder/PyramidPlunderConfig.java
/*
* Copyright (c) 2020 Mitchell <https://github.com/Mitchell-Kovacs>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.pyramidplunder;
import java.awt.Color;
import net.runelite.client.config.Config;
import net.runelite.client.config.ConfigGroup;
import net.runelite.client.config.ConfigItem;
@ConfigGroup("pyramidplunder")
public interface PyramidPlunderConfig extends Config
{
@ConfigItem(
position = 0,
keyName = "hideTimer",
name = "Hide default timer",
description = "Hides the default pyramid plunder timer"
)
default boolean hideTimer()
{
return true;
}
@ConfigItem(
position = 1,
keyName = "showExactTimer",
name = "Show exact timer",
description = "Displays the amount of time remaining as an infobox"
)
default boolean showExactTimer()
{
return true;
}
@ConfigItem(
position = 2,
keyName = "timerLowWarning",
name = "Timer low warning",
description = "Determines the time when the timers color will change"
)
default int timerLowWarning()
{
return 30;
}
@ConfigItem(
position = 3,
keyName = "highlightDoorsColor",
name = "Highlight doors color",
description = "Selects the color for highlighting tomb doors"
)
default Color highlightDoorsColor()
{
return Color.green;
}
@ConfigItem(
position = 4,
keyName = "highlightDoors",
name = "Highlight doors",
description = "Highlights the four tomb doors in each room"
)
default boolean highlightDoors()
{
return true;
}
@ConfigItem(
position = 5,
keyName = "highlightSpeartrapColor",
name = "Highlight speartrap color",
description = "Selects the color for highlighting speartraps"
)
default Color highlightSpeartrapsColor()
{
return Color.orange;
}
@ConfigItem(
position = 6,
keyName = "highlightSpeartraps",
name = "Highlight speartraps",
description = "Highlight the spear traps at the entrance of each room"
)
default boolean highlightSpeartraps()
{
return true;
}
@ConfigItem(
position = 7,
keyName = "highlightContainersColor",
name = "Highlight containers color",
description = "Selects the color for highlighting urns, chests and sarcophagus"
)
default Color highlightContainersColor()
{
return Color.yellow;
}
@ConfigItem(
position = 8,
keyName = "highlightUrnsFloor",
name = "Highlight urns floor",
description = "Highlight the urns starting at selected floor and up"
)
default int highlightUrnsFloor()
{
return 9;
}
@ConfigItem(
position = 9,
keyName = "highlightedChestFloor",
name = "Highlight chest floor",
description = "Highlight the Grand Gold Chest starting at selected floor and up"
)
default int highlightChestFloor()
{
return 9;
}
@ConfigItem(
position = 10,
keyName = "highlightedSarcophagusFloor",
name = "Highlight sarcophagus floor",
description = "Highlight the sarcophagus starting at selected floor and up"
)
default int highlightSarcophagusFloor()
{
return 9;
}
}
|
/*!! include('common/ids', { symbol_prefix = ''}, {
}) !! 32 */
/* ################# !! GENERATED CODE -- DO NOT MODIFY !! ################# */
#pragma once
#ifndef BE_OMICRON_IDS_HPP_
#define BE_OMICRON_IDS_HPP_
#include <be/core/id.hpp>
#ifdef BE_ID_EXTERNS
namespace be {
namespace ids {
} // be::ids
} // be
#endif
#endif
#ifdef BE_ID_STATICS
namespace be {
namespace ids {
} // be::ids
} // be
#endif
/* ######################### END OF GENERATED CODE ######################### */
|
# Function to find the sum of digits
def sum_digits(num):
# Variable to store the sum of digits
sum = 0
while num > 0:
digit = num % 10
sum = sum + digit
num = num // 10
return sum
num = 1234
sum = sum_digits(num)
print("Sum of digits of %d is %d" % (num, sum))
# Output: Sum of digits of 1234 is 10
|
<reponame>BrandonBrasson/cups
class CreateAddCreatorToCupcakes < ActiveRecord::Migration
def change
rename_table('user_cupcakes', 'bookmarks')
end
end
|
using RIQExtensions
module RIQ
# Simple object for retrieving your org-wide account properties. The object is read only and provides only fetch and convenience methods.
class AccountProperties
attr_reader :data
# Performs a network call and fetches the account properties for the org.
def initialize
@client = RIQ.client
d = @client.get(node).symbolize
if d
@data = d[:fields]
else
raise RIQError, 'No account properties found'
end
end
# (see RIQObject#node)
def node
'accounts/fields'
end
# @param id [String, Int] A simple lookup for a field that has a matching ID
def field(id)
@data.select{|f| f[:id] == id.to_s}.first
end
end
class << self
def account_props
AccountProperties.new
end
end
end
|
<filename>app/src/main/java/com/example/user/stijnverdenius_pset3/get_hppt_reqt.java
package com.example.user.stijnverdenius_pset3;
/**
* Created by User on 2/24/2017.
*/
import android.util.Log;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.HttpURLConnection;
import java.io.IOException;
import java.net.*;
import java.io.*;
import java.util.ArrayList;
import android.util.Log;
public class get_hppt_reqt {
protected static synchronized String downloadFromServer(String... params) {
String result = "";
String chosentag = params[0];
chosentag = chosentag.replaceAll(" ", "_");
String urlstring = String.format("http://www.omdbapi.com/?t=%s",chosentag);
Log.d("url", urlstring);
URL url = null;
try {
url = new URL(urlstring);
} catch (MalformedURLException e) {
Log.d("get_hppt exc", "url fout");
}
HttpURLConnection connect;
if (url != null) {
try {
connect = (HttpURLConnection) url.openConnection();
connect.setRequestMethod("GET");
Integer responseCode = connect.getResponseCode();
Log.d("respo", responseCode.toString());
if (responseCode >= 200 && responseCode < 300) {
BufferedReader breader = new BufferedReader(new InputStreamReader(connect.getInputStream()));
String line;
while ((line = breader.readLine()) != null) {
result += line;
}
}
} catch (IOException e) {
Log.d("get_hppt exc", "url != null fout");
}
}
Log.d("result", result);
return result;
}
}
|
module.exports = require('./assets/vue-directive-image-previewer.js')
|
<reponame>AlexProkhor/DotNext-Moscow-2020<gh_stars>0
export abstract class MenuElementBase {
path: string;
name: string;
}
|
<gh_stars>0
package weixin.weicar.service.impl;
import weixin.weicar.service.CarToolServiceI;
import org.jeecgframework.core.common.service.impl.CommonServiceImpl;
import weixin.weicar.entity.CarToolEntity;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.UUID;
import java.io.Serializable;
@Service("carToolService")
@Transactional
public class CarToolServiceImpl extends CommonServiceImpl implements CarToolServiceI {
public <T> void delete(T entity) {
super.delete(entity);
//执行删除操作配置的sql增强
this.doDelSql((CarToolEntity)entity);
}
public <T> Serializable save(T entity) {
Serializable t = super.save(entity);
//执行新增操作配置的sql增强
this.doAddSql((CarToolEntity)entity);
return t;
}
public <T> void saveOrUpdate(T entity) {
super.saveOrUpdate(entity);
//执行更新操作配置的sql增强
this.doUpdateSql((CarToolEntity)entity);
}
/**
* 默认按钮-sql增强-新增操作
* @param id
* @return
*/
public boolean doAddSql(CarToolEntity t){
return true;
}
/**
* 默认按钮-sql增强-更新操作
* @param id
* @return
*/
public boolean doUpdateSql(CarToolEntity t){
return true;
}
/**
* 默认按钮-sql增强-删除操作
* @param id
* @return
*/
public boolean doDelSql(CarToolEntity t){
return true;
}
/**
* 替换sql中的变量
* @param sql
* @return
*/
public String replaceVal(String sql,CarToolEntity t){
sql = sql.replace("#{id}",String.valueOf(t.getId()));
sql = sql.replace("#{create_name}",String.valueOf(t.getCreateName()));
sql = sql.replace("#{create_date}",String.valueOf(t.getCreateDate()));
sql = sql.replace("#{name}",String.valueOf(t.getName()));
sql = sql.replace("#{url}",String.valueOf(t.getUrl()));
sql = sql.replace("#{disabled}",String.valueOf(t.getDisabled()));
sql = sql.replace("#{accountid}",String.valueOf(t.getAccountid()));
sql = sql.replace("#{sorts}",String.valueOf(t.getSorts()));
sql = sql.replace("#{UUID}",UUID.randomUUID().toString());
return sql;
}
}
|
#!/usr/bin/env bash
require_env_variable () {
local env_dir=$1
local name=$2
if [[ -z $(get_env_variable $env_dir $name) ]]; then
echo "${name} was not set, aborting." | indent
exit 1
fi
}
|
class NetworkDevice:
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
self.port = ""
def validate_port(self, port):
# Implement this method to validate and set the port attribute
if not isinstance(port, str) or not port.isdigit():
raise ValueError("Invalid port: Port must be a non-empty string consisting only of digits")
self.port = port
# Example usage
device = NetworkDevice(ip="192.168.1.1", model="router")
try:
device.validate_port("8080") # Valid port
print(device.port) # Output: 8080
device.validate_port("port") # Invalid port
except ValueError as e:
print(e) # Output: Invalid port: Port must be a non-empty string consisting only of digits
|
#!/bin/sh
test_description='wildmatch tests'
. ./test-lib.sh
match() {
if [ $1 = 1 ]; then
test_expect_success "wildmatch: match '$3' '$4'" "
test-wildmatch wildmatch '$3' '$4'
"
else
test_expect_success "wildmatch: no match '$3' '$4'" "
! test-wildmatch wildmatch '$3' '$4'
"
fi
if [ $2 = 1 ]; then
test_expect_success "fnmatch: match '$3' '$4'" "
test-wildmatch fnmatch '$3' '$4'
"
elif [ $2 = 0 ]; then
test_expect_success "fnmatch: no match '$3' '$4'" "
! test-wildmatch fnmatch '$3' '$4'
"
# else
# test_expect_success BROKEN_FNMATCH "fnmatch: '$3' '$4'" "
# ! test-wildmatch fnmatch '$3' '$4'
# "
fi
}
imatch() {
if [ $1 = 1 ]; then
test_expect_success "iwildmatch: match '$2' '$3'" "
test-wildmatch iwildmatch '$2' '$3'
"
else
test_expect_success "iwildmatch: no match '$2' '$3'" "
! test-wildmatch iwildmatch '$2' '$3'
"
fi
}
pathmatch() {
if [ $1 = 1 ]; then
test_expect_success "pathmatch: match '$2' '$3'" "
test-wildmatch pathmatch '$2' '$3'
"
else
test_expect_success "pathmatch: no match '$2' '$3'" "
! test-wildmatch pathmatch '$2' '$3'
"
fi
}
# Basic wildmat features
match 1 1 foo foo
match 0 0 foo bar
match 1 1 '' ""
match 1 1 foo '???'
match 0 0 foo '??'
match 1 1 foo '*'
match 1 1 foo 'f*'
match 0 0 foo '*f'
match 1 1 foo '*foo*'
match 1 1 foobar '*ob*a*r*'
match 1 1 aaaaaaabababab '*ab'
match 1 1 'foo*' 'foo\*'
match 0 0 foobar 'foo\*bar'
match 1 1 'f\oo' 'f\\oo'
match 1 1 ball '*[al]?'
match 0 0 ten '[ten]'
match 0 1 ten '**[!te]'
match 0 0 ten '**[!ten]'
match 1 1 ten 't[a-g]n'
match 0 0 ten 't[!a-g]n'
match 1 1 ton 't[!a-g]n'
match 1 1 ton 't[^a-g]n'
match 1 x 'a]b' 'a[]]b'
match 1 x a-b 'a[]-]b'
match 1 x 'a]b' 'a[]-]b'
match 0 x aab 'a[]-]b'
match 1 x aab 'a[]a-]b'
match 1 1 ']' ']'
# Extended slash-matching features
match 0 0 'foo/baz/bar' 'foo*bar'
match 0 0 'foo/baz/bar' 'foo**bar'
match 0 1 'foobazbar' 'foo**bar'
match 1 1 'foo/baz/bar' 'foo/**/bar'
match 1 0 'foo/baz/bar' 'foo/**/**/bar'
match 1 0 'foo/b/a/z/bar' 'foo/**/bar'
match 1 0 'foo/b/a/z/bar' 'foo/**/**/bar'
match 1 0 'foo/bar' 'foo/**/bar'
match 1 0 'foo/bar' 'foo/**/**/bar'
match 0 0 'foo/bar' 'foo?bar'
match 0 0 'foo/bar' 'foo[/]bar'
match 0 0 'foo/bar' 'f[^eiu][^eiu][^eiu][^eiu][^eiu]r'
match 1 1 'foo-bar' 'f[^eiu][^eiu][^eiu][^eiu][^eiu]r'
match 1 0 'foo' '**/foo'
match 1 x 'XXX/foo' '**/foo'
match 1 0 'bar/baz/foo' '**/foo'
match 0 0 'bar/baz/foo' '*/foo'
match 0 0 'foo/bar/baz' '**/bar*'
match 1 0 'deep/foo/bar/baz' '**/bar/*'
match 0 0 'deep/foo/bar/baz/' '**/bar/*'
match 1 0 'deep/foo/bar/baz/' '**/bar/**'
match 0 0 'deep/foo/bar' '**/bar/*'
match 1 0 'deep/foo/bar/' '**/bar/**'
match 0 0 'foo/bar/baz' '**/bar**'
match 1 0 'foo/bar/baz/x' '*/bar/**'
match 0 0 'deep/foo/bar/baz/x' '*/bar/**'
match 1 0 'deep/foo/bar/baz/x' '**/bar/*/*'
# Various additional tests
match 0 0 'acrt' 'a[c-c]st'
match 1 1 'acrt' 'a[c-c]rt'
match 0 0 ']' '[!]-]'
match 1 x 'a' '[!]-]'
match 0 0 '' '\'
match 0 x '\' '\'
match 0 x 'XXX/\' '*/\'
match 1 x 'XXX/\' '*/\\'
match 1 1 'foo' 'foo'
match 1 1 '@foo' '@foo'
match 0 0 'foo' '@foo'
match 1 1 '[ab]' '\[ab]'
match 1 1 '[ab]' '[[]ab]'
match 1 x '[ab]' '[[:]ab]'
match 0 x '[ab]' '[[::]ab]'
match 1 x '[ab]' '[[:digit]ab]'
match 1 x '[ab]' '[\[:]ab]'
match 1 1 '?a?b' '\??\?b'
match 1 1 'abc' '\a\b\c'
match 0 0 'foo' ''
match 1 0 'foo/bar/baz/to' '**/t[o]'
# Character class tests
match 1 x 'a1B' '[[:alpha:]][[:digit:]][[:upper:]]'
match 0 x 'a' '[[:digit:][:upper:][:space:]]'
match 1 x 'A' '[[:digit:][:upper:][:space:]]'
match 1 x '1' '[[:digit:][:upper:][:space:]]'
match 0 x '1' '[[:digit:][:upper:][:spaci:]]'
match 1 x ' ' '[[:digit:][:upper:][:space:]]'
match 0 x '.' '[[:digit:][:upper:][:space:]]'
match 1 x '.' '[[:digit:][:punct:][:space:]]'
match 1 x '5' '[[:xdigit:]]'
match 1 x 'f' '[[:xdigit:]]'
match 1 x 'D' '[[:xdigit:]]'
match 1 x '_' '[[:alnum:][:alpha:][:blank:][:cntrl:][:digit:][:graph:][:lower:][:print:][:punct:][:space:][:upper:][:xdigit:]]'
match 1 x '_' '[[:alnum:][:alpha:][:blank:][:cntrl:][:digit:][:graph:][:lower:][:print:][:punct:][:space:][:upper:][:xdigit:]]'
match 1 x '.' '[^[:alnum:][:alpha:][:blank:][:cntrl:][:digit:][:lower:][:space:][:upper:][:xdigit:]]'
match 1 x '5' '[a-c[:digit:]x-z]'
match 1 x 'b' '[a-c[:digit:]x-z]'
match 1 x 'y' '[a-c[:digit:]x-z]'
match 0 x 'q' '[a-c[:digit:]x-z]'
# Additional tests, including some malformed wildmats
match 1 x ']' '[\\-^]'
match 0 0 '[' '[\\-^]'
match 1 x '-' '[\-_]'
match 1 x ']' '[\]]'
match 0 0 '\]' '[\]]'
match 0 0 '\' '[\]]'
match 0 0 'ab' 'a[]b'
match 0 x 'a[]b' 'a[]b'
match 0 x 'ab[' 'ab['
match 0 0 'ab' '[!'
match 0 0 'ab' '[-'
match 1 1 '-' '[-]'
match 0 0 '-' '[a-'
match 0 0 '-' '[!a-'
match 1 x '-' '[--A]'
match 1 x '5' '[--A]'
match 1 1 ' ' '[ --]'
match 1 1 '$' '[ --]'
match 1 1 '-' '[ --]'
match 0 0 '0' '[ --]'
match 1 x '-' '[---]'
match 1 x '-' '[------]'
match 0 0 'j' '[a-e-n]'
match 1 x '-' '[a-e-n]'
match 1 x 'a' '[!------]'
match 0 0 '[' '[]-a]'
match 1 x '^' '[]-a]'
match 0 0 '^' '[!]-a]'
match 1 x '[' '[!]-a]'
match 1 1 '^' '[a^bc]'
match 1 x '-b]' '[a-]b]'
match 0 0 '\' '[\]'
match 1 1 '\' '[\\]'
match 0 0 '\' '[!\\]'
match 1 1 'G' '[A-\\]'
match 0 0 'aaabbb' 'b*a'
match 0 0 'aabcaa' '*ba*'
match 1 1 ',' '[,]'
match 1 1 ',' '[\\,]'
match 1 1 '\' '[\\,]'
match 1 1 '-' '[,-.]'
match 0 0 '+' '[,-.]'
match 0 0 '-.]' '[,-.]'
match 1 1 '2' '[\1-\3]'
match 1 1 '3' '[\1-\3]'
match 0 0 '4' '[\1-\3]'
match 1 1 '\' '[[-\]]'
match 1 1 '[' '[[-\]]'
match 1 1 ']' '[[-\]]'
match 0 0 '-' '[[-\]]'
# Test recursion and the abort code (use "wildtest -i" to see iteration counts)
match 1 1 '-adobe-courier-bold-o-normal--12-120-75-75-m-70-iso8859-1' '-*-*-*-*-*-*-12-*-*-*-m-*-*-*'
match 0 0 '-adobe-courier-bold-o-normal--12-120-75-75-X-70-iso8859-1' '-*-*-*-*-*-*-12-*-*-*-m-*-*-*'
match 0 0 '-adobe-courier-bold-o-normal--12-120-75-75-/-70-iso8859-1' '-*-*-*-*-*-*-12-*-*-*-m-*-*-*'
match 1 1 'XXX/adobe/courier/bold/o/normal//12/120/75/75/m/70/iso8859/1' 'XXX/*/*/*/*/*/*/12/*/*/*/m/*/*/*'
match 0 0 'XXX/adobe/courier/bold/o/normal//12/120/75/75/X/70/iso8859/1' 'XXX/*/*/*/*/*/*/12/*/*/*/m/*/*/*'
match 1 0 'abcd/abcdefg/abcdefghijk/abcdefghijklmnop.txt' '**/*a*b*g*n*t'
match 0 0 'abcd/abcdefg/abcdefghijk/abcdefghijklmnop.txtz' '**/*a*b*g*n*t'
match 0 x foo '*/*/*'
match 0 x foo/bar '*/*/*'
match 1 x foo/bba/arr '*/*/*'
match 0 x foo/bb/aa/rr '*/*/*'
match 1 x foo/bb/aa/rr '**/**/**'
match 1 x abcXdefXghi '*X*i'
match 0 x ab/cXd/efXg/hi '*X*i'
match 1 x ab/cXd/efXg/hi '*/*X*/*/*i'
match 1 x ab/cXd/efXg/hi '**/*X*/**/*i'
pathmatch 1 foo foo
pathmatch 0 foo fo
pathmatch 1 foo/bar foo/bar
pathmatch 1 foo/bar 'foo/*'
pathmatch 1 foo/bba/arr 'foo/*'
pathmatch 1 foo/bba/arr 'foo/**'
pathmatch 1 foo/bba/arr 'foo*'
pathmatch 1 foo/bba/arr 'foo**'
pathmatch 1 foo/bba/arr 'foo/*arr'
pathmatch 1 foo/bba/arr 'foo/**arr'
pathmatch 0 foo/bba/arr 'foo/*z'
pathmatch 0 foo/bba/arr 'foo/**z'
pathmatch 1 foo/bar 'foo?bar'
pathmatch 1 foo/bar 'foo[/]bar'
pathmatch 0 foo '*/*/*'
pathmatch 0 foo/bar '*/*/*'
pathmatch 1 foo/bba/arr '*/*/*'
pathmatch 1 foo/bb/aa/rr '*/*/*'
pathmatch 1 abcXdefXghi '*X*i'
pathmatch 1 ab/cXd/efXg/hi '*/*X*/*/*i'
pathmatch 1 ab/cXd/efXg/hi '*Xg*i'
# Case-sensitivy features
match 0 x 'a' '[A-Z]'
match 1 x 'A' '[A-Z]'
match 0 x 'A' '[a-z]'
match 1 x 'a' '[a-z]'
match 0 x 'a' '[[:upper:]]'
match 1 x 'A' '[[:upper:]]'
match 0 x 'A' '[[:lower:]]'
match 1 x 'a' '[[:lower:]]'
match 0 x 'A' '[B-Za]'
match 1 x 'a' '[B-Za]'
match 0 x 'A' '[B-a]'
match 1 x 'a' '[B-a]'
match 0 x 'z' '[Z-y]'
match 1 x 'Z' '[Z-y]'
imatch 1 'a' '[A-Z]'
imatch 1 'A' '[A-Z]'
imatch 1 'A' '[a-z]'
imatch 1 'a' '[a-z]'
imatch 1 'a' '[[:upper:]]'
imatch 1 'A' '[[:upper:]]'
imatch 1 'A' '[[:lower:]]'
imatch 1 'a' '[[:lower:]]'
imatch 1 'A' '[B-Za]'
imatch 1 'a' '[B-Za]'
imatch 1 'A' '[B-a]'
imatch 1 'a' '[B-a]'
imatch 1 'z' '[Z-y]'
imatch 1 'Z' '[Z-y]'
test_done
|
<filename>app/models/lastfm_user.rb
class LastfmUser
attr_accessor :name, :lastfm
def initialize(name, lastfm)
@name = name
@lastfm = lastfm
@group = 'mnml'
end
def info
Rails.cache.fetch("/users/#{@name}#info", :expires_in => 7.days, :compress => true) do
@lastfm.user.get_info(:user => @name)
end
end
def get_weekly_chart_list
Rails.cache.fetch("/users/#{@group}#weekly_chart_list", :expires_in => 7.days, :compress => true) do
@lastfm.user.get_weekly_chart_list(:group => @group)
end
end
def top_artists
Rails.cache.fetch("/users/#{@name}#top_artists", :expires_in => 7.days, :compress => true) do
@lastfm.user.get_top_artists(:user => @name, :limit => 30)
end
end
end
|
import { ErrorCodes, LoginCredentialEntity } from "../../../src/domain/entity/LoginCredential"
import { DomainError } from "../../../src/domain/DomainError"
import config from "../../../src/config/app"
function generateRandomPassword(length: number) {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+!\"#$%&'()~=L+*?<"
let ret = ""
for (let k = 0; k < length; k++) {
ret += charset.charAt(Math.floor(Math.random() * charset.length))
}
return ret
}
describe("LoginCredentialEntity", () => {
test("Normal", async () => {
const userId = 1
const password = <PASSWORD>(config.user_login_credential.password.min_length)
const credential = await LoginCredentialEntity.new(userId, password)
expect(credential).toBeInstanceOf(LoginCredentialEntity)
})
test("PasswordNotMeetPolicy", async () => {
expect.assertions(2)
const userId = 1
const password = <PASSWORD>(config.user_login_credential.password.min_length - 1)
try {
await LoginCredentialEntity.new(userId, password)
} catch (error) {
expect(error).toBeInstanceOf(DomainError)
if (error instanceof DomainError) {
expect(error.code).toMatch(ErrorCodes.PasswordNotMeetPolicy)
}
}
})
test("PasswordNotMeetPolicy", async () => {
expect.assertions(2)
const userId = 1
const password = <PASSWORD>(config.user_login_credential.password.max_length + 1)
try {
await LoginCredentialEntity.new(userId, password)
} catch (error) {
expect(error).toBeInstanceOf(DomainError)
if (error instanceof DomainError) {
expect(error.code).toMatch(ErrorCodes.PasswordNotMeetPolicy)
}
}
})
it.each([1.5, new Date(), {}, [], true, false, null, undefined])("InvaidPasswordInput", async (password) => {
expect.assertions(2)
const userId = 1
try {
//@ts-ignore
await LoginCredentialEntity.new(userId, password)
} catch (error) {
expect(error).toBeInstanceOf(DomainError)
if (error instanceof DomainError) {
expect(error.code).toMatch(ErrorCodes.InvaidPasswordInput)
}
}
})
})
|
import hmac
import hashlib
from datetime import datetime
from simple_rest.auth.decorators import request_passes_test
from simple_rest.utils.decorators import wrap_object
def get_secret_key(request, *args, **kwargs):
public_key = request.GET.get('_auth_public_key')
if public_key:
#user = User.objects.get(public_key=public_key)
#return user.secret_key
return 'test123'
else:
return None
def authentication_required(obj):
"""
Requires that the user be authenticated either by a signature or by
being actively logged in.
"""
def test_func(request, *args, **kwargs):
#secret_key = get_secret_key(request, *args, **kwargs)
#if secret_key:
#return validate_signature(request, secret_key)
#else:
return request.user.is_authenticated
decorator = request_passes_test(test_func)
return wrap_object(obj, decorator)
def calculate_signature(secret_key, getdata, request_body):
"""
Calculates the signature for the given request ``getdata`` and
``request_body``.
Sort QUERYSTRING (``getdata``) alphabetically by key, and join all key
value pairs into one long string. Then append the ``request_body``. This
is the message sent to HMAC.
Generate a signature using HMAC with ``secret_key`` as the ``key``,
the message as the data and SHA512 as the hash function. The hexdigested
output is the signature.
"""
# Construct the message from the timestamp and the data in the request
message = '{}{}{}'.format(
''.join("{}{}".format(k,v) for k, v in sorted(iter(getdata.items()))),
request_body)
# Calculate the signature (HMAC SHA512) according to RFC 2104
signature = hmac.HMAC(str(secret_key), message, hashlib.sha512).hexdigest()
return signature
def validate_signature(request, secret_key):
"""
Validates the signature associated with the given request.
"""
getdata = request.GET.copy()
# Make sure the request contains a signature
if getdata.get('_auth_signature', False):
signature = getdata['_auth_signature']
del getdata['_auth_signature']
else:
return False
# Make sure the request contains a timestamp
if getdata.get('_auth_timestamp', False):
timestamp = int(getdata.get('_auth_timestamp', False))
else:
return False
# Make sure the signature has not expired
delta = datetime.utcnow() - datetime.utcfromtimestamp(timestamp)
if delta.seconds > 5 * 60: # If the signature is older than 5 minutes, it's invalid
return False
# Make sure the signature is valid
request_body = request.body
return signature == calculate_signature(secret_key, getdata, request_body)
def signature_required(secret_key_func):
"""
Requires that the request contain a valid signature to gain access
to a specified resource.
"""
def actual_decorator(obj):
def test_func(request, *args, **kwargs):
secret_key = secret_key_func(request, *args, **kwargs)
return validate_signature(request, secret_key)
decorator = request_passes_test(test_func)
return wrap_object(obj, decorator)
return actual_decorator
|
#!/bin/sh
make -C /Users/christian/GIT/opencv/ios/build/iPhoneSimulator-x86_64 -f /Users/christian/GIT/opencv/ios/build/iPhoneSimulator-x86_64/CMakeScripts/ZERO_CHECK_cmakeRulesBuildPhase.make$CONFIGURATION all
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2497-1
#
# Security announcement date: 2015-02-09 00:00:00 UTC
# Script generation date: 2017-01-01 21:04:14 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: i686
#
# Vulnerable packages fix on version:
# - ntp:1:4.2.6.p5+dfsg-3ubuntu2.14.04.2
#
# Last versions recommanded by security team:
# - ntp:1:4.2.6.p5+dfsg-3ubuntu2.14.04.10
#
# CVE List:
# - CVE-2014-9297
# - CVE-2014-9298
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade ntp=1:4.2.6.p5+dfsg-3ubuntu2.14.04.10 -y
|
<gh_stars>10-100
package bird
import (
"testing"
)
func Test_MemoryCacheAccess(t *testing.T) {
cache, err := NewMemoryCache()
parsed := Parsed{
"foo": 23,
"bar": 42,
"baz": true,
}
t.Log("Setting memory cache...")
err = cache.Set("testkey", parsed, 5)
if err != nil {
t.Error(err)
}
t.Log("Fetching from memory cache...")
parsed, err = cache.Get("testkey")
if err != nil {
t.Error(err)
}
cache.Expire()
t.Log(parsed)
}
func Test_MemoryCacheAccessKeyMissing(t *testing.T) {
cache, err := NewMemoryCache()
parsed, err := cache.Get("test_missing_key")
if !IsSpecial(parsed) {
t.Error(err)
}
t.Log("Cache error:", err)
t.Log(parsed)
}
func Test_MemoryCacheRoutes(t *testing.T) {
f, err := openFile("routes_bird1_ipv4.sample")
if err != nil {
t.Error(err)
}
defer f.Close()
parsed := parseRoutes(f)
_, ok := parsed["routes"].([]Parsed)
if !ok {
t.Fatal("Error getting routes")
}
cache, err := NewMemoryCache()
err = cache.Set("routes_protocol_test", parsed, 5)
if err != nil {
t.Error(err)
}
parsed, err = cache.Get("routes_protocol_test")
if err != nil {
t.Error(err)
return
}
routes, ok := parsed["routes"].([]Parsed)
if !ok {
t.Error("Error getting routes")
}
t.Log("Retrieved routes:", len(routes))
}
|
# frozen_string_literal: true
require_relative 'twitch/version'
require_relative 'twitch/client'
|
#!/usr/bin/env bash
# Compiling with ghcjs:
stack build --stack-yaml=stack-ghcjs.yaml
# Moving the generated files to the js folder:
mkdir -p js
cp -r $(stack path --local-install-root --stack-yaml=stack-ghcjs.yaml)/bin/starterApp.jsexe/all.js js/
# Minifying all.js file using the closure compiler:
cd js
ccjs all.js --compilation_level=ADVANCED_OPTIMIZATIONS > all.min.js
# OPTIONAL: zipping, to see the actual transferred size of the app:
zopfli all.min.js
|
#!/usr/bin/env bash
set -euo pipefail
MEMORY_QUERY='sum by (label_app_kubernetes_io_component) (sum(container_memory_usage_bytes{namespace="openshift-cnv"}) by (pod) * on (pod) group_left(label_app_kubernetes_io_component) kube_pod_labels{namespace="openshift-cnv"}) / (1024* 1024)'
CPU_QUERY='sum by (label_app_kubernetes_io_component) (sum(pod:container_cpu_usage:sum{namespace="openshift-cnv"}) by (pod) * on (pod) group_left(label_app_kubernetes_io_component) kube_pod_labels{namespace="openshift-cnv"})'
JQ_FILTER='.data.result[] | .metric.label_app_kubernetes_io_component + ":" + .value[1]'
function run_query() {
local query="$1"
oc exec -n openshift-monitoring prometheus-k8s-0 -c prometheus -- curl --silent \
--data-urlencode "query=${query}" \
http://127.0.0.1:9090/api/v1/query? | jq "$JQ_FILTER"
}
echo "----MEMORY_CONSUMPTION----"
run_query "$MEMORY_QUERY"
echo "----CPU_CONSUMPTION----"
run_query "$CPU_QUERY"
|
from sklearn.cluster import AgglomerativeClustering
# Create a dataset
x = [[1, 2], [4, 7], [9, 8], [12, 17], [20, 3]]
# Create a hierarchical clustering object
hc = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='ward')
# Apply the clustering algorithm to the dataset
y_pred = hc.fit_predict(x)
# Print the cluster labels
print(y_pred)
|
#!/bin/bash
echo "setup k8s cluster"
cd ${HOME}/pai-deploy/kubespray
ansible-playbook -i inventory/pai/hosts.yml cluster.yml --become --become-user=root -e "@inventory/pai/openpai.yml" || exit $?
sudo mkdir -p ${HOME}/pai-deploy/kube || exit $?
sudo cp -rf ${HOME}/pai-deploy/kubespray/inventory/pai/artifacts/admin.conf ${HOME}/pai-deploy/kube/config || exit $?
echo "You can run the following commands to setup kubectl on you local host:"
echo "ansible-playbook -i ${HOME}/pai-deploy/kubespray/inventory/pai/hosts.yml ${HOME}/pai-deploy/pai/contrib/kubespray/set-kubectl.yml --ask-become-pass"
|
#! /bin/bash
# Author: Marco Esposito
# Based on instructions by the MARP developing team.
# Run this script on bash to compile the presentation in html format.
# This script is only intended to be used on Linux.
if [ -z ${2} ]; then echo "compiling to HTML"; else FLAG=--allow-local-files; echo "compiling to PDF"; fi
docker run --rm \
-v $PWD:/home/marp/app/ \
-e MARP_USER="$(id -u):$(id -g)" \
-e LANG=$LANG marpteam/marp-cli \
${FLAG} \
$1 \
$2
|
<gh_stars>0
/*
* Contacts Service
*
* Copyright (c) 2010 - 2012 Samsung Electronics Co., Ltd. All rights reserved.
*
* Contact: <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef __CTS_TYPES_H__
#define __CTS_TYPES_H__
//<!--
/**
* @defgroup CONTACTS_SVC_TYPES Types
* @ingroup CONTACTS_SVC
* @addtogroup CONTACTS_SVC_TYPES
* @{
*
* It is Types of Number, E-mail, Web address, Address, Event, Phone Log, etc.
* And this interface provides methods to handle custom types.
*
*/
/**
* The Number can be made with a set of values by specifying one or more values.
* \n Example : CTS_NUM_TYPE_HOME|CTS_NUM_TYPE_VOICE
* \n Exceptionally, CTS_NUM_TYPE_CUSTOM is exclusive.
* CTS_NUM_TYPE_CUSTOM should be handled earlier.
*/
enum NUMBERTYPE{
CTS_NUM_TYPE_NONE = 0,
CTS_NUM_TYPE_HOME = 1<<0,/**< a telephone number associated with a residence */
CTS_NUM_TYPE_WORK = 1<<1,/**< a telephone number associated with a place of work */
CTS_NUM_TYPE_VOICE = 1<<2,/**< a voice telephone number */
CTS_NUM_TYPE_FAX = 1<<3,/**< a facsimile telephone number */
CTS_NUM_TYPE_MSG = 1<<4,/**< the telephone number has voice messaging support */
CTS_NUM_TYPE_CELL = 1<<5,/**< a cellular telephone number */
CTS_NUM_TYPE_PAGER = 1<<6,/**< a paging device telephone number */
CTS_NUM_TYPE_BBS = 1<<7,/**< a bulletin board system telephone number */
CTS_NUM_TYPE_MODEM = 1<<8,/**< a MODEM connected telephone number */
CTS_NUM_TYPE_CAR = 1<<9,/**< a car-phone telephone number */
CTS_NUM_TYPE_ISDN = 1<<10,/**< an ISDN service telephone number */
CTS_NUM_TYPE_VIDEO = 1<<11,/**< a video conferencing telephone number */
CTS_NUM_TYPE_PCS = 1<<12,/**< a personal communication services telephone number */
CTS_NUM_TYPE_ASSISTANT = 1<<30,/**< a additional type for assistant */
CTS_NUM_TYPE_CUSTOM = 1<<31,/**< Custom number type */
};
enum EMAILTYPE{
CTS_EMAIL_TYPE_NONE = 0,/**< Other */
CTS_EMAIL_TYPE_HOME = 1<<0,/**< . */
CTS_EMAIL_TYPE_WORK = 1<<1,/**< . */
};
enum ADDRESSTYPE{
CTS_ADDR_TYPE_NONE = 0,/**< . */
CTS_ADDR_TYPE_HOME = 1<<0,/**< a delivery address for a residence */
CTS_ADDR_TYPE_WORK = 1<<1,/**< a delivery address for a place of work */
CTS_ADDR_TYPE_DOM = 1<<2,/**< a domestic delivery address */
CTS_ADDR_TYPE_INTL = 1<<3,/**< an international delivery address */
CTS_ADDR_TYPE_POSTAL = 1<<4,/**< a postal delivery address */
CTS_ADDR_TYPE_PARCEL = 1<<5,/**< a parcel delivery address */
};
enum WEBTYPE{
CTS_WEB_TYPE_NONE,/**< Other */
CTS_WEB_TYPE_HOME,/**< . */
CTS_WEB_TYPE_WORK,/**< . */
};
enum PLOGTYPE{
CTS_PLOG_TYPE_NONE,
CTS_PLOG_TYPE_VOICE_INCOMMING = 1,/**< . */
CTS_PLOG_TYPE_VOICE_OUTGOING = 2,/**< . */
CTS_PLOG_TYPE_VIDEO_INCOMMING = 3,/**< . */
CTS_PLOG_TYPE_VIDEO_OUTGOING = 4,/**< . */
CTS_PLOG_TYPE_VOICE_INCOMMING_UNSEEN = 5,/**< Not confirmed missed call */
CTS_PLOG_TYPE_VOICE_INCOMMING_SEEN = 6,/**< Confirmed missed call */
CTS_PLOG_TYPE_VIDEO_INCOMMING_UNSEEN = 7,/**< Not confirmed missed video call */
CTS_PLOG_TYPE_VIDEO_INCOMMING_SEEN = 8,/**< Confirmed missed video call */
CTS_PLOG_TYPE_VOICE_REJECT = 9,/**< . */
CTS_PLOG_TYPE_VIDEO_REJECT = 10,/**< . */
CTS_PLOG_TYPE_VOICE_BLOCKED = 11,/**< . */
CTS_PLOG_TYPE_VIDEO_BLOCKED = 12,/**< . */
CTS_PLOG_TYPE_MMS_INCOMMING = 101,/**< . */
CTS_PLOG_TYPE_MMS_OUTGOING = 102,/**< . */
CTS_PLOG_TYPE_SMS_INCOMMING = 103,/**< . */
CTS_PLOG_TYPE_SMS_OUTGOING = 104,/**< . */
CTS_PLOG_TYPE_SMS_BLOCKED = 105,/**< . */
CTS_PLOG_TYPE_MMS_BLOCKED = 106,/**< . */
CTS_PLOG_TYPE_EMAIL_RECEIVED = 201,/**<.*/
CTS_PLOG_TYPE_EMAIL_SENT = 202,/**<.*/
CTS_PLOG_TYPE_MAX
};
enum EVENTTYPE{
CTS_EVENT_TYPE_BIRTH,/**< . */
CTS_EVENT_TYPE_ANNIVERSARY/**< . */
};
enum ADDRESSBOOKTYPE{
CTS_ADDRESSBOOK_TYPE_INTERNAL, /**< . */
CTS_ADDRESSBOOK_TYPE_EXCHANGE, /**< . */
CTS_ADDRESSBOOK_TYPE_GOOGLE, /**< . */
CTS_ADDRESSBOOK_TYPE_YAHOO, /**< . */
CTS_ADDRESSBOOK_TYPE_FACEBOOK, /**< . */
CTS_ADDRESSBOOK_TYPE_OTHER, /**< . */
};
/**
* Use for contacts_svc_insert_custom_type(),
* contacts_svc_delete_custom_type(), contacts_svc_find_custom_type().
*/
typedef enum {
CTS_TYPE_CLASS_EXTEND_DATA=0,/**< Extend Data type(@ref CONTACTS_SVC_EXTEND) */
CTS_TYPE_CLASS_NUM=1,/**< Custom Number type */
}cts_custom_type_class;
/**
* This function inserts a User defined type into database.
* This api assigns a index of the group automatically.
* \n The returned index is unique & non-reusable.
*
* @param[in] type_class #cts_custom_type_class
* @param[in] type_name Name of Custom Type
* @return the index of the inserted custom type on success, Negative value(#cts_error) on error
*/
int contacts_svc_insert_custom_type(cts_custom_type_class type_class, char *type_name);
/**
* This function deletes a user defined type in database.
*
* @param[in] type_class #cts_custom_type_class
* @param[in] index The index of User defined type to delete in database.
* @return #CTS_SUCCESS on success, Negative value(#cts_error) on error
*/
int contacts_svc_delete_custom_type(cts_custom_type_class type_class, int index);
/**
* This function gets name of custom type.
* Obtained string should be free using by free().
* @param[in] type_class #cts_custom_type_class
* @param[in] index The index of User defined type.
* @return The gotten information, or NULL if no value is obtained or error
*/
char* contacts_svc_get_custom_type(cts_custom_type_class type_class, int index);
/**
* This function gets index of user defined type of #name.
*
* @param[in] type_class #cts_custom_type_class
* @param[in] type_name The name of type for searching
* @return index of found Custom type on success, Negative value(#cts_error) on error
*/
int contacts_svc_find_custom_type(cts_custom_type_class type_class, char *type_name);
/*
* @}
*/
/**
* @defgroup CONTACTS_SVC_EXTEND Using the Extend Data for Contact
* @ingroup CONTACTS_SVC
* @addtogroup CONTACTS_SVC_EXTEND
* @{
*
* This is description of usages of extend data related with a contact.
*
* @section extend_sec1 Properties
* - The extend data is contacts service value(#CTSvalue).
* - The extend data only exist for contact struct(#CTSstruct).
* - The type of extend data is defined
* by contacts_svc_insert_custom_type() with #CTS_TYPE_CLASS_EXTEND_DATA.
* - The extend data is stored to contact by contacts_svc_struct_store_value().
* - Extend data can be stored only one at each type in contacts.
* - The index of custom type is used as the field parameter of contacts_svc_struct_store_value().
* - The composition of the extend data(#EXTENDVALUE)
* -# #CTS_EXTEND_VAL_DATA1_INT
* -# #CTS_EXTEND_VAL_DATA2_STR
* -# #CTS_EXTEND_VAL_DATA3_STR
* -# #CTS_EXTEND_VAL_DATA4_STR
* -# #CTS_EXTEND_VAL_DATA5_STR
* -# #CTS_EXTEND_VAL_DATA6_STR
* -# #CTS_EXTEND_VAL_DATA7_STR
* -# #CTS_EXTEND_VAL_DATA8_STR
* -# #CTS_EXTEND_VAL_DATA9_STR
* -# #CTS_EXTEND_VAL_DATA10_STR
*
* @section extend_sec2 Usages
* - Notice
* \n The extend data has values of fixed type.
* \n Therefore if you want to save values of the other types, convert to string.
* \n This mechanism is a supplementary mechanism. Do not abuse.
* - example
* @code
#include <stdio.h>
#include <glib.h>
#include <contacts-svc.h>
static void print_extend_contact(CTSstruct *contact)
{
int ret;
CTSvalue *value;
GSList *get_list, *cursor;
value = NULL;
contacts_svc_struct_get_value(contact, CTS_CF_NAME_VALUE, &value);
printf("First Name : %s\n", contacts_svc_value_get_str(value, CTS_NAME_VAL_FIRST_STR));
printf("Last Name : %s\n", contacts_svc_value_get_str(value, CTS_NAME_VAL_LAST_STR));
value = NULL;
ret = contacts_svc_find_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "YomiName");
ret = contacts_svc_struct_get_value(contact, ret, &value);
if(CTS_SUCCESS == ret) {
printf("extend1 data2 : %s\n", contacts_svc_value_get_str(value, CTS_EXTEND_VAL_DATA2_STR));
printf("extend1 data3 : %s\n", contacts_svc_value_get_str(value, CTS_EXTEND_VAL_DATA3_STR));
printf("extend1 data4 : %s\n", contacts_svc_value_get_str(value, CTS_EXTEND_VAL_DATA4_STR));
}
value = NULL;
ret = contacts_svc_find_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "Family");
ret = contacts_svc_struct_get_value(contact, ret, &value);
if(CTS_SUCCESS == ret) {
printf("extend2 data2 : %s\n", contacts_svc_value_get_str(value, CTS_EXTEND_VAL_DATA2_STR));
printf("extend2 data3 : %s\n", contacts_svc_value_get_str(value, CTS_EXTEND_VAL_DATA3_STR));
printf("extend2 data4 : %s\n", contacts_svc_value_get_str(value, CTS_EXTEND_VAL_DATA4_STR));
}
get_list = NULL;
contacts_svc_struct_get_list(contact, CTS_CF_NUMBER_LIST, &get_list);
cursor = get_list;
for(;cursor;cursor=g_slist_next(cursor))
{
printf("number Type = %d",
contacts_svc_value_get_int(cursor->data, CTS_NUM_VAL_TYPE_INT));
if(contacts_svc_value_get_bool(cursor->data, CTS_NUM_VAL_FAVORITE_BOOL))
printf("(favorite)");
printf("Number = %s\n",
contacts_svc_value_get_str(cursor->data, CTS_NUM_VAL_NUMBER_STR));
}
}
void extend_data_test(void)
{
int ret, index;
CTSstruct *contact;
CTSvalue *name, *number1, *extend_value;
GSList *numbers=NULL;
contact = contacts_svc_struct_new(CTS_STRUCT_CONTACT);
name = contacts_svc_value_new(CTS_VALUE_NAME);
if(name) {
contacts_svc_value_set_str(name, CTS_NAME_VAL_FIRST_STR, "People");
contacts_svc_value_set_str(name, CTS_NAME_VAL_LAST_STR, "Japan");
}
contacts_svc_struct_store_value(contact, CTS_CF_NAME_VALUE, name);
contacts_svc_value_free(name);
number1 = contacts_svc_value_new(CTS_VALUE_NUMBER);
if(number1) {
contacts_svc_value_set_str(number1, CTS_NUM_VAL_NUMBER_STR, "0333333333");
contacts_svc_value_set_int(number1, CTS_NUM_VAL_TYPE_INT, CTS_NUM_TYPE_MOBILE);
contacts_svc_value_set_bool(number1, CTS_NUM_VAL_DEFAULT_BOOL, true);
}
numbers = g_slist_append(numbers, number1);
contacts_svc_struct_store_list(contact, CTS_CF_NUMBER_LIST, numbers);
contacts_svc_value_free(number1);
g_slist_free(numbers);
extend_value = contacts_svc_value_new(CTS_VALUE_EXTEND);
if(extend_value) {
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA2_STR, "YomiFirstName");
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA3_STR, "YomiLastName");
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA4_STR, "YomiCompanyName");
}
ret = contacts_svc_find_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "YomiName");
if(CTS_ERR_DB_RECORD_NOT_FOUND == ret)
ret = contacts_svc_insert_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "YomiName");
contacts_svc_struct_store_value(contact, ret, extend_value);
contacts_svc_value_free(extend_value);
extend_value = contacts_svc_value_new(CTS_VALUE_EXTEND);
if(extend_value) {
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA2_STR, "Children1");
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA3_STR, "Children2");
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA4_STR, "Children3");
}
ret = contacts_svc_find_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "Family");
if(CTS_ERR_DB_RECORD_NOT_FOUND == ret)
ret = contacts_svc_insert_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "Family");
contacts_svc_struct_store_value(contact, ret, extend_value);
contacts_svc_value_free(extend_value);
index = contacts_svc_insert_contact(0, contact);
contacts_svc_struct_free(contact);
contact = NULL;
ret = contacts_svc_get_contact(index, &contact);
if(ret < 0)
{
printf("No found record\n");
return;
}
print_extend_contact(contact);
//update test
extend_value = NULL;
ret = contacts_svc_find_custom_type(CTS_TYPE_CLASS_EXTEND_DATA, "Family");
ret = contacts_svc_struct_get_value(contact, ret, &extend_value);
if(CTS_SUCCESS == ret)
contacts_svc_value_set_str(extend_value, CTS_EXTEND_VAL_DATA2_STR, "Children4");
contacts_svc_struct_store_value(contact, ret, extend_value);
contacts_svc_update_contact(contact);
contacts_svc_struct_free(contact);
contact = NULL;
ret = contacts_svc_get_contact(index, &contact);
if(ret < 0)
{
printf("No found record\n");
return;
}
print_extend_contact(contact);
contacts_svc_struct_free(contact);
}
int main()
{
contacts_svc_connect();
extend_data_test();
contacts_svc_disconnect();
return 0;
}
* @endcode
*
* @}
*/
//-->
#endif //__CTS_TYPES_H__
|
/**
* @file c_api_sparse_array_spec.cc
*
* @section LICENSE
*
* The MIT License
*
* @copyright Copyright (c) 2016 MIT and Intel Corporation
* @copyright Copyright (c) 2018-2020 Omics Data Automation, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @section DESCRIPTION
*
* Tests of C API for sparse array operations.
*/
#include "catch.h"
#include "c_api_sparse_array_spec.h"
#include "progress_bar.h"
#include "storage_manager.h"
#include "storage_posixfs.h"
#include "utils.h"
#include <cstring>
#include <iostream>
#include <map>
#include <time.h>
#include <sys/time.h>
#include <sstream>
SparseArrayTestFixture::SparseArrayTestFixture() {
// Error code
int rc;
// Initialize context
rc = tiledb_ctx_init(&tiledb_ctx_, NULL);
CHECK_RC(rc, TILEDB_OK);
// Create workspace
rc = tiledb_workspace_create(tiledb_ctx_, WORKSPACE.c_str());
CHECK_RC(rc, TILEDB_OK);
}
SparseArrayTestFixture::~SparseArrayTestFixture() {
// Error code
int rc;
// Finalize TileDB context
rc = tiledb_ctx_finalize(tiledb_ctx_);
CHECK_RC(rc, TILEDB_OK);
}
/* ****************************** */
/* PUBLIC METHODS */
/* ****************************** */
template <typename T>
void* create_buffer(int32_t size, T** buffer, size_t* bytes) {
T *typed_buffer= *buffer;
typed_buffer = new T[size];
*bytes = (size)*sizeof(T);
for(int32_t i = 0; i < size; ++i)
typed_buffer[i] = (T)i;
return reinterpret_cast<void *>(typed_buffer);
}
template <typename T>
void clear_buffer(int32_t size, T* buffer) {
for(int32_t i = 0; i < size; ++i)
buffer[i] = (T)0;
}
template <typename T>
void validate_and_cleanup_buffer(int32_t size, T* buffer) {
for(int32_t i = 0; i < size; ++i)
CHECK(buffer[i] == (T)i);
delete buffer;
}
int SparseArrayTestFixture::create_sparse_array_1D(
const int attribute_type,
const int32_t tile_extent,
const int32_t domain_lo,
const int32_t domain_hi,
const int cell_order,
const int tile_order) {
// Error code
int rc;
// Setup array
const int attribute_num = 1;
const char* attributes[] = { "MY_ATTRIBUTE" };
const char* dimensions[] = { "X"};
int32_t domain[] = { domain_lo, domain_hi };
int32_t tile_extents[] = { tile_extent };
const int types[] = { attribute_type, TILEDB_INT32 };
int compression[] = { TILEDB_NO_COMPRESSION, TILEDB_NO_COMPRESSION};
int compression_level[] = { 0, 0 };
// Set the array schema
rc = tiledb_array_set_schema(
&array_schema_,
array_name_.c_str(),
attributes,
attribute_num,
0,
cell_order,
NULL,
compression,
compression_level,
NULL, // Offsets compression
NULL, // Offsets compression level
0, // Sparse
dimensions,
1,
domain,
2*sizeof(int32_t),
tile_extents,
sizeof(int32_t),
tile_order,
types);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Create the array
rc = tiledb_array_create(tiledb_ctx_, &array_schema_);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Free array schema
rc = tiledb_array_free_schema(&array_schema_);
if(rc != TILEDB_OK)
return TILEDB_ERR;
int32_t domain_size = domain_hi-domain_lo+1;
size_t nbytes = domain_size/10;
int32_t* buffer_coords = new int32_t[nbytes];
for (int32_t i = 0; i < domain_size/10; ++i) {
buffer_coords[i] = i;
}
size_t buffer_coords_size = nbytes * sizeof(int32_t);
create_buffer(nbytes, &buffer_coords, &nbytes);
void *buffer = nullptr;
if (attribute_type == TILEDB_CHAR) {
char *typed_buffer = reinterpret_cast<char *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_INT8) {
int8_t *typed_buffer = reinterpret_cast<int8_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_INT16) {
int16_t *typed_buffer = reinterpret_cast<int16_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_INT32) {
int32_t *typed_buffer = reinterpret_cast<int32_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_INT64) {
int64_t *typed_buffer = reinterpret_cast<int64_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_UINT8) {
uint8_t *typed_buffer = reinterpret_cast<uint8_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_UINT16) {
uint16_t *typed_buffer = reinterpret_cast<uint16_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_UINT32) {
uint32_t *typed_buffer = reinterpret_cast<uint32_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_UINT64) {
uint64_t *typed_buffer = reinterpret_cast<uint64_t *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_FLOAT32) {
float *typed_buffer = reinterpret_cast<float *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
} else if (attribute_type == TILEDB_FLOAT64) {
double *typed_buffer = reinterpret_cast<double *>(buffer);
buffer = create_buffer(nbytes, &typed_buffer, &nbytes);
}
CHECK(buffer != nullptr);
std::vector<void *> buffers;
std::vector<size_t> buffer_sizes;
buffers.push_back(buffer);
buffer_sizes.push_back(nbytes);
buffers.push_back(buffer_coords);
buffer_sizes.push_back(buffer_coords_size);
return 0;
// Intialize array
TileDB_Array* tiledb_array;
rc = tiledb_array_init(
tiledb_ctx_,
&tiledb_array,
array_name_.c_str(),
TILEDB_ARRAY_WRITE,
NULL,
NULL,
0);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Write array
rc = tiledb_array_write(tiledb_array, const_cast<const void **>(buffers.data()), buffer_sizes.data());
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Finalize the array
rc = tiledb_array_finalize(tiledb_array);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Clear buffer
if (attribute_type == TILEDB_CHAR) {
clear_buffer(domain_size, reinterpret_cast<char *>(buffer));
} else if (attribute_type == TILEDB_INT8) {
clear_buffer(domain_size, reinterpret_cast<int8_t *>(buffer));
} else if (attribute_type == TILEDB_INT16) {
clear_buffer(domain_size, reinterpret_cast<int16_t *>(buffer));
} else if (attribute_type == TILEDB_INT32) {
clear_buffer(domain_size, reinterpret_cast<int32_t *>(buffer));
} else if (attribute_type == TILEDB_INT64) {
clear_buffer(domain_size, reinterpret_cast<int64_t *>(buffer));
} else if (attribute_type == TILEDB_UINT8) {
clear_buffer(domain_size, reinterpret_cast<uint8_t *>(buffer));
} else if (attribute_type == TILEDB_UINT16) {
clear_buffer(domain_size, reinterpret_cast<uint16_t *>(buffer));
} else if (attribute_type == TILEDB_UINT32) {
clear_buffer(domain_size, reinterpret_cast<uint32_t *>(buffer));
} else if (attribute_type == TILEDB_UINT64) {
clear_buffer(domain_size, reinterpret_cast<uint64_t *>(buffer));
} else if (attribute_type == TILEDB_FLOAT32) {
clear_buffer(domain_size, reinterpret_cast<float *>(buffer));
} else if (attribute_type == TILEDB_FLOAT64) {
clear_buffer(domain_size, reinterpret_cast<double *>(buffer));
}
// Read array
rc = tiledb_array_init(
tiledb_ctx_,
&tiledb_array,
array_name_.c_str(),
TILEDB_ARRAY_READ,
NULL,
NULL,
0);
if(rc != TILEDB_OK)
return TILEDB_ERR;
rc = tiledb_array_read(tiledb_array, buffers.data(), buffer_sizes.data());
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Finalize the array
rc = tiledb_array_finalize(tiledb_array);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Check buffer
if (attribute_type == TILEDB_CHAR) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<char *>(buffer));
} else if (attribute_type == TILEDB_INT8) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<int8_t *>(buffer));
} else if (attribute_type == TILEDB_INT16) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<int16_t *>(buffer));
} else if (attribute_type == TILEDB_INT32) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<int32_t *>(buffer));
} else if (attribute_type == TILEDB_INT64) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<int64_t *>(buffer));
} else if (attribute_type == TILEDB_UINT8) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<uint8_t *>(buffer));
} else if (attribute_type == TILEDB_UINT16) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<uint16_t *>(buffer));
} else if (attribute_type == TILEDB_UINT32) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<uint32_t *>(buffer));
} else if (attribute_type == TILEDB_UINT64) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<uint64_t *>(buffer));
} else if (attribute_type == TILEDB_FLOAT32) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<float *>(buffer));
} else if (attribute_type == TILEDB_FLOAT64) {
validate_and_cleanup_buffer(domain_size, reinterpret_cast<double *>(buffer));
}
return TILEDB_OK;
}
int SparseArrayTestFixture::create_sparse_array_2D(
const int64_t tile_extent_0,
const int64_t tile_extent_1,
const int64_t domain_0_lo,
const int64_t domain_0_hi,
const int64_t domain_1_lo,
const int64_t domain_1_hi,
const int64_t capacity,
const bool enable_compression,
const int cell_order,
const int tile_order) {
// Error code
int rc;
// Prepare and set the array schema object and data structures
const int attribute_num = 1;
const char* attributes[] = { "ATTR_INT32" };
const char* dimensions[] = { "X", "Y" };
int64_t domain[] = { domain_0_lo, domain_0_hi, domain_1_lo, domain_1_hi };
int64_t tile_extents[] = { tile_extent_0, tile_extent_1 };
const int types[] = { TILEDB_INT32, TILEDB_INT64 };
int compression[2];
const int dense = 0;
if(!enable_compression) {
compression[0] = TILEDB_NO_COMPRESSION;
compression[1] = TILEDB_NO_COMPRESSION;
} else {
compression[0] = TILEDB_GZIP;
compression[1] = TILEDB_GZIP;
}
// Set the array schema
rc = tiledb_array_set_schema(
&array_schema_,
array_name_.c_str(),
attributes,
attribute_num,
capacity,
cell_order,
NULL,
compression,
NULL,
NULL, // Offsets compression
NULL, // Offsets compression level
dense,
dimensions,
2,
domain,
4*sizeof(int64_t),
tile_extents,
2*sizeof(int64_t),
tile_order,
types);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Create the array
rc = tiledb_array_create(tiledb_ctx_, &array_schema_);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Free array schema
rc = tiledb_array_free_schema(&array_schema_);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Success
return TILEDB_OK;
}
int* SparseArrayTestFixture::read_sparse_array_2D(
const int64_t domain_0_lo,
const int64_t domain_0_hi,
const int64_t domain_1_lo,
const int64_t domain_1_hi,
const int read_mode) {
// Error code
int rc;
// Initialize a subarray
const int64_t subarray[] = {
domain_0_lo,
domain_0_hi,
domain_1_lo,
domain_1_hi
};
// Subset over a specific attribute
const char* attributes[] = { "ATTR_INT32" };
// Initialize the array in the input mode
TileDB_Array* tiledb_array;
rc = tiledb_array_init(
tiledb_ctx_,
&tiledb_array,
array_name_.c_str(),
read_mode,
subarray,
attributes,
1);
if(rc != TILEDB_OK)
return NULL;
// Prepare the buffers that will store the result
int64_t domain_size_0 = domain_0_hi - domain_0_lo + 1;
int64_t domain_size_1 = domain_1_hi - domain_1_lo + 1;
int64_t cell_num = domain_size_0 * domain_size_1;
int* buffer_a1 = new int[cell_num];
assert(buffer_a1);
void* buffers[] = { buffer_a1 };
size_t buffer_size_a1 = cell_num * sizeof(int);
size_t buffer_sizes[] = { buffer_size_a1 };
// Read from array
rc = tiledb_array_read(tiledb_array, buffers, buffer_sizes);
if(rc != TILEDB_OK) {
tiledb_array_finalize(tiledb_array);
return NULL;
}
// Finalize the array
rc = tiledb_array_finalize(tiledb_array);
if(rc != TILEDB_OK)
return NULL;
// Success - return the created buffer
return buffer_a1;
}
void SparseArrayTestFixture::set_array_name(const char *name) {
array_name_ = WORKSPACE + name;
}
int SparseArrayTestFixture::write_sparse_array_unsorted_2D(
const int64_t domain_size_0,
const int64_t domain_size_1) {
// Error code
int rc;
// Generate random attribute values and coordinates for sparse write
int64_t cell_num = domain_size_0*domain_size_1;
int* buffer_a1 = new int[cell_num];
int64_t* buffer_coords = new int64_t[2*cell_num];
int64_t coords_index = 0L;
for (int64_t i = 0; i < domain_size_0; ++i) {
for (int64_t j = 0; j < domain_size_1; ++j) {
buffer_a1[i*domain_size_1+j] = i*domain_size_1+j;
buffer_coords[2*coords_index] = i;
buffer_coords[2*coords_index+1] = j;
coords_index++;
}
}
// Initialize the array
TileDB_Array* tiledb_array;
rc = tiledb_array_init(
tiledb_ctx_,
&tiledb_array,
array_name_.c_str(),
TILEDB_ARRAY_WRITE_UNSORTED,
NULL,
NULL,
0);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Write to array
const void* buffers[] = { buffer_a1, buffer_coords };
size_t buffer_sizes[2];
buffer_sizes[0] = cell_num*sizeof(int);
buffer_sizes[1] = 2*cell_num*sizeof(int64_t);
rc = tiledb_array_write(tiledb_array, buffers, buffer_sizes);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Finalize the array
rc = tiledb_array_finalize(tiledb_array);
if(rc != TILEDB_OK)
return TILEDB_ERR;
// Clean up
delete [] buffer_a1;
delete [] buffer_coords;
// Success
return TILEDB_OK;
}
TEST_CASE_METHOD(SparseArrayTestFixture, "Test sparse write with attribute types", "[test_sparse_1D_array]") {
int rc;
set_array_name("sparse_test_char_100x100");
rc = create_sparse_array_1D(TILEDB_CHAR, 10, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_int8_100x100");
rc = create_sparse_array_1D(TILEDB_INT8, 20, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_int16_100x100");
rc = create_sparse_array_1D(TILEDB_INT16, 30, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_int32_100x100");
rc = create_sparse_array_1D(TILEDB_INT32, 40, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_int64_100x100");
rc = create_sparse_array_1D(TILEDB_INT64, 50, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_uint8_100x100");
rc = create_sparse_array_1D(TILEDB_UINT8, 60, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_uint16_100x100");
rc = create_sparse_array_1D(TILEDB_UINT16, 70, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_uint32_100x100");
rc = create_sparse_array_1D(TILEDB_UINT32, 80, 0, 99, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_uint64_100x100");
rc = create_sparse_array_1D(TILEDB_UINT64, 10, 0, 99, TILEDB_ROW_MAJOR, TILEDB_COL_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_float32_100x100");
rc = create_sparse_array_1D(TILEDB_FLOAT32, 10, 0, 99, TILEDB_COL_MAJOR, TILEDB_ROW_MAJOR);
CHECK_RC(rc, TILEDB_OK);
set_array_name("sparse_test_float64_100x100");
rc = create_sparse_array_1D(TILEDB_FLOAT64, 10, 0, 99, TILEDB_COL_MAJOR, TILEDB_COL_MAJOR);
CHECK_RC(rc, TILEDB_OK);
}
/**
* Test is to randomly read subregions of the array and
* check with corresponding value set by row_id*dim1+col_id
* Top left corner is always 4,4
* Test runs through 10 iterations to choose random
* width and height of the subregions
*/
TEST_CASE_METHOD(SparseArrayTestFixture, "Test random read subregions", "[test_random_sparse_sorted_reads]") {
// Error code
int rc;
// Parameters used in this test
int64_t domain_size_0 = 5000;
int64_t domain_size_1 = 1000;
int64_t tile_extent_0 = 100;
int64_t tile_extent_1 = 100;
int64_t domain_0_lo = 0;
int64_t domain_0_hi = domain_size_0-1;
int64_t domain_1_lo = 0;
int64_t domain_1_hi = domain_size_1-1;
int64_t capacity = 0; // 0 means use default capacity
int cell_order = TILEDB_ROW_MAJOR;
int tile_order = TILEDB_ROW_MAJOR;
int iter_num = 10;
// Set array name
set_array_name("sparse_test_5000x1000_100x100");
// Create a progress bar
ProgressBar* progress_bar = new ProgressBar();
// Create a dense integer array
rc = create_sparse_array_2D(
tile_extent_0,
tile_extent_1,
domain_0_lo,
domain_0_hi,
domain_1_lo,
domain_1_hi,
capacity,
false,
cell_order,
tile_order);
CHECK_RC(rc, TILEDB_OK);
// Write array cells with value = row id * COLUMNS + col id
// to disk
rc = write_sparse_array_unsorted_2D(domain_size_0, domain_size_1);
CHECK_RC(rc, TILEDB_OK);
// Test random subarrays and check with corresponding value set by
// row_id*dim1+col_id. Top left corner is always 4,4.
int64_t d0_lo = 4;
int64_t d0_hi = 0;
int64_t d1_lo = 4;
int64_t d1_hi = 0;
int64_t height = 0, width = 0;
for(int iter = 0; iter < iter_num; ++iter) {
height = rand() % (domain_size_0 - d0_lo);
width = rand() % (domain_size_1 - d1_lo);
d0_hi = d0_lo + height;
d1_hi = d1_lo + width;
int64_t index = 0;
// Read subarray
int *buffer = read_sparse_array_2D(
d0_lo,
d0_hi,
d1_lo,
d1_hi,
TILEDB_ARRAY_READ_SORTED_ROW);
REQUIRE(buffer != NULL);
// Check
for(int64_t i = d0_lo; i <= d0_hi; ++i) {
for(int64_t j = d1_lo; j <= d1_hi; ++j) {
CHECK(buffer[index] == i*domain_size_1+j);
if (buffer[index] != (i*domain_size_1+j)) {
std::cout << "mismatch: " << i
<< "," << j << "=" << buffer[index] << "!="
<< ((i*domain_size_1+j)) << "\n";
return;
}
++index;
}
}
// Clean up
delete [] buffer;
// Update progress bar
progress_bar->load(1.0/iter_num);
}
// Delete progress bar
delete progress_bar;
}
class SparseArrayEnvTestFixture : SparseArrayTestFixture {
public:
SparseArrayTestFixture *test_fixture;
SparseArrayEnvTestFixture() {
test_fixture = NULL;
}
~SparseArrayEnvTestFixture() {
delete test_fixture;
}
void set_disable_file_locking() {
CHECK(setenv("TILEDB_DISABLE_FILE_LOCKING", "1", 1) == 0);
CHECK(is_env_set("TILEDB_DISABLE_FILE_LOCKING"));
PosixFS fs;
REQUIRE(!fs.locking_support());
}
void unset_disable_file_locking() {
unsetenv("TILEDB_DISABLE_FILE_LOCKING");
CHECK(!is_env_set("TILEDB_DISABLE_FILE_LOCKING"));
PosixFS fs;
REQUIRE(fs.locking_support());
}
void set_keep_write_file_handles_open() {
CHECK(setenv("TILEDB_KEEP_FILE_HANDLES_OPEN", "1", 1) == 0);
CHECK(is_env_set("TILEDB_KEEP_FILE_HANDLES_OPEN"));
PosixFS fs;
REQUIRE(fs.keep_write_file_handles_open());
}
void unset_keep_write_file_handles_open() {
unsetenv("TILEDB_KEEP_FILE_HANDLES_OPEN");
CHECK(!is_env_set("KEEP_FILE_HANDLES_OPEN"));
PosixFS fs;
REQUIRE(!fs.keep_write_file_handles_open());
}
int write_array() {
// Set array name
set_array_name("sparse_test_disable_file_locking_env");
CHECK_RC(create_sparse_array_2D(4, 4, 0, 15, 0, 15, 0, false, TILEDB_ROW_MAJOR, TILEDB_ROW_MAJOR), TILEDB_OK);
// Write array cells with value = row id * COLUMNS + col id
// to disk
CHECK_RC(write_sparse_array_unsorted_2D(16, 16), TILEDB_OK);
return 0;
}
int read_array() {
CHECK(read_sparse_array_2D(4, 0, 4, 0, TILEDB_ARRAY_READ) != NULL);
return 0;
}
bool consolidation_file_lock_exists() {
PosixFS fs;
return fs.is_file(array_name_+"/"+TILEDB_SM_CONSOLIDATION_FILELOCK_NAME);
}
};
TEST_CASE_METHOD(SparseArrayEnvTestFixture, "Test reading/writing with env unset", "[test_sparse_read_with_env_unset]") {
unset_disable_file_locking();
unset_keep_write_file_handles_open();
write_array();
CHECK(consolidation_file_lock_exists());
// TILEDB_DISABLE_FILE_LOCK unset
unset_disable_file_locking();
read_array();
// TILEDB_DISABLE_FILE_LOCK=1
set_disable_file_locking();
read_array();
}
TEST_CASE_METHOD(SparseArrayEnvTestFixture, "Test reading/writing with env set", "[test_sparse_read_with_env_set]") {
set_disable_file_locking();
set_keep_write_file_handles_open();
write_array();
CHECK(consolidation_file_lock_exists());
// TILEDB_DISABLE_FILE_LOCK unset
unset_disable_file_locking();
read_array();
// TILEDB_DISABLE_FILE_LOCK=1
set_disable_file_locking();
read_array();
}
|
package tr.com.minicrm.productgroup.data.mongo.collection;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
@Document(collection = "database_sequences")
@Getter
@Setter
@NoArgsConstructor
public class DatabaseSequence {
@Id
private String id;
private long seq;
}
|
<reponame>SodY2/meanAP
'use strict';
angular.module('users').controller('AuthenticationController', ['$scope', '$state', '$http', '$location', '$window', 'Authentication', 'PasswordValidator',
function($scope, $state, $http, $location, $window, Authentication, PasswordValidator) {
$scope.authentication = Authentication;
$scope.popoverMsg = PasswordValidator.getPopoverMsg();
// Get an eventual error defined in the URL query string:
$scope.error = $location.search().err;
// If user is signed in then redirect back home
if ($scope.authentication.user) {
$location.path('/');
}
$scope.signup = function(isValid) {
$scope.error = null;
if (!isValid) {
$scope.$broadcast('show-errors-check-validity', 'userForm');
return false;
}
$http.post('/api/auth/signup', $scope.credentials).success(function(response) {
// If successful we assign the response to the global user model
$scope.authentication.user = response;
// And redirect to the previous or home page
$state.go($state.previous.state.name || 'home', $state.previous.params);
}).error(function(response) {
$scope.error = response.message;
});
};
$scope.signin = function(isValid) {
$scope.error = null;
if (!isValid) {
$scope.$broadcast('show-errors-check-validity', 'userForm');
return false;
}
$http.post('/api/auth/signin', $scope.credentials).success(function(response) {
// If successful we assign the response to the global user model
$scope.authentication.user = response;
// And redirect to the previous or home page
$state.go($state.previous.state.name || 'home', $state.previous.params);
}).error(function(response) {
$scope.error = response.message;
});
};
// OAuth provider request
$scope.callOauthProvider = function(url) {
if ($state.previous && $state.previous.href) {
url += '?redirect_to=' + encodeURIComponent($state.previous.href);
}
// Effectively call OAuth authentication route:
$window.location.href = url;
};
$scope.popup2 = {
opened: false
};
$scope.open2 = function() {
$scope.popup2.opened = true;
};
$scope.formats = ['dd.MM.yyyy'];
$scope.format = $scope.formats[0];
$scope.altInputFormats = ['dd.MM.yyyy'];
$scope.setDate = function(year, month, day) {
$scope.credentials.birthDate = new Date(year, month, day);
};
$scope.toggleMin = function() {
$scope.inlineOptions.minDate = $scope.inlineOptions.minDate ? null : new Date();
$scope.dateOptions.minDate = $scope.inlineOptions.minDate;
};
var tomorrow = new Date();
tomorrow.setDate(tomorrow.getDate() + 1);
var afterTomorrow = new Date();
afterTomorrow.setDate(tomorrow.getDate() + 1);
$scope.events = [{
date: tomorrow,
status: 'full'
}, {
date: afterTomorrow,
status: 'partially'
}];
function getDayClass(data) {
var date = data.date,
mode = data.mode;
if (mode === 'day') {
var dayToCheck = new Date(date).setHours(0, 0, 0, 0);
for (var i = 0; i < $scope.events.length; i++) {
var currentDay = new Date($scope.events[i].date).setHours(0, 0, 0, 0);
if (dayToCheck === currentDay) {
return $scope.events[i].status;
}
}
}
return '';
}
}
]);
|
import React from "react";
import BackgroundSlider from "react-background-slider";
import beach from "../assets/landing/beach.jpg";
import mountain from "../assets/landing/mountain.jpg";
import desert from "../assets/landing/desert.jpg";
import { Card, Button } from "reactstrap";
import { Link } from "react-router-dom";
const Landing = () => {
return (
<>
<BackgroundSlider
images={[beach, mountain, desert]}
duration={5}
transition={2}
/>
<Card
style={{
width: "20%",
height: "fitcontent",
margin: "0 auto",
opacity: "0.85",
textAlign: "center",
marginTop: "15%",
fontSize: "24px",
padding: "50px 10px",
}}
>
<span style={{ fontSize: "16px", fontWeight: "600" }}>
Explore the world at your fingertips
</span>
<Link to="/admin/beach">
<Button color="success">Let's Travel</Button>
</Link>
</Card>
</>
);
};
export default Landing;
|
#!/bin/sh
# Crea una imagen de disco que contiene bootstrappr y paquetes.
THISDIR=$(/usr/bin/dirname ${0})
DMGNAME="${THISDIR}/bootstrap.dmg"
if [[ -e "${DMGNAME}" ]] ; then
/bin/rm "${DMGNAME}"
fi
/usr/bin/hdiutil create -fs HFS+ -srcfolder "${THISDIR}/bootstrap" "${DMGNAME}"
|
#!/bin/sh -e
# set a configuration file if not already set
! (: "${GEOIP_CONFIG_FILE?}") 2>/dev/null && {
GEOIP_CONFIG_FILE="/etc/GeoIP.conf"
[[ ! -z $GEOIP_USER_ID ]] && {
echo "UserId $GEOIP_USER_ID" > $GEOIP_CONFIG_FILE
}
[[ ! -z $GEOIP_LICENSE_KEY ]] && {
echo "LicenseKey $GEOIP_LICENSE_KEY" > $GEOIP_CONFIG_FILE
}
echo "ProductIds ${GEOIP_PRODUCT_IDS:-"GeoLite2-City GeoLite2-Country"}" >> $GEOIP_CONFIG_FILE
echo "DatabaseDirectory ${GEOIP_DIRECTORY:-"/usr/local/share/GeoIP"}" >> $GEOIP_CONFIG_FILE
}
# execute the updates with verbose output
geoipupdate -f $GEOIP_CONFIG_FILE -v
|
#!/bin/bash
# standardize species names for the two models
python ../../../scripts/standardizeModelSpeciesNames.py --model1 Models/minimal_chem.inp Models/minimal_species_dictionary.txt --model2 Models/superminimal_chem.inp Models/superminimal_species_dictionary.txt
|
<gh_stars>10-100
package io.opensphere.core.units.length;
import io.opensphere.core.util.Utilities;
/**
* A length with feet as its native unit.
*/
public final class Feet extends Length
{
/** Long label. */
public static final String FEET_LONG_LABEL1 = "foot";
/** Long label. */
public static final String FEET_LONG_LABEL2 = "feet";
/** Short label. */
public static final String FEET_SHORT_LABEL = "ft";
/** Serial version UID. */
private static final long serialVersionUID = 1L;
/**
* Constructor.
*
* @param feet The magnitude of the length.
*/
public Feet(double feet)
{
super(feet);
}
/**
* Construct this length from another length.
*
* @param dist The other length.
*/
public Feet(Length dist)
{
super(Utilities.checkNull(dist, "dist").inFeet());
}
@Override
public Feet clone()
{
return (Feet)super.clone();
}
@Override
public String getLongLabel(boolean plural)
{
return plural ? FEET_LONG_LABEL2 : FEET_LONG_LABEL1;
}
@Override
public String getShortLabel(boolean plural)
{
return FEET_SHORT_LABEL;
}
@Override
public double inFeet()
{
return getMagnitude();
}
}
|
asar pack ./OutApp/FISH_EDITOR-win32-x64/resources/app ./OutApp/FISH_EDITOR-win32-x64/resources/app.asar
rm -rf ./OutApp/FISH_EDITOR-win32-x64/resources/app
|
<gh_stars>0
#include "Loader.h"
Loader::Loader()
{
numVertices=0;
}
Loader::~Loader()
{
}
void Loader::loadScene(const char* filePath){
Assimp::Importer importer;
myScene = importer.ReadFile(filePath, aiProcess_Triangulate | aiProcess_GenSmoothNormals | aiProcess_FlipUVs);
if(!myScene){
std::clog<<"Couldn't load "<<filePath<<" "<<importer.GetErrorString()<<std::endl;
return;
} else {
std::clog<<"Loaded "<<filePath<<" successfully"<<std::endl;
loadMeshes(myScene);
}
}
void Loader::loadMeshes(const aiScene* loadedScene){
for (int i = 0; i < myScene->mNumMeshes; ++i)
{
loadNormals(myScene->mMeshes[i]);
loadVertices(myScene->mMeshes[i]);
loadTextures(myScene->mMeshes[i]);
numVertices += myScene->mMeshes[i]->mNumVertices;
}
std::clog<<"Meshes loaded..."<<std::endl;
}
void Loader::loadNormals(aiMesh* loadedMesh){
if(loadedMesh->HasNormals())
{
glm::vec3 auxiliarNormalVec;
for(int i=0; i < loadedMesh->mNumVertices; ++i)
{
auxiliarNormalVec[0] = loadedMesh->mNormals[i][0];
auxiliarNormalVec[1] = loadedMesh->mNormals[i][1];
auxiliarNormalVec[2] = loadedMesh->mNormals[i][2];
normalArray.push_back(auxiliarNormalVec);
}
} else {
std::clog<<"Meshes has no normal"<<std::endl;
}
}
void Loader::loadVertices(aiMesh* loadedMesh){
glm::vec3 auxiliarVertexVec;
for(int i=0; i < loadedMesh->mNumVertices; ++i)
{
auxiliarVertexVec[0] = loadedMesh->mVertices[i][0];
auxiliarVertexVec[1] = loadedMesh->mVertices[i][1];
auxiliarVertexVec[2] = loadedMesh->mVertices[i][2];
vertexArray.push_back(auxiliarVertexVec);
}
}
void Loader::loadTextures(aiMesh* loadedMesh){
if(loadedMesh->HasTextureCoords(0))
{
glm::vec2 auxiliarTextureVec;
for(int i=0; i < loadedMesh->mNumVertices; ++i)
{
//aiFace tmpFace = loadedMesh->mFaces[i];
auxiliarTextureVec[0] = loadedMesh->mTextureCoords[0][i].x;
auxiliarTextureVec[1] = loadedMesh->mTextureCoords[0][i].y;
textureArray.push_back(auxiliarTextureVec);
}
} else {
std::clog<<"Meshes has no texture"<<std::endl;
}
}
void Loader::PrintVec3(glm::vec3 v3){
for (int i = 0; i < 3; ++i)
{
std::cout<<v3[i]<<" ";
}
std::cout<<std::endl;
}
void Loader::PrintVec2(glm::vec2 v2){
for (int i = 0; i < 2; ++i)
{
std::cout<<v2[i]<<" ";
}
std::cout<<std::endl;
}
void Loader::PrintVector(std::vector <glm::vec3> vec){
for(int i=0; i<vec.size(); ++i)
PrintVec3(vec[i]);
}
|
<reponame>msrivastav13/sfbulk2
/* eslint-disable header/header */
import * as fs from 'fs';
import { flags, SfdxCommand } from '@salesforce/command';
import { Messages } from '@salesforce/core';
import BulkAPI2 from 'node-sf-bulk2/dist/bulk2';
import { BulkAPI2Connection } from 'node-sf-bulk2';
// Initialize Messages with the current plugin directory
Messages.importMessagesDirectory(__dirname);
// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core,
// or any library that is using the messages framework can also be loaded this way.
// TODO: replace the package name with your new package's name
const messages = Messages.loadMessages('sfbulk2api', 'org');
export default class BulkQuryResults extends SfdxCommand {
public static description = messages.getMessage('bulkCommandDescription');
public static examples = [
`$ sfdx bulk2:query --query "Select Id, Name From Account" --targetusername <EMAIL>
`,
];
protected static flagsConfig = {
// flag with a value (-n, --name=VALUE)
jobid: flags.string({
char: 'i',
description: messages.getMessage('queryFlagDescription'),
}),
path: flags.filepath({
char: 'p',
description: messages.getMessage('queryFlagDescription'),
}),
};
// Comment this out if your command does not require an org username
protected static requiresUsername = true;
// Comment this out if your command does not support a hub org username
protected static supportsDevhubUsername = false;
// Set this to true if your command requires a project workspace; 'requiresProject' is false by default
protected static requiresProject = false;
public async run(): Promise<string> {
// this.org is guaranteed because requiresUsername=true, as opposed to supportsUsername
const conn = this.org.getConnection();
const bulkconnect: BulkAPI2Connection = {
accessToken: conn.accessToken,
apiVersion: '51.0',
instanceUrl: conn.instanceUrl,
};
try {
const bulkapi2 = new BulkAPI2(bulkconnect);
const data = await bulkapi2.getBulkqueryResults(this.flags.jobid);
fs.writeFileSync(this.flags.path, data, 'utf8');
this.ux.log(data);
return data;
} catch (ex) {
this.ux.error(ex);
}
}
}
|
cd "$(dirname "$0")"
echo "LITEGL"
../../litegl/utils/pack.sh
cp -v ../../litegl/build/* ../editor/js/extra
echo "LITESCENE"
../../litescene/utils/pack.sh
cp -v ../../litescene/build/* ../editor/js/extra
cp -v ../../litescene/data/shaders.xml ../editor/data
echo "LITEGUI"
../../litegui/utils/pack.sh
cp -v ../../litegui/build/*.js ../editor/js/extra
cp -v ../../litegui/build/*.css ../editor/css
echo "LITEGRAPH"
../../litegraph/utils/pack.sh
cp -v ../../litegraph/build/* ../editor/js/extra
cp -v ../../litegraph/css/*.css ../editor/css
echo "LITEFILESERVER"
cp -v ../../litefileserver/src/litefileserver.js ../editor/js/extra
echo "WEBGLTOCANVAS2D"
cp -v ../../canvas2DtoWebGL/src/Canvas2DtoWebGL.js ../editor/js/extra
|
#!/bin/sh
# peform unit tests
#
ABOUT='peform unit tests'
USAGE='[<...OPTIONS>] [<TEST-UTIL>] [[--]<...passthru args>]'
COPYRIGHT='Copyright (c) 2018-2019, Doug Bird. All Rights Reserved.'
ME='tests.sh'
#
# resolve $APP_DIR
[ -n "$APP_DIR" ] || { ME_DIR="/$0"; ME_DIR=${ME_DIR%/*}; ME_DIR=${ME_DIR:-.}; ME_DIR=${ME_DIR#/}/; ME_DIR=$(cd "$ME_DIR"; pwd); APP_DIR=$(cd $ME_DIR/../; pwd); }
DOC_ROOT=$APP_DIR/docs # documentation root directory
PHPUNIT_BIN=$APP_DIR/vendor/bin/phpunit # phpunit executable
PHPUNIT_TESTS_ROOT=$APP_DIR/tests # unit tests root directory
TEXT_COVERAGE_BASENAME=coverage.txt
HTML_COVERAGE_SYMLINK= # html coverage report symlink
#
# exit codes
EXIT_CODE_MISSING_DEP=3
EXIT_CODE_FAILED_TEST=4
EXIT_CODE_FAILED_REFORMAT=20
CMD_STATUS_DONTUSE="255 2 $EXIT_CODE_FAILED_TEST $EXIT_CODE_MISSING_DEP"
print_hint() {
echo " Hint, try: $ME --usage"
}
sedescape() {
echo "$@" | sed 's/\([[\/.*]\|\]\)/\\&/g'
}
SKIP_TESTS=0
PRINT_COVERAGE=0
HTML_COVERAGE_REPORT=0
SKIP_COVERAGE_REPORT=0
OPTION_STATUS=0
while getopts :?qhua-: arg; do { case $arg in
h|u|a) HELP_MODE=1;;
-) LONG_OPTARG="${OPTARG#*=}"; case $OPTARG in
help|usage|about) HELP_MODE=1;;
skip-coverage) SKIP_COVERAGE_REPORT=1;;
html-coverage) HTML_COVERAGE_REPORT=1; SKIP_COVERAGE_REPORT=0;;
print-coverage) PRINT_COVERAGE=1;;
show-coverage) PRINT_COVERAGE=1;;
coverage) PRINT_COVERAGE=1;;
reformat-only|skip-tests) SKIP_TESTS=1; HTML_COVERAGE_REPORT=1; SKIP_COVERAGE_REPORT=1;;
'') break ;; # end option parsing
*) >&2 echo "$ME: unrecognized long option --$OPTARG"; OPTION_STATUS=2;;
esac ;;
*) >&2 echo "$ME: unrecognized option -$OPTARG"; OPTION_STATUS=2;;
esac } done
shift $((OPTIND-1)) # remove parsed options and args from $@ list
[ "$OPTION_STATUS" = 0 ] || { >&2 echo "$ME: (FATAL) one or more invalid options"; >&2 print_hint; exit $OPTION_STATUS; }
if [ "$HELP_MODE" ]; then
echo "$ME"
echo "$ABOUT"
echo "$COPYRIGHT"
echo ""
echo "Usage:"
echo " $ME $USAGE"
echo ""
echo "Options:"
echo " --skip-coverage"
echo " Always skip creating coverage reports."
echo ""
echo " --html-coverage"
echo " Creates a coverage report in HTML format in a hidden folder in the project's 'docs' directory."
echo " Ignored if xdebug is not available."
echo ""
echo " --print-coverage"
echo " Outputs a text coverage report after unit test completion."
echo " Ignored if xdebug is not available."
echo ""
echo " --reformat-only"
echo " Skip all tests and just reformat existing HTML coverage report(s)."
echo ""
echo "Operands:"
echo " <TEST-UTIL>"
echo " Optionally specify a test util; otherwise all test utils are executed."
echo " Acceptable Values: phpunit"
echo " Test Suite Descriptions:"
echo " phpunit: \"Unit\" phpunit test util; see phpunit.xml"
echo " If xdebug is available, a coverage report in text format is (re)generated unless the '--skip-coverage' option is provided."
echo " Coverage report path: $DOC_ROOT/$TEXT_COVERAGE_BASENAME"
echo " HTML coverage report dir: $HTML_COVERAGE_ROOT"
echo ""
echo "Exit code meanings:"
echo " 2: command-line usage error"
echo " $EXIT_CODE_MISSING_DEP: missing required dependency"
echo " $EXIT_CODE_FAILED_TEST: one or more tests failed"
echo " $EXIT_CODE_FAILED_REFORMAT: failed to reformat HTML coverage report"
exit 0
fi
cmd_status_filter() {
cmd_status=$1
! [ "$cmd_status" -eq "$cmd_status" ] 2> /dev/null && return 1
test "${CMD_STATUS_DONTUSE#*$cmd_status}" != "$CMD_STATUS_DONTUSE" && return 1
( [ "$cmd_status" -lt "126" ] || [ "$cmd_status" -gt "165" ] ) && return $cmd_status
return 1
}
PHPUNIT_STATUS=-1
phpunit_sanity_check() {
[ "$PHPUNIT_STATUS" = "-1" ] || return $PHPUNIT_STATUS
[ -x "$PHPUNIT_BIN" ] || {
>&2 echo "$ME: phpunit binary '$PHPUNIT_BIN' is inaccessible, have you run composer?"
PHPUNIT_STATUS=$EXIT_CODE_MISSING_DEP
return $EXIT_CODE_MISSING_DEP
}
PHPUNIT_STATUS=0
}
#
# phpunit wrapper function
#
phpunit() {
$PHPUNIT_BIN "$@" || {
cmd_status=$?
>&2 echo "$ME: phpunit failed with exit code $cmd_status"
cmd_status_filter $cmd_status
return
}
return 0
}
XDEBUG_STATUS=-1
xdebug_sanity_check() {
[ "$XDEBUG_STATUS" != "-1" ] && return $XDEBUG_STATUS
php -m 2> /dev/null | grep xdebug > /dev/null 2>&1
XDEBUG_STATUS=$?
return $XDEBUG_STATUS
}
phpunit_coverage_check() {
[ "$SKIP_COVERAGE_REPORT" = "0" ] || {
return 1
}
xdebug_sanity_check && return
>&2 echo "$ME: (NOTICE) xdebug is not available, will skip coverage reports"
SKIP_COVERAGE_REPORT=1
return 1
}
phpunit_coverage_check
phpunit_html_coverage_check() {
[ "$HTML_COVERAGE_REPORT" = "1" ] || return 1
xdebug_sanity_check && return 0
>&2 echo "$ME: (NOTICE) xdebug is not available, will skip html coverage reports"
HTML_COVERAGE_REPORT=0
return 1
}
phpunit_html_coverage_check
print_phpunit_text_coverage_path() {
local test_suffix=$1
if [ -z "$test_suffix" ]; then
printf "$DOC_ROOT/$TEXT_COVERAGE_BASENAME"
else
printf "$DOC_ROOT/coverage-$test_suffix.txt"
fi
}
print_phpunit_html_coverage_path() {
local test_suffix=$1
if [ -z "$test_suffix" ]; then
printf "$DOC_ROOT/.coverage"
else
printf "$DOC_ROOT/.coverage-$test_suffix"
fi
}
print_phpunit_html_coverage_symlink_path() {
[ -n "$HTML_COVERAGE_SYMLINK" ] || return 0
local test_suffix=$1
if [ -z "$test_suffix" ]; then
printf "$HTML_COVERAGE_SYMLINK"
else
printf "$HTML_COVERAGE_SYMLINK-$test_suffix"
fi
}
print_phpunit_coverage_opt() {
local test_suffix=$1
if phpunit_html_coverage_check; then
printf " --coverage-html=$(print_phpunit_html_coverage_path $test_suffix) "
if ( [ -n "$HTML_COVERAGE_SYMLINK" ] && [ ! -e "$HTML_COVERAGE_SYMLINK" ] && [ -d "$(dirname $HTML_COVERAGE_SYMLINK)" ] ); then
ln -s $(print_phpunit_html_coverage_path $test_suffix) $(print_phpunit_html_coverage_symlink_path)
fi
fi
if phpunit_coverage_check; then
printf " --coverage-text=$(print_phpunit_text_coverage_path $test_suffix) "
fi
}
print_phpunit_coverage_report() {
local test_suffix=$1
phpunit_coverage_check || return 0
[ "$PRINT_COVERAGE" = "1" ] || return 0
[ -f "$(print_phpunit_text_coverage_path $test_suffix)" ] || return 0
printf "\n$(print_phpunit_text_coverage_path):\n"
cat $(print_phpunit_text_coverage_path)
}
print_phpunit_test_label() {
local test_suffix=$1
local temp_coverage_dir=
if [ -z "$test_suffix" ]; then
echo 'phpunit'
else
echo "phpunit-$test_suffix"
fi
}
REFORMAT_STATUS=0
reformat_failed() {
local message="$1"
local test_suffix=$2
local output=
output="$ME: error during reformat of $(print_phpunit_test_label $test_suffix) HTML coverage report"
if [ ! -z "$message" ]; then
output="$output: $message"
fi
>&2 echo "$output"
REFORMAT_STATUS=$EXIT_CODE_FAILED_REFORMAT
return $REFORMAT_STATUS
}
reformat_html_coverage() {
[ "$HTML_COVERAGE_REPORT" = "1" ] || return 0
local test_suffix=$1
local coverage_dir="$(print_phpunit_html_coverage_path $test_suffix)"
local temp_coverage_dir=
echo "$ME: reformat $(print_phpunit_test_label $test_suffix) HTML coverage report: started"
[ -d "$coverage_dir" ] || {
reformat_failed "directory not found: $coverage_dir" $test_suffix; return $?
}
temp_coverage_dir=$(cd "$coverage_dir/../" && pwd) || {
reformat_failed "cannot stat parent directory: $coverage_dir" $test_suffix; return $?
}
temp_coverage_dir="$temp_coverage_dir/.$(basename $coverage_dir)"
rm -rf $temp_coverage_dir
mkdir -p $temp_coverage_dir || {
reformat_failed "failed to create temp dir: $temp_coverage_dir" $test_suffix; return $?
}
rm -rf $temp_coverage_dir/.html-files
find $coverage_dir -type f -name '*.html' > $temp_coverage_dir/.html-files || {
reformat_failed "failed to find HTML coverage files, 'find' terminated with exit status $?" $test_suffix; return $?
}
cp -Rp $coverage_dir/. $temp_coverage_dir/ || {
reformat_failed "failed to copy to temp dir: $temp_coverage_dir" $test_suffix; return $?
}
local temp_filename=
while read filename; do
temp_filename=$(echo $filename | sed "s|$coverage_dir|\\$temp_coverage_dir|")
sed "s|$APP_DIR/||g" $filename > $temp_filename
#echo "temp_filename: $temp_filename"
#echo "filename: $filename"
done < $temp_coverage_dir/.html-files
local backup_dir=
for i in $(seq 1 5); do
backup_dir="$(dirname $coverage_dir)/.$(basename $coverage_dir)-"$(date "+%Y%m%d%H%M%S")
[ ! -d "$backup_dir" ] && break
sleep 1
done
mv $coverage_dir $backup_dir || {
reformat_failed "failed to create backup coverage, 'mv' terminated with exit status $?" $test_suffix; return $?
}
mv $temp_coverage_dir $coverage_dir || {
reformat_failed "failed to replace coverage, 'mv' terminated with exit status $?" $test_suffix; return $?
}
rm -rf $backup_dir
echo "$ME: reformat $(print_phpunit_test_label $test_suffix) HTML coverage report: complete"
local open_path="$coverage_dir/index.html"
echo "open_path: $open_path"
for open_cmd in xdg-open open; do
command -v $open_cmd > /dev/null && {
$open_cmd $open_path > /dev/null 2>&1 && {
return
}
}
done
for open_bin in chromium-browser firefox iceweasel safari; do
command -v $open_bin > /dev/null && {
nohup $open_bin $open_path > /dev/null 2>&1 &
break
}
done
}
reformat_txt_coverage() {
[ "$SKIP_COVERAGE_REPORT" != "1" ] || return 0
[ -f $DOC_ROOT/$TEXT_COVERAGE_BASENAME ] || return 0
#
# prepare temp file
rm -f $DOC_ROOT/.$TEXT_COVERAGE_BASENAME
cp $DOC_ROOT/$TEXT_COVERAGE_BASENAME $DOC_ROOT/.$TEXT_COVERAGE_BASENAME
#
# remove report date
MENU_STARTWITH=$(sedescape 'Code Coverage Report:') || return
MENU_ENDWITH=$(sedescape ' Summary') || return
sed "/^$MENU_STARTWITH/,/^$MENU_ENDWITH/{/^$MENU_STARTWITH/!{/^$MENU_ENDWITH/!d}}" "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" > "$DOC_ROOT/..$TEXT_COVERAGE_BASENAME"
mv "$DOC_ROOT/..$TEXT_COVERAGE_BASENAME" "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" || return
#
# trim multi newlines
sed '/^$/N;/^\n$/D' "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" > "$DOC_ROOT/..$TEXT_COVERAGE_BASENAME" || return
mv "$DOC_ROOT/..$TEXT_COVERAGE_BASENAME" "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" || return
sed '1{/^$/d}' "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" > "$DOC_ROOT/..$TEXT_COVERAGE_BASENAME" || return
mv "$DOC_ROOT/..$TEXT_COVERAGE_BASENAME" "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" || return
#
# copy temp file to $TEXT_COVERAGE_BASENAME
mv "$DOC_ROOT/.$TEXT_COVERAGE_BASENAME" "$DOC_ROOT/$TEXT_COVERAGE_BASENAME" || return
}
TEST_UTIL=$1
#
# determine if wrapper mode specified by TEST_UTIL
#
if [ -n "$TEST_UTIL" ]; then
shift
#
# apply phpunit wrapper mode
#
if [ "$TEST_UTIL" = "phpunit" ]; then
phpunit_sanity_check || exit
echo "phpunit args: $@"
phpunit "$@" || {
cmd_status_filter $?
exit
}
reformat_txt_coverage
print_phpunit_coverage_report
reformat_html_coverage
exit 0
fi
>&2 echo "$ME: unrecognized <TEST-UTIL>: $TEST_UTIL"
>&2 print_hint
exit 2
fi
#
# no TEST_UTIL specified: perform ALL tests
#
#
# sanity check test commands
#
phpunit_sanity_check || exit
#
# tests status
#
TESTS_STATUS=0
echo "phpunit args: $(print_phpunit_coverage_opt)"
#
# run all phpunit tests
#
CMD_STATUS=0
if [ "$SKIP_TESTS" = "0" ]; then
phpunit $(print_phpunit_coverage_opt)
CMD_STATUS=$?
fi
if [ "$CMD_STATUS" = "0" ]; then
reformat_txt_coverage
print_phpunit_coverage_report
reformat_html_coverage
else
TESTS_STATUS=$EXIT_CODE_FAILED_TEST
fi
[ "$REFORMAT_STATUS" = "0" ] || {
>&2 echo "$ME: failed to reformat one or more HTML coverage reports"
}
[ "$TESTS_STATUS" = "0" ] || {
>&2 echo "$ME: one or more tests failed"
exit $TESTS_STATUS
}
[ "$REFORMAT_STATUS" = "0" ] || {
exit $REFORMAT_STATUS
}
|
import re
def process_log_file(log_file_path, pattern):
matching_lines = []
with open(log_file_path, 'r') as file:
for line in file:
if re.search(pattern, line):
matching_lines.append(line.strip())
return matching_lines
# Example usage
log_file_path = '/path/to/log/file.log'
pattern = r'I: Parent1'
result = process_log_file(log_file_path, pattern)
print(result)
|
<gh_stars>0
import React from "react";
import Grid from "@material-ui/core/Grid";
import TextField from "@material-ui/core/TextField";
import Button from "@material-ui/core/Button";
import Fab from "@material-ui/core/Fab";
import Icon from "@material-ui/core/Icon";
import AddIcon from "@material-ui/icons/Add";
import RelationshipModal from "../components/RelationshipModal";
import ButtonsRow from "../components/ButtonsRow";
import changeMode from "../functions/changeMode";
import handleSubmit from "../functions/handleSubmit";
import handleChange from "../functions/handleChange";
import getCookie from "../functions/getCookie";
class ReposicionesDetalles extends React.Component {
state = {
currentUrl: "reposiciones",
mode: "read",
loading: false,
form: {
id: 0,
date: "",
medicinePurchaseOrders: [{ medicineId: 0, medicineName: "", quantity: 0 }]
},
modalShow: [false]
};
changeMode = changeMode.bind(this);
handleSubmit = handleSubmit.bind(this);
handleChange = handleChange.bind(this);
async getData() {
const response = await fetch(window.ApiUrl + this.state.currentUrl + "/" + this.props.match.params.id, {
headers: {
Authorization: "BEARER " + getCookie("token")
}
});
const data = await response.json();
let medicinePurchaseOrders = [];
data.medicinePurchaseOrders.forEach(medicinePurchaseOrder => {
medicinePurchaseOrders.push({
medicineId: medicinePurchaseOrder.medicineId,
medicineName: medicinePurchaseOrder.medicine.name,
quantity: medicinePurchaseOrder.quantity
});
});
this.setState({
form: {
id: data.id,
date: data.date,
medicinePurchaseOrders
}
});
}
// async getMedicines() {
// const response = await fetch(window.ApiUrl + "medicamentos?order=name");
// const data = await response.json();
// data.forEach(medicine => {
// this.setState({
// medicines: {
// ...this.state.medicines,
// [medicine.id]: medicine.name
// }
// });
// });
// }
componentDidMount() {
if (this.props.match.params.id !== "añadir") {
this.getData();
}
// this.getMedicines();
this.changeMode();
}
componentDidUpdate() {
this.props.history.listen(location => this.changeMode());
}
addRow = () => {
let { medicinePurchaseOrders } = this.state.form;
let { modalShow } = this.state;
medicinePurchaseOrders.push({ medicineId: 0, medicineName: "", quantity: 0 });
modalShow.push(false);
this.setState({
form: {
...this.state.form,
medicinePurchaseOrders
},
modalShow
});
};
deleteRow = index => {
let { medicinePurchaseOrders } = this.state.form;
if (medicinePurchaseOrders.length > 1) {
medicinePurchaseOrders.splice(index, 1);
this.setState({
form: {
...this.state.form,
medicinePurchaseOrders
}
});
} else {
window.container.error("Debe existir al menos un elemento", "Error", {
showAnimation: "animated rubberBand",
hideAnimation: "animated flipOutX",
timeOut: 5000,
extendedTimeOut: 2000
});
}
};
selectRelation = (index, id, name) => {
let { medicinePurchaseOrders } = this.state.form;
let { modalShow } = this.state;
medicinePurchaseOrders[index]["medicineId"] = id;
medicinePurchaseOrders[index]["medicineName"] = name;
modalShow[index] = false;
this.setState({
form: {
...this.state.form,
medicinePurchaseOrders
},
modalShow
});
};
handleRowChange = (e, index) => {
//Maneja el cambio en la cantidad
let { name, value } = e.target;
let { medicinePurchaseOrders } = this.state.form;
medicinePurchaseOrders[index][name] = value;
this.setState({
form: {
...this.state.form,
medicinePurchaseOrders
}
});
};
render() {
return (
<div>
<Grid container direction="column">
<Grid container direction="row" justify="center" className="mt-5">
<Grid item>
<h1>Reposiciones</h1>
</Grid>
</Grid>
<form onSubmit={this.handleSubmit}>
{this.state.mode !== "create" && (
<Grid container direction="row" justify="center" className="mt-3">
<Grid item>
<TextField
label="ID"
margin="normal"
variant="outlined"
name="id"
value={this.state.form.id}
InputProps={{ readOnly: true }}
/>
</Grid>
</Grid>
)}
<Grid container direction="row" justify="center">
<Grid item>
<TextField
required
type="datetime-local"
label="Fecha"
margin="normal"
variant="outlined"
name="date"
onChange={this.handleChange}
value={this.state.form.date}
InputProps={{
readOnly: this.state.mode === "read" || this.state.mode === "delete"
}}
InputLabelProps={{ shrink: true }}
/>
</Grid>
</Grid>
{this.state.form.medicinePurchaseOrders.map((element, index) => (
<Grid container key={index} direction="row" justify="center" spacing={5}>
<Grid item className="mt-3">
<TextField
required
label="Medicamento"
margin="none"
variant="outlined"
name="medicineName"
value={element.medicineName}
style={{ width: 145 }}
InputProps={{
readOnly: true
}}
/>
<Button
disabled={this.state.mode === "read" || this.state.mode === "delete"}
className="mt-1 px-0"
size="large"
variant="contained"
color="primary"
onClick={() => {
let { modalShow } = this.state;
modalShow[index] = true;
this.setState({ modalShow });
}}
>
<i className="fas fa-2x fa-pills" />
</Button>
<RelationshipModal
show={this.state.modalShow[index]}
onHide={() => {
let { modalShow } = this.state;
modalShow[index] = false;
this.setState({ modalShow });
}}
entity={"Medicamentos"}
history={this.props.history}
selectRelation={(id, name) => this.selectRelation(index, id, name)}
/>
</Grid>
<Grid item>
<TextField
required
type="number"
label="Cantidad"
margin="normal"
variant="outlined"
name="quantity"
onChange={e => this.handleRowChange(e, index)}
value={element.quantity}
InputProps={{
readOnly: this.state.mode === "read" || this.state.mode === "delete"
}}
/>
</Grid>
{(this.state.mode === "create" || this.state.mode === "update") && (
<Grid item className="mt-4">
<Fab
size="small"
className="bg-danger"
disabled={this.state.mode === "read" || this.state.mode === "delete"}
onClick={() => this.deleteRow(index)}
>
<Icon className="fas fa-minus-circle" />
</Fab>
</Grid>
)}
</Grid>
))}
{(this.state.mode === "create" || this.state.mode === "update") && (
<Grid container direction="row" justify="center" spacing={5}>
<Grid item>
<Fab onClick={this.addRow} color="primary" size="medium">
<AddIcon />
</Fab>
</Grid>
</Grid>
)}
<ButtonsRow
id={this.props.match.params.id}
mode={this.state.mode}
history={this.props.history}
edit={false}
loading={this.state.loading}
/>
</form>
</Grid>
</div>
);
}
}
export default ReposicionesDetalles;
|
<!DOCTYPE html>
<html>
<head>
<title>Date Formatter</title>
</head>
<body>
<form>
<lable>Enter Date: </lable>
<input type="date" name="date">
<input type="submit">
</form>
</body>
</html>
<script>
document.querySelector('form').addEventListener('submit', function (e) {
e.preventDefault();
var userinput = new Date(document.querySelector('input[type="date"]').value);
console.log(userinput.toLocaleDateString());
});
</script>
|
def search(arr, target):
result = -1
for i, n in enumerate(arr):
if n == target:
result = i
break
return result
|
import {Component, Input, OnInit} from '@angular/core';
import { ApiService } from '../../services/api.service';
@Component ({
selector: 'comments',
templateUrl: './comments.component.html',
styleUrls: ['./comments.component.scss']
})
export default class CommentsComponent implements OnInit{
@Input()
Comments: Array<Object>;
constructor(){}
ngOnInit(){
}
addComment(): void{
console.log('submitted');
}
}
|
#!/bin/sh
set -e
ROOTDIR=dist
BUNDLE=${ROOTDIR}/StintCoin-Qt.app
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
for i in `grep -v CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff $i -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff $i -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${TARGETFILE}.sign"
DIRNAME="`dirname ${SIGNFILE}`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if=$i of=${SIGNFILE} bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
for i in `grep CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C ${TEMPDIR} -czf ${OUT} .
rm -rf ${TEMPDIR}
echo "Created ${OUT}"
|
#!/bin/env bash
PORT=8888
docker run \
-d \
-p $PORT:8888 \
-v "${PWD}":/home/jovyan \
-v /tmp:/tmp \
-e NB_UID=1000 \
-e NB_GID=1000 \
--user root \
ghcr.io/sorosliu1029/explore-git:latest \
start-notebook.sh --NotebookApp.password='sha1:34147a04de8e:28b0c1d0c034adf65f78074e69253c7f83e18144'
echo "Visit localhost:${PORT} to explore Git"
|
<gh_stars>0
var $ = jQuery;
class authenticateAdmin {
constructor() {
this.events();
this.ajaxAuthentication();
}
events() {
$(".loginform").submit(this.createCookie);
$(".end-the-day").on('click', this.openPageModal);
$(".do-not-end-day").on('click', this.closeModal);
}
createCookie() {
document.cookie = `username=${$("input[name=username]").val()}`;
document.cookie = `password=${$("input[name=passwd]").val()}`;
}
getCookie(cookieName) {
var cookieName = cookieName + "=";
var decodeCookieForSpecialChars = decodeURIComponent(document.cookie);
var cookieToArray = decodeCookieForSpecialChars.split(';');
for(var i=0; i<cookieToArray.length; i++) {
var theCookie = cookieToArray[i];
while(theCookie.charAt(0) == ' ') {
theCookie = theCookie.substring(1)
}
if(theCookie.indexOf(cookieName) == 0) {
return theCookie.substring(cookieName.length, theCookie.length);
}
}
}
openPageModal() {
$("#end-the-day-notification").css({
'z-index': '999999999',
'opacity': 1
})
setTimeout(function() {
$("#notification-wrapper").css({
'top': 0,
'opacity': 1
})
}, 200)
}
closeModal() {
setTimeout(function() {
$("#end-the-day-notification").css({
'z-index': '-9999999999999999999999999999999999999',
'opacity': 0
});
}, 200)
$("#notification-wrapper").css({
'top': -100,
'opacity': 0
})
}
ajaxAuthentication() {
const credentials = {
username: this.getCookie("username"),
password: this.getCookie("password")
}
setInterval(() => {
//console.log(credentials);
$.ajax({
url: `authenticate.php`,
type: 'POST',
data: credentials,
})
}, 2000);
}
}
new authenticateAdmin;
|
import { $TSContext } from 'amplify-cli-core';
describe('command blocking', () => {
test('validate which commands will be blocked or not', async () => {
const { isCommandInMatches, versionGatingBlockedCommands } = await import('../version-gating');
expect(isCommandInMatches({ plugin: 'api', command: 'add' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'function', command: 'add' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'api', command: 'update' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'function', command: 'update' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'api', command: 'remove' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'function', command: 'remove' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'core', command: 'push' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'api', command: 'push' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'function', command: 'push' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'hosting', command: 'publish' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: 'api', command: 'gql-compile' }, versionGatingBlockedCommands)).toBe(true);
expect(isCommandInMatches({ plugin: undefined, command: 'help' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'version' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'configure' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'console' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'init' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'logout' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'status' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: undefined, command: 'pull' }, versionGatingBlockedCommands)).toBe(false);
expect(isCommandInMatches({ plugin: 'env', command: 'list' }, versionGatingBlockedCommands)).toBe(false);
});
});
describe('version gating', () => {
const originalProcessEnv = process.env;
let stackMetadata: any = undefined;
class CfnClientMock {
public getTemplateSummary = () => {
return {
promise: () =>
new Promise((resolve, _) => {
resolve({ Metadata: stackMetadata });
}),
};
};
}
const cfnClientMockInstance = new CfnClientMock();
class CloudFormation {
cfn: CfnClientMock;
constructor() {
this.cfn = cfnClientMockInstance;
}
}
const cloudFormationClient_stub = new CloudFormation();
const meta_stub = {
providers: {
awscloudformation: {
StackName: 'mockstack',
},
},
};
const stackMetadata_stub_520_500 = {
AmplifyCLI: {
DeployedByCLIVersion: '5.2.0',
MinimumCompatibleCLIVersion: '5.0.0',
},
};
const stackMetadata_stub_520_530 = {
AmplifyCLI: {
DeployedByCLIVersion: '5.2.0',
MinimumCompatibleCLIVersion: '5.3.0',
},
};
const stackMetadata_stub_530_531 = {
AmplifyCLI: {
DeployedByCLIVersion: '5.3.0',
MinimumCompatibleCLIVersion: '5.3.1',
},
};
const versionInfo_520_500 = {
currentCLIVersion: '5.2.0',
minimumCompatibleCLIVersion: '5.0.0',
};
const versionInfo_520_510 = {
currentCLIVersion: '5.2.0',
minimumCompatibleCLIVersion: '5.1.0',
};
const versionInfo_520_540 = {
currentCLIVersion: '5.2.0',
minimumCompatibleCLIVersion: '5.4.0',
};
const versionInfo_532_530 = {
currentCLIVersion: '5.3.2',
minimumCompatibleCLIVersion: '5.3.0',
};
const context_stub = ({
print: {
info: jest.fn(),
warning: jest.fn(),
success: jest.fn(),
},
input: {
plugin: 'api',
command: 'add',
},
versionInfo: versionInfo_520_500,
amplify: {
invokePluginMethod: jest.fn().mockReturnValue(cloudFormationClient_stub),
},
} as unknown) as jest.Mocked<$TSContext>;
beforeEach(() => {
jest.clearAllMocks();
jest.resetModules();
// reset mutated state
context_stub.input.plugin = 'api';
context_stub.input.command = 'add';
context_stub.versionInfo = versionInfo_520_500;
stackMetadata = undefined;
process.env = { ...originalProcessEnv };
});
afterEach(() => {
jest.clearAllMocks();
jest.resetModules();
// reset mutated state
context_stub.input.plugin = 'api';
context_stub.input.command = 'add';
stackMetadata = undefined;
process.env = { ...originalProcessEnv };
});
test('version gating should pass when env override set', async () => {
process.env.AMPLIFY_CLI_DISABLE_VERSION_CHECK = '1';
const versionGating = await import('../version-gating');
const isCommandInMatchesMock = jest.spyOn(versionGating, 'isCommandInMatches');
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
expect(isCommandInMatchesMock).toHaveBeenCalledTimes(0);
});
test('version gating should pass when command is non-blocking', async () => {
context_stub.input.plugin = 'core';
context_stub.input.command = 'version';
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
const isCommandInMatchesMock = jest.spyOn(versionGating, 'isCommandInMatches');
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => undefined);
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
expect(isCommandInMatchesMock).toHaveBeenCalledTimes(1);
expect(stateManagerMock).toHaveBeenCalledTimes(0);
});
test('version gating should pass when stack is not deployed', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => undefined);
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
expect(stateManagerMock).toHaveBeenCalledTimes(1);
expect(context_stub.amplify.invokePluginMethod).toHaveBeenCalledTimes(0);
});
test('version gating should pass when stack has no metadata', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => meta_stub);
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
expect(stateManagerMock).toHaveBeenCalledTimes(1);
expect(context_stub.amplify.invokePluginMethod).toHaveBeenCalledTimes(1);
});
test('version gating should pass, meta: 5.2.0, metamin: 5.0.0, current: 5.2.0, min: 5.0.0', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
stackMetadata = stackMetadata_stub_520_500;
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => meta_stub);
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
});
test('version gating should pass, meta: 5.2.0, metamin: 5.0.0, current: 5.2.0, min: 5.1.0', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
stackMetadata = stackMetadata_stub_520_500;
context_stub.versionInfo = versionInfo_520_510;
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => meta_stub);
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
});
test('version gating should pass, meta: 5.3.0, metamin: 5.3.1, current: 5.3.2, min: 5.3.0', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
stackMetadata = stackMetadata_stub_530_531;
context_stub.versionInfo = versionInfo_532_530;
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => meta_stub);
await expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toBe(true);
});
test('version gating should fail, meta: 5.2.0, metamin: 5.3.0, current: 5.2.0, min: 5.0.0', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
stackMetadata = stackMetadata_stub_520_530;
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => meta_stub);
expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toEqual(false);
});
test('version gating should fail, meta: 5.2.0, metamin: 5.3.0, current: 5.2.0, min: 5.4.0', async () => {
const versionGating = await import('../version-gating');
const { stateManager } = await import('amplify-cli-core');
stackMetadata = stackMetadata_stub_520_530;
context_stub.versionInfo = versionInfo_520_540;
const stateManagerMock = jest.spyOn(stateManager, 'getMeta').mockImplementation(() => meta_stub);
expect(versionGating.isMinimumVersionSatisfied(context_stub)).resolves.toEqual(false);
});
});
|
<reponame>adamsrsen/watchinsync
import {Column, Entity, ManyToOne, PrimaryGeneratedColumn} from 'typeorm'
import Users from './Users'
import Rooms from './Rooms'
@Entity()
export default class Messages {
@PrimaryGeneratedColumn()
id: number
@Column()
text: string
@Column({type: 'timestamptz'})
timestamp: Date
@ManyToOne(() => Users)
user: Partial<Users>
@ManyToOne(() => Rooms, (room) => room.messages)
room: Partial<Rooms>
}
|
///<reference types="Cypress" />
let faker = require('faker');
let localforage = require('localforage');
describe('Frogbudget - Repeats', () => {
beforeEach(() => {
cy.visit('/');
indexedDB.deleteDatabase('entry');
indexedDB.deleteDatabase('repeat');
})
it('create default weekly repeat', () => {
cy.get('.fab').click();
cy.get('.switch__toggle').click();
cy.get('#input_category_repeat').type(faker.commerce.productName());
let price = Number(faker.commerce.price()) * (Math.random() > 0.5 ? -1:1);
cy.get('#input_value_repeat').type(price);
cy.get('#input_begin').type('2019-02-01');
cy.get('#input_end').type('2019-02-28');
cy.get('select').select(['Montag']);
cy.contains('Hinzufügen').click();
cy.contains(String(4*price));
})
})
describe('Frogbudget - Entries', () => {
beforeEach(() => {
cy.visit('/');
indexedDB.deleteDatabase('entry');
indexedDB.deleteDatabase('repeat');
})
it('create defaultentry', () => {
cy.get('.fab').click();
cy.get('#input_submit').click();
cy.contains('default');
cy.contains('0€');
});
it('create test entry', () => {
cy.get('.fab').click();
cy.get('#input_category_entry').type('bread');
cy.get('#input_value_entry').type('-1.99');
cy.get('#input_submit').click()
cy.contains('bread');
cy.contains('-1.99€');
});
it('create 30 test entries', () => {
let totalprice = 0;
for(let i = 0; i < 30; ++i) {
cy.get('.fab').click();
cy.get('#input_category_entry').type(faker.commerce.productName());
let price = Number(faker.commerce.price()) * (Math.random() > 0.5 ? -1:1);
totalprice += price;
cy.get('#input_value_entry').type(price);
let rand = ((Math.random()*27)+1).toFixed(0);
cy.get('#input_date').type(`2019-02-${rand.length < 2 ? '0' + rand : rand}`)
cy.contains('Hinzufügen').click()
cy.get('.right').contains(`${totalprice.toFixed(2)} €`);
}
});
});
|
<reponame>nabeelkhan/Oracle-DBA-Life
set echo off
set heading off
set feedback off
prompt
prompt Current Date and Time
prompt
select '*** Time = '||to_char(sysdate,'DD-MON-YY HH:MI:SS')|| ' ***' from dual;
|
import { locales } from 'nextra/locales'
export const middleware = locales
|
<reponame>bbernhar/skia<filename>modules/skottie/src/SkottieTest.cpp
/*
* Copyright 2018 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "include/core/SkFontMgr.h"
#include "include/core/SkMatrix.h"
#include "include/core/SkStream.h"
#include "include/core/SkTextBlob.h"
#include "include/core/SkTypeface.h"
#include "modules/skottie/include/Skottie.h"
#include "modules/skottie/include/SkottieProperty.h"
#include "modules/skottie/src/text/SkottieShaper.h"
#include "src/core/SkFontDescriptor.h"
#include "src/core/SkTextBlobPriv.h"
#include "tests/Test.h"
#include "tools/ToolUtils.h"
#include <cmath>
#include <string>
#include <tuple>
#include <vector>
using namespace skottie;
DEF_TEST(Skottie_OssFuzz8956, reporter) {
static constexpr char json[] =
"{\"v\":\" \",\"fr\":3,\"w\":4,\"h\":3,\"layers\":[{\"ty\": 1, \"sw\": 10, \"sh\": 10,"
" \"sc\":\"#ffffff\", \"ks\":{\"o\":{\"a\": true, \"k\":"
" [{\"t\": 0, \"s\": 0, \"e\": 1, \"i\": {\"x\":[]}}]}}}]}";
SkMemoryStream stream(json, strlen(json));
// Passes if parsing doesn't crash.
auto animation = Animation::Make(&stream);
}
DEF_TEST(Skottie_Properties, reporter) {
auto test_typeface = ToolUtils::create_portable_typeface();
REPORTER_ASSERT(reporter, test_typeface);
static const char json[] = R"({
"v": "5.2.1",
"w": 100,
"h": 100,
"fr": 1,
"ip": 0,
"op": 1,
"fonts": {
"list": [
{
"fName": "test_font",
"fFamily": "test-family",
"fStyle": "TestFontStyle"
}
]
},
"layers": [
{
"ty": 4,
"nm": "layer_0",
"ind": 0,
"ip": 0,
"op": 1,
"ks": {
"o": { "a": 0, "k": 50 }
},
"ef": [{
"ef": [
{},
{},
{ "v": { "a": 0, "k": [ 0, 1, 0 ] }},
{},
{},
{},
{ "v": { "a": 0, "k": 1 }}
],
"nm": "fill_effect_0",
"mn": "ADBE Fill",
"ty": 21
}],
"shapes": [
{
"ty": "el",
"nm": "geometry_0",
"p": { "a": 0, "k": [ 50, 50 ] },
"s": { "a": 0, "k": [ 50, 50 ] }
},
{
"ty": "fl",
"nm": "fill_0",
"c": { "a": 0, "k": [ 1, 0, 0] }
},
{
"ty": "tr",
"nm": "shape_transform_0",
"o": { "a": 0, "k": 100 },
"s": { "a": 0, "k": [ 50, 50 ] }
}
]
},
{
"ty": 5,
"nm": "layer_1",
"ip": 0,
"op": 1,
"ks": {
"p": { "a": 0, "k": [25, 25] }
},
"t": {
"d": {
"k": [
{
"t": 0,
"s": {
"f": "test_font",
"s": 100,
"t": "inline_text",
"lh": 120,
"ls": 12
}
}
]
}
}
}
]
})";
class TestPropertyObserver final : public PropertyObserver {
public:
struct ColorInfo {
SkString node_name;
std::unique_ptr<skottie::ColorPropertyHandle> handle;
};
struct OpacityInfo {
SkString node_name;
std::unique_ptr<skottie::OpacityPropertyHandle> handle;
};
struct TextInfo {
SkString node_name;
std::unique_ptr<skottie::TextPropertyHandle> handle;
};
struct TransformInfo {
SkString node_name;
std::unique_ptr<skottie::TransformPropertyHandle> handle;
};
void onColorProperty(const char node_name[],
const PropertyObserver::LazyHandle<ColorPropertyHandle>& lh) override {
fColors.push_back({SkString(node_name), lh()});
fColorsWithFullKeypath.push_back({SkString(fCurrentNode.c_str()), lh()});
}
void onOpacityProperty(const char node_name[],
const PropertyObserver::LazyHandle<OpacityPropertyHandle>& lh) override {
fOpacities.push_back({SkString(node_name), lh()});
}
void onTextProperty(const char node_name[],
const PropertyObserver::LazyHandle<TextPropertyHandle>& lh) override {
fTexts.push_back({SkString(node_name), lh()});
}
void onTransformProperty(const char node_name[],
const PropertyObserver::LazyHandle<TransformPropertyHandle>& lh) override {
fTransforms.push_back({SkString(node_name), lh()});
}
void onEnterNode(const char node_name[], PropertyObserver::NodeType node_type) override {
if (node_name == nullptr) {
return;
}
fCurrentNode = fCurrentNode.empty() ? node_name : fCurrentNode + "." + node_name;
}
void onLeavingNode(const char node_name[], PropertyObserver::NodeType node_type) override {
if (node_name == nullptr) {
return;
}
auto length = strlen(node_name);
fCurrentNode =
fCurrentNode.length() > length
? fCurrentNode.substr(0, fCurrentNode.length() - strlen(node_name) - 1)
: "";
}
const std::vector<ColorInfo>& colors() const { return fColors; }
const std::vector<OpacityInfo>& opacities() const { return fOpacities; }
const std::vector<TextInfo>& texts() const { return fTexts; }
const std::vector<TransformInfo>& transforms() const { return fTransforms; }
const std::vector<ColorInfo>& colorsWithFullKeypath() const {
return fColorsWithFullKeypath;
}
private:
std::vector<ColorInfo> fColors;
std::vector<OpacityInfo> fOpacities;
std::vector<TextInfo> fTexts;
std::vector<TransformInfo> fTransforms;
std::string fCurrentNode;
std::vector<ColorInfo> fColorsWithFullKeypath;
};
// Returns a single specified typeface for all requests.
class FakeFontMgr : public SkFontMgr {
public:
FakeFontMgr(sk_sp<SkTypeface> test_font) : fTestFont(test_font) {}
int onCountFamilies() const override { return 1; }
void onGetFamilyName(int index, SkString* familyName) const override {}
SkFontStyleSet* onCreateStyleSet(int index) const override { return nullptr; }
SkFontStyleSet* onMatchFamily(const char familyName[]) const override { return nullptr; }
SkTypeface* onMatchFamilyStyle(const char familyName[],
const SkFontStyle& fontStyle) const override {
return nullptr;
}
SkTypeface* onMatchFamilyStyleCharacter(const char familyName[], const SkFontStyle&,
const char* bcp47[], int bcp47Count,
SkUnichar character) const override {
return nullptr;
}
sk_sp<SkTypeface> onMakeFromData(sk_sp<SkData>, int ttcIndex) const override {
return fTestFont;
}
sk_sp<SkTypeface> onMakeFromStreamIndex(std::unique_ptr<SkStreamAsset>,
int ttcIndex) const override {
return fTestFont;
}
sk_sp<SkTypeface> onMakeFromStreamArgs(std::unique_ptr<SkStreamAsset>,
const SkFontArguments&) const override {
return fTestFont;
}
sk_sp<SkTypeface> onMakeFromFile(const char path[], int ttcIndex) const override {
return fTestFont;
}
sk_sp<SkTypeface> onLegacyMakeTypeface(const char familyName[], SkFontStyle) const override {
return fTestFont;
}
private:
sk_sp<SkTypeface> fTestFont;
};
sk_sp<FakeFontMgr> test_font_manager = sk_make_sp<FakeFontMgr>(test_typeface);
SkMemoryStream stream(json, strlen(json));
auto observer = sk_make_sp<TestPropertyObserver>();
auto animation = skottie::Animation::Builder()
.setPropertyObserver(observer)
.setFontManager(test_font_manager)
.make(&stream);
REPORTER_ASSERT(reporter, animation);
const auto& colors = observer->colors();
REPORTER_ASSERT(reporter, colors.size() == 2);
REPORTER_ASSERT(reporter, colors[0].node_name.equals("fill_0"));
REPORTER_ASSERT(reporter, colors[0].handle->get() == 0xffff0000);
REPORTER_ASSERT(reporter, colors[1].node_name.equals("fill_effect_0"));
REPORTER_ASSERT(reporter, colors[1].handle->get() == 0xff00ff00);
const auto& colorsWithFullKeypath = observer->colorsWithFullKeypath();
REPORTER_ASSERT(reporter, colorsWithFullKeypath.size() == 2);
REPORTER_ASSERT(reporter, colorsWithFullKeypath[0].node_name.equals("layer_0.fill_0"));
REPORTER_ASSERT(reporter, colorsWithFullKeypath[0].handle->get() == 0xffff0000);
REPORTER_ASSERT(reporter, colorsWithFullKeypath[1].node_name.equals("layer_0.fill_effect_0"));
REPORTER_ASSERT(reporter, colorsWithFullKeypath[1].handle->get() == 0xff00ff00);
const auto& opacities = observer->opacities();
REPORTER_ASSERT(reporter, opacities.size() == 3);
REPORTER_ASSERT(reporter, opacities[0].node_name.equals("shape_transform_0"));
REPORTER_ASSERT(reporter, SkScalarNearlyEqual(opacities[0].handle->get(), 100));
REPORTER_ASSERT(reporter, opacities[1].node_name.equals("layer_0"));
REPORTER_ASSERT(reporter, SkScalarNearlyEqual(opacities[1].handle->get(), 50));
const auto& transforms = observer->transforms();
REPORTER_ASSERT(reporter, transforms.size() == 3);
REPORTER_ASSERT(reporter, transforms[0].node_name.equals("layer_0"));
REPORTER_ASSERT(reporter, transforms[0].handle->get() == skottie::TransformPropertyValue({
SkPoint::Make(0, 0),
SkPoint::Make(0, 0),
SkVector::Make(100, 100),
0,
0,
0
}));
REPORTER_ASSERT(reporter, transforms[1].node_name.equals("layer_1"));
REPORTER_ASSERT(reporter, transforms[1].handle->get() == skottie::TransformPropertyValue({
SkPoint::Make(0, 0),
SkPoint::Make(25, 25),
SkVector::Make(100, 100),
0,
0,
0
}));
REPORTER_ASSERT(reporter, transforms[2].node_name.equals("shape_transform_0"));
REPORTER_ASSERT(reporter, transforms[2].handle->get() == skottie::TransformPropertyValue({
SkPoint::Make(0, 0),
SkPoint::Make(0, 0),
SkVector::Make(50, 50),
0,
0,
0
}));
const auto& texts = observer->texts();
REPORTER_ASSERT(reporter, texts.size() == 1);
REPORTER_ASSERT(reporter, texts[0].node_name.equals("layer_1"));
REPORTER_ASSERT(reporter, texts[0].handle->get() == skottie::TextPropertyValue({
test_typeface,
SkString("inline_text"),
100,
0, 100,
0,
120,
12,
0,
SkTextUtils::kLeft_Align,
Shaper::VAlign::kTopBaseline,
Shaper::ResizePolicy::kNone,
Shaper::LinebreakPolicy::kExplicit,
Shaper::Direction::kLTR,
SkRect::MakeEmpty(),
SK_ColorTRANSPARENT,
SK_ColorTRANSPARENT,
TextPaintOrder::kFillStroke,
false,
false
}));
}
DEF_TEST(Skottie_Annotations, reporter) {
static constexpr char json[] = R"({
"v": "5.2.1",
"w": 100,
"h": 100,
"fr": 10,
"ip": 0,
"op": 100,
"layers": [
{
"ty": 1,
"ind": 0,
"ip": 0,
"op": 1,
"ks": {
"o": { "a": 0, "k": 50 }
},
"sw": 100,
"sh": 100,
"sc": "#ffffff"
}
],
"markers": [
{
"cm": "marker_1",
"dr": 25,
"tm": 25
},
{
"cm": "marker_2",
"dr": 0,
"tm": 75
}
]
})";
class TestMarkerObserver final : public MarkerObserver {
public:
void onMarker(const char name[], float t0, float t1) override {
fMarkers.push_back(std::make_tuple(name, t0, t1));
}
std::vector<std::tuple<std::string, float, float>> fMarkers;
};
SkMemoryStream stream(json, strlen(json));
auto observer = sk_make_sp<TestMarkerObserver>();
auto animation = skottie::Animation::Builder()
.setMarkerObserver(observer)
.make(&stream);
REPORTER_ASSERT(reporter, animation);
REPORTER_ASSERT(reporter, animation->duration() == 10);
REPORTER_ASSERT(reporter, animation->inPoint() == 0.0);
REPORTER_ASSERT(reporter, animation->outPoint() == 100.0);
REPORTER_ASSERT(reporter, observer->fMarkers.size() == 2ul);
REPORTER_ASSERT(reporter, std::get<0>(observer->fMarkers[0]) == "marker_1");
REPORTER_ASSERT(reporter, std::get<1>(observer->fMarkers[0]) == 0.25f);
REPORTER_ASSERT(reporter, std::get<2>(observer->fMarkers[0]) == 0.50f);
REPORTER_ASSERT(reporter, std::get<0>(observer->fMarkers[1]) == "marker_2");
REPORTER_ASSERT(reporter, std::get<1>(observer->fMarkers[1]) == 0.75f);
REPORTER_ASSERT(reporter, std::get<2>(observer->fMarkers[1]) == 0.75f);
}
static SkRect ComputeBlobBounds(const sk_sp<SkTextBlob>& blob) {
auto bounds = SkRect::MakeEmpty();
if (!blob) {
return bounds;
}
SkAutoSTArray<16, SkRect> glyphBounds;
for (SkTextBlobRunIterator it(blob.get()); !it.done(); it.next()) {
glyphBounds.reset(SkToInt(it.glyphCount()));
it.font().getBounds(it.glyphs(), it.glyphCount(), glyphBounds.get(), nullptr);
SkASSERT(it.positioning() == SkTextBlobRunIterator::kFull_Positioning);
for (uint32_t i = 0; i < it.glyphCount(); ++i) {
bounds.join(glyphBounds[i].makeOffset(it.pos()[i * 2 ],
it.pos()[i * 2 + 1]));
}
}
return bounds;
}
static SkRect ComputeShapeResultBounds(const skottie::Shaper::Result& res) {
auto bounds = SkRect::MakeEmpty();
for (const auto& fragment : res.fFragments) {
bounds.join(ComputeBlobBounds(fragment.fBlob).makeOffset(fragment.fPos.x(),
fragment.fPos.y()));
}
return bounds;
}
DEF_TEST(Skottie_Shaper_HAlign, reporter) {
auto typeface = SkTypeface::MakeDefault();
REPORTER_ASSERT(reporter, typeface);
static constexpr struct {
SkScalar text_size,
tolerance;
} kTestSizes[] = {
// These gross tolerances are required for the test to pass on NativeFonts bots.
// Might be worth investigating why we need so much slack.
{ 5, 2.0f },
{ 10, 2.0f },
{ 15, 2.4f },
{ 25, 4.4f },
};
static constexpr struct {
SkTextUtils::Align align;
SkScalar l_selector,
r_selector;
} kTestAligns[] = {
{ SkTextUtils:: kLeft_Align, 0.0f, 1.0f },
{ SkTextUtils::kCenter_Align, 0.5f, 0.5f },
{ SkTextUtils:: kRight_Align, 1.0f, 0.0f },
};
const SkString text("Foo, bar.\rBaz.");
const SkPoint text_point = SkPoint::Make(100, 100);
for (const auto& tsize : kTestSizes) {
for (const auto& talign : kTestAligns) {
const skottie::Shaper::TextDesc desc = {
typeface,
tsize.text_size,
0, tsize.text_size,
tsize.text_size,
0,
0,
talign.align,
Shaper::VAlign::kTopBaseline,
Shaper::ResizePolicy::kNone,
Shaper::LinebreakPolicy::kExplicit,
Shaper::Direction::kLTR,
Shaper::Flags::kNone
};
const auto shape_result = Shaper::Shape(text, desc, text_point,
SkFontMgr::RefDefault());
REPORTER_ASSERT(reporter, shape_result.fFragments.size() == 1ul);
REPORTER_ASSERT(reporter, shape_result.fFragments[0].fBlob);
const auto shape_bounds = ComputeShapeResultBounds(shape_result);
REPORTER_ASSERT(reporter, !shape_bounds.isEmpty());
const auto expected_l = text_point.x() - shape_bounds.width() * talign.l_selector;
REPORTER_ASSERT(reporter,
std::fabs(shape_bounds.left() - expected_l) < tsize.tolerance,
"%f %f %f %f %d", shape_bounds.left(), expected_l, tsize.tolerance,
tsize.text_size, talign.align);
const auto expected_r = text_point.x() + shape_bounds.width() * talign.r_selector;
REPORTER_ASSERT(reporter,
std::fabs(shape_bounds.right() - expected_r) < tsize.tolerance,
"%f %f %f %f %d", shape_bounds.right(), expected_r, tsize.tolerance,
tsize.text_size, talign.align);
}
}
}
DEF_TEST(Skottie_Shaper_VAlign, reporter) {
auto typeface = SkTypeface::MakeDefault();
REPORTER_ASSERT(reporter, typeface);
static constexpr struct {
SkScalar text_size,
tolerance;
} kTestSizes[] = {
// These gross tolerances are required for the test to pass on NativeFonts bots.
// Might be worth investigating why we need so much slack.
{ 5, 2.0f },
{ 10, 4.0f },
{ 15, 5.5f },
{ 25, 8.0f },
};
struct {
skottie::Shaper::VAlign align;
SkScalar topFactor;
} kTestAligns[] = {
{ skottie::Shaper::VAlign::kVisualTop , 0.0f },
{ skottie::Shaper::VAlign::kVisualCenter, 0.5f },
// TODO: any way to test kTopBaseline?
};
const SkString text("Foo, bar.\rBaz.");
const auto text_box = SkRect::MakeXYWH(100, 100, 1000, 1000); // large-enough to avoid breaks.
for (const auto& tsize : kTestSizes) {
for (const auto& talign : kTestAligns) {
const skottie::Shaper::TextDesc desc = {
typeface,
tsize.text_size,
0, tsize.text_size,
tsize.text_size,
0,
0,
SkTextUtils::Align::kCenter_Align,
talign.align,
Shaper::ResizePolicy::kNone,
Shaper::LinebreakPolicy::kParagraph,
Shaper::Direction::kLTR,
Shaper::Flags::kNone
};
const auto shape_result = Shaper::Shape(text, desc, text_box, SkFontMgr::RefDefault());
REPORTER_ASSERT(reporter, shape_result.fFragments.size() == 1ul);
REPORTER_ASSERT(reporter, shape_result.fFragments[0].fBlob);
const auto shape_bounds = ComputeShapeResultBounds(shape_result);
REPORTER_ASSERT(reporter, !shape_bounds.isEmpty());
const auto v_diff = text_box.height() - shape_bounds.height();
const auto expected_t = text_box.top() + v_diff * talign.topFactor;
REPORTER_ASSERT(reporter,
std::fabs(shape_bounds.top() - expected_t) < tsize.tolerance,
"%f %f %f %f %d", shape_bounds.top(), expected_t, tsize.tolerance,
tsize.text_size, SkToU32(talign.align));
const auto expected_b = text_box.bottom() - v_diff * (1 - talign.topFactor);
REPORTER_ASSERT(reporter,
std::fabs(shape_bounds.bottom() - expected_b) < tsize.tolerance,
"%f %f %f %f %d", shape_bounds.bottom(), expected_b, tsize.tolerance,
tsize.text_size, SkToU32(talign.align));
}
}
}
DEF_TEST(Skottie_Shaper_FragmentGlyphs, reporter) {
skottie::Shaper::TextDesc desc = {
SkTypeface::MakeDefault(),
18,
0, 18,
18,
0,
0,
SkTextUtils::Align::kCenter_Align,
Shaper::VAlign::kTop,
Shaper::ResizePolicy::kNone,
Shaper::LinebreakPolicy::kParagraph,
Shaper::Direction::kLTR,
Shaper::Flags::kNone
};
const SkString text("Foo bar baz");
const auto text_box = SkRect::MakeWH(100, 100);
{
const auto shape_result = Shaper::Shape(text, desc, text_box, SkFontMgr::RefDefault());
// Default/consolidated mode => single blob result.
REPORTER_ASSERT(reporter, shape_result.fFragments.size() == 1ul);
REPORTER_ASSERT(reporter, shape_result.fFragments[0].fBlob);
}
{
desc.fFlags = Shaper::Flags::kFragmentGlyphs;
const auto shape_result = skottie::Shaper::Shape(text, desc, text_box,
SkFontMgr::RefDefault());
// Fragmented mode => one blob per glyph.
const size_t expectedSize = text.size();
REPORTER_ASSERT(reporter, shape_result.fFragments.size() == expectedSize);
for (size_t i = 0; i < expectedSize; ++i) {
REPORTER_ASSERT(reporter, shape_result.fFragments[i].fBlob);
}
}
}
#if defined(SK_SHAPER_HARFBUZZ_AVAILABLE) && !defined(SK_BUILD_FOR_WIN)
DEF_TEST(Skottie_Shaper_ExplicitFontMgr, reporter) {
class CountingFontMgr : public SkFontMgr {
public:
size_t fallbackCount() const { return fFallbackCount; }
protected:
int onCountFamilies() const override { return 0; }
void onGetFamilyName(int index, SkString* familyName) const override {
SkDEBUGFAIL("onGetFamilyName called with bad index");
}
SkFontStyleSet* onCreateStyleSet(int index) const override {
SkDEBUGFAIL("onCreateStyleSet called with bad index");
return nullptr;
}
SkFontStyleSet* onMatchFamily(const char[]) const override {
return SkFontStyleSet::CreateEmpty();
}
SkTypeface* onMatchFamilyStyle(const char[], const SkFontStyle&) const override {
return nullptr;
}
SkTypeface* onMatchFamilyStyleCharacter(const char familyName[],
const SkFontStyle& style,
const char* bcp47[],
int bcp47Count,
SkUnichar character) const override {
fFallbackCount++;
return nullptr;
}
sk_sp<SkTypeface> onMakeFromData(sk_sp<SkData>, int) const override {
return nullptr;
}
sk_sp<SkTypeface> onMakeFromStreamIndex(std::unique_ptr<SkStreamAsset>, int) const override {
return nullptr;
}
sk_sp<SkTypeface> onMakeFromStreamArgs(std::unique_ptr<SkStreamAsset>,
const SkFontArguments&) const override {
return nullptr;
}
sk_sp<SkTypeface> onMakeFromFile(const char[], int) const override {
return nullptr;
}
sk_sp<SkTypeface> onLegacyMakeTypeface(const char [], SkFontStyle) const override {
return nullptr;
}
private:
mutable size_t fFallbackCount = 0;
};
auto fontmgr = sk_make_sp<CountingFontMgr>();
skottie::Shaper::TextDesc desc = {
ToolUtils::create_portable_typeface(),
18,
0, 18,
18,
0,
0,
SkTextUtils::Align::kCenter_Align,
Shaper::VAlign::kTop,
Shaper::ResizePolicy::kNone,
Shaper::LinebreakPolicy::kParagraph,
Shaper::Direction::kLTR,
Shaper::Flags::kNone
};
const auto text_box = SkRect::MakeWH(100, 100);
{
const auto shape_result = Shaper::Shape(SkString("foo bar"), desc, text_box, fontmgr);
REPORTER_ASSERT(reporter, shape_result.fFragments.size() == 1ul);
REPORTER_ASSERT(reporter, shape_result.fFragments[0].fBlob);
REPORTER_ASSERT(reporter, fontmgr->fallbackCount() == 0ul);
REPORTER_ASSERT(reporter, shape_result.fMissingGlyphCount == 0);
}
{
// An unassigned codepoint should trigger fallback.
const auto shape_result = skottie::Shaper::Shape(SkString("foo\U000DFFFFbar"),
desc, text_box, fontmgr);
REPORTER_ASSERT(reporter, shape_result.fFragments.size() == 1ul);
REPORTER_ASSERT(reporter, shape_result.fFragments[0].fBlob);
REPORTER_ASSERT(reporter, fontmgr->fallbackCount() == 1ul);
REPORTER_ASSERT(reporter, shape_result.fMissingGlyphCount == 1ul);
}
}
#endif
DEF_TEST(Skottie_Image_Loading, reporter) {
class TestResourceProvider final : public skresources::ResourceProvider {
public:
TestResourceProvider(sk_sp<skresources::ImageAsset> single_asset,
sk_sp<skresources::ImageAsset> multi_asset)
: fSingleFrameAsset(std::move(single_asset))
, fMultiFrameAsset (std::move( multi_asset)) {}
private:
sk_sp<ImageAsset> loadImageAsset(const char path[],
const char name[],
const char id[]) const override {
return strcmp(id, "single_frame")
? fMultiFrameAsset
: fSingleFrameAsset;
}
const sk_sp<skresources::ImageAsset> fSingleFrameAsset,
fMultiFrameAsset;
};
auto make_animation = [&reporter] (sk_sp<skresources::ImageAsset> single_asset,
sk_sp<skresources::ImageAsset> multi_asset,
bool deferred_image_loading) {
static constexpr char json[] = R"({
"v": "5.2.1",
"w": 100,
"h": 100,
"fr": 10,
"ip": 0,
"op": 100,
"assets": [
{
"id": "single_frame",
"p" : "single_frame.png",
"u" : "images/",
"w" : 500,
"h" : 500
},
{
"id": "multi_frame",
"p" : "multi_frame.png",
"u" : "images/",
"w" : 500,
"h" : 500
}
],
"layers": [
{
"ty": 2,
"refId": "single_frame",
"ind": 0,
"ip": 0,
"op": 100,
"ks": {}
},
{
"ty": 2,
"refId": "multi_frame",
"ind": 1,
"ip": 0,
"op": 100,
"ks": {}
}
]
})";
SkMemoryStream stream(json, strlen(json));
const auto flags = deferred_image_loading
? static_cast<uint32_t>(skottie::Animation::Builder::kDeferImageLoading)
: 0;
auto animation =
skottie::Animation::Builder(flags)
.setResourceProvider(sk_make_sp<TestResourceProvider>(std::move(single_asset),
std::move( multi_asset)))
.make(&stream);
REPORTER_ASSERT(reporter, animation);
return animation;
};
class TestAsset final : public skresources::ImageAsset {
public:
explicit TestAsset(bool multi_frame) : fMultiFrame(multi_frame) {}
const std::vector<float>& requestedFrames() const { return fRequestedFrames; }
private:
bool isMultiFrame() override { return fMultiFrame; }
sk_sp<SkImage> getFrame(float t) override {
fRequestedFrames.push_back(t);
return SkSurface::MakeRasterN32Premul(10, 10)->makeImageSnapshot();
}
const bool fMultiFrame;
std::vector<float> fRequestedFrames;
};
{
auto single_asset = sk_make_sp<TestAsset>(false),
multi_asset = sk_make_sp<TestAsset>(true);
// Default image loading: single-frame images are loaded upfront, multi-frame images are
// loaded on-demand.
auto animation = make_animation(single_asset, multi_asset, false);
REPORTER_ASSERT(reporter, single_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, multi_asset->requestedFrames().size() == 0);
REPORTER_ASSERT(reporter, SkScalarNearlyZero(single_asset->requestedFrames()[0]));
animation->seekFrameTime(1);
REPORTER_ASSERT(reporter, single_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, multi_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, SkScalarNearlyEqual(multi_asset->requestedFrames()[0], 1));
animation->seekFrameTime(2);
REPORTER_ASSERT(reporter, single_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, multi_asset->requestedFrames().size() == 2);
REPORTER_ASSERT(reporter, SkScalarNearlyEqual(multi_asset->requestedFrames()[1], 2));
}
{
auto single_asset = sk_make_sp<TestAsset>(false),
multi_asset = sk_make_sp<TestAsset>(true);
// Deferred image loading: both single-frame and multi-frame images are loaded on-demand.
auto animation = make_animation(single_asset, multi_asset, true);
REPORTER_ASSERT(reporter, single_asset->requestedFrames().size() == 0);
REPORTER_ASSERT(reporter, multi_asset->requestedFrames().size() == 0);
animation->seekFrameTime(1);
REPORTER_ASSERT(reporter, single_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, multi_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, SkScalarNearlyEqual(single_asset->requestedFrames()[0], 1));
REPORTER_ASSERT(reporter, SkScalarNearlyEqual (multi_asset->requestedFrames()[0], 1));
animation->seekFrameTime(2);
REPORTER_ASSERT(reporter, single_asset->requestedFrames().size() == 1);
REPORTER_ASSERT(reporter, multi_asset->requestedFrames().size() == 2);
REPORTER_ASSERT(reporter, SkScalarNearlyEqual(multi_asset->requestedFrames()[1], 2));
}
}
|
# 依存関係解決
apt-get update
apt-get install -y git cmake ninja-build clang python uuid-dev libicu-dev icu-devtools libbsd-dev libedit-dev libxml2-dev libsqlite3-dev swig libpython-dev libncurses5-dev pkg-config curl
HOME=/home/vagrant
export $HOME
# swiftenvをクローン
git clone https://github.com/kylef/swiftenv.git $HOME/.swiftenv
# パスの設定
echo 'export SWIFTENV_ROOT="$HOME/.swiftenv"' >> $HOME/.bash_profile
echo 'export PATH="$SWIFTENV_ROOT/bin:$PATH"' >> $HOME/.bash_profile
echo 'eval "$(swiftenv init -)"' >> $HOME/.bash_profile
# Swiftのインストール
$HOME/.swiftenv/bin/swiftenv install DEVELOPMENT-SNAPSHOT-2016-06-06-a
chmod 777 $HOME/.swiftenv -R
|
<gh_stars>10-100
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.benchmark
import scala.collection.mutable.ListBuffer
import org.apache.spark.benchmark.Benchmark
import org.apache.spark.sql.Column
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
/**
* Synthetic benchmark for interval functions.
* To run this benchmark:
* {{{
* 1. without sbt:
* bin/spark-submit --class <this class> --jars <spark core test jar> <sql core test jar>
* 2. build/sbt "sql/test:runMain <this class>"
* 3. generate result:
* SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "sql/test:runMain <this class>"
* Results will be written to "benchmarks/IntervalBenchmark-results.txt".
* }}}
*/
object IntervalBenchmark extends SqlBasedBenchmark {
import spark.implicits._
private def doBenchmark(cardinality: Long, exprs: Column*): Unit = {
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true") {
spark
.range(0, cardinality, 1, 1)
.select(exprs: _*)
.queryExecution
.toRdd
.foreach(_ => ())
}
}
private def addCase(
benchmark: Benchmark,
cardinality: Long,
name: String,
exprs: Column*): Unit = {
benchmark.addCase(name, numIters = 3) { _ =>
doBenchmark(cardinality, exprs: _*)
}
}
private def buildString(withPrefix: Boolean, units: Seq[String] = Seq.empty): Column = {
val init = lit(if (withPrefix) "interval" else "") ::
($"id" % 10000).cast("string") ::
lit("years") :: Nil
concat_ws(" ", (init ++ units.map(lit)): _*)
}
private def addCase(benchmark: Benchmark, cardinality: Long, units: Seq[String]): Unit = {
Seq(true, false).foreach { withPrefix =>
val expr = buildString(withPrefix, units).cast("interval")
val note = if (withPrefix) "w/ interval" else "w/o interval"
benchmark.addCase(s"${units.length + 1} units $note", numIters = 3) { _ =>
doBenchmark(cardinality, expr)
}
}
}
override def runBenchmarkSuite(mainArgs: Array[String]): Unit = {
val N = 1000000
val timeUnits = Seq(
"13 months", " 1 months",
"100 weeks", "9 days", "12 hours", "- 3 hours",
"5 minutes", "45 seconds", "123 milliseconds", "567 microseconds")
val intervalToTest = ListBuffer[String]()
val benchmark = new Benchmark("cast strings to intervals", N, output = output)
// The first 2 cases are used to show the overhead of preparing the interval string.
addCase(benchmark, N, "prepare string w/ interval", buildString(true, timeUnits))
addCase(benchmark, N, "prepare string w/o interval", buildString(false, timeUnits))
addCase(benchmark, N, intervalToTest) // Only years
for (unit <- timeUnits) {
intervalToTest.append(unit)
addCase(benchmark, N, intervalToTest)
}
benchmark.run()
}
}
|
package api
import (
"net/http"
"github.com/labstack/echo/v4"
)
var registers []func(e *echo.Echo, h *HTTPHandler)
func init() {
registers = append(registers, func(e *echo.Echo, h *HTTPHandler) {
assetHandler := http.FileServer(http.Dir("static"))
e.GET("/", echo.WrapHandler(assetHandler))
e.GET("/*", echo.WrapHandler(assetHandler))
})
}
type HTTPHandler struct {
}
func NewHTTPHandler() *HTTPHandler {
return &HTTPHandler{}
}
func (h *HTTPHandler) Register(e *echo.Echo) {
for _, regFn := range registers {
regFn(e, h)
}
}
|
#!/usr/bin/env bash
# ---------------------------------------------------
# paste commands below into the command prompt on the new server.
# The server will need open-ssh-server installed. You can connect with username and password.
# use putty or some ssh client that you can paste text into.
# Other clients are mremote, windows ssh in command prompt. There are many others.
# Use filezilla - connection over ssh to put files on the server.
# To edit files on the server right click on a file and select edit.
# if you need to edit files as root, copy it to writable folder and then
# copy it back after editing, or just use sed or nano to edit the files.
# Another option is cyberduck.
# ---------------------------------------------------
## Step 1 as user albe - settings.
# get prompt for sudo. This will avoid sending pasted characters to the password prompt, which leads to them missing when they are needed.
# They way I paste commands, it usually has extra characters, like new lines. This can end up in the password prompt.
# I need to backspace before entering my password.
sudo ls
# ---------------------------------------------------
|
<reponame>hanyueqiang/actionview-fe
import { asyncFuncCreator } from '../utils';
export function index(key) {
return asyncFuncCreator({
constant: 'STATE_INDEX',
promise: (client) => client.request({ url: '/project/' + key + '/state' })
});
}
export function create(key, values) {
return asyncFuncCreator({
constant: 'STATE_CREATE',
promise: (client) => client.request({ url: '/project/' + key + '/state', method: 'post', data: values })
});
}
export function update(key, values) {
return asyncFuncCreator({
constant: 'STATE_UPDATE',
promise: (client) => client.request({ url: '/project/' + key + '/state/' + values.id, method: 'put', data: values })
});
}
export function select(id) {
return { type: 'STATE_SELECT', id: id };
}
export function del(key, id) {
return asyncFuncCreator({
constant: 'STATE_DELETE',
id,
promise: (client) => client.request({ url: '/project/' + key + '/state/' + id, method: 'delete' })
});
}
|
<filename>test/buffer.js
/*global global, testSuite, Buffer*/
testSuite('buffer', function(assert) {
testSuite('strings', function() {
var b = new Buffer('sa');
assert('string without encoding specified', b.toString() === 'sa');
b = new Buffer('sa', 'utf8');
assert('string with utf8', b.toString() === 'sa');
assert('string with utf8 length', b.length === 2);
b = new Buffer('↗Զ');
assert('string with utf8 extended', b.toString() === '↗Զ');
assert('string with utf8 extended length', b.length === 5);
});
testSuite('binary', function() {
var b = new Buffer(2);
assert('length', b.length === 2);
assert('index access', b.get(1) === 0);
assert.shouldThrow('get invalid offset', function() {
b.get(2);
});
b.set(0, 97);
b.set(1, 98);
assert('can set bytes', b.toString() === 'ab');
assert.shouldThrow('set invalid offset', function() {
b.set(-1, 0);
});
});
testSuite('slice', function() {
var b = new Buffer('abcdefghi');
assert('one char slice', b.slice(0, 1).toString() === 'a');
assert('middle slice', b.slice(1, 3).toString() === 'bc');
assert('end slice', b.slice(2).toString() === 'cdefghi');
assert('neg start', b.slice(-2).toString() === 'hi');
assert('neg start, pos end', b.slice(-2, 8).toString() === 'h');
assert('neg start, neg end', b.slice(-2, -1).toString() === 'h');
assert('pos start, neg end', b.slice(1, -1).toString() === 'bcdefgh');
});
testSuite('write', function() {
var b = new Buffer('abcdef');
b.write('xx', 'ascii', 2);
assert('write all', b.toString() === 'abxxef');
b.write('yyzz', 'ascii', 2, 2);
assert('write some', b.toString() === 'abyyef');
b.write('__', 0);
assert('write beginning', b.toString() === '__yyef');
b.write('---', 4, 2);
assert('write end', b.toString() === '__yy--');
});
testSuite('encode hex', function() {
var encode = function(str) {
return new Buffer(str).toString('hex');
};
assert('[empty]', encode('') === '');
assert('e1', encode('abc') === '616263');
assert('e2', encode('\x00') === '00');
});
testSuite('decode hex', function() {
var decode = function(str) {
return new Buffer(str, 'hex').toString('ascii');
};
assert('[empty]', decode('') === '');
assert('d1', decode('616263') === 'abc');
assert('d2', decode('00') === '\x00');
});
testSuite('encode base64', function() {
var encode = function(str) {
return new Buffer(str, 'ascii').toString('base64');
};
assert('[empty]', encode('') === '');
assert('e1', encode('f') === 'Zg==');
assert('e2', encode('fo') === 'Zm8=');
assert('e3', encode('foo') === 'Zm9v');
assert('e4', encode('quux') === 'cXV1eA==');
assert('e5', encode('!"#$%') === 'ISIjJCU=');
assert('e6', encode("&'()*+") === 'JicoKSor');
assert('e7', encode(',-./012') === 'LC0uLzAxMg==');
assert('e8', encode('3456789:') === 'MzQ1Njc4OTo=');
assert('e9', encode(';<=>?@ABC') === 'Ozw9Pj9AQUJD');
assert('e10', encode('DEFGHIJKLM') === 'REVGR0hJSktMTQ==');
assert('e11', encode('NOPQRSTUVWX') === 'Tk9QUVJTVFVWV1g=');
assert('e12', encode('YZ[\\]^_`abc') === 'WVpbXF1eX2BhYmM=');
assert('e13', encode('defghijklmnop') === 'ZGVmZ2hpamtsbW5vcA==');
assert('e14', encode('qrstuvwxyz{|}~') === 'cXJzdHV2d3h5ent8fX4=');
//assert.shouldThrow('cannot encode non-ASCII input', function() {
// encode('✈');
//});
});
testSuite('decode base64', function() {
var decode = function(str) {
return new Buffer(str, 'base64').toString('ascii');
};
assert('[empty]', decode('') === '');
assert('d1', decode('Zg==') === 'f');
assert('d2', decode('Zm8=') === 'fo');
assert('d3', decode('Zm9v') === 'foo');
assert('d4', decode('cXV1eA==') === 'quux');
assert('d5', decode('ISIjJCU=') === '!"#$%');
assert('d6', decode('JicoKSor') === "&'()*+");
assert('d7', decode('LC0uLzAxMg==') === ',-./012');
assert('d8', decode('MzQ1Njc4OTo=') === '3456789:');
assert('d9', decode('Ozw9Pj9AQUJD') === ';<=>?@ABC');
assert('d10', decode('REVGR0hJSktMTQ==') === 'DEFGHIJKLM');
assert('d11', decode('Tk9QUVJTVFVWV1g=') === 'NOPQRSTUVWX');
assert('d12', decode('WVpbXF1eX2BhYmM=') === 'YZ[\\]^_`abc');
assert('d13', decode('ZGVmZ2hpamtsbW5vcA==') === 'defghijklmnop');
assert('d14', decode('cXJzdHV2d3h5ent8fX4=') === 'qrstuvwxyz{|}~');
//assert.shouldThrow('cannot decode invalid input', function() {
// decode('a');
//});
});
//strange bug where this would equal 2MQ=VPJb
assert('base64 special case', new Buffer('d8c40054f25b', 'hex').toString('base64') ==='2MQAVPJb');
});
|
<reponame>montmanu/env-ci
import test from 'ava';
import git from '../../services/git';
import {gitRepo, gitCommit} from '../helpers/git-utils';
test('Return "commit" and "branch" from local repository', async t => {
const {cwd} = await gitRepo();
const commit = await gitCommit('Test commit message', {cwd});
t.deepEqual(git.configuration({cwd}), {commit, branch: 'master'});
});
|
def insertion_sort(A):
for i in range(1, len(A)):
currentValue = A[i]
j = i - 1
while j >= 0 and A[j] > currentValue:
A[j + 1] = A[j]
j -= 1
A[j + 1] = currentValue
A = [4,1,3,2]
insertion_sort(A)
print(A)
|
package tv.twitch.android.shared.ui.menus.core;
public abstract class MenuModel {
public static abstract class SingleItemMenu extends MenuModel {
}
}
|
import React from "react";
import {
StyleSheet,
View,
Text,
Animated,
TouchableOpacity,
Platform,
} from "react-native";
import MapView, { Marker } from 'react-native-maps';
import { connect } from "react-redux";
class HomeScreen extends React.Component {
state = {
distance: 0,
duration: 0,
pace: 0,
animValue: new Animated.Value(0),
};
startTracking = () => {
// start tracking code
};
stopTracking = () => {
// stop tracking code
};
render() {
return (
<View style={styles.container}>
<MapView
style={styles.map}
region={this.props.currentRegion}
>
{this.props.locations.map((loc, i) => (
<Marker
coordinate={loc.coords}
key={i}
/>
))}
</MapView>
<View style={styles.infoContainer}>
<Animated.Text style={styles.text}>
Distance: {this.state.distance}
</Animated.Text>
{/* Other texts go here */}
</View>
<View style={styles.buttonContainer}>
{this.state.isTracking ? (
<TouchableOpacity
onPress={this.stopTracking}
style={[styles.button, { backgroundColor: "#ff3b30" }]}
>
<Text>Stop</Text>
</TouchableOpacity>
) : (
<TouchableOpacity
onPress={this.startTracking}
style={[styles.button, { backgroundColor: "#4cd964" }]}
>
<Text>Start</Text>
</TouchableOpacity>
)}
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
backgroundColor: "white",
alignItems: "center",
},
map:{
...StyleSheet.absoluteFillObject,
},
infoContainer:{
...StyleSheet.absoluteFillObject,
},
text:{
fontSize: 20,
padding: 10,
},
buttonContainer:{
position: 'absolute',
bottom: 10,
left: 10,
},
button:{
padding: 10,
paddingHorizontal: 20,
},
});
const mapStateToProps = (state) => ({
currentRegion: state.regionReducer.currentRegion,
locations: state.regionReducer.locations,
});
export default connect(mapStateToProps)(HomeScreen);
|
#!/usr/bin/env python
'''
Use processes and Netmiko to connect to each of the devices in the database.
Execute 'show version' on each device. Record the amount of time required to do this.
DISCLAIMER NOTE: Solution is limited to the exercise's scope
'''
from net_system.models import NetworkDevice
import django
from multiprocessing import Process
from termcolor import colored
from datetime import datetime
from netmiko import ConnectHandler
def sh_ver(a_device):
# Execute cmd with NETMIKO
creds = a_device.credentials
rem_conn_ssh = ConnectHandler(device_type=a_device.device_type, ip=a_device.ip_address, username=creds.username,
password=creds.password, port=a_device.port, secret='')
# Output cmd
output = rem_conn_ssh.send_command_expect("show version")
print "\n <<--------------------------->> \n "+ colored(output, 'green') + "\n"
def main():
# Main function to connect to the devices using NETMIKO and execute a cmd. Multi-processing support.
django.setup()
# Record start time
process = []
start_time = datetime.now()
pylab_devices = NetworkDevice.objects.all()
for a_device in pylab_devices:
# Create a PROCESS for each device connection/cmd
node_process = Process(target=sh_ver, args=(a_device,))
# Start the THREAD
node_process.start()
process.append(node_process)
for any_process in process:
print "Notice: " + colored(any_process, 'red')
any_process.join()
# Function sh_ver runtime calculation
runtime = datetime.now() - start_time
print "This operation required " + colored(runtime, 'blue')
if __name__ == "__main__":
main()
|
var chai = require('chai');
var expect = chai.expect;
var sinon = require('sinon');
var sinonChai = require('sinon-chai');
var queryUtil = require('../server/customUtils/queryUtil.js');
var loginApiManager = require('../server/ApiManager/loginApiManager.js');
var defaultApiManager = require('../server/ApiManager/defaultApiManager.js');
var sandbox = {};
chai.use(sinonChai);
describe("Login Api Manager Testing",function(){
it("Check if the addParamsToQuery and default.handler is called", function(){
sandbox.addParamsToQueryStub = sinon.stub(queryUtil,"addParamsToQuery",function(query,params){
expect(sandbox.addParamsToQueryStub.called).to.equal(true);
});
sandbox.defaultHandlerStub = sinon.stub(defaultApiManager,"handler",function(){
expect(sandbox.defaultHandlerStub.called).to.equal(true);
});
loginApiManager.handler("query","params","req","res");
});
after(function(){
sandbox.addParamsToQueryStub.restore();
sandbox.defaultHandlerStub.restore();
});
});
|
# author:pengrk
# email:546711211@qq.com
# qq group:573283836
scp 01dns.yaml root@10.1.12.20:/dns/dns.yaml
|
<gh_stars>10-100
package io.opensphere.csvcommon.detect.datetime.algorithm;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.text.ParseException;
import java.util.List;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.common.configuration.date.DateFormat;
import io.opensphere.core.common.configuration.date.DateFormatsConfig;
import io.opensphere.core.common.configuration.date.DateFormat.Type;
import io.opensphere.core.preferences.ClasspathPreferencesPersistenceManager;
import io.opensphere.core.preferences.InternalPreferencesIF;
import io.opensphere.core.util.DateTimeFormats;
import io.opensphere.core.util.collections.New;
import io.opensphere.csvcommon.common.CellSampler;
import io.opensphere.csvcommon.common.datetime.ConfigurationProvider;
import io.opensphere.csvcommon.common.datetime.DateColumnResults;
import io.opensphere.csvcommon.detect.ValueWithConfidence;
import io.opensphere.csvcommon.detect.datetime.algorithm.DateTimeFinder;
import io.opensphere.csvcommon.detect.datetime.util.DateDataGenerator;
import io.opensphere.mantle.util.MantleConstants;
/**
* Tests the DateTimeFinder class.
*
*/
@SuppressWarnings("PMD.GodClass")
public class DateTimeFinderTest
{
/**
* Tests the ability to detect a datetime column and a time column.
*/
@Test
public void testDateTimeAndDownTime()
{
DateFormatsConfig configuration = getDateFormats();
DateFormat dateFormat = new DateFormat();
dateFormat.setSdf("yyyy-M-d HH:mm:ss");
DateFormat timeFormat = new DateFormat();
timeFormat.setSdf("'z'HHmmss.SS");
EasyMockSupport support = new EasyMockSupport();
List<List<String>> data = DateDataGenerator.generateSingleCompoundDate(dateFormat, timeFormat);
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertTrue(dateFormat.getSdf().equals(value.getValue().getUpTimeColumn().getPrimaryColumnFormat())
|| "yyyy-MM-dd HH:mm:ss".equals(value.getValue().getUpTimeColumn().getPrimaryColumnFormat()));
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertNull(value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIME, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(timeFormat.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn().getSecondaryColumnFormat());
assertEquals(-1, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
support.verifyAll();
}
/**
* Tests one day column and two time columns that share the same day column.
*
* @throws ParseException Bad parse.
*/
@Test
public void testDayTimeUpTimeDown() throws ParseException
{
DateFormatsConfig configuration = getDateFormats();
List<DateFormat> dateFormats1 = getFormats(configuration, Type.DATE);
List<DateFormat> timeFormats1 = getFormats(configuration, Type.TIME);
List<DateFormat> timeFormats2 = getFormats(configuration, Type.TIME);
for (DateFormat dateFormat1 : dateFormats1)
{
for (DateFormat timeFormat1 : timeFormats1)
{
for (DateFormat timeFormat2 : timeFormats2)
{
EasyMockSupport support = new EasyMockSupport();
List<List<String>> data = DateDataGenerator.generateDayTimeUpTimeDown(dateFormat1, timeFormat1, timeFormat2);
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat1.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(2, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat2.getSdf(), value.getValue().getDownTimeColumn().getSecondaryColumnFormat());
assertEquals(4, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
support.verifyAll();
}
}
}
}
/**
* Tests excluding a column based on its name.
*/
@Test
public void testExcludeColumn()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = getDateFormats();
List<List<String>> data = DateDataGenerator.generateSingleDateLotsOfDecimalSeconds();
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSamplerWithHeaderCells(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertNull(value.getValue().getUpTimeColumn());
support.verifyAll();
}
/**
* Tests the HHmmss format.
*/
@Test
public void testHHmmss()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = getDateFormats();
List<List<String>> data = DateDataGenerator.generateHHmmss();
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals("yyyyMMdd", value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(8, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals("HHmmss", value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(9, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
support.verifyAll();
}
/**
* Tests a single date consisting of two columns.
*/
@Test
public void testSingleCompoundDate()
{
DateFormatsConfig configuration = getDateFormats();
List<DateFormat> dateFormats = getFormats(configuration, Type.DATE);
List<DateFormat> timeFormats = getFormats(configuration, Type.TIME);
for (DateFormat dateFormat : dateFormats)
{
for (DateFormat timeFormat : timeFormats)
{
EasyMockSupport support = new EasyMockSupport();
List<List<String>> data = DateDataGenerator.generateSingleCompoundDate(dateFormat, timeFormat);
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
support.verifyAll();
}
}
}
/**
* Tests all known formats for a single date column.
*/
@Test
public void testSingleDate()
{
DateFormatsConfig configuration = getDateFormats();
List<DateFormat> formats = getFormats(configuration, Type.DATE);
for (DateFormat format : formats)
{
EasyMockSupport support = new EasyMockSupport();
List<List<String>> data = DateDataGenerator.generateSingleDate(format);
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
if (format.getSdf().contains("y") && format.getType() == Type.TIMESTAMP || format.getType() == Type.DATE)
{
assertNotNull("Failed on format " + format.getSdf(), value.getValue().getUpTimeColumn());
assertEquals(format.getType(), value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else
{
assertNull(value.getValue().getUpTimeColumn());
}
support.verifyAll();
}
}
/**
* Tests all known formats for a single date column whose format matches an
* empty pattern.
*/
@Test
public void testSingleDateEmptyPattern()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = new DateFormatsConfig();
DateFormat emptyFormat = new DateFormat();
emptyFormat.setType(Type.TIMESTAMP);
emptyFormat.setSdf("yyyyMMdd HHmmss");
configuration.getFormats().add(emptyFormat);
for (DateFormat format : configuration.getFormats())
{
List<List<String>> data = DateDataGenerator.generateSingleDate(format);
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
if (format.getSdf().contains("y") && format.getType() == Type.TIMESTAMP || format.getType() == Type.DATE)
{
assertNotNull("Failed on format " + format.getSdf(), value.getValue().getUpTimeColumn());
assertEquals(format.getType(), value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else
{
assertNull(value.getValue().getUpTimeColumn());
}
support.verifyAll();
}
}
/**
* Tests a Single date with a very long decimal seconds string.
*/
@Test
public void testSingleDateLotsOfDecimalSeconds()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = getDateFormats();
List<List<String>> data = DateDataGenerator.generateSingleDateLotsOfDecimalSeconds();
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals("yyyy:M:d::HH:mm:ss.SSS", value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
support.verifyAll();
}
/**
* Tests the yyyyMMdd format.
*/
@Test
public void testSingleDateyyyyMMdd()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = getDateFormats();
List<List<String>> data = DateDataGenerator.generateSingleDateyyyyMMdd();
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.DATE, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals("yyyyMMdd", value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
support.verifyAll();
}
/**
* Tests the yyyy-MM-dd HH:mm:ss format.
*/
@Test
public void testYearMonthDayTime()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = getDateFormats();
support.replayAll();
List<List<String>> data = DateDataGenerator.generateYearMonthDayTime();
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
support.verifyAll();
}
/**
* Tests the zHHmmss.SS format.
*/
@Test
public void testZ()
{
EasyMockSupport support = new EasyMockSupport();
DateFormatsConfig configuration = getDateFormats();
support.replayAll();
DateFormat format = new DateFormat();
format.setSdf("'z'HHmmss.SS");
format.setType(Type.TIME);
DateFormat dateFormat = new DateFormat();
dateFormat.setSdf("yyyyMMdd");
dateFormat.setType(Type.DATE);
List<List<String>> data = DateDataGenerator.generateSingleCompoundDate(dateFormat, format);
ConfigurationProvider provider = createConfigurationProvider(support, configuration);
CellSampler sampler = createSampler(support, data);
support.replayAll();
DateTimeFinder finder = new DateTimeFinder(provider);
ValueWithConfidence<DateColumnResults> value = finder.findDates(sampler);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals("yyyyMMdd", value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals("'z'HHmmss.SS", value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
support.verifyAll();
}
/**
* Creates an easy mocked configuration provider.
*
* @param support The easy mock support object.
* @param config The configuration to return.
* @return the configuration provider.
*/
private ConfigurationProvider createConfigurationProvider(EasyMockSupport support, DateFormatsConfig config)
{
ConfigurationProvider provider = support.createMock(ConfigurationProvider.class);
provider.getDateFormats();
EasyMock.expectLastCall().andReturn(config);
EasyMock.expectLastCall().atLeastOnce();
provider.getExcludeColumns();
EasyMock.expectLastCall().andReturn(New.list("mod"));
EasyMock.expectLastCall().atLeastOnce();
return provider;
}
/**
* The cell sampler.
*
* @param support The easy mock support.
* @param data The data for the cell sampler to return.
* @return The cell sampler.
*/
private CellSampler createSampler(EasyMockSupport support, List<? extends List<? extends String>> data)
{
CellSampler sampler = support.createMock(CellSampler.class);
sampler.getBeginningSampleCells();
EasyMock.expectLastCall().andReturn(data);
sampler.getHeaderCells();
EasyMock.expectLastCall().andReturn(New.list());
return sampler;
}
/**
* The cell sampler.
*
* @param support The easy mock support.
* @param data The data for the cell sampler to return.
* @return The cell sampler.
*/
private CellSampler createSamplerWithHeaderCells(EasyMockSupport support, List<? extends List<? extends String>> data)
{
CellSampler sampler = support.createMock(CellSampler.class);
sampler.getBeginningSampleCells();
EasyMock.expectLastCall().andReturn(data);
List<String> columnNames = New.list();
if (!data.isEmpty())
{
List<? extends String> row = data.get(0);
for (String cell : row)
{
if (cell.contains("N/A") || cell.contains("-"))
{
columnNames.add("Last Modified");
}
else
{
columnNames.add("column Name");
}
}
}
sampler.getHeaderCells();
EasyMock.expectLastCall().andReturn(columnNames);
return sampler;
}
/**
* Gets the date formats.
*
* @return The list of known configured date formats.
*/
private DateFormatsConfig getDateFormats()
{
ClasspathPreferencesPersistenceManager manager = new ClasspathPreferencesPersistenceManager();
InternalPreferencesIF preferences = manager.load(MantleConstants.USER_DATE_FORMAT_CONFIG_FILE_TOPIC, null, false);
DateFormatsConfig config = preferences.getJAXBObject(DateFormatsConfig.class, "DateFormatConfig", null);
// Remove any ambiguous date formats just to help test assertions.
List<DateFormat> formatsToRemove = New.list();
for (DateFormat format : config.getFormats())
{
String sdf = format.getSdf();
if (DateTimeFormats.DATE_FORMAT.equals(sdf) || "yyyy/MM/dd".equals(sdf) || "MM/dd/yyyy".equals(sdf))
{
formatsToRemove.add(format);
}
}
config.getFormats().removeAll(formatsToRemove);
return config;
}
/**
* Gets the list of formats that are the specified format type.
*
* @param config The configuration to get the formats from.
* @param formatType The format type to get.
* @return The list of formats within the configuration of the specified
* type.
*/
private List<DateFormat> getFormats(DateFormatsConfig config, Type formatType)
{
List<DateFormat> formats = New.list();
for (DateFormat format : config.getFormats())
{
if (format.getType() == formatType)
{
boolean canAdd = true;
if (formatType == Type.DATE || formatType == Type.TIMESTAMP)
{
String sdf = format.getSdf();
int monthIndex = sdf.indexOf('M');
int dayIndex = sdf.indexOf('d');
// Remove any MM dd formats just to make testing easier
// since MM is the same as M.
int monthMonthIndex = sdf.indexOf("MM");
int dayDayIndex = sdf.indexOf("dd");
if (dayIndex < monthIndex || monthMonthIndex >= 0 || dayDayIndex >= 0)
{
canAdd = false;
}
}
if (canAdd)
{
formats.add(format);
}
}
}
return formats;
}
}
|
import React from "react";
const WeatherUI = ({ data }) => {
return (
<div id="weather-ui">
<h1>Weather forecast</h1>
<ul>
{data.map(day => (
<li>
<h2>{day.name}</h2>
<div>{day.temperature} °C</div>
<div>{day.precipitation}% chance of precipitation</div>
</li>
))}
</ul>
</div>
);
};
export default WeatherUI;
|
// Function to find the mid-point of the list
public Node FindMiddle(Node head)
{
// Edge cases
if (head == null || head.next == null)
return head;
Node slow = head;
Node fast = head;
// Move fast by two nodes, and slow by one node
while (fast != null && fast.next != null) {
slow = slow.next;
fast = fast.next.next;
}
// Return the middle node
return slow;
}
|
#!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_DIR/variables.sh"
source "$CURRENT_DIR/helpers.sh"
source "$CURRENT_DIR/spinner_helpers.sh"
# delimiters
d=$'\t'
delimiter=$'\t'
# if "quiet" script produces no output
SCRIPT_OUTPUT="$1"
grouped_sessions_format() {
local format
format+="#{session_grouped}"
format+="${delimiter}"
format+="#{session_group}"
format+="${delimiter}"
format+="#{session_id}"
format+="${delimiter}"
format+="#{session_name}"
echo "$format"
}
pane_format() {
local format
format+="pane"
format+="${delimiter}"
format+="#{session_name}"
format+="${delimiter}"
format+="#{window_index}"
format+="${delimiter}"
format+=":#{window_name}"
format+="${delimiter}"
format+="#{window_active}"
format+="${delimiter}"
format+=":#{window_flags}"
format+="${delimiter}"
format+="#{pane_index}"
format+="${delimiter}"
format+=":#{pane_current_path}"
format+="${delimiter}"
format+="#{pane_active}"
format+="${delimiter}"
format+="#{pane_current_command}"
format+="${delimiter}"
format+="#{pane_pid}"
format+="${delimiter}"
format+="#{history_size}"
echo "$format"
}
window_format() {
local format
format+="window"
format+="${delimiter}"
format+="#{session_name}"
format+="${delimiter}"
format+="#{window_index}"
format+="${delimiter}"
format+="#{window_active}"
format+="${delimiter}"
format+=":#{window_flags}"
format+="${delimiter}"
format+="#{window_layout}"
echo "$format"
}
state_format() {
local format
format+="state"
format+="${delimiter}"
format+="#{client_session}"
format+="${delimiter}"
format+="#{client_last_session}"
echo "$format"
}
dump_panes_raw() {
tmux list-panes -a -F "$(pane_format)"
}
dump_windows_raw(){
tmux list-windows -a -F "$(window_format)"
}
toggle_window_zoom() {
local target="$1"
tmux resize-pane -Z -t "$target"
}
_save_command_strategy_file() {
local save_command_strategy="$(get_tmux_option "$save_command_strategy_option" "$default_save_command_strategy")"
local strategy_file="$CURRENT_DIR/../save_command_strategies/${save_command_strategy}.sh"
local default_strategy_file="$CURRENT_DIR/../save_command_strategies/${default_save_command_strategy}.sh"
if [ -e "$strategy_file" ]; then # strategy file exists?
echo "$strategy_file"
else
echo "$default_strategy_file"
fi
}
pane_full_command() {
local pane_pid="$1"
local strategy_file="$(_save_command_strategy_file)"
# execute strategy script to get pane full command
$strategy_file "$pane_pid"
}
number_nonempty_lines_on_screen() {
local pane_id="$1"
tmux capture-pane -pJ -t "$pane_id" |
sed '/^$/d' |
wc -l |
sed 's/ //g'
}
# tests if there was any command output in the current pane
pane_has_any_content() {
local pane_id="$1"
local history_size="$(tmux display -p -t "$pane_id" -F "#{history_size}")"
local cursor_y="$(tmux display -p -t "$pane_id" -F "#{cursor_y}")"
# doing "cheap" tests first
[ "$history_size" -gt 0 ] || # history has any content?
[ "$cursor_y" -gt 0 ] || # cursor not in first line?
[ "$(number_nonempty_lines_on_screen "$pane_id")" -gt 1 ]
}
capture_pane_contents() {
local pane_id="$1"
local start_line="-$2"
local pane_contents_area="$3"
if pane_has_any_content "$pane_id"; then
if [ "$pane_contents_area" = "visible" ]; then
start_line="0"
fi
# the printf hack below removes *trailing* empty lines
printf '%s\n' "$(tmux capture-pane -epJ -S "$start_line" -t "$pane_id")" > "$(pane_contents_file "save" "$pane_id")"
fi
}
save_shell_history() {
if [ "$pane_command" = "bash" ]; then
local history_w='history -w'
local history_r='history -r'
local accept_line='C-m'
local end_of_line='C-e'
local backward_kill_line='C-u'
elif [ "$pane_command" = "zsh" ]; then
# fc -W does not work with -L
# fc -l format is different from what's written by fc -W
# fc -R either reads the format produced by fc -W or considers
# the entire line to be a command. That's why we need -n.
# fc -l only list the last 16 items by default, I think 64 is more reasonable.
local history_w='fc -lLn -64 >'
local history_r='fc -R'
local zsh_bindkey="$(zsh -i -c bindkey)"
local accept_line="$(expr "$(echo "$zsh_bindkey" | grep -m1 '\saccept-line$')" : '^"\(.*\)".*')"
local end_of_line="$(expr "$(echo "$zsh_bindkey" | grep -m1 '\send-of-line$')" : '^"\(.*\)".*')"
local backward_kill_line="$(expr "$(echo "$zsh_bindkey" | grep -m1 '\sbackward-kill-line$')" : '^"\(.*\)".*')"
else
return
fi
local pane_id="$1"
local pane_command="$2"
local full_command="$3"
if [ "$full_command" = ":" ]; then
# leading space prevents the command from being saved to history
# (assuming default HISTCONTROL settings)
local write_command=" $history_w '$(resurrect_history_file "$pane_id" "$pane_command")'"
local read_command=" $history_r '$(resurrect_history_file "$pane_id" "$pane_command")'"
# C-e C-u is a Bash shortcut sequence to clear whole line. It is necessary to
# delete any pending input so it does not interfere with our history command.
tmux send-keys -t "$pane_id" "$end_of_line" "$backward_kill_line" "$write_command" "$accept_line"
# Immediately restore after saving
tmux send-keys -t "$pane_id" "$end_of_line" "$backward_kill_line" "$read_command" "$accept_line"
fi
}
get_active_window_index() {
local session_name="$1"
tmux list-windows -t "$session_name" -F "#{window_flags} #{window_index}" |
awk '$1 ~ /\*/ { print $2; }'
}
get_alternate_window_index() {
local session_name="$1"
tmux list-windows -t "$session_name" -F "#{window_flags} #{window_index}" |
awk '$1 ~ /-/ { print $2; }'
}
dump_grouped_sessions() {
local current_session_group=""
local original_session
tmux list-sessions -F "$(grouped_sessions_format)" |
grep "^1" |
cut -c 3- |
sort |
while IFS=$d read session_group session_id session_name; do
if [ "$session_group" != "$current_session_group" ]; then
# this session is the original/first session in the group
original_session="$session_name"
current_session_group="$session_group"
else
# this session "points" to the original session
active_window_index="$(get_active_window_index "$session_name")"
alternate_window_index="$(get_alternate_window_index "$session_name")"
echo "grouped_session${d}${session_name}${d}${original_session}${d}:${alternate_window_index}${d}:${active_window_index}"
fi
done
}
fetch_and_dump_grouped_sessions(){
local grouped_sessions_dump="$(dump_grouped_sessions)"
get_grouped_sessions "$grouped_sessions_dump"
if [ -n "$grouped_sessions_dump" ]; then
echo "$grouped_sessions_dump"
fi
}
# translates pane pid to process command running inside a pane
dump_panes() {
local full_command
dump_panes_raw |
while IFS=$d read line_type session_name window_number window_name window_active window_flags pane_index dir pane_active pane_command pane_pid history_size; do
# not saving panes from grouped sessions
if is_session_grouped "$session_name"; then
continue
fi
full_command="$(pane_full_command $pane_pid)"
echo "${line_type}${d}${session_name}${d}${window_number}${d}${window_name}${d}${window_active}${d}${window_flags}${d}${pane_index}${d}${dir}${d}${pane_active}${d}${pane_command}${d}:${full_command}"
done
}
dump_windows() {
dump_windows_raw |
while IFS=$d read line_type session_name window_index window_active window_flags window_layout; do
# not saving windows from grouped sessions
if is_session_grouped "$session_name"; then
continue
fi
echo "${line_type}${d}${session_name}${d}${window_index}${d}${window_active}${d}${window_flags}${d}${window_layout}"
done
}
dump_state() {
tmux display-message -p "$(state_format)"
}
dump_pane_contents() {
local pane_contents_area="$(get_tmux_option "$pane_contents_area_option" "$default_pane_contents_area")"
dump_panes_raw |
while IFS=$d read line_type session_name window_number window_name window_active window_flags pane_index dir pane_active pane_command pane_pid history_size; do
capture_pane_contents "${session_name}:${window_number}.${pane_index}" "$history_size" "$pane_contents_area"
done
}
dump_shell_history() {
dump_panes |
while IFS=$d read line_type session_name window_number window_name window_active window_flags pane_index dir pane_active pane_command full_command; do
save_shell_history "$session_name:$window_number.$pane_index" "$pane_command" "$full_command"
done
}
remove_old_backups() {
# remove resurrect files older than 30 days, but keep at least 5 copies of backup.
local -a files
files=($(ls -t $(resurrect_dir)/${RESURRECT_FILE_PREFIX}_*.${RESURRECT_FILE_EXTENSION} | tail -n +6))
[[ ${#files[@]} -eq 0 ]] ||
find "${files[@]}" -type f -mtime +30 -exec rm -v "{}" \;
}
save_all() {
local resurrect_file_path="$(resurrect_file_path)"
local last_resurrect_file="$(last_resurrect_file)"
mkdir -p "$(resurrect_dir)"
fetch_and_dump_grouped_sessions > "$resurrect_file_path"
dump_panes >> "$resurrect_file_path"
dump_windows >> "$resurrect_file_path"
dump_state >> "$resurrect_file_path"
if files_differ "$resurrect_file_path" "$last_resurrect_file"; then
ln -fs "$(basename "$resurrect_file_path")" "$last_resurrect_file"
else
rm "$resurrect_file_path"
fi
if capture_pane_contents_option_on; then
mkdir -p "$(pane_contents_dir "save")"
dump_pane_contents
pane_contents_create_archive
rm "$(pane_contents_dir "save")"/*
fi
if save_shell_history_option_on; then
dump_shell_history
fi
remove_old_backups
}
show_output() {
[ "$SCRIPT_OUTPUT" != "quiet" ]
}
main() {
if supported_tmux_version_ok; then
if show_output; then
start_spinner "Saving..." "Tmux environment saved!"
fi
save_all
if show_output; then
stop_spinner
display_message "Tmux environment saved!"
fi
fi
}
main
|
BASEDIR=$(dirname $(pwd))
echo ${BASEDIR}
WORKDIR=$1
sudo docker rm --force lc
if [ -x "$BASEDIR/lc" ]; then
sudo docker run -d --name lc \
-v ${BASEDIR}/integrate/cephconf:/etc/ceph/ \
-v ${BASEDIR}/integrate/yigconf:/etc/yig/ \
-v ${BASEDIR}:/var/log/yig \
-v ${BASEDIR}:${WORKDIR} \
--net=integrate_vpcbr \
--ip 10.5.0.20 \
journeymidnight/yig /work/lc
echo "started lc from local dir"
fi
|
<reponame>shaba1567/Elastos.Essentials.App<gh_stars>0
import { AsciiMapping } from "./asciimapping";
import { MnemonicSuggestionProvider } from "./suggestionprovider";
export class FrenchMnemonicSuggestionProvider implements MnemonicSuggestionProvider {
private mapping: AsciiMapping;
constructor() {
void import("src/assets/components/mnemonic-keypad/french.json").then(mapping => {
this.mapping = mapping.default as any as AsciiMapping;
});
}
getSuggestions(letters: string): string[] {
letters = letters.toLowerCase();
let matchingKeys = Object.keys(this.mapping).filter(key => key.startsWith(letters));
return matchingKeys.reduce((acc, key) => [...acc, ...(this.mapping[key] || [])], [] as string[]).slice(0, 6);
}
}
|
#!/usr/bin/env bash
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../../../shell_env.sh
echo "drop stage if exists s2;" | $MYSQL_CLIENT_CONNECT
echo "CREATE STAGE if not exists s2;" | $MYSQL_CLIENT_CONNECT
echo "list @s2" | $MYSQL_CLIENT_CONNECT
curl -u root: -XPUT -H "stage_name:s2" -F "upload=@${CURDIR}/books.csv" "http://localhost:${QUERY_HTTP_HANDLER_PORT}/v1/upload_to_stage" > /dev/null 2>&1
curl -u root: -XPUT -H "stage_name:s2" -H "relative_path:test" -F "upload=@${CURDIR}/books.csv" "http://localhost:${QUERY_HTTP_HANDLER_PORT}/v1/upload_to_stage" > /dev/null 2>&1
echo "list @s2" | $MYSQL_CLIENT_CONNECT | awk '{print $1,$2,$3}'
echo "drop stage s2;" | $MYSQL_CLIENT_CONNECT
# test drop stage
echo "CREATE STAGE if not exists s2;" | $MYSQL_CLIENT_CONNECT
echo "list @s2" | $MYSQL_CLIENT_CONNECT
echo "drop stage s2;" | $MYSQL_CLIENT_CONNECT
|
#!/bin/bash
set -e
node_modules/.bin/sequelize db:create || echo 'Database cant be created might be exists'
npm run migrate
|
<filename>Notes/Notes/geoNotes-Bridging-Header.h
//
// geoNotes-Bridging-Header.h
// geoNotes
//
// Created by <NAME> on 2/29/16.
// Copyright © 2016 <NAME>. All rights reserved.
//
#import <Parse.h>
#import <Bolts.h>
|
import java.util.Scanner;
class linearsearch
{
public static void main(String args[])
{
int c,n,search,array[];
Scanner in = new Scanner(System.in);
System.out.print("Enter no of elements : ");
n = in.nextInt();
array = new int[n];
System.out.println("Enter" + n + "elements");
for(c=0;c<n;c++)
array[c] = in.nextInt();
System.out.println("Enter search elenment : ");
search = in.nextInt();
for(c=0;c<n;c++)
{
if(array[c]==search)
{
System.out.println(search + "present at" + (c+1));
break;
}
}
if(c==n)
System.out.println(search + "isn't present");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.