text
stringlengths 1
1.05M
|
|---|
class Customer:
def __init__(self, name, email, address, phone, credit_card_number):
self.name = name
self.email = email
self.address = address
self.phone = phone
self.credit_card_number = credit_card_number
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_email(self):
return self.email
def set_email(self, email):
self.email = email
def get_address(self):
return self.address
def set_address(self, address):
self.address = address
def get_phone(self):
return self.phone
def set_phone(self, phone):
self.phone = phone
def get_credit_card_number(self):
return self.credit_card_number
def set_credit_card_number(self, credit_card_number):
self.credit_card_number = credit_card_number
|
TERMUX_PKG_HOMEPAGE=https://maven.apache.org/
TERMUX_PKG_DESCRIPTION="A Java software project management and comprehension tool"
TERMUX_PKG_LICENSE="Apache-2.0"
TERMUX_PKG_MAINTAINER="@masterjavaofficial"
TERMUX_PKG_VERSION=3.8.3
TERMUX_PKG_SRCURL=https://dlcdn.apache.org/maven/maven-3/${TERMUX_PKG_VERSION}/binaries/apache-maven-${TERMUX_PKG_VERSION}-bin.tar.gz
TERMUX_PKG_SHA256=0f1597d11085b8fe93d84652a18c6deea71ece9fabba45a02cf6600c7758fd5b
TERMUX_PKG_DEPENDS="openjdk-17"
TERMUX_PKG_BUILD_IN_SRC=true
TERMUX_PKG_PLATFORM_INDEPENDENT=true
termux_step_make_install() {
# Remove starter scripts for Windows
rm -f bin/*.cmd
# Remove DLL for Windows
rm -rf lib/jansi-native/Windows
rm -rf $TERMUX_PREFIX/opt/maven
mkdir -p $TERMUX_PREFIX/opt
cp -a $TERMUX_PKG_SRCDIR $TERMUX_PREFIX/opt/maven/
# Symlink only starter scripts for Linux
ln -sfr $TERMUX_PREFIX/opt/maven/bin/mvn $TERMUX_PREFIX/bin/mvn
ln -sfr $TERMUX_PREFIX/opt/maven/bin/mvnDebug $TERMUX_PREFIX/bin/mvnDebug
ln -sfr $TERMUX_PREFIX/opt/maven/bin/mvnyjp $TERMUX_PREFIX/bin/mvnyjp
}
|
#!/bin/bash
set -x
set -e
export PYTHONUNBUFFERED="True"
GPU_ID=$1
DATASET=$2
NET=$3
ITERS=$4
array=( $@ )
len=${#array[@]}
EXTRA_ARGS=${array[@]:4:$len}
EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_}
case ${DATASET} in
pascal_voc)
TRAIN_IMDB="voc_2007_trainval"
TEST_IMDB="voc_2007_test"
ITERS=70000
ANCHORS="[8,16,32]"
RATIOS="[0.5,1,2]"
;;
pascal_voc_0712)
TRAIN_IMDB="voc_2007_trainval+voc_2012_trainval"
TEST_IMDB="voc_2007_test"
ITERS=${ITERS}
ANCHORS="[8,16,32]"
RATIOS="[0.5,1,2]"
;;
coco)
TRAIN_IMDB="coco_2014_train"
TEST_IMDB="coco_2014_minival"
ITERS=490000
ANCHORS="[4,8,16,32]"
RATIOS="[0.5,1,2]"
;;
chest_xrays)
TRAIN_IMDB="chest_xrays_trainval"
TEST_IMDB="chest_xrays_test"
STEPSIZE="[1200]"
ITERS=${ITERS}
ANCHORS="[8,16,32]"
RATIOS="[0.5,1,2]"
;;
open_images)
TRAIN_IMDB="open_images_trainval"
TEST_IMDB="open_images_test"
STEPSIZE="[960000]"
ITERS=${ITERS}
ANCHORS="[8,16,32]"
RATIOS="[0.5,1,2]"
;;
open_images_mini)
TRAIN_IMDB="open_images_minitrainval"
TEST_IMDB="open_images_test"
STEPSIZE="[480000]"
ITERS=${ITERS}
ANCHORS="[8,16,32]"
RATIOS="[0.5,1,2]"
;;
*)
echo "No dataset given"
exit
;;
esac
LOG="experiments/logs/test_${NET}_${TRAIN_IMDB}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`"
exec &> >(tee -a "$LOG")
echo Logging output to "$LOG"
set +x
if [[ ! -z ${EXTRA_ARGS_SLUG} ]]; then
NET_FINAL=output/${NET}/${TRAIN_IMDB}/${EXTRA_ARGS_SLUG}/${NET}_faster_rcnn_iter_${ITERS}.ckpt
else
NET_FINAL=output/${NET}/${TRAIN_IMDB}/default/${NET}_faster_rcnn_iter_${ITERS}.ckpt
fi
set -x
if [[ ! -z ${EXTRA_ARGS_SLUG} ]]; then
CUDA_VISIBLE_DEVICES=${GPU_ID} time python ./tools/test_net.py \
--imdb ${TEST_IMDB} \
--model ${NET_FINAL} \
--cfg experiments/cfgs/${NET}.yml \
--tag ${EXTRA_ARGS_SLUG} \
--net ${NET} \
--set ANCHOR_SCALES ${ANCHORS} ANCHOR_RATIOS ${RATIOS}
else
CUDA_VISIBLE_DEVICES=${GPU_ID} time python ./tools/test_net.py \
--imdb ${TEST_IMDB} \
--model ${NET_FINAL} \
--cfg experiments/cfgs/${NET}.yml \
--net ${NET} \
--set ANCHOR_SCALES ${ANCHORS} ANCHOR_RATIOS ${RATIOS}
fi
|
#!/bin/bash -eux
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v "/var/correctly:/var/correctly:ro" -w="/var/correctly" docker/compose:latest up -d
|
#ifndef GameCenterAuth_h
#define GameCenterAuth_h
typedef void ( __stdcall *GenerateSucceeded )(
const char* publicKeyUrl,
uint64_t timestamp,
const char* signature,
const char* salt,
const char* playerID,
const char* alias,
const char* bundleID
);
typedef void ( __stdcall *GenerateFailed )(
const char* reason
);
extern "C"
{
void GenerateIdentityVerificationSignature(GenerateSucceeded OnSucceeded, GenerateFailed OnFailed);
}
#endif
|
<reponame>lanpinguo/rootfs_build
/*
******************************************************************************
*
* vin.c
*
* Hawkview ISP - vin.c module
*
* Copyright (c) 2015 by Allwinnertech Co., Ltd. http://www.allwinnertech.com
*
* Version Author Date Description
*
* 3.0 <NAME> 2015/12/01 ISP Tuning Tools Support
*
******************************************************************************
*/
#include <linux/module.h>
#include <linux/errno.h>
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/version.h>
#include <linux/mutex.h>
#include <linux/videodev2.h>
#include <linux/delay.h>
#include <linux/string.h>
#include <linux/freezer.h>
#include <linux/io.h>
#include <linux/platform_device.h>
#include <linux/interrupt.h>
#include <linux/i2c.h>
#include <linux/spi/spi.h>
#include <linux/moduleparam.h>
#include <media/v4l2-device.h>
#include <media/v4l2-ioctl.h>
#include <media/v4l2-common.h>
#include <media/v4l2-mediabus.h>
#include <media/v4l2-subdev.h>
#include <media/videobuf2-dma-contig.h>
#include <linux/regulator/consumer.h>
#include "utility/bsp_common.h"
#include "vin-isp/bsp_isp_algo.h"
#include "vin-cci/bsp_cci.h"
#include "vin-cci/cci_helper.h"
#include "utility/config.h"
#include "modules/sensor/camera_cfg.h"
#include "utility/sensor_info.h"
#include "utility/vin_io.h"
#include "vin.h"
#define VIN_MODULE_NAME "sunxi-vin-media"
static void vin_md_prepare_pipeline(struct vin_pipeline *p,
struct media_entity *me)
{
struct v4l2_subdev *sd;
int i;
for (i = 0; i < VIN_IND_ACTUATOR; i++)
p->sd[i] = NULL;
while (1) {
struct media_pad *pad = NULL;
/* Find remote source pad */
for (i = 0; i < me->num_pads; i++) {
struct media_pad *spad = &me->pads[i];
if (!(spad->flags & MEDIA_PAD_FL_SINK))
continue;
pad = media_entity_remote_source(spad);
if (pad)
break;
}
if (pad == NULL ||
media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
break;
sd = media_entity_to_v4l2_subdev(pad->entity);
vin_log(VIN_LOG_MD, "%s entity is %s, group id is 0x%x\n",
__func__, pad->entity->name, sd->grp_id);
switch (sd->grp_id) {
case VIN_GRP_ID_SENSOR:
p->sd[VIN_IND_SENSOR] = sd;
break;
case VIN_GRP_ID_MIPI:
p->sd[VIN_IND_MIPI] = sd;
break;
case VIN_GRP_ID_CSI:
p->sd[VIN_IND_CSI] = sd;
break;
case VIN_GRP_ID_ISP:
p->sd[VIN_IND_ISP] = sd;
break;
case VIN_GRP_ID_SCALER:
p->sd[VIN_IND_SCALER] = sd;
break;
default:
break;
}
me = &sd->entity;
if (me->num_pads == 1)
break;
}
}
static int __vin_subdev_set_power(struct v4l2_subdev *sd, int on)
{
int *use_count;
int ret;
if (sd == NULL)
return -ENXIO;
use_count = &sd->entity.use_count;
if (on && (*use_count)++ > 0)
return 0;
else if (!on && (*use_count == 0 || --(*use_count) > 0))
return 0;
ret = v4l2_subdev_call(sd, core, s_power, on);
return ret != -ENOIOCTLCMD ? ret : 0;
}
static int vin_pipeline_s_power(struct vin_pipeline *p, bool on)
{
static const u8 seq[2][VIN_IND_MAX] = {
{ VIN_IND_ISP, VIN_IND_SENSOR, VIN_IND_CSI, VIN_IND_MIPI,
VIN_IND_SCALER },
{ VIN_IND_MIPI, VIN_IND_CSI, VIN_IND_SENSOR, VIN_IND_ISP,
VIN_IND_SCALER},
};
int i, ret = 0;
if (p->sd[VIN_IND_SENSOR] == NULL)
return -ENXIO;
for (i = 0; i < VIN_IND_MAX; i++) {
unsigned int idx = seq[on][i];
if (NULL == p->sd[idx])
continue;
ret = __vin_subdev_set_power(p->sd[idx], on);
if (ret < 0 && ret != -ENXIO)
goto error;
}
return 0;
error:
for (; i >= 0; i--) {
unsigned int idx = seq[on][i];
if (NULL == p->sd[idx])
continue;
__vin_subdev_set_power(p->sd[idx], !on);
}
return ret;
}
static int __vin_pipeline_open(struct vin_pipeline *p,
struct media_entity *me, bool prepare)
{
struct v4l2_subdev *sd;
int ret;
if (WARN_ON(p == NULL || me == NULL))
return -EINVAL;
if (prepare)
vin_md_prepare_pipeline(p, me);
sd = p->sd[VIN_IND_SENSOR];
if (sd == NULL)
return -EINVAL;
ret = vin_pipeline_s_power(p, 1);
if (!ret)
return 0;
return ret;
}
static int __vin_pipeline_close(struct vin_pipeline *p)
{
struct v4l2_subdev *sd = p ? p->sd[VIN_IND_SENSOR] : NULL;
struct vin_md *vind;
int ret = 0;
if (WARN_ON(sd == NULL))
return -EINVAL;
if (p->sd[VIN_IND_SENSOR]) {
ret = vin_pipeline_s_power(p, 0);
}
vind = entity_to_vin_mdev(&sd->entity);
return ret == -ENXIO ? 0 : ret;
}
static int __vin_pipeline_s_stream(struct vin_pipeline *p, bool on)
{
static const u8 seq[2][VIN_IND_MAX] = {
{ VIN_IND_ISP, VIN_IND_SENSOR, VIN_IND_CSI, VIN_IND_MIPI,
VIN_IND_SCALER },
{ VIN_IND_MIPI, VIN_IND_SENSOR, VIN_IND_CSI, VIN_IND_ISP,
VIN_IND_SCALER},
};
int i, ret = 0;
if (p->sd[VIN_IND_SENSOR] == NULL)
return -ENODEV;
for (i = 0; i < VIN_IND_ACTUATOR; i++) {
unsigned int idx = seq[on][i];
if (NULL == p->sd[idx])
continue;
ret = v4l2_subdev_call(p->sd[idx], video, s_stream, on);
if (ret < 0 && ret != -ENOIOCTLCMD && ret != -ENODEV) {
vin_err("%s error!\n", __func__);
goto error;
}
}
return 0;
error:
for (; i >= 0; i--) {
unsigned int idx = seq[on][i];
v4l2_subdev_call(p->sd[idx], video, s_stream, !on);
}
return ret;
}
static const struct vin_pipeline_ops vin_pipe_ops = {
.open = __vin_pipeline_open,
.close = __vin_pipeline_close,
.set_stream = __vin_pipeline_s_stream,
};
struct v4l2_subdev *__vin_subdev_register(struct vin_core *vinc,
char *name, u8 addr,
enum module_type type)
{
struct v4l2_device *v4l2_dev = vinc->v4l2_dev;
struct modules_config *modu_cfg = &vinc->modu_cfg;
struct v4l2_subdev *sd = NULL;
if (type == VIN_MODULE_TYPE_CCI) {
sd = cci_bus_match(name, modu_cfg->bus_sel, addr);
if (IS_ERR_OR_NULL(sd)) {
vin_err("registering v4l2 sd No such device!\n");
return NULL;
} else {
if (v4l2_device_register_subdev(v4l2_dev, sd))
return NULL;
vin_print("sd %s register OK!\n", sd->name);
}
} else if (type == VIN_MODULE_TYPE_I2C) {
struct i2c_adapter *adapter =
i2c_get_adapter(modu_cfg->bus_sel);
if (adapter == NULL) {
vin_err("request i2c adapter failed!\n");
return NULL;
}
sd = v4l2_i2c_new_subdev(v4l2_dev, adapter, name, addr, NULL);
if (IS_ERR_OR_NULL(sd)) {
i2c_put_adapter(adapter);
vin_err("registering v4l2 sd No such device!\n");
return NULL;
} else {
vin_print("sd %s register OK!\n", sd->name);
}
} else if (type == VIN_MODULE_TYPE_SPI) {
#if defined(CONFIG_SPI)
struct spi_master *master =
spi_busnum_to_master(modu_cfg->bus_sel);
struct spi_board_info info;
if (master == NULL) {
vin_err("request spi master failed!\n");
return NULL;
}
strlcpy(info.modalias, name, sizeof(info.modalias));
info.bus_num = modu_cfg->bus_sel;
info.chip_select = 1;
sd = v4l2_spi_new_subdev(v4l2_dev, master, &info);
if (IS_ERR_OR_NULL(sd)) {
spi_master_put(master);
vin_err("registering v4l2 sd No such device!\n");
return NULL;
} else {
vin_print("sd %s register OK!\n", sd->name);
}
#endif
} else if (type == VIN_MODULE_TYPE_GPIO) {
vin_print("Sensor type error, type = %d!\n", type);
return NULL;
} else {
vin_print("Sensor type error, type = %d!\n", type);
return NULL;
}
return sd;
}
static int __vin_subdev_unregister(struct v4l2_subdev *sd,
enum module_type type)
{
if (IS_ERR_OR_NULL(sd)) {
vin_log(VIN_LOG_MD, "%s sd = NULL!\n", __func__);
return -1;
}
if (type == VIN_MODULE_TYPE_CCI) {
struct cci_driver *cci_driv = v4l2_get_subdevdata(sd);
if (IS_ERR_OR_NULL(cci_driv))
return -ENODEV;
vin_print("vin sd %s unregister!\n", sd->name);
v4l2_device_unregister_subdev(sd);
cci_bus_match_cancel(cci_driv);
} else if (type == VIN_MODULE_TYPE_I2C) {
struct i2c_adapter *adapter;
struct i2c_client *client = v4l2_get_subdevdata(sd);
if (!client)
return -ENODEV;
vin_print("vin sd %s unregister!\n", sd->name);
v4l2_device_unregister_subdev(sd);
adapter = client->adapter;
i2c_unregister_device(client);
if (adapter)
i2c_put_adapter(adapter);
} else if (type == VIN_MODULE_TYPE_SPI) {
struct spi_master *master;
struct spi_device *spi = v4l2_get_subdevdata(sd);
if (!spi)
return -ENODEV;
vin_print("vin sd %s unregister!\n", sd->name);
v4l2_device_unregister_subdev(sd);
master = spi->master;
spi_unregister_device(spi);
if (master)
spi_master_put(master);
} else if (type == VIN_MODULE_TYPE_GPIO) {
vin_print("Sensor type error, type = %d!\n", type);
return -EFAULT;
} else {
vin_print("Sensor type error, type = %d!\n", type);
return -EFAULT;
}
return 0;
}
static int __vin_handle_sensor_info(struct sensor_instance *inst)
{
if (inst->cam_type == SENSOR_RAW) {
inst->is_bayer_raw = 1;
inst->is_isp_used = 1;
} else if (inst->cam_type == SENSOR_YUV) {
inst->is_bayer_raw = 0;
inst->is_isp_used = 0;
} else {
inst->is_bayer_raw = 0;
inst->is_isp_used = 1;
}
return 0;
}
static void __vin_verify_sensor_info(struct sensor_instance *inst)
{
struct sensor_item sensor_info;
char *sensor_type_name[] = {"YUV", "RAW", NULL,};
if (get_sensor_info(inst->cam_name, &sensor_info) == 0) {
if (inst->cam_addr != sensor_info.i2c_addr) {
vin_warn("%s i2c_addr is different from device_tree!\n",
sensor_info.sensor_name);
}
if (inst->is_bayer_raw != sensor_info.sensor_type) {
vin_warn("%s fmt is different from device_tree!\n",
sensor_type_name[sensor_info.sensor_type]);
vin_warn("detect fmt %d replace device_tree fmt %d!\n",
sensor_info.sensor_type,
inst->is_bayer_raw);
inst->is_bayer_raw = sensor_info.sensor_type;
}
vin_print("find sensor name is %s, address is %x, type is %s\n",
sensor_info.sensor_name, sensor_info.i2c_addr,
sensor_type_name[sensor_info.sensor_type]);
}
}
static int __vin_register_module(struct vin_core *vinc, int i)
{
struct modules_config *modu_cfg = &vinc->modu_cfg;
struct sensor_instance *inst = &modu_cfg->sensors.inst[i];
struct vin_module_info *modules = &modu_cfg->modules;
if (!strcmp(inst->cam_name, "")) {
vin_err("Sensor name is NULL!\n");
modules->sensor[i].sd = NULL;
return -1;
}
/*camera sensor register. */
modules->sensor[i].sd = __vin_subdev_register(vinc,
inst->cam_name,
inst->cam_addr >> 1,
modules->sensor[i].type);
if (!vinc->modu_cfg.act_used) {
modules->act[i].sd = NULL;
return 0;
}
/*camera act register. */
modules->act[i].sd = __vin_subdev_register(vinc,
inst->act_name,
inst->act_addr >> 1,
modules->act[i].type);
return 0;
}
static void __vin_unregister_module(struct vin_core *vinc, int i)
{
struct vin_module_info *modules = &vinc->modu_cfg.modules;
/*camera subdev unregister */
__vin_subdev_unregister(modules->sensor[i].sd,
modules->sensor[i].type);
__vin_subdev_unregister(modules->act[i].sd,
modules->act[i].type);
vin_log(VIN_LOG_MD, "%s!\n", __func__);
modules->sensor[i].sd = NULL;
modules->act[i].sd = NULL;
}
static void __vin_register_modules(struct vin_core *vinc)
{
int i, num;
struct sensor_list *sensors = &vinc->modu_cfg.sensors;
if (sensors->use_sensor_list == 1) {
num = sensors->detect_num;
if (sensors->detect_num == 0)
num = 1;
} else {
num = 1;
}
for (i = 0; i < num; i++) {
if (sensors->use_sensor_list == 1)
__vin_handle_sensor_info(&sensors->inst[i]);
__vin_verify_sensor_info(&sensors->inst[i]);
__vin_register_module(vinc, i);
if (-1 == vin_core_check_sensor_list(vinc, i))
__vin_unregister_module(vinc, i);
}
}
static void __vin_unregister_modules(struct vin_core *vinc)
{
int i, num;
struct sensor_list *sensors = &vinc->modu_cfg.sensors;
if (sensors->use_sensor_list == 1) {
num = sensors->detect_num;
if (sensors->detect_num == 0)
num = 1;
} else {
num = 1;
}
for (i = 0; i < num; i++) {
__vin_unregister_module(vinc, i);
}
}
#ifdef CONFIG_OF
static bool vin_is_node_available(struct device_node *node, char *name)
{
const __be32 *list;
struct device_node *sub_np;
int i, size;
vin_log(VIN_LOG_MD, "%s\n", __func__);
list = of_get_property(node, name, &size);
if ((!list) || (0 == size)) {
vin_warn("missing isp_handle property in node %s\n",
node->name);
} else {
vin_log(VIN_LOG_MD, "isp_handle value is %d len is %d\n",
be32_to_cpup(list), size);
size /= sizeof(*list);
for (i = 0; i < size; i++) {
sub_np = of_find_node_by_phandle(be32_to_cpup(list++));
if (!sub_np) {
vin_warn("%s index %d invalid phandle\n",
"isp_handle", i);
return false;
} else if (of_device_is_available(sub_np)) {
return true;
}
}
}
return false;
}
#else
#define vin_is_node_available(node, name) (false)
#endif /* CONFIG_OF */
static int vin_md_link_notify(struct media_pad *source,
struct media_pad *sink, u32 flags)
{
vin_print("%s: source %s, sink %s, flag %d\n", __func__,
source->entity->name, sink->entity->name, flags);
return 0;
}
static ssize_t vin_md_sysfs_show(struct device *dev,
struct device_attribute *attr, char *buf)
{
struct platform_device *pdev = to_platform_device(dev);
struct vin_md *vind = platform_get_drvdata(pdev);
if (vind->user_subdev_api)
return strlcpy(buf, "Sub-device API (sub-dev)\n", PAGE_SIZE);
return strlcpy(buf, "V4L2 video node only API (vid-dev)\n", PAGE_SIZE);
}
static ssize_t vin_md_sysfs_store(struct device *dev,
struct device_attribute *attr,
const char *buf, size_t count)
{
struct platform_device *pdev = to_platform_device(dev);
struct vin_md *vind = platform_get_drvdata(pdev);
bool subdev_api;
int i;
if (!strcmp(buf, "vid-dev\n"))
subdev_api = false;
else if (!strcmp(buf, "sub-dev\n"))
subdev_api = true;
else
return count;
vind->user_subdev_api = subdev_api;
for (i = 0; i < VIN_MAX_DEV; i++)
if (vind->vinc[i])
vind->vinc[i]->vid_cap.user_subdev_api = subdev_api;
return count;
}
static DEVICE_ATTR(subdev_api, S_IWUSR | S_IRUGO,
vin_md_sysfs_show, vin_md_sysfs_store);
static int vin_md_get_clocks(struct vin_md *vind)
{
return 0;
}
static void vin_md_put_clocks(struct vin_md *vind)
{
}
static int vin_md_register_core_entity(struct vin_md *vind,
struct vin_core *vinc)
{
struct v4l2_subdev *sd;
int ret;
if (WARN_ON(vinc->id >= VIN_MAX_DEV))
return -EBUSY;
sd = &vinc->vid_cap.subdev;
v4l2_set_subdev_hostdata(sd, (void *)&vin_pipe_ops);
ret = v4l2_device_register_subdev(&vind->v4l2_dev, sd);
if (!ret) {
vind->vinc[vinc->id] = vinc;
vinc->vid_cap.user_subdev_api = vind->user_subdev_api;
} else {
vin_err("Failed to register vin_cap.%d (%d)\n",
vinc->id, ret);
}
return ret;
}
static int vin_md_register_entities(struct vin_md *vind,
struct device_node *parent)
{
int i, ret;
struct vin_core *vinc = NULL;
vin_print("%s\n", __func__);
for (i = 0; i < VIN_MAX_DEV; i++) {
/*video device register */
vind->vinc[i] = sunxi_vin_core_get_dev(i);
if (NULL == vind->vinc[i])
continue;
vinc = vind->vinc[i];
vinc->v4l2_dev = &vind->v4l2_dev;
__vin_register_modules(vinc);
if (-1 == vinc->modu_cfg.sensors.valid_idx) {
vind->vinc[i] = NULL;
continue;
}
vin_md_register_core_entity(vind, vinc);
if (!vinc->modu_cfg.flash_used)
continue;
/*flash subdev register */
vinc->modu_cfg.modules.flash.id = i;
vinc->modu_cfg.modules.flash.sd = sunxi_flash_get_subdev(i);
ret = v4l2_device_register_subdev(vinc->v4l2_dev,
vinc->modu_cfg.modules.flash.sd);
if (ret < 0)
vin_warn("flash%d subdev register fail!\n", i);
}
for (i = 0; i < VIN_MAX_CSI; i++) {
/*Register CSI subdev */
vind->csi[i].id = i;
vind->csi[i].sd = sunxi_csi_get_subdev(i);
ret = v4l2_device_register_subdev(&vind->v4l2_dev,
vind->csi[i].sd);
if (ret < 0)
vin_warn("csi%d subdev register fail!\n", i);
}
for (i = 0; i < VIN_MAX_MIPI; i++) {
/*Register MIPI subdev */
vind->mipi[i].id = i;
vind->mipi[i].sd = sunxi_mipi_get_subdev(i);
ret = v4l2_device_register_subdev(&vind->v4l2_dev,
vind->mipi[i].sd);
if (ret < 0)
vin_warn("mipi%d subdev register fail!\n", i);
}
for (i = 0; i < VIN_MAX_ISP; i++) {
/*Register ISP subdev */
vind->isp[i].id = i;
vind->isp[i].sd = sunxi_isp_get_subdev(i);
ret = v4l2_device_register_subdev(&vind->v4l2_dev,
vind->isp[i].sd);
if (ret < 0)
vin_warn("isp%d subdev register fail!\n", i);
/*Register STATISTIC BUF subdev */
vind->stat[i].id = i;
vind->stat[i].sd = sunxi_stat_get_subdev(i);
ret = v4l2_device_register_subdev(&vind->v4l2_dev,
vind->stat[i].sd);
if (ret < 0)
vin_warn("stat%d subdev register fail!\n", i);
}
for (i = 0; i < VIN_MAX_SCALER; i++) {
/*Register SCALER subdev */
vind->scaler[i].id = i;
vind->scaler[i].sd = sunxi_scaler_get_subdev(i);
ret = v4l2_device_register_subdev(&vind->v4l2_dev,
vind->scaler[i].sd);
if (ret < 0)
vin_warn("scaler%d subdev register fail!\n", i);
}
return 0;
}
static void vin_md_unregister_entities(struct vin_md *vind)
{
struct vin_core *vinc;
int i;
struct vin_module_info *modules = NULL;
for (i = 0; i < VIN_MAX_DEV; i++) {
if (vind->vinc[i] == NULL)
continue;
vinc = vind->vinc[i];
__vin_unregister_modules(vinc);
modules = &vinc->modu_cfg.modules;
if (modules->flash.sd == NULL)
continue;
v4l2_device_unregister_subdev(modules->flash.sd);
modules->flash.sd = NULL;
v4l2_device_unregister_subdev(&vinc->vid_cap.subdev);
vinc->pipeline_ops = NULL;
vind->vinc[i] = NULL;
}
for (i = 0; i < VIN_MAX_CSI; i++) {
if (vind->csi[i].sd == NULL)
continue;
v4l2_device_unregister_subdev(vind->csi[i].sd);
vind->cci[i].sd = NULL;
}
for (i = 0; i < VIN_MAX_MIPI; i++) {
if (vind->mipi[i].sd == NULL)
continue;
v4l2_device_unregister_subdev(vind->mipi[i].sd);
vind->mipi[i].sd = NULL;
}
for (i = 0; i < VIN_MAX_ISP; i++) {
if (vind->isp[i].sd == NULL)
continue;
v4l2_device_unregister_subdev(vind->isp[i].sd);
vind->isp[i].sd = NULL;
v4l2_device_unregister_subdev(vind->stat[i].sd);
vind->stat[i].sd = NULL;
}
for (i = 0; i < VIN_MAX_SCALER; i++) {
if (vind->scaler[i].sd == NULL)
continue;
v4l2_device_unregister_subdev(vind->scaler[i].sd);
vind->scaler[i].sd = NULL;
}
vin_print("%s\n", __func__);
}
static int sensor_link_to_mipi_csi(struct vin_core *vinc,
struct v4l2_subdev *to)
{
struct v4l2_subdev *sensor[MAX_DETECT_NUM];
struct media_entity *source, *sink;
int j, ret = 0;
for (j = 0; j < MAX_DETECT_NUM; j++)
sensor[j] = vinc->modu_cfg.modules.sensor[j].sd;
if ((!sensor[0]) && (!sensor[1]) && (!sensor[2])) {
vin_err("Pipe line sensor subdev is NULL!\n");
return -1;
}
for (j = 0; j < MAX_DETECT_NUM; j++) {
if (sensor[j] == NULL)
continue;
source = &sensor[j]->entity;
sink = &to->entity;
ret = media_entity_create_link(source, SENSOR_PAD_SOURCE,
sink, 0,
/*MEDIA_LNK_FL_IMMUTABLE | */
MEDIA_LNK_FL_ENABLED);
vin_print("created link [%s] %c> [%s]\n",
source->name, MEDIA_LNK_FL_ENABLED ? '=' : '-',
sink->name);
}
return ret;
}
static int vin_create_media_links(struct vin_md *vind)
{
struct v4l2_subdev *mipi, *csi, *isp, *stat, *scaler, *cap_sd;
struct media_entity *source, *sink;
int i, j, ret = 0;
for (i = 0; i < VIN_MAX_DEV; i++) {
struct vin_core *vinc = NULL;
struct vin_pipeline_cfg *pc = NULL;
vinc = vind->vinc[i];
if (NULL == vinc)
continue;
pc = &vinc->pipe_cfg;
/*MIPI*/
if (0xff == pc->mipi_ind)
mipi = NULL;
else
mipi = vind->mipi[pc->mipi_ind].sd;
/*CSI*/
if (0xff == pc->csi_ind)
csi = NULL;
else
csi = vind->csi[pc->csi_ind].sd;
if (NULL != mipi) {
/*link MIPI sensor*/
ret = sensor_link_to_mipi_csi(vinc, mipi);
if (NULL == csi) {
vin_err("MIPI Pipe line csi subdev is NULL, "
"DevID is %d\n", i);
continue;
}
source = &mipi->entity;
sink = &csi->entity;
ret = media_entity_create_link(source, MIPI_PAD_SOURCE,
sink, CSI_PAD_SINK,
MEDIA_LNK_FL_ENABLED);
} else {
/*link Bt.601 sensor*/
if (NULL == csi) {
vin_err("Bt.601 Pipeline csi subdev is NULL, "
"DevID is %d\n", i);
continue;
}
ret = sensor_link_to_mipi_csi(vinc, csi);
}
cap_sd = &vinc->vid_cap.subdev;
/* SCALER */
scaler = vind->scaler[i].sd;
if (scaler == NULL)
continue;
/*Link Vin Core*/
source = &scaler->entity;
sink = &cap_sd->entity;
ret = media_entity_create_link(source, SCALER_PAD_SOURCE,
sink, VIN_SD_PAD_SINK,
MEDIA_LNK_FL_ENABLED);
if (ret)
break;
/* Notify vin core subdev entity */
ret = media_entity_call(sink, link_setup, &sink->pads[0],
&source->pads[SCALER_PAD_SOURCE],
MEDIA_LNK_FL_ENABLED);
if (ret)
break;
vin_log(VIN_LOG_MD, "created link [%s] %c> [%s]\n",
source->name, MEDIA_LNK_FL_ENABLED ? '=' : '-',
sink->name);
source = &cap_sd->entity;
sink = &vinc->vid_cap.vdev.entity;
ret = media_entity_create_link(source, VIN_SD_PAD_SOURCE,
sink, 0, MEDIA_LNK_FL_ENABLED);
if (ret)
break;
vin_log(VIN_LOG_MD, "created link [%s] %c> [%s]\n",
source->name, MEDIA_LNK_FL_ENABLED ? '=' : '-',
sink->name);
}
for (i = 0; i < VIN_MAX_CSI; i++) {
csi = vind->csi[i].sd;
if (csi == NULL)
continue;
source = &csi->entity;
for (j = 0; j < VIN_MAX_ISP; j++) {
isp = vind->isp[j].sd;
if (isp == NULL)
continue;
sink = &isp->entity;
ret = media_entity_create_link(source, CSI_PAD_SOURCE,
sink, ISP_PAD_SINK,
0);
vin_log(VIN_LOG_MD, "created link [%s] %c> [%s]\n",
source->name, 0 ? '=' : '-',
sink->name);
}
}
for (i = 0; i < VIN_MAX_ISP; i++) {
isp = vind->isp[i].sd;
if (isp == NULL)
continue;
source = &isp->entity;
stat = vind->stat[i].sd;
sink = &stat->entity;
ret = media_entity_create_link(source, ISP_PAD_SOURCE_ST,
sink, 0,
MEDIA_LNK_FL_IMMUTABLE |
MEDIA_LNK_FL_ENABLED);
vin_log(VIN_LOG_MD, "created link [%s] %c> [%s]\n",
source->name, MEDIA_LNK_FL_ENABLED ? '=' : '-',
sink->name);
for (j = 0; j < VIN_MAX_SCALER; j++) {
scaler = vind->scaler[j].sd;
if (scaler == NULL)
continue;
sink = &scaler->entity;
ret = media_entity_create_link(source, ISP_PAD_SOURCE,
sink, SCALER_PAD_SINK,
0);
vin_log(VIN_LOG_MD, "created link [%s] %c> [%s]\n",
source->name, 0 ? '=' : '-',
sink->name);
}
}
return ret;
}
static int vin_setup_default_links(struct vin_md *vind)
{
struct v4l2_subdev *csi, *isp, *scaler;
int i, ret = 0;
for (i = 0; i < VIN_MAX_DEV; i++) {
struct vin_core *vinc = NULL;
struct vin_pipeline_cfg *pc = NULL;
struct media_link *link = NULL;
struct vin_pipeline *p = NULL;
vinc = vind->vinc[i];
if (NULL == vinc)
continue;
pc = &vinc->pipe_cfg;
/*CSI*/
if (0xff == pc->csi_ind)
csi = NULL;
else
csi = vind->csi[pc->csi_ind].sd;
/*ISP*/
if (0xff == pc->isp_ind)
isp = NULL;
else
isp = vind->isp[pc->isp_ind].sd;
/*SCALER*/
if (0xff == pc->scaler_ind)
scaler = NULL;
else
scaler = vind->scaler[pc->scaler_ind].sd;
if (csi && isp) {
link = media_entity_find_link(&csi->entity.pads[CSI_PAD_SOURCE],
&isp->entity.pads[ISP_PAD_SINK]);
} else if (csi && scaler) {
link = media_entity_find_link(&csi->entity.pads[CSI_PAD_SOURCE],
&scaler->entity.pads[SCALER_PAD_SINK]);
}
if (link) {
vin_log(VIN_LOG_MD, "link: source %s sink %s\n",
link->source->entity->name,
link->sink->entity->name);
ret = media_entity_setup_link(link, MEDIA_LNK_FL_ENABLED);
if (ret)
vin_err("media_entity_setup_link error\n");
} else {
vin_err("media_entity_find_link null\n");
}
if (isp && scaler)
link = media_entity_find_link(&isp->entity.pads[ISP_PAD_SOURCE],
&scaler->entity.pads[SCALER_PAD_SINK]);
if (link) {
vin_log(VIN_LOG_MD, "link: source %s sink %s\n",
link->source->entity->name,
link->sink->entity->name);
ret = media_entity_setup_link(link, MEDIA_LNK_FL_ENABLED);
if (ret)
vin_err("media_entity_setup_link error\n");
} else {
vin_err("media_entity_find_link null\n");
}
p = &vinc->vid_cap.pipe;
vin_md_prepare_pipeline(p, &vinc->vid_cap.subdev.entity);
}
return ret;
}
static int vin_probe(struct platform_device *pdev)
{
struct device *dev = &pdev->dev;
struct v4l2_device *v4l2_dev;
struct vin_md *vind;
int ret;
vind = devm_kzalloc(dev, sizeof(*vind), GFP_KERNEL);
if (!vind)
return -ENOMEM;
spin_lock_init(&vind->slock);
vind->pdev = pdev;
strlcpy(vind->media_dev.model, "Allwinner Vin",
sizeof(vind->media_dev.model));
vind->media_dev.link_notify = vin_md_link_notify;
vind->media_dev.dev = dev;
v4l2_dev = &vind->v4l2_dev;
v4l2_dev->mdev = &vind->media_dev;
strlcpy(v4l2_dev->name, "sunxi-vin", sizeof(v4l2_dev->name));
vind->isp_used = vin_is_node_available(dev->of_node, "isp_handle");
ret = v4l2_device_register(dev, &vind->v4l2_dev);
if (ret < 0) {
vin_err("Failed to register v4l2_device: %d\n", ret);
return ret;
}
ret = media_device_register(&vind->media_dev);
if (ret < 0) {
vin_err("Failed to register media device: %d\n",
ret);
goto err_md;
}
platform_set_drvdata(pdev, vind);
ret = vin_md_get_clocks(vind);
if (ret)
goto err_clk;
vind->user_subdev_api = 0;
#ifdef CONFIG_PM_RUNTIME
pm_runtime_enable(&pdev->dev);
#endif
/* Protect the media graph while we're registering entities */
mutex_lock(&vind->media_dev.graph_mutex);
if (dev->of_node)
ret = vin_md_register_entities(vind, dev->of_node);
else {
vin_err("Device tree of_node is NULL!\n");
ret = -ENOSYS;
}
if (ret)
goto err_unlock;
ret = vin_create_media_links(vind);
if (ret) {
vin_err("vin_create_media_links error\n");
goto err_unlock;
}
mutex_unlock(&vind->media_dev.graph_mutex);
/*
* when use media_entity_setup_link we should
* pay attention to graph_mutex dead lock.
*/
ret = vin_setup_default_links(vind);
if (ret) {
vin_err("vin_setup_default_links error\n");
goto err_unlock;
}
ret = v4l2_device_register_subdev_nodes(&vind->v4l2_dev);
if (ret)
goto err_unlock;
ret = device_create_file(&pdev->dev, &dev_attr_subdev_api);
if (ret)
goto err_unlock;
vin_print("%s ok!\n", __func__);
return 0;
err_unlock:
mutex_unlock(&vind->media_dev.graph_mutex);
err_clk:
vin_md_put_clocks(vind);
vin_md_unregister_entities(vind);
media_device_unregister(&vind->media_dev);
err_md:
v4l2_device_unregister(&vind->v4l2_dev);
return ret;
}
static int vin_remove(struct platform_device *pdev)
{
struct vin_md *vind = (struct vin_md *)dev_get_drvdata(&pdev->dev);
mutex_unlock(&vind->media_dev.graph_mutex);
device_remove_file(&pdev->dev, &dev_attr_subdev_api);
vin_md_put_clocks(vind);
vin_md_unregister_entities(vind);
media_device_unregister(&vind->media_dev);
v4l2_device_unregister(&vind->v4l2_dev);
#ifdef CONFIG_PM_RUNTIME
pm_runtime_disable(&pdev->dev);
#endif
devm_kfree(&pdev->dev, vind);
vin_print("%s ok!\n", __func__);
return 0;
}
static void vin_shutdown(struct platform_device *pdev)
{
vin_print("%s!\n", __func__);
}
#ifdef CONFIG_PM_RUNTIME
int vin_runtime_suspend(struct device *d)
{
return 0;
}
int vin_runtime_resume(struct device *d)
{
return 0;
}
int vin_runtime_idle(struct device *d)
{
return 0;
}
#endif
int vin_suspend(struct device *d)
{
return 0;
}
int vin_resume(struct device *d)
{
return 0;
}
static const struct dev_pm_ops vin_runtime_pm_ops = {
SET_SYSTEM_SLEEP_PM_OPS(vin_suspend, vin_resume)
SET_RUNTIME_PM_OPS(vin_runtime_suspend, vin_runtime_resume,
vin_runtime_idle)
};
static const struct of_device_id sunxi_vin_match[] = {
{.compatible = "allwinner,sunxi-vin-media",},
{},
};
static struct platform_driver vin_driver = {
.probe = vin_probe,
.remove = vin_remove,
.shutdown = vin_shutdown,
.driver = {
.name = VIN_MODULE_NAME,
.owner = THIS_MODULE,
.of_match_table = sunxi_vin_match,
.pm = &vin_runtime_pm_ops,
}
};
static int __init vin_init(void)
{
int ret;
vin_print("Welcome to Video Front End driver\n");
ret = sunxi_csi_platform_register();
if (ret) {
vin_err("Sunxi csi driver register failed\n");
return ret;
}
ret = sunxi_isp_platform_register();
if (ret) {
vin_err("Sunxi isp driver register failed\n");
return ret;
}
ret = sunxi_mipi_platform_register();
if (ret) {
vin_err("Sunxi mipi driver register failed\n");
return ret;
}
ret = sunxi_flash_platform_register();
if (ret) {
vin_err("Sunxi flash driver register failed\n");
return ret;
}
ret = sunxi_scaler_platform_register();
if (ret) {
vin_err("Sunxi scaler driver register failed\n");
return ret;
}
ret = sunxi_vin_core_register_driver();
if (ret) {
vin_err("Sunxi vin register driver failed!\n");
return ret;
}
ret = platform_driver_register(&vin_driver);
if (ret) {
vin_err("Sunxi vin register driver failed!\n");
return ret;
}
vin_print("vin init end\n");
return ret;
}
static void __exit vin_exit(void)
{
vin_print("vin_exit\n");
platform_driver_unregister(&vin_driver);
sunxi_vin_core_unregister_driver();
sunxi_csi_platform_unregister();
sunxi_isp_platform_unregister();
sunxi_mipi_platform_unregister();
sunxi_flash_platform_unregister();
sunxi_scaler_platform_unregister();
vin_print("vin_exit end\n");
}
module_init(vin_init);
module_exit(vin_exit);
MODULE_AUTHOR("yangfeng");
MODULE_LICENSE("Dual BSD/GPL");
MODULE_DESCRIPTION("Video Input Module for Allwinner");
|
<filename>omf-impl/src/main/java/org/om/core/impl/session/factory/DefaultSessionFactory.java
package org.om.core.impl.session.factory;
import org.om.core.api.persistence.PersistenceAdapterFactory;
import org.om.core.impl.mapping.extractor.EntityMappingExtractorImpl;
import org.om.core.impl.mapping.registry.OnDemandMappingRegistry;
import org.om.core.impl.persistence.cglib.CglibProxyFactory;
import org.om.core.impl.persistence.interceptor.factory.PersistenceInterceptorFactoryImpl;
/**
*
* This class extends ImmutableSessionFactory and passes the "usual defaults" to
* ImmutableSessionFactory, to allow creating sessions without worrying about
* needing to pass the ctor arguments. It is used in the contribs.
*
* @author tome
*
*
*/
public class DefaultSessionFactory extends ImmutableSessionFactory {
public DefaultSessionFactory(PersistenceAdapterFactory persistenceDelegateFactory) {
super(persistenceDelegateFactory, new OnDemandMappingRegistry(new EntityMappingExtractorImpl()), new CglibProxyFactory(
new PersistenceInterceptorFactoryImpl()));
}
}
|
<reponame>wilbur147/Ruoyi-VUE-Plus<gh_stars>0
package com.ruoyi.content.mapper;
import com.ruoyi.common.core.mybatisplus.core.BaseMapperPlus;
import com.ruoyi.content.domain.ConTopic;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* 专题Mapper接口
*
* @author ruoyi
* @date 2021-05-12
*/
public interface ConTopicMapper extends BaseMapperPlus<ConTopic> {
}
|
#! /bin/bash
set -e
go build
./server -port=8080 &
./server -port=8081 &
./server -port=8082 &
echo "running, enter to stop"
read && killall server
|
<filename>vendor/github.com/crackcomm/cloudflare/messages.go
package cloudflare
import "encoding/json"
// Response - Cloudflare API Response.
type Response struct {
Result json.RawMessage `json:"result"`
ResultInfo *ResultInfo `json:"result_info"`
Errors []*ResponseError `json:"errors"`
Success bool `json:"success"`
}
// ResultInfo - Cloudflare API Response Result Info.
type ResultInfo struct {
Page int `json:"page,omitempty"`
PerPage int `json:"per_page,omitempty"`
TotalPages int `json:"total_pages,omitempty"`
Count int `json:"count,omitempty"`
TotalCount int `json:"total_count,omitempty"`
}
// ResponseError - Cloudflare API Response error.
type ResponseError struct {
Code int `json:"code,omitempty"`
Message string `json:"message,omitempty"`
}
// Err - Gets response error if any.
func (response *Response) Err() error {
if len(response.Errors) > 0 {
return response.Errors[0]
}
return nil
}
// Error - Returns response error message.
func (err *ResponseError) Error() string {
return err.Message
}
|
<reponame>diitalk/flect-chime-sdk-demo<filename>frontend3/src/App.tsx
import React, { useMemo } from 'react';
import './App.css';
import { MessageDialog } from './pages/000_common/MessageDialg';
import { SignIn } from './pages/010_signin';
import { SignUp } from './pages/011_signup';
import { Verify } from './pages/012_verify';
import { RequestChangePassword } from './pages/013_requestChangePassword';
import { NewPassword } from './pages/014_newPassword';
import { Entrance } from './pages/020_entrance';
import { CreateMeetingRoom } from './pages/021_createMeetingRoom';
import { WaitingRoom } from './pages/022_waitingRoom/WaitingRoom';
import { WaitingRoomAmongUs } from './pages/022_waitingRoom/WaitingRoomAmongUs';
import { MeetingRoom } from './pages/023_meetingRoom/MeetingRoom';
import { MeetingRoomAmongUs } from './pages/024_meetingRoomAmongUs/MeetingRoomAmongUs';
import { MeetingManagerSignin } from './pages/100_MeetingManagerSignin/MeetingManagerSingin';
import { MeetingManager } from './pages/101_MeetingManager/MeetingManager';
import { HeadlessMeetingManager } from './pages/200_HeadlessMeetingManager/HeadlessMeetingManager';
import { AppStateProvider, useAppState } from './providers/AppStateProvider';
const Router = () => {
const { stage, mode } = useAppState()
console.log(`[App] stage:${stage}`)
const page = useMemo(()=>{
switch(stage){
case "SIGNIN":
return <SignIn />
case "SIGNUP":
return <SignUp />
case "VERIFY":
return <Verify />
case "REQUEST_NEW_PASSWORD":
return <RequestChangePassword />
case "NEW_PASSWORD":
return <NewPassword />
case "ENTRANCE":
return <Entrance />
case "CREATE_MEETING_ROOM":
return <CreateMeetingRoom />
case "WAITING_ROOM":
if(mode === "amongus"){
return <WaitingRoomAmongUs />
}else{
return <WaitingRoom />
}
case "MEETING_ROOM":
if(mode === "amongus"){
return <MeetingRoomAmongUs />
}else {
return <MeetingRoom />
}
case "MEETING_MANAGER_SIGNIN":
return <MeetingManagerSignin />
case "MEETING_MANAGER":
return <MeetingManager />
case "HEADLESS_MEETING_MANAGER":
return <HeadlessMeetingManager />
default:
return <div>no view</div>
}
},[stage, mode])
return (
<div >
{page}
</div>
)
}
const App = () => {
return (
<div >
<AppStateProvider>
<Router />
<MessageDialog />
</AppStateProvider>
</div>
);
}
export default App;
|
package com.example.xty.helloagain.Utils;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.example.xty.helloagain.MyEntity.Entity;
import com.example.xty.helloagain.R;
import java.util.List;
/**
* Created by xty on 2018/6/24.
*/
public class SheetAdapter extends RecyclerView.Adapter {
private List<Entity> list;
public SheetAdapter(List<Entity> list) {
this.list = list;
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.sheet_item_layout, parent, false);
return new sheetViewHolder(view);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
sheetViewHolder vh = (sheetViewHolder) holder;
vh.getTv_sheetRow1().setText(list.get(position).getSheetRow1());
vh.getTv_sheetRow2().setText(list.get(position).getSheetRow2());
vh.getTv_sheetRow3().setText(list.get(position).getSheetRow3());
}
@Override
public int getItemCount() {
return list.size();
}
public class sheetViewHolder extends RecyclerView.ViewHolder{
public final View mView;
public final TextView tv_sheetRow1;
public final TextView tv_sheetRow2;
public final TextView tv_sheetRow3;
public sheetViewHolder(View itemView) {
super(itemView);
mView = itemView;
tv_sheetRow1 = (TextView) itemView.findViewById(R.id.tv_sheetRow1);
tv_sheetRow2 = (TextView) itemView.findViewById(R.id.tv_sheetRow2);
tv_sheetRow3 = (TextView) itemView.findViewById(R.id.tv_sheetRow3);
}
public TextView getTv_sheetRow1() {
return tv_sheetRow1;
}
public TextView getTv_sheetRow2() {
return tv_sheetRow2;
}
public TextView getTv_sheetRow3() {
return tv_sheetRow3;
}
}
}
|
#!/bin/bash
# by Sanzhen Liu
# 9/12/2021
version=0.01;
default_f1suffix='.R1.pair.fq';
default_f2suffix='.R2.pair.fq';
default_cpus=4;
script_dir=`echo $0 | sed 's/[^\/]*\/[^\/]*$//g'` # remove file and the direct subdirectory
default_parser=$script_dir"/utils/samparser.bwa.pl";
default_aggregator=$script_dir"/utils/alignment.log.aggregate.pl";
parser_para="-e 60 -m 5 100 --tail 5 100 --gap 10 --insert 100 600";
RED='\033[0;31m'
NC='\033[0m' # No Color
usage() {
echo -e "${RED}Prerequirement${NC}: bwa, samtools"
echo -e "${RED}Usage${NC}: $0 -f <fastq> -r <ref> [other options]" >&2
echo " -f: fastq file; required" >&2
echo " -r: bwa indexed database; required" >&2
echo " -1: suffix of first pair of fastq ($default_f1suffix)" >&2
echo " -2: suffix of second pair of fastq ($default_f2suffix)" >&2
echo " -c: number of cpus ($default_cpus)" >&2
echo " -m: modules to load" >&2
echo " -p: parser script for filtering bwa alignments ($default_parser)" >&2;
echo " -a: aggregator script for merging parser log ($default_aggregator)" >&2;
echo " -h: help information" >&2
}
while getopts ":f:r:1:2:c:p:m:vh" opt; do
case $opt in
f) fq=$OPTARG;;
r) ref=$OPTARG;;
1) f1suffix=$OPTARG;;
2) f2suffix=$OPTARG;;
c) cpus=$OPTARG;;
p) parser=$OPTARG;;
m) modules+=($OPTARG);;
a) aggregator=$OPTARG;;
v) echo $version; exit;;
h) usage; exit;;
\?) echo "Invalid options: -$OPTARG." >&2; exit;;
:) echo "Option -$OPTARG requires an argument" >&2; exit;;
esac
done
###############################################
# modules
###############################################
cmd_check () {
input_cmd=$1
which $input_cmd &>/dev/null
if [ $? -eq 1 ]; then
echo -e "${RED}ERROR${NC}: $input_cmd not available." >&2
exit;
fi
}
file_check () {
input_file=$1
if [ ! -f $input_file ]; then
echo -e "${RED}ERROR${NC}: $input_file does not exit." >&2
exit;
fi
}
###############################################
### check required parameters
###############################################
if [ -z $fq ] || [ -z $ref ]; then
echo -e "${RED}ERROR${NC}: Required parameters: -f; -r." >&2
usage;
exit;
fi
file_check $fq # check input data
if [ ! -f ${ref}.bwt ]; then
echo -e "${RED}ERROR${NC}: BWA databae $ref does not exit." >&2
exit;
fi
if [ -z $f1suffix ]; then
f1suffix=$default_f1suffix;
fi
if [ -z $f2suffix ]; then
f2suffix=$default_f2suffix;
fi
if [ -z $cpus ]; then
cpus=$default_cpus;
fi
if [ -z $parser ]; then
parser=$default_parser;
fi
if [ -z $aggregator ]; then
aggregator=$default_aggregator;
fi
for module in "${modules[@]}"; do
module load $module;
if [ $? -eq 0 ]; then
echo "module "$module" loaded";
fi
done
###############################################
# input fastq
###############################################
fq2=$(echo $fq | sed "s/$f1suffix/$f2suffix/g");
echo -e "${RED}input data:${NC}" >&2
echo " read 1: "$fq >&2
echo " read 2: "$fq2 >&2
# if fq are gzip files
fq_extension="${fq##*.}" # suffix
if [ $fq_extension == "gz" ]; then
new_fq=`echo $fq | sed 's/.*\///g' | sed 's/.gz//g' | sed 's/^/./g'`;
new_fq2=`echo $fq2 | sed 's/.*\///g' | sed 's/.gz//g' | sed 's/^/./g'`;
gunzip -c $fq > $new_fq
gunzip -c $fq2 > $new_fq2
fq=$new_fq; fq2=$new_fq2
new_f1suffix=`echo $fq1suffix | sed 's/.gz//g'`;
#new_f2suffix=`echo $fq2suffix | sed 's/.gz//g'`
fq1suffix=$new_fq1suffix;
#fq2suffix=$new_fq2suffix;
fi
###############################################
# check requirements:
###############################################
cmd_check bwa;
cmd_check samtools;
file_check $parser;
###############################################
# run and output:
###############################################
out=$(echo $fq | sed 's/.*\///g' | sed 's/^\.//g' | sed "s/$f1suffix//g");
sam=${out}.sam
echo -e "${RED}Output prefix:${NC}" >&2
echo " $out" >&2
echo -e "${red}reference db:${nc}" >&2
echo " $ref" >&2
### aln
bwaout=${out}.sam
group_info="@RG\tID:${out}\tSM:${out}"
echo $group_info
bwa mem -t $cpus -R "$group_info" $ref $fq $fq2 1>${out}.sam 2>${out}.aln.log
if [ $? -eq 1 ]; then
echo -e "${RED}ERROR${NC}: BWA alignment failed." >&2
if [ $fq_extension == "gz" ]; then
rm $new_fq; rm $new_fq2;
fi
exit;
fi
# cleanup
if [ $fq_extension == "gz" ]; then
rm $new_fq; rm $new_fq2
fi
# split alignments
naln=`wc -l ${bwaout} | sed 's/ .*//g'`;
nlines=`expr $naln / $cpus`;
split -d -l $nlines ${bwaout} ${bwaout}
# tmp
tmp_split_parser=${bwaout}.split.parse.tmp.sh
echo "#!/bin/bash" > $tmp_split_parser
for sam in ${bwaout}[0-9]*[0-9]; do
echo -e perl ${parser} -i ${sam} $parser_para 1\>${sam}.parse 2\>${sam}.parse.log >> $tmp_split_parser
done
### filter
xargs --arg-file=$tmp_split_parser --max-proc=$cpus --replace --verbose /bin/sh -c "{}";
if [ $? -eq 1 ]; then
echo -e "${RED}ERROR${NC}: Alignment parsing failed." >&2
exit;
fi
rm $tmp_split_parser
### merge
cat ${bwaout}[0-9]*.parse > ${out}.parse.sam
perl $aggregator ${bwaout}[0-9]*.parse.log > ${out}.parse.log
rm ${bwaout}[0-9]*
### convert SAM to BAM:
samtools view -@ $cpus -bS ${out}.parse.sam | samtools sort -@ $cpus -o ${out}.bam
if [ $? -eq 1 ]; then
echo -e "${RED}ERROR${NC}: SAMtools sam2bam conversion failed." >&2
exit;
fi
### Index sorted BAM:
samtools index -@ $cpus ${out}.bam
if [ $? -eq 1 ]; then
echo -e "${RED}ERROR${NC}: SAMtools index failed." >&2
exit;
fi
|
#include "Text.h"
#include <cstring>
#include <cassert>
#include <cstdarg>
#define MakeLetterCaseLower(character) (character | 0b00100000)
#define MakeLetterCaseUpper(character) (character & 0b11011111)
#define CompareLetterCaseIgnore(a, b) (MakeLetterCaseLower(a) == b) || (MakeLetterCaseUpper(a) == b)
inline wchar_t BF::Text::AsciiToUnicode(char character)
{
return (wchar_t)character;
}
inline char BF::Text::UnicodeToAscii(wchar_t character)
{
const bool isToBig = character > 255u;
const char result = (!isToBig * character) + (isToBig * '?');
return result;
}
size_t BF::Text::AsciiToUnicode(const char* input, const size_t inputSize, wchar_t* output, const size_t outputSize)
{
size_t i = 0;
for (; (i < inputSize) && (input[i] != '\0') && (i < outputSize); ++i)
{
output[i] = input[i];
}
output[i] = L'\0';
return i;
}
size_t BF::Text::UnicodeToAscii(const wchar_t* input, const size_t inputSize, char* output, const size_t outputSize)
{
size_t i = 0;
for (; (i < inputSize) && (output[i] != L'\0') && (i < outputSize); ++i)
{
output[i] = AsciiToUnicode(input[i]);
}
output[i] = '\0';
return i;
}
void BF::Text::Clear(char* string, const size_t stringSize)
{
memset(string, 0, sizeof(char) * stringSize);
}
void BF::Text::Clear(wchar_t* string, const size_t stringSize)
{
memset(string, 0, sizeof(wchar_t) * stringSize);
}
size_t BF::Text::Length(const char* string)
{
size_t index = 0;
for (; (string[index] != '\0'); ++index);
return index;
}
size_t BF::Text::LengthUntil(const char* string, const size_t stringSize, const char character)
{
size_t index = 0;
for (; (index < stringSize) && (string[index] != '\0') && (string[index] != character); ++index);
return index;
}
size_t BF::Text::Length(const wchar_t* string)
{
size_t index = 0;
for (; (string[index] != L'\0'); ++index);
return index;
}
size_t BF::Text::Copy(char* destination, const char* source, const size_t stringSize)
{
size_t i = 0;
assert(destination);
assert(source);
for (; (i < stringSize) && (source[i] != '\0'); ++i)
{
destination[i] = source[i];
}
destination[i] = '\0';
return i;
}
size_t BF::Text::Copy(char* destination, const wchar_t* source, const size_t stringSize)
{
size_t i = 0;
assert(destination);
assert(source);
for (; (i < stringSize) && (source[i] != '\0'); ++i)
{
destination[i] = source[i];
}
destination[i] = '\0';
return i;
}
size_t BF::Text::Copy(wchar_t* destination, const char* source, const size_t stringSize)
{
size_t i = 0;
for (; (i < stringSize) && (source[i] != '\0'); ++i)
{
destination[i] = AsciiToUnicode(source[i]);
}
destination[i] = '\0';
return i;
}
size_t BF::Text::Copy(wchar_t* destination, const wchar_t* source, const size_t stringSize)
{
// lstrcpyW(Path, filePath);
size_t i = 0;
for (; (i < stringSize) && (source[i] != '\0'); ++i)
{
destination[i] = source[i];
}
destination[i] = '\0';
return i;
}
int BF::Text::Compare(const char* a, const size_t aSize, const char* b, const size_t bSize)
{
size_t index = 0;
int samecounter = 0;
for (; (index < aSize) && (index < bSize) && (a[index] != '\0') && (b[index] != '\0'); ++index)
samecounter += a[index] == b[index];
return (index == samecounter) && ((index == aSize) && (index == bSize));
}
int BF::Text::Compare(const char* a, const char* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0'); ++index)
samecounter += a[index] == b[index];
return (index == samecounter);
}
int BF::Text::Compare(const wchar_t* a, const wchar_t* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0'); ++index)
samecounter += a[index] == b[index];
return index == samecounter;
}
int BF::Text::Compare(const char* a, const wchar_t* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0'); ++index)
samecounter += (wchar_t)a[index] == b[index];
return index == samecounter;
}
int BF::Text::Compare(const wchar_t* a, const char* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0'); ++index)
samecounter += a[index] == (wchar_t)b[index];
return index == samecounter;
}
int BF::Text::CompareIgnoreCase(const char* a, const char* b, const size_t stringSize)
{
size_t index = 0; // Number of characters that will be read
size_t counterOfSameCharacters = 0; // counts how many times we detected same characters
bool wasLastLetterSame = true;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0') && wasLastLetterSame; ++index)
{
wasLastLetterSame = CompareLetterCaseIgnore(a[index], b[index]);
counterOfSameCharacters += wasLastLetterSame;
}
return index == counterOfSameCharacters; // Did we countes as much as we read?
}
int BF::Text::CompareIgnoreCase(const wchar_t* a, const wchar_t* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
bool wasLastLetterSame = true;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0') && wasLastLetterSame; ++index)
{
wasLastLetterSame = CompareLetterCaseIgnore(a[index], b[index]);
samecounter += wasLastLetterSame;
}
return index == samecounter;
}
int BF::Text::CompareIgnoreCase(const char* a, const wchar_t* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
bool wasLastLetterSame = true;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0') && wasLastLetterSame; ++index)
{
wasLastLetterSame = CompareLetterCaseIgnore((wchar_t)a[index], b[index]);
samecounter += wasLastLetterSame;
}
return index == samecounter;
}
int BF::Text::CompareIgnoreCase(const wchar_t* a, const char* b, const size_t stringSize)
{
size_t index = 0;
int samecounter = 0;
bool wasLastLetterSame = true;
for (; (index < stringSize) && (a[index] != '\0') && (b[index] != '\0') && wasLastLetterSame; ++index)
{
wasLastLetterSame = CompareLetterCaseIgnore(a[index], (wchar_t)b[index]);
samecounter += wasLastLetterSame;
}
return index == samecounter;
}
char* BF::Text::FindPosition(const char* data, size_t dataSize, const char* target, size_t targetSize)
{
const char* source = nullptr;
bool found = false;
for (size_t i = 0; (data[i] != '\0') && (i + targetSize) < dataSize && !found; i++)
{
source = data + i;
found = Text::Compare(source, target, targetSize);
}
return (char*)(found * (size_t)source);
}
size_t BF::Text::ToInt(const char* string, size_t dataSize, int& number)
{
size_t index = 0;
bool isNegative = false;
number = 0;
if (!string)
{
return 0;
}
if (string[0] == '-')
{
index++;
isNegative = true;
}
for (; string[index] != '\0'; index++)
{
char character = string[index];
char isValidCharacter = (character >= '0' && character <= '9');
int numberElement = character - '0';
if (!isValidCharacter)
{
break;
}
number *= 10; // "Shft number to left" Example 12 -> 120
number += numberElement; // ASCII character to actual number.
}
if (isNegative)
{
number *= -1;
}
return index;
}
size_t BF::Text::ToBool(const char* string, size_t dataSize, bool& number)
{
switch (string[0])
{
default:
case '0':
case 'F':
case 'f':
number = false;
break;
case '1':
case 'T':
case 't':
number = true;
break;
}
return 1;
}
size_t BF::Text::ToFloat(const char* string, const size_t dataSize, float& number)
{
double x = 0;
size_t readBytes = Text::ToDouble(string, dataSize, x);
number = x;
return readBytes;
}
size_t BF::Text::ToDouble(const char* string, const size_t dataSize, double& number)
{
unsigned int digitsAfterDot = 1;
bool isWholeNumberChunk = true;
unsigned int index = 0;
bool isNegative = false;
number = 0;
if (!string)
{
return 0;
}
if (string[0] == '-')
{
index++;
isNegative = true;
}
for (; string[index] != '\0'; index++)
{
char character = string[index];
bool isDot = character == '.';
bool isValidCharacter = (character >= '0' && character <= '9') || isDot;
int numberElement = character - '0';
if (!isValidCharacter)
{
break;
}
// Trigger when we switch to after dot
if (isDot && isWholeNumberChunk)
{
isWholeNumberChunk = false;
continue;
}
number *= 10; // "Shft number to left" Example 12 -> 120
number += numberElement; // ASCII character to actual number.
if (!isWholeNumberChunk)
{
digitsAfterDot *= 10;
}
}
if (isNegative)
{
number *= -1;
}
//double stdResult = std::strtof(string, 0); // STD Method
number /= (double)digitsAfterDot;
return index;
}
size_t BF::Text::FindFirst(const char* string, const size_t dataSize, const char character)
{
size_t i = 0;
for (bool found = false; (string[i] != '\0') && i < dataSize && !found; ++i)
{
found = character == string[i];
}
--i;
return i;
}
size_t BF::Text::FindLast(const char* string, const size_t dataSize, const char character)
{
bool found = false;
size_t i = Length(string);
for (; i > 0 && !found; --i)
{
found = character == string[i];
}
return i;
}
size_t BF::Text::FindLast(const wchar_t* string, const size_t dataSize, const wchar_t character)
{
bool found = false;
size_t i = Length(string);
for (; i > 0 && !found; --i)
{
found = character == string[i];
}
i++;
return i;
}
void BF::Text::TerminateBeginFromFirst(char* string, const size_t dataSize, const char character)
{
size_t index = FindFirst(string, dataSize, character);
if (index != -1)
{
string[index] = '\0';
}
}
void BF::Text::Parse(char* buffer, size_t bufferSize, const char* syntax, ...)
{
va_list args;
va_start(args, syntax);
int startIndex = 0;
int stopIndex = 0;
int command = 0;
bool finished = false;
while (!finished)
{
char commandKey = syntax[command++];
bool commandIsNumber = commandKey == 'i' || commandKey == 'f' || commandKey == 'u';
while (true)
{
char current = buffer[stopIndex++];
finished = current == '\0';
if (commandIsNumber && current == '/' || current == ' ' || finished)
{
break;
}
}
switch (commandKey)
{
case 's':
{
char* destination = va_arg(args, char*);
char* source = &buffer[startIndex];
unsigned int length = stopIndex - startIndex - 1;
memcpy(destination, source, length);
destination[length] = '\0';
break;
}
case 'i':
case 'd':
case 'u':
{
int* i = va_arg(args, int*);
char* source = &buffer[startIndex];
ToInt(source, 5, *i);
break;
}
case 'f':
{
float* number = va_arg(args, float*);
char* source = &buffer[startIndex];
ToFloat(source, 5, (*number));
break;
}
case 'c':
{
char* character = va_arg(args, char*);
*character = buffer[startIndex];
break;
}
default:
break;
}
startIndex = stopIndex;
}
va_end(args);
}
void BF::Text::FindAll(const char* string, const size_t stringSize, const ParsingToken* parsingTokenList, const size_t parsingTokenListSize)
{
bool finished = false;
bool foundItem = false;
size_t foundTargets = 0;
for (size_t i = 0; (i < stringSize) && (string[i] != '\0') && !finished; ++i)
{
foundItem = false;
for (size_t t = 0; (t < parsingTokenListSize) && (string[i] != ' ') && !foundItem; t++)
{
const ParsingToken& parsingToken = parsingTokenList[t];
const char* targetString = parsingToken.String;
const size_t targetStringSize = Length(targetString);
const char* sourceString = string + i;
foundItem = Compare(sourceString, targetString, targetStringSize); // Compare whole word
if (foundItem)
{
size_t lengthTag = LengthUntil(sourceString, stringSize, '=');
const char* valueString = sourceString + lengthTag + 1;
i += lengthTag + 1;
(*parsingToken.Value) = valueString;
for (; (string[i] != '\0') && string[i] != ' '; i++); // Skip data
++foundTargets;
}
}
finished = foundTargets == parsingTokenListSize;
}
}
|
import React from "react";
import Dialog from "@material-ui/core/Dialog";
import { makeStyles } from "@material-ui/core/styles";
import { closeFeedback, isFeedbackOpen } from "../../Redux/dialogs";
import { useDispatch, useSelector, shallowEqual } from "react-redux";
import { getUser, getSessionId, getUserId } from "../../Redux/eventSession";
const useStyles = makeStyles((theme) => ({
content: {
width: "100%",
background: "url(/loading.gif) center center no-repeat",
height: 600,
},
}));
export default function (props) {
const classes = useStyles();
const open = useSelector(isFeedbackOpen);
const dispatch = useDispatch();
const user = useSelector(getUser, shallowEqual);
const userId = useSelector(getUserId);
const sessionId = useSelector(getSessionId);
const { firstName, lastName, email } = React.useMemo(
() =>
user
? { firstName: user.firstName, lastName: user.lastName, email: user.email }
: { firstName: "", lastName: "", email: "" },
[user]
);
const handleClose = () => {
dispatch(closeFeedback());
};
const airtableUrl = "https://airtable.com/embed/shrUsGXvQhbQoo0IW";
return (
<Dialog open={open} onClose={handleClose} fullWidth maxWidth="md" scroll="body">
<div className={classes.content}>
<script src="https://static.airtable.com/js/embed/embed_snippet_v1.js"></script>
<iframe
className="airtable-embed airtable-dynamic-height"
src={`${airtableUrl}?prefill_Name=${encodeURI(firstName + " " + lastName)}&prefill_Email=${encodeURI(
email
)}&prefill_UserId=${encodeURI(userId)}&prefill_EventId=${encodeURI(sessionId)}`}
frameBorder="0"
// onMouseWheel=""
width="100%"
height="2823"
style={{
height: "100%",
}}
title="registerPadeleeAirtable"
// onLoad={addCssIframe}
// id="airtableFrame"
></iframe>
</div>
</Dialog>
);
}
|
python3 -m pip install alabaster
python3 -m pip install aniso8601
python3 -m pip install appnope
python3 -m pip install asn1crypto
python3 -m pip install astroid
python3 -m pip install atomicwrites
python3 -m pip install attrs
python3 -m pip install Babel
python3 -m pip install backcall
python3 -m pip install backports.shutil-get-terminal-size
python3 -m pip install banknumber
python3 -m pip install behave
python3 -m pip install bleach
python3 -m pip install blessings
python3 -m pip install bpython
python3 -m pip install cachetools
python3 -m pip install certifi
python3 -m pip install cffi
python3 -m pip install chardet
python3 -m pip install cli-helpers
python3 -m pip install Click
python3 -m pip install colorama
python3 -m pip install configobj
python3 -m pip install cryptography
python3 -m pip install cursor
python3 -m pip install curtsies
python3 -m pip install cycler
python3 -m pip install Cython
python3 -m pip install decorator
python3 -m pip install defusedxml
python3 -m pip install Django
python3 -m pip install docopt
python3 -m pip install docutils
python3 -m pip install entrypoints
python3 -m pip install enum34
python3 -m pip install flake8
python3 -m pip install Flask
python3 -m pip install Flask-PyMongo
python3 -m pip install Flask-RESTful
python3 -m pip install gpg
python3 -m pip install greenlet
python3 -m pip install gunicorn
python3 -m pip install h5py
python3 -m pip install halo
python3 -m pip install html5lib
python3 -m pip install humanize
python3 -m pip install idna
python3 -m pip install imageio
python3 -m pip install imageio-ffmpeg
python3 -m pip install imagesize
python3 -m pip install importlib-metadata
python3 -m pip install ipykernel
python3 -m pip install ipython
python3 -m pip install ipython-genutils
python3 -m pip install ipywidgets
python3 -m pip install isort
python3 -m pip install itsdangerous
python3 -m pip install jedi
python3 -m pip install Jinja2
python3 -m pip install joblib
python3 -m pip install jsonschema
python3 -m pip install jupyter-client
python3 -m pip install jupyter-core
python3 -m pip install Kivy
python3 -m pip install Kivy-Garden
python3 -m pip install kiwisolver
python3 -m pip install lazy-object-proxy
python3 -m pip install log-symbols
python3 -m pip install lxml
python3 -m pip install Markdown
python3 -m pip install MarkupSafe
python3 -m pip install matplotlib
python3 -m pip install mccabe
python3 -m pip install mdv
python3 -m pip install mercurial
python3 -m pip install meson
python3 -m pip install mistune
python3 -m pip install more-itertools
python3 -m pip install MouseInfo
python3 -m pip install moviepy
python3 -m pip install msgpack
python3 -m pip install mycli
python3 -m pip install mysqlclient
python3 -m pip install nbconvert
python3 -m pip install nbformat
python3 -m pip install neovim
python3 -m pip install neovim-remote
python3 -m pip install notebook
python3 -m pip install numpy
python3 -m pip install olefile
python3 -m pip install opencv-python
python3 -m pip install packaging
python3 -m pip install paho-mqtt
python3 -m pip install pandas
python3 -m pip install pandocfilters
python3 -m pip install parse
python3 -m pip install parse-type
python3 -m pip install parso
python3 -m pip install pexpect
python3 -m pip install pgcli
python3 -m pip install pgspecial
python3 -m pip install pickleshare
python3 -m pip install Pillow
python3 -m pip install pip
python3 -m pip install pluggy
python3 -m pip install powerline-mem-segment
python3 -m pip install powerline-status
python3 -m pip install prettytable
python3 -m pip install proglog
python3 -m pip install prometheus-client
python3 -m pip install prompt-toolkit
python3 -m pip install psutil
python3 -m pip install psycopg2
python3 -m pip install ptyprocess
python3 -m pip install py
python3 -m pip install PyAutoGUI
python3 -m pip install pycairo
python3 -m pip install pycodestyle
python3 -m pip install pycparser
python3 -m pip install pydf
python3 -m pip install pyflakes
python3 -m pip install PyGetWindow
python3 -m pip install Pygments
python3 -m pip install PyGObject
python3 -m pip install pylint
python3 -m pip install pymongo
python3 -m pip install PyMsgBox
python3 -m pip install PyMySQL
python3 -m pip install pynvim
python3 -m pip install pyobjc
python3 -m pip install pyobjc-core
python3 -m pip install pyobjc-framework-Accounts
python3 -m pip install pyobjc-framework-AddressBook
python3 -m pip install pyobjc-framework-AdSupport
python3 -m pip install pyobjc-framework-AppleScriptKit
python3 -m pip install pyobjc-framework-AppleScriptObjC
python3 -m pip install pyobjc-framework-ApplicationServices
python3 -m pip install pyobjc-framework-AuthenticationServices
python3 -m pip install pyobjc-framework-Automator
python3 -m pip install pyobjc-framework-AVFoundation
python3 -m pip install pyobjc-framework-AVKit
python3 -m pip install pyobjc-framework-BusinessChat
python3 -m pip install pyobjc-framework-CalendarStore
python3 -m pip install pyobjc-framework-CFNetwork
python3 -m pip install pyobjc-framework-CloudKit
python3 -m pip install pyobjc-framework-Cocoa
python3 -m pip install pyobjc-framework-Collaboration
python3 -m pip install pyobjc-framework-ColorSync
python3 -m pip install pyobjc-framework-Contacts
python3 -m pip install pyobjc-framework-ContactsUI
python3 -m pip install pyobjc-framework-CoreAudio
python3 -m pip install pyobjc-framework-CoreAudioKit
python3 -m pip install pyobjc-framework-CoreBluetooth
python3 -m pip install pyobjc-framework-CoreData
python3 -m pip install pyobjc-framework-CoreHaptics
python3 -m pip install pyobjc-framework-CoreLocation
python3 -m pip install pyobjc-framework-CoreMedia
python3 -m pip install pyobjc-framework-CoreMediaIO
python3 -m pip install pyobjc-framework-CoreML
python3 -m pip install pyobjc-framework-CoreMotion
python3 -m pip install pyobjc-framework-CoreServices
python3 -m pip install pyobjc-framework-CoreSpotlight
python3 -m pip install pyobjc-framework-CoreText
python3 -m pip install pyobjc-framework-CoreWLAN
python3 -m pip install pyobjc-framework-CryptoTokenKit
python3 -m pip install pyobjc-framework-DeviceCheck
python3 -m pip install pyobjc-framework-DictionaryServices
python3 -m pip install pyobjc-framework-DiscRecording
python3 -m pip install pyobjc-framework-DiscRecordingUI
python3 -m pip install pyobjc-framework-DiskArbitration
python3 -m pip install pyobjc-framework-DVDPlayback
python3 -m pip install pyobjc-framework-EventKit
python3 -m pip install pyobjc-framework-ExceptionHandling
python3 -m pip install pyobjc-framework-ExecutionPolicy
python3 -m pip install pyobjc-framework-ExternalAccessory
python3 -m pip install pyobjc-framework-FileProvider
python3 -m pip install pyobjc-framework-FileProviderUI
python3 -m pip install pyobjc-framework-FinderSync
python3 -m pip install pyobjc-framework-FSEvents
python3 -m pip install pyobjc-framework-GameCenter
python3 -m pip install pyobjc-framework-GameController
python3 -m pip install pyobjc-framework-GameKit
python3 -m pip install pyobjc-framework-GameplayKit
python3 -m pip install pyobjc-framework-ImageCaptureCore
python3 -m pip install pyobjc-framework-IMServicePlugIn
python3 -m pip install pyobjc-framework-InputMethodKit
python3 -m pip install pyobjc-framework-InstallerPlugins
python3 -m pip install pyobjc-framework-InstantMessage
python3 -m pip install pyobjc-framework-Intents
python3 -m pip install pyobjc-framework-IOSurface
python3 -m pip install pyobjc-framework-iTunesLibrary
python3 -m pip install pyobjc-framework-LatentSemanticMapping
python3 -m pip install pyobjc-framework-LaunchServices
python3 -m pip install pyobjc-framework-libdispatch
python3 -m pip install pyobjc-framework-LinkPresentation
python3 -m pip install pyobjc-framework-LocalAuthentication
python3 -m pip install pyobjc-framework-MapKit
python3 -m pip install pyobjc-framework-MediaAccessibility
python3 -m pip install pyobjc-framework-MediaLibrary
python3 -m pip install pyobjc-framework-MediaPlayer
python3 -m pip install pyobjc-framework-MediaToolbox
python3 -m pip install pyobjc-framework-MetalKit
python3 -m pip install pyobjc-framework-ModelIO
python3 -m pip install pyobjc-framework-MultipeerConnectivity
python3 -m pip install pyobjc-framework-NaturalLanguage
python3 -m pip install pyobjc-framework-NetFS
python3 -m pip install pyobjc-framework-Network
python3 -m pip install pyobjc-framework-NetworkExtension
python3 -m pip install pyobjc-framework-NotificationCenter
python3 -m pip install pyobjc-framework-OpenDirectory
python3 -m pip install pyobjc-framework-OSAKit
python3 -m pip install pyobjc-framework-OSLog
python3 -m pip install pyobjc-framework-PencilKit
python3 -m pip install pyobjc-framework-Photos
python3 -m pip install pyobjc-framework-PhotosUI
python3 -m pip install pyobjc-framework-PreferencePanes
python3 -m pip install pyobjc-framework-PubSub
python3 -m pip install pyobjc-framework-PushKit
python3 -m pip install pyobjc-framework-QTKit
python3 -m pip install pyobjc-framework-Quartz
python3 -m pip install pyobjc-framework-QuickLookThumbnailing
python3 -m pip install pyobjc-framework-SafariServices
python3 -m pip install pyobjc-framework-SceneKit
python3 -m pip install pyobjc-framework-ScreenSaver
python3 -m pip install pyobjc-framework-ScriptingBridge
python3 -m pip install pyobjc-framework-SearchKit
python3 -m pip install pyobjc-framework-Security
python3 -m pip install pyobjc-framework-SecurityFoundation
python3 -m pip install pyobjc-framework-SecurityInterface
python3 -m pip install pyobjc-framework-ServiceManagement
python3 -m pip install pyobjc-framework-Social
python3 -m pip install pyobjc-framework-SoundAnalysis
python3 -m pip install pyobjc-framework-Speech
python3 -m pip install pyobjc-framework-SpriteKit
python3 -m pip install pyobjc-framework-StoreKit
python3 -m pip install pyobjc-framework-SyncServices
python3 -m pip install pyobjc-framework-SystemConfiguration
python3 -m pip install pyobjc-framework-SystemExtensions
python3 -m pip install pyobjc-framework-UserNotifications
python3 -m pip install pyobjc-framework-VideoSubscriberAccount
python3 -m pip install pyobjc-framework-VideoToolbox
python3 -m pip install pyobjc-framework-Vision
python3 -m pip install pyobjc-framework-WebKit
python3 -m pip install pyparsing
python3 -m pip install PyPDF2
python3 -m pip install pyperclip
python3 -m pip install PyRect
python3 -m pip install pyrsistent
python3 -m pip install PyScreeze
python3 -m pip install pytest
python3 -m pip install python-dateutil
python3 -m pip install python-docx
python3 -m pip install PyTweening
python3 -m pip install pytz
python3 -m pip install pyzmq
python3 -m pip install relativetimebuilder
python3 -m pip install reppy
python3 -m pip install requests
python3 -m pip install scikit-learn
python3 -m pip install scipy
python3 -m pip install selenium
python3 -m pip install Send2Trash
python3 -m pip install setproctitle
python3 -m pip install setuptools
python3 -m pip install simplegeneric
python3 -m pip install six
python3 -m pip install sklearn
python3 -m pip install snowballstemmer
python3 -m pip install Sphinx
python3 -m pip install sphinxcontrib-applehelp
python3 -m pip install sphinxcontrib-devhelp
python3 -m pip install sphinxcontrib-htmlhelp
python3 -m pip install sphinxcontrib-jsmath
python3 -m pip install sphinxcontrib-qthelp
python3 -m pip install sphinxcontrib-serializinghtml
python3 -m pip install sphinxcontrib-websupport
python3 -m pip install spidy-web-crawler
python3 -m pip install spinners
python3 -m pip install sqlparse
python3 -m pip install tabulate
python3 -m pip install termcolor
python3 -m pip install terminado
python3 -m pip install terminaltables
python3 -m pip install testpath
python3 -m pip install tornado
python3 -m pip install tqdm
python3 -m pip install traitlets
python3 -m pip install typed-ast
python3 -m pip install typing
python3 -m pip install urllib3
python3 -m pip install virtualenv
python3 -m pip install wcwidth
python3 -m pip install webencodings
python3 -m pip install websockets
python3 -m pip install Werkzeug
python3 -m pip install wheel
python3 -m pip install widgetsnbextension
python3 -m pip install wrapt
python3 -m pip install xlrd
python3 -m pip install xlwt
python3 -m pip install yapf
python3 -m pip install zipp
|
<reponame>neowutran/home-assistant-js-OTP<filename>src/modules/voice/stores/is-voice-supported-store.js
import { Store } from 'nuclear-js';
const INSTANCE = new Store({
getInitialState() {
return 'webkitSpeechRecognition' in window;
},
});
export default INSTANCE;
|
from rest_framework import routers
from django.urls import path, include
from drf.base import views
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
router = routers.DefaultRouter()
router.register(r'members', views.MemberViewSet)
@api_view(['GET'])
def member_status_api(request):
status = request.query_params.get('status')
if status not in ['active', 'inactive', 'pending']:
return Response({'error': 'Invalid status'}, status=status.HTTP_400_BAD_REQUEST)
# Assuming Member model exists with appropriate fields
if status:
members = Member.objects.filter(status=status)
else:
members = Member.objects.all()
# Serialize members and return response
serialized_members = MemberSerializer(members, many=True)
return Response(serialized_members.data)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
path('api/', include(router.urls)),
path('api/members2/', views.member_api),
path('api/members/', member_status_api), # New URL mapping for custom view
path('api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
export { default as Logon } from './Logon';
export { default as Register } from './Register';
export { default as Profile } from './Profile';
export { default as NewIncidents } from './NewIncidents';
|
/* global module:false */
module.exports = function(grunt) {
grunt.initConfig({
app: grunt.file.readJSON('package.json'),
sass: {
dist: {
options: {
style: 'expanded'
},
files: {
'css/style.css': 'scss/style.scss'
}
}
},
autoprefixer: {
no_dest: {
src: 'css/style.css'
}
},
prettysass: {
options: {
alphabetize: false,
indent: 4
},
jsxc: {
src: ['scss/*.scss']
}
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
gruntfile: {
src: 'Gruntfile.js'
},
files: ['js/*.js']
},
jsbeautifier: {
files: ['js/*.js'],
options: {
config: '.jsbeautifyrc'
}
},
watch: {
css: {
files: ['scss/*'],
tasks: ['sass', 'autoprefixer']
}
}
});
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-autoprefixer');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-jsbeautifier');
grunt.loadNpmTasks('grunt-prettysass');
grunt.registerTask('default', [ 'commit', 'watch' ]);
grunt.registerTask('commit', [ 'prettysass', 'sass', 'autoprefixer', 'jsbeautifier', 'jshint' ]);
};
|
def is_armstrong(num):
n = len(str(num))
total = 0
temp = num
while temp > 0:
digit = temp % 10
total += digit**n
temp //= 10
if num == total:
return True
else:
return False
|
// Ava does not allow defining in `./helpers`. Otherwise, this file should be
// with the other test runner files in `test/helpers/testing/`
// eslint-disable-next-line import/no-unassigned-import
import '../helpers/testing/ava.js'
|
import random
def random_picks(sequence, relative_odds):
table = [ z for x, y in zip(sequence, relative_odds) for z in [x]*y ]
while True:
yield random.choice(table)
|
import { Injectable } from '@angular/core';
import { Actions, createEffect, ofType } from '@ngrx/effects';
import { Store } from '@ngrx/store';
import { fetch } from '@nrwl/angular';
import { map } from 'rxjs/operators';
import * as EventsPileCardsActions from './events-pile-cards.actions';
import { createInitialEventsPileCards } from './events-pile-cards.models';
import * as EventsPileCardsFeature from './events-pile-cards.reducer';
@Injectable()
export class EventsPileCardsEffects {
initNewGame$ = createEffect(() =>
this.actions$.pipe(
ofType(EventsPileCardsActions.initEventsPileCardsNewGame),
map(() =>
EventsPileCardsActions.setEntitiesInitializedEventsPileCards({
eventsPileCards: createInitialEventsPileCards(),
})
)
)
);
initSavedGame$ = createEffect(() =>
this.actions$.pipe(
ofType(EventsPileCardsActions.initEventsPileCardsSavedGame),
fetch({
run: () => {
// Your custom service 'load' logic goes here. For now just return a success action...
return EventsPileCardsActions.loadEventsPileCardsSuccess({
eventsPileCards: [],
});
},
onError: (_action, error) => {
console.error('Error', error);
return EventsPileCardsActions.loadEventsPileCardsFailure({ error });
},
})
)
);
resetPileAndSelectFirst$ = createEffect(() =>
this.actions$.pipe(
ofType(EventsPileCardsActions.resetEventsPile),
map(() =>
EventsPileCardsActions.setEntitiesSelectFirstEventsPileCards({
eventsPileCards: createInitialEventsPileCards(),
})
)
)
);
constructor(
private actions$: Actions,
private eventsPileCardsStore: Store<EventsPileCardsFeature.EventsPileCardsPartialState>
) {}
}
|
module.exports = {
siteMetadata: {
title: `Bitlogic Web`,
description: `Bitlogic Web`,
author: `Bitlogic`,
},
plugins: [
`gatsby-plugin-netlify-cms`,
`gatsby-transformer-json`,
`gatsby-plugin-react-helmet`,
`gatsby-transformer-sharp`,
`gatsby-plugin-sharp`,
{
resolve: `gatsby-plugin-manifest`,
options: {
name: `Bitlogic Web`,
short_name: `Bitlogic`,
start_url: `/`,
background_color: `#74C8D2`,
theme_color: `#74C8D2`,
display: `minimal-ui`,
icon: `src/images/isotipo.png`, // This path is relative to the root of the site.
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
name: `images`,
path: `${__dirname}/src/images`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
path: `${__dirname}/content/homepage`,
name: `homepage`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
path: `${__dirname}/content/aboutUs`,
name: `aboutUs`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
path: `${__dirname}/content/services`,
name: `services`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
path: `${__dirname}/content/culture`,
name: `culture`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
path: `${__dirname}/content/contact`,
name: `contact`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
path: `${__dirname}/content/blog`,
name: `blog`,
},
},
],
};
|
#!/bin/sh
. ../config/webserver-settings.sh
echo "starting 35-processmodel-publish-fork-simple.sh"
curl -v --anyauth --user $MLADMINUSER:$MLADMINPASS -X POST \
-d "<somexml/>" \
-H "Content-type: application/xml" -H "Accept: application/xml" \
"http://$RESTHOST:$RESTPORT/v1/resources/processmodel?rs:publishedId=fork-simple__1__0" > 35-processmodel-publish-fork-simple-out.txt
echo "35-processmodel-publish-fork-simple.sh complete"
|
def solution(pointA, pointB):
graph = [[0, 10, 15, 20],
[5, 0, 9, 10],
[6, 13, 0, 12],
[8, 8, 9, 0]]
start = pointA
end = pointB
# Shortest Path
infinity = float("inf")
distances = [infinity] * len(graph)
previous = [None] * len(graph)
visited = [False] * len(graph)
distances[start] = 0
for i in range(len(graph)):
minimum = infinity + 1
for j in range(len(graph)):
if visited[j] is False and distances[j] < minimum:
minimum = distances[j]
index = j
visited[index] = True
# If a shorter path is found
for j in range(len(graph)):
if graph[index][j] > 0 and visited[j] is False:
if distances[index] + graph[index][j] < distances[j]:
distances[j] = distances[index] + graph[index][j]
previous[j] = index
path = []
k = end
while k is not None:
path.append(k)
k = previous[k]
path = path[::-1] # reverse the path
return path
|
'use strict';
let uuids = {};
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = function (_ref) {
var t = _ref.types;
/* ==========================================================================
* Utilities
* ======================================================================= */
var transformOnType = function transformOnType(transforms) {
return function (node) {
var transformer = transforms[node.type];
if (transformer) {
return transformer(node);
}
throw new Error(node.type + ' could not be transformed');
};
};
/* ==========================================================================
* Initial configuration
* ======================================================================= */
var initConfig = function initConfig(path, state) {
var _state$opts = state.opts,
_state$opts$useNew = _state$opts.useNew,
useNew = _state$opts$useNew === undefined ? false : _state$opts$useNew,
constructorModule = _state$opts.module,
constructorFunction = _state$opts.function,
_state$opts$useVariab = _state$opts.useVariables,
useVariables = _state$opts$useVariab === undefined ? false : _state$opts$useVariab;
var variablesRegex = void 0,
jsxObjectTransformer = void 0;
if (useVariables === true) {
// Use the default variables regular expression when true.
variablesRegex = /^[A-Z]/;
} else if ((0, _isString2.default)(useVariables)) {
// If it’s a plain regular expression string.
variablesRegex = new RegExp(useVariables);
}
var executeExpression = useNew ? t.newExpression : t.callExpression;
var jsxObjectTransformerCreator = function jsxObjectTransformerCreator(expression) {
return function (value) {
return executeExpression(expression, [value]);
};
};
if (constructorModule) {
// If the constructor function will be retrieved from a module.
var moduleName = path.scope.generateUidIdentifier(useNew ? 'JSXNode' : 'jsx');
jsxObjectTransformer = jsxObjectTransformerCreator(moduleName);
var importDeclaration = t.importDeclaration([t.importDefaultSpecifier(moduleName)], t.stringLiteral(constructorModule));
// Add the import declration to the top of the file.
path.findParent(function (p) {
return p.isProgram();
}).unshiftContainer('body', importDeclaration);
} else if (constructorFunction) {
// If the constructor function will be an in scope function.
var expression = constructorFunction.split('.').map((0, _ary2.default)(t.identifier, 1)).reduce((0, _ary2.default)(t.memberExpression, 2));
jsxObjectTransformer = jsxObjectTransformerCreator(expression);
} else {
// Otherwise, we won‘t be mapping.
jsxObjectTransformer = _identity2.default;
}
// console.dir('Object Transformer >>>>', jsxObjectTransformer);
return {
variablesRegex: variablesRegex,
jsxObjectTransformer: jsxObjectTransformer
};
};
/* =========================================================================
* Visitors
* ======================================================================= */
var visitJSXElement = function visitJSXElement(path, state) {
if (!state.get('jsxConfig')) {
state.set('jsxConfig', initConfig(path, state));
}
var _state$get = state.get('jsxConfig'),
variablesRegex = _state$get.variablesRegex,
jsxObjectTransformer = _state$get.jsxObjectTransformer;
/* ==========================================================================
* UniqueId generator
* ======================================================================= */
var guid = function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1);
}
// ensure guids are unique and only increase the length if a collision has been found
let uuid = s4();
if(uuids[uuid] && uuids[uuid] == 'used') {
console.log('uuid duplicated, reassigning')
uuid = s4() + s4();
}
uuids[uuid] = 'used';
return uuid; //+ s4() + s4() + s4(); //+ '-' + s4() + '-' + s4() + s4() + s4();
};
/* ==========================================================================
* Node Transformers
* ======================================================================= */
var JSXIdentifier = function JSXIdentifier(node) {
return t.stringLiteral(node.name);
};
var JSXNamespacedName = function JSXNamespacedName(node) {
return t.stringLiteral(node.namespace.name + ':' + node.name.name);
};
var _JSXMemberExpression = transformOnType({
JSXIdentifier: function JSXIdentifier(node) {
return t.identifier(node.name);
},
JSXMemberExpression: function JSXMemberExpression(node) {
return t.memberExpression(_JSXMemberExpression(node.object), _JSXMemberExpression(node.property));
}
});
var JSXElementName = transformOnType({
JSXIdentifier: variablesRegex ? function (node) {
return variablesRegex.test(node.name) ? t.identifier(node.name) : JSXIdentifier(node);
} : JSXIdentifier,
JSXNamespacedName: JSXNamespacedName,
JSXMemberExpression: _JSXMemberExpression
});
var JSXExpressionContainer = function JSXExpressionContainer(node) {
return node.expression;
};
var JSXAttributeName = transformOnType({ JSXIdentifier: JSXIdentifier, JSXNamespacedName: JSXNamespacedName, JSXMemberExpression: _JSXMemberExpression });
var JSXAttributeValue = transformOnType({
StringLiteral: function StringLiteral(node) {
return node;
},
JSXExpressionContainer: JSXExpressionContainer
});
var JSXAttributes = function JSXAttributes(nodes) {
var object = [];
var objects = [];
nodes.forEach(function (node) {
switch (node.type) {
case 'JSXAttribute':
{
if (!object) {
object = [];
}
var attributeName = JSXAttributeName(node.name);
var objectKey = _esutils2.default.keyword.isIdentifierNameES6(attributeName.value) ? t.identifier(attributeName.value) : attributeName;
object.push(t.objectProperty(objectKey, JSXAttributeValue(node.value)));
break;
}
case 'JSXSpreadAttribute':
{
if (object) {
objects.push(t.objectExpression(object));
object = null;
}
objects.push(node.argument);
break;
}
default:
throw new Error(node.type + ' cannot be used as a JSX attribute');
}
});
if (object && object.length > 0) {
objects.push(t.objectExpression(object));
}
if (objects.length === 0) {
return t.objectExpression([]);
} else if (objects.length === 1) {
return objects[0];
}
return t.callExpression(state.addHelper('extends'), objects);
};
var JSXText = function JSXText(node) {
if (state.opts.noTrim) return t.stringLiteral(node.value);
var value = node.value.replace(/\n\s*/g, '');
return value === '' ? null : t.stringLiteral(value);
};
var JSXElement = function JSXElement(node) {
return jsxObjectTransformer(t.objectExpression(
[
t.objectProperty(t.identifier(nameProperty),
JSXElementName(node.openingElement.name)),
t.objectProperty(t.identifier(attributesProperty),
JSXAttributes(node.openingElement.attributes)),
t.objectProperty(t.identifier(childrenProperty), JSXChildren(node.children)),
t.objectProperty(t.identifier(uniqueId), t.stringLiteral(guid()))
]
));
};
var JSXChild = transformOnType({ JSXText: JSXText, JSXElement: JSXElement, JSXExpressionContainer: JSXExpressionContainer });
var JSXChildren = function JSXChildren(nodes) {
return t.arrayExpression(nodes.map(JSXChild).filter(Boolean)
// Normalize all of our string children into one big string. This can be
// an optimization as we minimize the number of nodes created.
// This step just turns `['1', '2']` into `['12']`.
.reduce(function (children, child) {
//if it is a child is an empty expression dont append the child just return current array.
//This removes 'empty' or 'null' spaces in the children.
if(child.type === 'JSXEmptyExpression') {
return _toConsumableArray(children);
}
var lastChild = children.length > 0 ? children[children.length - 1] : null;
// If this is a string literal, and the last child is a string literal, merge them.
if (child.type === 'StringLiteral' && lastChild && lastChild.type === 'StringLiteral') {
return [].concat(_toConsumableArray(children.slice(0, -1)), [t.stringLiteral(lastChild.value + child.value)]);
}
// Otherwise just append the child to our array normally.
return [].concat(_toConsumableArray(children), [child]);
}, []));
};
// Actually replace JSX with an object.
path.replaceWith(JSXElement(path.node));
};
/* ==========================================================================
* Plugin
* ======================================================================= */
return {
inherits: require('babel-plugin-syntax-jsx'),
visitor: {
JSXElement: visitJSXElement
}
};
};
var _isString = require('lodash/isString');
var _isString2 = _interopRequireDefault(_isString);
var _identity = require('lodash/identity');
var _identity2 = _interopRequireDefault(_identity);
var _ary = require('lodash/ary');
var _ary2 = _interopRequireDefault(_ary);
var _esutils = require('esutils');
var _esutils2 = _interopRequireDefault(_esutils);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var nameProperty = 'elementName';
var attributesProperty = 'attributes';
var childrenProperty = 'children';
var uniqueId = 'guid';
|
def fibonacci(n):
first = 0
second = 1
for i in range(n):
print(first)
temp = first
first = second
second = temp + second
fibonacci(5)
|
<reponame>Loliticos/mayfi
const { Command, MayfiEmbed, MiscUtils } = require('../../')
const fetch = require("node-fetch")
module.exports = class Dog extends Command {
constructor (client) {
super({
name: 'dog',
aliases: ['cachorro'],
category: 'fun',
}, client)
}
async run ({ channel, author, t}) {
const body = await fetch('https://dog.ceo/api/breeds/image/random').then(res => res.json())
let embed = new MayfiEmbed(author)
.setTitle("🐶")
.setImage(body.message)
channel.send({embed})
}
}
|
class RenameSiteAssignment < ActiveRecord::Migration[5.1]
def change
rename_table :site_assignments, :site_contact_assignments
end
end
|
import numpy as np
from scipy.ndimage import gaussian_filter
from scipy.ndimage import maximum_filter
def detectFeatures(image, hessianThreshold, minFeatureScales):
# Compute the Hessian matrix
dx, dy = np.gradient(image)
dxx, dxy = np.gradient(dx)
dyx, dyy = np.gradient(dy)
# Compute the eigenvalues of the Hessian matrix
trace = dxx + dyy
determinant = dxx * dyy - dxy * dyx
eigenvalues = 0.5 * (trace + np.sqrt(trace**2 - 4 * determinant)), 0.5 * (trace - np.sqrt(trace**2 - 4 * determinant))
# Identify feature points
feature_points = np.argwhere((eigenvalues[0] > hessianThreshold) & (eigenvalues[1] > hessianThreshold))
# Apply non-maximum suppression
maxima = maximum_filter(image, size=minFeatureScales)
feature_points = feature_points[(image[feature_points[:, 0], feature_points[:, 1]] == maxima[feature_points[:, 0], feature_points[:, 1]])]
return feature_points
# Test the detectFeatures function with a sample grayscale image
sample_image = np.array([[0, 0, 0, 0, 0],
[0, 1, 2, 1, 0],
[0, 2, 4, 2, 0],
[0, 1, 2, 1, 0],
[0, 0, 0, 0, 0]])
detected_features = detectFeatures(sample_image, 0.03, 3)
print(detected_features)
|
function replace_with_code($str) {
$newStr = '';
for($i=0; $i < strlen($str); $i++) {
$code = ord($str[$i]);
$newStr .= "&#{$code}";
}
return $newStr;
}
|
import { ConflictException, InternalServerErrorException } from '@nestjs/common'
import { EntityRepository, Repository } from 'typeorm'
import { AuthCredentialsDto } from './dto/auth-credential.dto'
import { User } from './user.entity'
import * as bcypt from 'bcryptjs'
@EntityRepository(User)
export class UserRepository extends Repository<User> {
async createUser(authCredentialsDto: AuthCredentialsDto): Promise<void> {
const { username, password } = authCredentialsDto
const salt = await bcypt.genSalt()
const hashedPW = await bcypt.hash(password, salt)
const user = this.create({ username, password: <PASSWORD>PW })
try {
await this.save(user)
} catch (error) {
if(error.code === '23505') {
throw new ConflictException('이름을 사용 중인 유저가 존재합니다')
} else {
throw new InternalServerErrorException()
}
}
}
}
|
<gh_stars>1-10
//
// UIViewController+ADScaleTransition.h
// CommonLibrary
//
// Created by James on 3/7/14.
// Copyright (c) 2014 CommonLibrary. All rights reserved.
//
#if kSupportADTransition
#import <UIKit/UIKit.h>
#import "ADScaleTransition.h"
@interface UIViewController (ADScaleTransition)
@property (nonatomic) ADScaleTransition *presentedScaleTransition;
@property (nonatomic) ADScaleTransition *presentingScaleTransition;
/**
* Present a view controller modally from the current view controller using a
* scale animation.
* @param destinationViewController The view controller to present
* @param sourceView A subview of the current view controller to scale from
* @param completion A block to run on completion. Can be NULL.
*/
- (void)scaleToViewController:(UIViewController *)destinationViewController fromView:(UIView *)sourceView withCompletion:(void (^)(void))completion;
/**
* Present a view controller modally from the current view controller using a
* scale animation.
* @param destinationViewController The view controller to present
* @param sourceView A subview of the current view controller to scale from
* @param sourceSnapshot The placeholder image for the source view. Specifying
* nil will take a snapshot just before the animation.
* @param destinationSnapshot The placeholder image for the destination view.
* Specifying nil will take a snapshot just before the animation.
* @param completion A block to run on completion. Can be NULL.
*/
- (void)scaleToViewController:(UIViewController *)destinationViewController fromView:(UIView *)sourceView withSourceSnapshotImage:(UIImage *)sourceSnapshot andDestinationSnapshot:(UIImage *)destinationSnapshot withCompletion:(void (^)(void))completion;
/**
* Present a view controller as a child view controller from the current view
* controller using a scale animation.
* @param destinationViewController The view controller to present
* @param sourceView A subview of the current view controller to scale from
* @param destinationSize The size for the destination view controller to take
* up on the screen.
* @param completion A block to run on completion. Can be NULL.
*/
- (void)scaleToViewController:(UIViewController *)destinationViewController fromView:(UIView *)sourceView asChildWithSize:(CGSize)destinationSize withCompletion:(void (^)(void))completion;
/**
* Present a view controller as a child view controller from the current view
* controller using a scale animation.
* @param destinationViewController The view controller to present
* @param sourceView A subview of the current view controller to scale from
* @param destinationSize The size for the destination view controller to take
* up on the screen.
* @param sourceSnapshot The placeholder image for the source view. Specifying
* nil will take a snapshot just before the animation.
* @param destinationSnapshot The placeholder image for the destination view.
* Specifying nil will take a snapshot just before the animation.
* @param completion A block to run on completion. Can be NULL.
*/
- (void)scaleToViewController:(UIViewController *)destinationViewController fromView:(UIView *)sourceView asChildWithSize:(CGSize)destinationSize withSourceSnapshotImage:(UIImage *)sourceSnapshot andDestinationSnapshot:(UIImage *)destinationSnapshot withCompletion:(void (^)(void))completion;
/**
* Dismiss the current modal view controller with a scale animation.
* @discussion Only works when the current view controller has been presented
* using one of the category convenience methods to present the view controller.
* @param completion A block to run on completion. Can be NULL.
*/
- (void)dismissScaleWithCompletion:(void (^)(void))completion;
/**
* Dismiss the current modal view controller with a scale animation to a cell in
* a UICollectionViewController or UITableViewController.
* @discussion Only works when the current view controller has been presented
* using one of the category convenience methods to present the view controller.
* @param indexPath The location of the cell to scale back to.
* @param completion A block to run on completion. Can be NULL.
*/
- (void)dismissScaleToIndexPath:(NSIndexPath *)indexPath withCompletion:(void (^)(void))completion;
@end
#endif
|
<gh_stars>0
export * from './getAllMdx';
export * from './getAllMdxCategories';
export * from './getMdx';
|
#!/bin/bash
# © Copyright IBM Corporation 2019.
# LICENSE: Apache License, Version 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
#
# Instructions:
# Download build script: wget https://raw.githubusercontent.com/linux-on-ibm-z/scripts/master/CFSSL/1.3.4/build_cfssl.sh
# Execute build script: bash build_cfssl.sh (provide -h for help)
set -e -o pipefail
PACKAGE_NAME="CFSSL"
PACKAGE_VERSION="1.3.4"
SOURCE_ROOT="$(pwd)"
GO_DEFAULT="$HOME/go"
FORCE="false"
LOG_FILE="$SOURCE_ROOT/logs/${PACKAGE_NAME}-${PACKAGE_VERSION}-$(date +"%F-%T").log"
trap cleanup 0 1 2 ERR
#Check if directory exists
if [ ! -d "$SOURCE_ROOT/logs/" ]; then
mkdir -p "$SOURCE_ROOT/logs/"
fi
# Need handling for RHEL 6.10 as it doesn't have os-release file
if [ -f "/etc/os-release" ]; then
source "/etc/os-release"
else
cat /etc/redhat-release >>"${LOG_FILE}"
export ID="rhel"
export VERSION_ID="6.x"
export PRETTY_NAME="Red Hat Enterprise Linux 6.x"
fi
function prepare() {
if command -v "sudo" >/dev/null; then
printf -- 'Sudo : Yes\n' >>"$LOG_FILE"
else
printf -- 'Sudo : No \n' >>"$LOG_FILE"
printf -- 'You can install the same from installing sudo from repository using apt, yum or zypper based on your distro. \n'
exit 1
fi
if [[ "$FORCE" == "true" ]]; then
printf -- 'Force attribute provided hence continuing with install without confirmation message\n' |& tee -a "$LOG_FILE"
else
# Ask user for prerequisite installation
printf -- "\nAs part of the installation , dependencies would be installed/upgraded.\n"
while true; do
read -r -p "Do you want to continue (y/n) ? : " yn
case $yn in
[Yy]*)
printf -- 'User responded with Yes. \n' >>"$LOG_FILE"
break
;;
[Nn]*) exit ;;
*) echo "Please provide confirmation to proceed." ;;
esac
done
fi
}
function cleanup() {
# Remove artifacts
rm -rf $SOURCE_ROOT/build_go.sh
printf -- "Cleaned up the artifacts\n" >>"$LOG_FILE"
}
function install_go() {
#Install Go
printf -- "\n Installing go \n" |& tee -a "$LOG_FILE"
cd $SOURCE_ROOT
wget "https://raw.githubusercontent.com/linux-on-ibm-z/scripts/master/Go/1.13/build_go.sh" |& tee -a "$LOG_FILE"
chmod +x build_go.sh
bash build_go.sh -v 1.12.7 |& tee -a "$LOG_FILE"
printf -- "Completed go installation successfully. \n" >>"$LOG_FILE"
}
function configureAndInstall() {
printf -- "Configuration and Installation started \n"
printf -- "Build and install CFSSL \n"
#Installing CFSSL
cd $SOURCE_ROOT
go get -u github.com/cloudflare/cfssl/cmd/cfssl
go get -u github.com/cloudflare/cfssl/cmd/cfssljson
cd $GOPATH/src/github.com/cloudflare/cfssl
git checkout 1.3.4
printf -- 'CFSSL installed successfully. \n'
printf -- "The tools will be installed in $GOPATH/bin."
#runTests
runTest
}
function runTest() {
set +e
if [[ "$TESTS" == "true" ]]; then
printf -- "\nTEST Flag is set, continue with running test \n"
cd $GOPATH/src/github.com/cloudflare/cfssl
./test.sh
printf -- "Tests completed. \n"
fi
set -e
}
function logDetails() {
printf -- '**************************** SYSTEM DETAILS *************************************************************\n' >"$LOG_FILE"
if [ -f "/etc/os-release" ]; then
cat "/etc/os-release" >>"$LOG_FILE"
fi
cat /proc/version >>"$LOG_FILE"
printf -- '*********************************************************************************************************\n' >>"$LOG_FILE"
printf -- "Detected %s \n" "$PRETTY_NAME"
printf -- "Request details : PACKAGE NAME= %s , VERSION= %s \n" "$PACKAGE_NAME" "$PACKAGE_VERSION" |& tee -a "$LOG_FILE"
}
# Print the usage message
function printHelp() {
echo
echo "Usage: "
echo " build_cfssl.sh [-d debug] [-y install-without-confirmation] [-t install-with-tests] "
echo
}
while getopts "h?dyt" opt; do
case "$opt" in
h | \?)
printHelp
exit 0
;;
d)
set -x
;;
y)
FORCE="true"
;;
t)
TESTS="true"
;;
esac
done
function gettingStarted() {
printf -- '\n********************************************************************************************************\n'
printf -- "\n*Getting Started * \n"
printf -- " CFSSL installed successfully. \n"
printf -- "More information can be found here : https://github.com/cloudflare/cfssl \n"
printf -- '**********************************************************************************************************\n'
}
logDetails
prepare #Check Prequisites
DISTRO="$ID-$VERSION_ID"
case "$DISTRO" in
"ubuntu-16.04" | "ubuntu-18.04" | "ubuntu-19.04")
printf -- "Installing %s %s for %s \n" "$PACKAGE_NAME" "$PACKAGE_VERSION" "$DISTRO" |& tee -a "$LOG_FILE"
printf -- "Installing dependencies... it may take some time.\n"
sudo apt-get update
sudo apt-get install -y git gcc make |& tee -a "$LOG_FILE"
install_go
export GOPATH=$SOURCE_ROOT
export PATH=$GOPATH/bin:$PATH
configureAndInstall |& tee -a "$LOG_FILE"
;;
"rhel-7.5" | "rhel-7.6" | "rhel-8.0")
printf -- "Installing %s %s for %s \n" "$PACKAGE_NAME" "$PACKAGE_VERSION" "$DISTRO" |& tee -a "$LOG_FILE"
printf -- "Installing dependencies... it may take some time.\n"
sudo yum install -y git gcc make wget |& tee -a "$LOG_FILE"
install_go
export GOPATH=$SOURCE_ROOT
export PATH=$GOPATH/bin:$PATH
configureAndInstall |& tee -a "$LOG_FILE"
;;
"sles-12.4" | "sles-15")
printf -- "Installing %s %s for %s \n" "$PACKAGE_NAME" "$PACKAGE_VERSION" "$DISTRO" |& tee -a "$LOG_FILE"
printf -- "Installing dependencies... it may take some time.\n"
sudo zypper install -y git gcc make wget |& tee -a "$LOG_FILE"
install_go
export GOPATH=$SOURCE_ROOT
export PATH=$GOPATH/bin:$PATH
configureAndInstall |& tee -a "$LOG_FILE"
;;
*)
printf -- "%s not supported \n" "$DISTRO" |& tee -a "$LOG_FILE"
exit 1
;;
esac
gettingStarted |& tee -a "$LOG_FILE"
|
#!/bin/sh
set -euf
# Loop until Confd created the first config gile
until /usr/bin/confd -onetime \
-node 127.0.0.1:4001 \
-config-file /etc/confd/conf.d/nginx.toml
do sleep 5; done
# Start Confd
echo "Starting Confd"
/usr/bin/confd -interval 10 \
-node 127.0.0.1:4001 \
-config-file /etc/confd/conf.d/nginx.toml &
# Start Nginx
echo "Starting Nginx"
/etc/init.d/nginx start
|
tensorflow_model1="https://c7xcode.obs.cn-north-4.myhuaweicloud.com:443/models/scenetextrecognition/advancedeast.pb"
tensorflow_model2="https://c7xcode.obs.cn-north-4.myhuaweicloud.com:443/models/scenetextrecognition/chineseocr.pb"
model_name1="advancedeast"
model_name2="chineseocr"
version=$1
data_source="https://c7xcode.obs.cn-north-4.myhuaweicloud.com/models/scenetextrecognition/"
verify_source="https://c7xcode.obs.cn-north-4.myhuaweicloud.com/models/scenetextrecognition/"
project_name="scenetextrecognition"
script_path="$( cd "$(dirname $BASH_SOURCE)" ; pwd -P)"
project_path=${script_path}/..
declare -i success=0
declare -i inferenceError=1
declare -i verifyResError=2
function downloadDataWithVerifySource() {
mkdir -p ${project_path}/src/Data/image/
wget -O ${project_path}/src/Data/image/"test.jpg" ${data_source}"test.jpg" --no-check-certificate
if [ $? -ne 0 ];then
echo "download test.jpg failed, please check Network."
return 1
fi
mkdir -p ${project_path}/verify_image/
wget -O ${project_path}/verify_image/test_result.txt ${verify_source}"test_result.txt" --no-check-certificate
if [ $? -ne 0 ];then
echo "download test_result.txt failed, please check Network."
return 1
fi
return 0
}
function setAtcEnv() {
# set enviroment param
if [[ ${version} = "c75" ]] || [[ ${version} = "C75" ]];then
export HOME=/home/HwHiAiUser
export install_path=$HOME/Ascend/ascend-toolkit/latest
export PATH=/usr/local/python3.7.5/bin:${install_path}/atc/ccec_compiler/bin:${install_path}/atc/bin:$PATH
export ASCEND_OPP_PATH=${install_path}/opp
export PYTHONPATH=${install_path}/atc/python/site-packages:${install_path}/atc/python/site-packages/auto_tune.egg/auto_tune:${install_path}/atc/python/site-packages/schedule_search.egg:$PYTHONPATH
export LD_LIBRARY_PATH=${install_path}/atc/lib64:${LD_LIBRARY_PATH}
echo "setAtcEnv success."
fi
return 0
}
function downloadOriginalModel() {
mkdir -p ${project_path}/model/
wget -O ${project_path}/src/Data/Models/TextRecognition/${tensorflow_model2##*/} ${tensorflow_model2} --no-check-certificate
if [ $? -ne 0 ];then
echo "install tensorflow_model failed, please check Network."
return 1
fi
wget -O ${project_path}/src/Data/Models/TextDetection/${tensorflow_model1##*/} ${tensorflow_model1} --no-check-certificate
if [ $? -ne 0 ];then
echo "install tensorflow_model failed, please check Network."
return 1
fi
return 0
}
function main() {
if [[ ${version}"x" = "x" ]];then
echo "ERROR: version is invalid"
return ${inferenceError}
fi
# reconfigure enviroment param
export LD_LIBRARY_PATH=/home/HwHiAiUser/Ascend/nnrt/latest/acllib/lib64:/home/HwHiAiUser/ascend_ddk/x86/lib:${LD_LIBRARY_PATH}
export ASCEND_HOME=/home/HwHiAiUser/Ascend
export LD_LIBRARY_PATH=$ASCEND_HOME/ascend-toolkit/latest/acllib/lib64:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/opencv/lib64:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/opencv/lib:$LD_LIBRARY_PATH
mkdir -p ${HOME}/models/${project_name}
if [[ $(find ${HOME}/models/${project_name} -name ${model_name}".om")"x" = "x" ]];then
# downloadmodel
downloadOriginalModel
if [ $? -ne 0 ];then
echo "ERROR: download original model failed"
return ${inferenceError}
fi
# set model convert param
setAtcEnv
if [ $? -ne 0 ];then
echo "ERROR: set atc environment failed"
return ${inferenceError}
fi
cd ${project_path}/src/Data/Models/TextDetection
atc --model=./advancedeast.pb \
--framework=3 \
--output=./advancedeast \
--soc_version=Ascend310 \
--insert_op_conf=./advancedeast_aipp.cfg \
--input_shape="input_img:1,-1,-1,3" \
--dynamic_image_size="832,832;416,832;832,416;416,416"
if [ $? -ne 0 ];then
echo "ERROR: convert model advancedeast failed"
return ${inferenceError}
fi
cd ${project_path}/src/Data/Models/TextRecognition
atc --model=./chineseocr.pb \
--framework=3 \
--output=./chineseocr \
--soc_version=Ascend310 \
--insert_op_conf=./chineseocr_aipp.cfg \
--input_shape="the_input:1,-1,-1,1" \
--dynamic_image_size="32,32;32,64;32,96;32,128;32,160;32,192;32,224;32,256;32,288;32,320"
if [ $? -ne 0 ];then
echo "ERROR: convert model chineseocr failed"
return ${inferenceError}
fi
fi
# download data
downloadDataWithVerifySource
if [ $? -ne 0 ];then
echo "ERROR: download test images or verify images failed"
return ${inferenceError}
fi
cd ${project_path}
bash ${project_path}/src/build.sh
if [ $? -ne 0 ];then
echo "ERROR: build failed. please check your project"
return ${inferenceError}
fi
cp -fr ${project_path}/src/Data/image ${project_path}/src/dist
# excute program
cd ${project_path}/src/dist/
${project_path}/src/dist/ocr
if [ $? -ne 0 ];then
echo "ERROR: run failed. please check your project"
return ${inferenceError}
fi
# verify
for outimage in $(find ${project_path}/verify_image -name "*.txt");do
tmp=`basename $outimage`
if [[ ! -d "${project_path}/src/dist/result" ]];then
echo "ERROR: not find results folders!"
return ${verifyResError}
fi
for test_file in `find ${project_path}/src/dist/result -name "*${tmp#*_}"`;do
python3 ${script_path}/verify_result.py ${test_file} ${outimage}
if [ $? -ne 0 ];then
echo "ERROR: The result of reasoning is wrong!"
return ${verifyResError}
fi
done
done
echo "run success"
return ${success}
}
main
|
<reponame>jtlivio/jl-spfx-project-webparts
declare interface IStarterWpStrings {
PropertyPaneDescription: string;
BasicGroupName: string;
MaxItemsFieldLabel: string;
}
declare module 'starterWpStrings' {
const strings: IStarterWpStrings;
export = strings;
}
|
package com.abubusoft.sharedpreferencewatcher.view.ui;
import android.arch.lifecycle.ViewModelProviders;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import com.abubusoft.kripton.android.annotation.BindPreferenceAdapter;
import com.abubusoft.sharedpreferencewatcher.model.BindAppPreferences;
import com.abubusoft.sharedpreferencewatcher.view.adapters.RecyclerViewAdapter;
import com.abubusoft.sharedpreferencewatcher.view.adapters.AbstractModel;
import com.abubusoft.sharedpreferencewatcher.R;
import com.abubusoft.sharedpreferencewatcher.viewmodel.SharedPrefsViewModel;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
private RecyclerView recyclerView;
private Toolbar toolbar;
private RecyclerViewAdapter mAdapter;
private ArrayList<AbstractModel> modelList = new ArrayList<>();
private SharedPrefsViewModel viewModel;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
findViews();
initToolbar("Kripton and Shared Prefs Live Data!");
setAdapter();
// view model management
// 1- create view model
viewModel = ViewModelProviders.of(this).get(SharedPrefsViewModel.class);
// 2- observe channel header
viewModel.getExampleList().observe(this, value ->
this.mAdapter.update(new AbstractModel("Key = ExampleList", "Value = " + value))
);
viewModel.getExampleSwitch().observe(this, value ->
this.mAdapter.update(new AbstractModel("Key = ExampleSwitch", "Value = " + value))
);
viewModel.getExampleText().observe(this, value ->
this.mAdapter.update(new AbstractModel("Key = ExampleText", "Value = " + value))
);
}
private void setAdapter() {
mAdapter = new RecyclerViewAdapter(MainActivity.this, modelList);
recyclerView.setHasFixedSize(true);
// use a linear layout manager
LinearLayoutManager layoutManager = new LinearLayoutManager(this);
recyclerView.setLayoutManager(layoutManager);
recyclerView.setAdapter(mAdapter);
}
private void findViews() {
toolbar = (Toolbar) findViewById(R.id.toolbar);
recyclerView = (RecyclerView) findViewById(R.id.recycler_view);
}
public void initToolbar(String title) {
setSupportActionBar(toolbar);
/*getSupportActionBar().setDisplayHomeAsUpEnabled(true);*/
/*getSupportActionBar().setDisplayShowHomeEnabled(true);*/
getSupportActionBar().setTitle(title);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
startActivity(new Intent(this, SettingsActivity.class));
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
package benchmarks.ran.bnldev.Neq;
public class oldV{
public static double snippet (double pp, int n, int idumx, int idum) {//idum is global Var
idum = idumx;
double PI=3.141592653589793238;
double j=0;
double nold=-1;
double am=0;
double em=0;
double g=0;
double angle=0;
double p=0;
double bnl=0;
double sq=0;
double t=0;
double y=0;
double pold=(-1.0);
double pc = 0;
double plog = 0;
double pclog = 0;
double en = 0;
double oldg = 0;
if (pp <= 0.5)
p = pp;
else
p = 1.0-pp;
am=n*p;
if (n < 25) {
bnl=0.0;
for (j=0;j<n;j++)
if (ran1(idum) < p)
++bnl;
}
else if (am < 1.0) {
g=Math.exp(-am);
t=1.0;
for (j=0;j<=n;j++) {
t *= ran1(idum);
if (t < g)
break;
}
if (j <= n)
bnl = j;
else
bnl = n;
}
else {
if (n != nold) {
en=n;
oldg=en+1.0;
nold=n;
}
if (p != pold) {
pc=1.0-p;
plog=Math.log(p);
pclog=Math.log(pc);
pold=p;
}
sq=Math.sqrt(2.0*am*pc);
do {
do {
angle = PI * ran1(idum);
y = Math.tan(angle);
em = sq * y + am;
} while (em < 0.0 || em >= (en+1.0));
em=Math.floor(em);
t=1.2*sq*(1.0+y*y)*Math.exp(oldg-gammln(em+1.0)-gammln(en-em+1.0)+em*plog+(en-em)*pclog);
} while (ran1(idum) > t && idum<530511967);
bnl=em;
}
if (p != pp)
bnl=n-bnl;
return bnl;
}
static double gammln(double xx){
int j;
double x,y,tmp,ser;
double[] cof={76.18009172947146,-86.50532032941677, 24.01409824083091,-1.231739572450155,0.1208650973866179e-2, -0.5395239384953e-5};
y=x=xx;
tmp=x+5.5;
tmp -= (x+0.5)*Math.log(tmp);
ser=1.000000000190015;
for (j=0;j<6;j++)
ser += cof[j]/++y;
return -tmp+Math.log(2.5066282746310005*ser/x);
}
static double ran1(double idum){
int IA=16807;
int IM=2147483647;
int IQ=127773;
int IR=2836;
int NTAB=32;
int NDIV=(1+(IM-1)/NTAB);
double EPS=3.0e-16;
double AM=1.0/IM;
double RNMX=(1.0-EPS);
int iy=0;
int[] iv = new int[NTAB];
int j,k;
double temp;
if (idum <= 0 || iy == 0) {
if (-idum < 1)
idum=1;
else
idum = -idum;
for (j=NTAB+7;j>=0;j--) {
k= (int) (idum/IQ);
idum=IA*(idum-k*IQ)-IR*k;
if (idum < 0)
idum += IM;
if (j < NTAB)
iv[j] = (int) idum;
}
iy=iv[0];
}
k= (int) (idum/IQ);
idum=IA*(idum-k*IQ)-IR*k;
if (idum < 0)
idum += IM;
j=iy/NDIV;
iy=iv[j];
iv[j] = (int) idum;
if ((temp=AM*iy) > RNMX)
return RNMX;
else
return temp;
}
}
|
package javax.jmdns.impl.util;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.Map;
/**
* This class contains all the byte shifting
*
* @author <NAME>
*
*/
public class ByteWrangler {
//private static Logger logger = LoggerFactory.getLogger(ByteWrangler.class.getName());
/**
* Maximum number of bytes a value can consist of.
*/
public static final int MAX_VALUE_LENGTH = 255;
/**
* Maximum number of bytes record data can consist of.
* It is {@link #MAX_VALUE_LENGTH} + 1 because the first byte contains the number of the following bytes.
*/
public static final int MAX_DATA_LENGTH = MAX_VALUE_LENGTH + 1;
/**
* Representation of no value. A zero length array of bytes.
*/
public static final byte[] NO_VALUE = new byte[0];
/**
* Representation of empty text.
* The first byte denotes the length of the following character bytes (in this case zero.)
*
* FIXME: Should this be exported as a method since it could change externally???
*/
public final static byte[] EMPTY_TXT = new byte[] { 0 };
/**
* Name for charset used to convert Strings to/from wire bytes: {@value #CHARSET_NAME}.
*/
public final static String CHARSET_NAME = "UTF-8";
/**
* Charset used to convert Strings to/from wire bytes: {@value #CHARSET_NAME}.
*/
private final static Charset CHARSET_UTF_8 = Charset.forName(CHARSET_NAME);
/**
* Write a String as {@value #CHARSET_NAME} encoded bytes to a stream.
*/
public static void writeUTF(final OutputStream out, final String str) throws IOException {
final byte[] utf8Bytes = str.getBytes(CHARSET_UTF_8);
out.write(utf8Bytes);
}
/**
* Read data bytes as {@value #CHARSET_NAME} to String.
*/
public static String readUTF(final byte data[]) {
return readUTF(data, 0, data.length);
}
/**
* Read data bytes as {@value #CHARSET_NAME} to String.
*/
public static String readUTF(final byte data[], final int off, final int len) {
return new String(data, off, len, CHARSET_UTF_8);
}
public static void readProperties(final Map<String, byte[]> properties, final byte[] textBytes) throws Exception {
if (textBytes != null) {
int off = 0;
while (off < textBytes.length) {
// length of the next key value pair
final int len = textBytes[off++] & 0xFF;
// error case
if (
(len == 0) || // no date
(off + len > textBytes.length) // length of data would exceed array bounds
) {
properties.clear();
break;
}
// look for the '='
int i = 0;
for (; (i < len) && (textBytes[off + i] != '='); i++) {
/* Stub */
}
// get the property name
final String name = readUTF(textBytes, off, i);
if (name == null) {
properties.clear();
break;
}
if (i == len) {
properties.put(name, NO_VALUE);
} else {
final byte value[] = new byte[len - ++i];
System.arraycopy(textBytes, off + i, value, 0, len - i);
properties.put(name, value);
}
off += len;
}
}
}
public static byte[] textFromProperties(final Map<String, ?> props) {
byte[] text = null;
if (props != null) {
try {
final ByteArrayOutputStream out = new ByteArrayOutputStream(MAX_DATA_LENGTH);
for (final Map.Entry<String, ?> entry: props.entrySet()) {
final String key = entry.getKey();
Object val = entry.getValue();
final ByteArrayOutputStream out2 = new ByteArrayOutputStream(100);
writeUTF(out2, key);
if (val == null) {
// Skip
} else if (val instanceof String) {
out2.write('=');
writeUTF(out2, (String) val);
} else if (val instanceof byte[]) {
byte[] bval = (byte[]) val;
if (bval.length > 0) {
out2.write('=');
out2.write(bval, 0, bval.length);
} else {
val = null;
}
} else {
throw new IllegalArgumentException("Invalid property value: " + val);
}
byte data[] = out2.toByteArray();
if (data.length > MAX_VALUE_LENGTH) {
//logger.warn("Cannot have individual values larger that 255 chars. Offending value: {}", key + (val == null ? "" : "=" + val));
return EMPTY_TXT;
}
out.write((byte) data.length);
out.write(data, 0, data.length);
}
text = out.toByteArray();
} catch (final IOException e) {
throw new RuntimeException("unexpected exception: " + e);
}
}
return (text != null && text.length > 0 ? text : EMPTY_TXT);
}
public static byte[] encodeText(final String text) throws IOException {
final ByteArrayOutputStream out = new ByteArrayOutputStream(MAX_DATA_LENGTH);
final ByteArrayOutputStream out2 = new ByteArrayOutputStream(100);
writeUTF(out2, text);
final byte data[] = out2.toByteArray();
if (data.length > MAX_VALUE_LENGTH) {
//logger.warn("Cannot have individual values larger that 255 chars. Offending value: {}", text);
return EMPTY_TXT;
}
out.write((byte) data.length);
out.write(data, 0, data.length);
final byte[] encodedText = out.toByteArray();
return (encodedText.length > 0 ? encodedText : EMPTY_TXT);
}
}
|
sleep 5
pwd > pwd.txt
|
/**************************************************************
* DEF_F_msgsys
* yafra.org
*
* system messages definition
*
**************************************************************/
delete from msg where msg.msg_typ = 5 and msg.s_id = 2;
commit work;
insert into msg values (5, 0, 2, 1,
'Dans le cas ou vous ne comprendriez pas ce message d''erreur,');
insert into msg values (5, 0, 2, 2,
' veuillez vour r�f�rer rapidement � votre support technique.');
insert into msg values (5, 0, 2, 3,
'Vous pouvez utiliser un formulaire d''erreur.');
insert into msg values (5, 0, 2, 4,
'En comptant sur votre comprehension.');
commit work;
insert into msg values (5, 1, 2, 1,
'Erreur interne lors de la composition du texte d''erreur !');
insert into msg values (5, 1, 2, 2,
'Avertissez imm�diatement votre support technique !');
commit work;
insert into msg values (5, 2, 2, 1,
'Il n''a pas �t� trouv� d''information correspondante');
insert into msg values (5, 2, 2, 2,
'au status %d dans la langue %d ');
commit work;
insert into msg values (5, 3, 2, 1,
'Erreur systeme interne lors de l''allocation de m�moire !');
insert into msg values (5, 3, 2, 2,
'Avertissez imm�diatement votre support technique !');
commit work;
insert into msg values (5, 4, 2, 1,
' M E S S A G E D'' E R R E U R');
insert into msg values (5, 4, 2, 2,
' ===============================');
insert into msg values (5, 4, 2, 3,
'Date: %s Heure: %s Utilisateur: %s');
insert into msg values (5, 4, 2, 4, 'Database: %s');
insert into msg values (5, 4, 2, 5, 'Version: %s');
insert into msg values (5, 4, 2, 6, '');
commit work;
|
#!/usr/bin/env bash
set -ex
USERNAME=unlockprotocol
IMAGE=locksmith
docker build -t $USERNAME/$IMAGE:latest --build-arg BUILD_DIR=locksmith .
|
import * as builder from 'botbuilder';
const dialog: builder.IDialogWaterfallStep[] = [
(session: builder.Session, args: any /** XXX type */, skip: Function) => {
session.endDialog(args.intent.answers[0].answer);
}
];
export default dialog;
|
nim c -d:release \
--define:"openmp" \
--define:"blas=mkl_intel_lp64" \
--define:"lapack=mkl_intel_lp64" \
--clibdir:"/opt/intel/mkl/lib/intel64" \
--passL:"/opt/intel/mkl/lib/intel64/libmkl_intel_lp64.a" \
--passL:"-lmkl_core" \
--passL:"-lmkl_gnu_thread" \
--passL:"-lgomp" \
--dynlibOverride:"mkl_intel_lp64" \
mnist.nim
|
const express = require('express');
const controller = require('./filters.controller');
const router = express.Router();
router.get('/:namespace', controller.getAllIdsByNamespace);
router.post('/:namespace/:id', controller.add);
router.delete('/:namespace/:id', controller.delete);
module.exports = () => router;
|
package com.nortal.spring.cw.core.web.component.page;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.springframework.util.Assert;
import com.nortal.spring.cw.core.web.component.ElementPath;
import com.nortal.spring.cw.core.web.component.element.EventElement;
import com.nortal.spring.cw.core.web.component.element.FormElement;
import com.nortal.spring.cw.core.web.component.form.FormElementHolderMapItem;
import com.nortal.spring.cw.core.web.util.ElementUtil;
public class SecondaryButtons implements ElementPath {
private static final long serialVersionUID = 1L;
private final Map<String, FormElementHolderMapItem> buttons = new LinkedHashMap<>();
private ElementPath parentElementPath;
public SecondaryButtons(ElementPath parentElementPath) {
this.parentElementPath = parentElementPath;
}
public void add(FormElement element) {
Assert.isInstanceOf(EventElement.class, element);
element.setParentElementPath(this);
buttons.put(element.getId(), new FormElementHolderMapItem(element, this));
}
public Map<String, FormElementHolderMapItem> getElementHolder() {
return MapUtils.unmodifiableMap(buttons);
}
@Override
public String getPath() {
return ElementUtil.getNameForFullPath(this);
}
@Override
public ElementPath getParentElementPath() {
return parentElementPath;
}
@Override
public void setParentElementPath(ElementPath elementPath) {
this.parentElementPath = elementPath;
}
public void clear() {
buttons.clear();
}
}
|
#!/usr/bin/env bash
# Copyright 2021 VMware
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# shellcheck disable=SC2155
readonly DIR="$(cd "$(dirname "$0")" && pwd)"
readonly HOST_ADDR=${HOST_ADDR:-$("$DIR"/ip.py)}
readonly REGISTRY_PORT=${REGISTRY_PORT:-5000}
readonly REGISTRY=${REGISTRY:-"${HOST_ADDR}:${REGISTRY_PORT}"}
readonly KIND_IMAGE=${KIND_IMAGE:-kindest/node:v1.21.1}
readonly RELEASE_VERSION=${RELEASE_VERSION:-""}
readonly RELEASE_YAML_PATH=${RELEASE_YAML_PATH:-"./release/cartographer.yaml"}
# shellcheck disable=SC2034 # This _should_ be marked as an extern but I clearly don't understand how it operates in github actions
readonly DOCKER_CONFIG=${DOCKER_CONFIG:-"/tmp/cartographer-docker"}
readonly REGISTRY_CONTAINER_NAME=cartographer-registry
readonly KUBERNETES_CONTAINER_NAME=cartographer-control-plane
readonly CERT_MANAGER_VERSION=1.5.3
readonly KAPP_CONTROLLER_VERSION=0.32.0
readonly KNATIVE_SERVING_VERSION=0.26.0
readonly KPACK_VERSION=0.5.1
readonly SOURCE_CONTROLLER_VERSION=0.17.0
readonly TEKTON_VERSION=0.30.0
readonly GIT_SERVE_VERSION=0.0.5
readonly GITOPS_REPO="http://git-server.default.svc.cluster.local:80/gitops-test.git"
readonly GITOPS_BRANCH="main"
readonly GITOPS_COMMIT_MESSAGE="Update config"
main() {
test $# -eq 0 && show_usage_help
display_vars "$@"
for command in "$@"; do
case $command in
cluster)
start_registry
start_local_cluster
install_cert_manager
install_kapp_controller
;;
cartographer)
install_cartographer
;;
cartographer-latest)
install_cartographer_latest_release
;;
pre-built-cartographer)
install_pre_built_cartographer_release
;;
example-dependencies)
install_git_serve
install_source_controller
install_kpack
install_knative_serving
install_tekton
install_tekton_git_cli_task
;;
example)
test_runnable_example
teardown_runnable_example
setup_example_sc "basic-sc"
test_example_sc "basic-sc"
teardown_example_sc "basic-sc"
setup_example_sc "testing-sc"
test_example_sc "testing-sc"
teardown_example_sc "testing-sc"
test_gitops
log "all tests passed!!"
;;
teardown-example)
teardown_runnable_example
teardown_example_sc "basic-sc"
teardown_example_sc "testing-sc"
teardown_gitops_example
;;
teardown)
delete_containers
;;
*)
echo "error: unknown command '$command'."
show_usage_help
exit 1
;;
esac
done
}
install_cartographer() {
log "build cartographer release and install it"
env \
REGISTRY="$REGISTRY" \
RELEASE_VERSION="$RELEASE_VERSION" \
DOCKER_CONFIG="$DOCKER_CONFIG" \
./hack/release.sh
ytt --ignore-unknown-comments \
--data-value registry="$REGISTRY" |
kapp deploy -a cartographer --yes \
-f ./release \
-f-
}
install_cartographer_latest_release() {
log "installing latest published cartographer release"
kapp deploy -a cartographer --yes -f https://github.com/vmware-tanzu/cartographer/releases/latest/download/cartographer.yaml
}
install_pre_built_cartographer_release() {
log "installing pre built cartographer release"
kapp deploy -a cartographer --yes -f "$RELEASE_YAML_PATH"
}
show_usage_help() {
echo "usage: $0 <command...>"
cat <<-COMMANDS
commands:
- cluster brings up a local cluster and a registry
- cartographer build a release of cartographer and install it in the
cluster
- cartographer-latest install the latest published release of cartographer
- example-dependencies installs dependencies used throughout examples
- example install the example and runs a minimal test on it
- teardown gets rid of the local cluster and registry created
- teardown-example gets rid of just the example installed (workload, etc)
COMMANDS
}
display_vars() {
cat <<-DISPLAY
Variables:
COMMANDS: $*
DIR: $DIR
HOST_ADDR: $HOST_ADDR
KIND_IMAGE: $KIND_IMAGE
REGISTRY: $REGISTRY
REGISTRY_PORT: $REGISTRY_PORT
DISPLAY
}
start_registry() {
log "starting registry"
docker container inspect $REGISTRY_CONTAINER_NAME &>/dev/null && {
echo "registry already exists"
return
}
docker run \
--detach \
--name "$REGISTRY_CONTAINER_NAME" \
--publish "${REGISTRY_PORT}":5000 \
registry:2
}
start_local_cluster() {
log "starting local cluster"
docker container inspect $KUBERNETES_CONTAINER_NAME &>/dev/null && {
echo "cluster already exists"
return
}
cat <<EOF | kind create cluster --config=-
kind: Cluster
apiVersion: kind.x-k8s.io/v1alpha4
name: cartographer
containerdConfigPatches:
- |-
[plugins."io.containerd.grpc.v1.cri".registry]
[plugins."io.containerd.grpc.v1.cri".registry.mirrors]
[plugins."io.containerd.grpc.v1.cri".registry.mirrors."${REGISTRY}"]
endpoint = ["http://${REGISTRY}"]
[plugins."io.containerd.grpc.v1.cri".registry.configs]
[plugins."io.containerd.grpc.v1.cri".registry.configs."${REGISTRY}".tls]
insecure_skip_verify = true
nodes:
- role: control-plane
image: ${KIND_IMAGE}
EOF
}
install_git_serve() {
kapp deploy --yes -a git-serve \
-f https://github.com/cirocosta/git-serve/releases/download/v$GIT_SERVE_VERSION/git-serve.yaml
}
install_cert_manager() {
ytt --ignore-unknown-comments \
-f "$DIR/overlays/remove-resource-requests-from-deployments.yaml" \
-f https://github.com/jetstack/cert-manager/releases/download/v$CERT_MANAGER_VERSION/cert-manager.yaml |
kapp deploy --yes -a cert-manager -f-
}
install_source_controller() {
kubectl create namespace gitops-toolkit || true
kubectl create clusterrolebinding gitops-toolkit-admin \
--clusterrole=cluster-admin \
--serviceaccount=gitops-toolkit:default || true
ytt --ignore-unknown-comments \
-f "$DIR/overlays/remove-resource-requests-from-deployments.yaml" \
-f https://github.com/fluxcd/source-controller/releases/download/v$SOURCE_CONTROLLER_VERSION/source-controller.crds.yaml \
-f https://github.com/fluxcd/source-controller/releases/download/v$SOURCE_CONTROLLER_VERSION/source-controller.deployment.yaml |
kapp deploy --yes -a gitops-toolkit --into-ns gitops-toolkit -f-
}
install_kpack() {
ytt --ignore-unknown-comments \
-f "$DIR/overlays/remove-resource-requests-from-deployments.yaml" \
-f https://github.com/pivotal/kpack/releases/download/v$KPACK_VERSION/release-$KPACK_VERSION.yaml |
kapp deploy --yes -a kpack -f-
}
install_kapp_controller() {
# Ensure script halts if kubectl is not installed
kubectl version --client
kubectl create clusterrolebinding default-admin \
--clusterrole=cluster-admin \
--serviceaccount=default:default || true
ytt --ignore-unknown-comments \
-f "$DIR/overlays/remove-resource-requests-from-deployments.yaml" \
-f https://github.com/vmware-tanzu/carvel-kapp-controller/releases/download/v$KAPP_CONTROLLER_VERSION/release.yml |
kapp deploy --yes -a kapp-controller -f-
}
install_knative_serving() {
ytt --ignore-unknown-comments \
-f https://github.com/knative/serving/releases/download/v$KNATIVE_SERVING_VERSION/serving-core.yaml \
-f https://github.com/knative/serving/releases/download/v$KNATIVE_SERVING_VERSION/serving-crds.yaml \
-f "$DIR/overlays/remove-resource-requests-from-deployments.yaml" |
kapp deploy --yes -a knative-serving -f-
}
install_tekton() {
ytt --ignore-unknown-comments \
-f https://storage.googleapis.com/tekton-releases/pipeline/previous/v$TEKTON_VERSION/release.yaml \
-f "$DIR/overlays/remove-resource-requests-from-deployments.yaml" |
kapp deploy --yes -a tekton -f-
}
install_tekton_git_cli_task() {
kapp deploy --yes -a tekton-git-cli -f https://raw.githubusercontent.com/tektoncd/catalog/main/task/git-cli/0.2/git-cli.yaml
}
setup_example_sc() {
export test_name="$1"
kapp deploy --yes -a "setup-example-$test_name" \
-f <(ytt --ignore-unknown-comments \
-f "$DIR/../examples/shared" \
-f "$DIR/../examples/$test_name/values.yaml" \
--data-value registry.server="$REGISTRY" \
--data-value image_prefix="$REGISTRY/example-$test_name-")
kapp deploy --yes -a "example-$test_name" \
-f <(ytt --ignore-unknown-comments \
-f "$DIR/../examples/$test_name" \
--data-value registry.server="$REGISTRY" \
--data-value workload_name="$test_name" \
--data-value image_prefix="$REGISTRY/example-$test_name-")
}
teardown_example_sc() {
export test_name="$1"
kapp delete --yes -a "example-$test_name"
kapp delete --yes -a "setup-example-$test_name"
# until [[ -z $(kubectl get pods -l "serving.knative.dev/configuration=$test_name" -o name) ]]; do sleep 1; done
log "teardown of '$test_name' complete"
}
test_example_sc() {
export test_name="$1"
log "testing '$test_name'"
for _ in {1..5}; do
for sleep_duration in {15..1}; do
local deployed_pods
deployed_pods=$(kubectl get pods \
-l "serving.knative.dev/configuration=$test_name" \
-o name)
if [[ "$deployed_pods" == *"$test_name"* ]]; then
log "testing '$test_name' SUCCEEDED! sweet"
return 0
fi
echo "- waiting $sleep_duration seconds"
sleep "$sleep_duration"
done
kubectl tree workload "$test_name"
done
log "testing '$test_name' FAILED :("
exit 1
}
teardown_gitops_example() {
log "cleaning up git repo"
kubectl delete -f ./hack/git-server.yaml
test_name="gitwriter-sc"
kapp delete --yes -a "example-$test_name"
kapp delete --yes -a "setup-example-$test_name"
kapp delete --yes -a example-delivery
log "teardown of '$test_name' complete"
}
test_runnable_example() {
log "test runnable"
first_output_revision=""
second_output_revision=""
third_output_revision=""
kubectl apply -f "$DIR/../examples/runnable-tekton/00-setup"
kubectl apply -f "$DIR/../examples/runnable-tekton/01-tests-pass"
counter=0
until [[ -n $(kubectl get taskruns -o json | jq '.items[] | .status.conditions[0].status' | grep True) ]]; do
sleep 5
if [[ $counter -gt 20 ]]; then
log "runnable test fails"
exit 1
else
echo "waiting 5 seconds for expected passing test to succeed"
(( counter+1 ))
fi
done
sleep 5
first_output_revision=$(kubectl get runnable test -o json | jq '.status.outputs.revision')
kubectl patch runnable test --type merge --patch "$(cat "$DIR/../examples/runnable-tekton/02-tests-fail/runnable-patch.yml")"
counter=0
until [[ -n $(kubectl get taskruns -o json | jq '.items[] | .status.conditions[0].status' | grep False) ]]; do
sleep 5
if [[ $counter -gt 20 ]]; then
log "runnable test fails"
exit 1
else
echo "waiting 5 seconds for expected failing test to fail"
(( counter+1 ))
fi
done
sleep 5
second_output_revision=$(kubectl get runnable test -o json | jq '.status.outputs.revision')
if [[ "$first_output_revision" != "$second_output_revision" ]]; then
log "runnable test fails"
exit 1
fi
kubectl patch runnable test --type merge --patch "$(cat "$DIR/../examples/runnable-tekton/03-tests-pass/runnable-patch.yml")"
counter=0
until [[ $(kubectl get taskruns -o json | jq '.items[] | .status.conditions[0].status' | grep True | wc -l) -eq 2 ]]; do
sleep 5
if [[ $counter -gt 20 ]]; then
log "runnable test fails"
exit 1
else
echo "waiting 5 seconds for expected passing test to succeed"
(( counter+1 ))
fi
done
sleep 5
third_output_revision=$(kubectl get runnable test -o json | jq '.status.outputs.revision')
if [[ "$first_output_revision" == "$third_output_revision" ]]; then
log "runnable test fails"
exit 1
fi
log "runnable test passes"
return 0
}
teardown_runnable_example() {
kubectl delete -f "$DIR/../examples/runnable-tekton/00-setup" --ignore-not-found
kubectl delete -f "$DIR/../examples/runnable-tekton/01-tests-pass" --ignore-not-found
log "teardown of runnable example complete"
}
test_gitops() {
setup_source_to_gitops
test_source_to_gitops
setup_gitops_to_app
test_gitops_to_app
teardown_gitops_example
}
setup_source_to_gitops() {
log "setting up source-to-gitops"
kubectl apply -f ./hack/git-server.yaml
test_name="gitwriter-sc"
kapp deploy --yes -a "setup-example-$test_name" \
-f <(ytt --ignore-unknown-comments \
-f "$DIR/../examples/shared" \
-f "$DIR/../examples/$test_name/values.yaml" \
--data-value registry.server="$REGISTRY" \
--data-value image_prefix="$REGISTRY/example-$test_name-")
kapp deploy --yes -a "example-$test_name" \
-f <(ytt --ignore-unknown-comments \
-f "$DIR/../examples/$test_name" \
--data-value registry.server="$REGISTRY" \
--data-value workload_name="$test_name" \
--data-value image_prefix="$REGISTRY/example-$test_name-" \
--data-value source_repo.url="https://github.com/kontinue/hello-world" \
--data-value source_repo.branch="main" \
--data-value git_repository="$GITOPS_REPO" \
--data-value git_branch="$GITOPS_BRANCH" \
--data-value git_user_name="gitops-user" \
--data-value git_user_email="gitops-user@example.com" \
--data-value git_commit_message="$GITOPS_COMMIT_MESSAGE")
}
test_source_to_gitops() {
log "testing source-to-gitops"
port=$(available_port)
until [[ $(kubectl get deployment git-server -o json | jq '.status.readyReplicas') == 1 ]]; do
log "waiting for git-server deployment"
sleep 10
done
(kubectl port-forward service/git-server $port:80 &) > /dev/null
sleep 5
SUCCESS=false
pushd "$(mktemp -d)"
log "trying to clone"
git clone "http://localhost:$port/gitops-test.git"
pushd "gitops-test"
for sleep_duration in {20..1}; do
git fetch --all --prune
git checkout "$GITOPS_BRANCH" > /dev/null 2> /dev/null || { echo "- waiting $sleep_duration seconds" && sleep "$sleep_duration" && continue; }
git pull > /dev/null 2> /dev/null
MOST_RECENT_GIT_MESSAGE="$(git log -1 --pretty=%B)"
if [[ "$GITOPS_COMMIT_MESSAGE" = "$MOST_RECENT_GIT_MESSAGE" ]]; then
log 'gitops worked! sweet'
SUCCESS=true
break
fi
echo "- waiting $sleep_duration seconds"
sleep "$sleep_duration"
done
popd
popd
if [[ "$SUCCESS" = true ]]; then
return 0
else
log 'FAILED :('
exit 1
fi
}
available_port() {
python - <<-EOF
import socket
s = socket.socket()
s.bind(('', 0))
print(s.getsockname()[1])
s.close()
EOF
}
setup_gitops_to_app() {
log "setting up gitops-to-app"
ytt --ignore-unknown-comments \
-f "$DIR/../examples/basic-delivery" \
--data-value git_writer.repository="$GITOPS_REPO" \
--data-value git_writer.branch="$GITOPS_BRANCH" |
kapp deploy --yes -a example-delivery -f-
}
test_gitops_to_app() {
log "testing gitops-to-app"
for _ in {1..5}; do
for sleep_duration in {15..1}; do
local deployed_pods
deployed_pods=$(kubectl get pods \
-l "serving.knative.dev/configuration=gitwriter-sc" \
-o name)
if [[ "$deployed_pods" == *"gitwriter-sc"* ]]; then
log "testing '$test_name' SUCCEEDED! sweet"
return 0
fi
echo "- waiting $sleep_duration seconds"
sleep "$sleep_duration"
done
kubectl tree deliverable gitops
done
log "testing gitops-to-app FAILED :("
exit 1
}
delete_containers() {
docker rm -f $REGISTRY_CONTAINER_NAME || true
docker rm -f $KUBERNETES_CONTAINER_NAME || true
}
log() {
printf '\n\t\033[1m%s\033[0m\n\n' "$1" 1>&2
}
main "$@"
|
#!/bin/bash
# bcc BNU Can CNRM GFDLG GFDLM
for runname in GFDLM
do
cd /home/hnoorazar/analog_codes/00_post_biofix/02_find_analogs/rcp85_qsubs/$runname
cat /home/hnoorazar/analog_codes/parameters/post_biofix/q_rcp85_w_precip | while read LINE ; do
qsub $LINE
done
done
|
import { createSelector } from "reselect";
import { includes, filter, map, orderBy, reduce, uniq } from "lodash";
import { getAllResearchers } from "../researchers/selectors";
import moment from "moment-timezone";
import {
LIVE_EVENTS_TAB,
PENDING_EVENTS_TAB,
STATES_LEGS,
FEDERAL_RADIO_BUTTON,
DATE_TIMESTAMP,
DATE_OBJ,
DATE_CREATED,
} from "../../constants";
import {
getAllOldEventsWithUserEmails,
getAllEvents,
getAllFederalAndStateLiveEvents,
} from "../events/selectors";
import { getCurrentUser } from "../users/selectors";
import { get116thCongress } from "../mocs/selectors";
import { getResearchersEmailById } from "../researchers/selectors";
export const getPendingOrLiveTab = (state) => state.selections.selectedEventTab;
export const getActiveFederalOrState = (state) =>
state.selections.federalOrState;
export const getMode = (state) => state.selections.mode;
export const getCurrentHashLocation = (state) =>
state.selections.currentHashLocation;
export const getOldEventsActiveFederalOrState = (state) =>
state.selections.federalOrStateOldEvents;
export const getDateRange = (state) => state.selections.dateLookupRange;
export const getStatesToFilterArchiveBy = (state) =>
state.selections.filterByState;
export const includeLiveEventsInLookup = (state) =>
state.selections.includeLiveEvents;
export const getTempAddress = (state) => state.selections.tempAddress;
export const getChamber = (state) => state.selections.filterByChamber;
export const getEventTypes = (state) => state.selections.filterByEventType;
export const getLegislativeBody = (state) =>
state.selections.filterByLegislativeBody;
export const getNameFilter = (state) => state.selections.filterByName;
export const getResearcherFilter = (state) =>
state.selections.filterByResearcher;
export const getDateLookupType = (state) => state.selections.dateLookupType;
export const getFilterSMSToLastWeek = (state) =>
state.selections.filterSMSToLastWeek;
export const getLiveEventUrl = createSelector(
[getActiveFederalOrState],
(federalOrState) => {
if (federalOrState !== FEDERAL_RADIO_BUTTON) {
return `state_townhalls/${federalOrState}`;
}
return "townHalls";
}
);
export const getSubmissionUrl = createSelector(
[getActiveFederalOrState],
(federalOrState) => {
if (federalOrState !== FEDERAL_RADIO_BUTTON) {
return `state_legislators_user_submission/${federalOrState}`;
}
return "UserSubmission";
}
);
export const getArchiveUrl = createSelector(
[getActiveFederalOrState],
(federalOrState) => {
if (federalOrState !== FEDERAL_RADIO_BUTTON) {
return `archived_state_town_halls/${federalOrState}`;
}
return "archived_town_halls";
}
);
export const getEventsToShowUrl = createSelector(
[getPendingOrLiveTab, getSubmissionUrl, getLiveEventUrl],
(liveOrPending, submissionUrl, liveEventUrl) => {
if (liveOrPending === LIVE_EVENTS_TAB) {
return liveEventUrl;
} else if (liveOrPending === PENDING_EVENTS_TAB) {
return submissionUrl;
}
return null;
}
);
export const getPeopleNameUrl = createSelector(
[getActiveFederalOrState, getMode],
(federalOrState, mode) => {
if (mode === "candidate") {
if (includes(STATES_LEGS, federalOrState)) {
return `state_candidate_keys/${federalOrState}`;
}
return "candidate_keys";
}
if (includes(STATES_LEGS, federalOrState)) {
return `state_legislators_id/${federalOrState}`;
}
return "mocID";
}
);
export const getPeopleDataUrl = createSelector(
[getActiveFederalOrState, getMode],
(federalOrState, mode) => {
if (mode === "candidate") {
return "candidate_data";
}
if (includes(STATES_LEGS, federalOrState)) {
return `state_legislators_data/${federalOrState}`;
}
return "mocData";
}
);
export const normalizeEventSchema = (eventData) => {
let normalizedEvent = {};
normalizedEvent.editable = eventData.editable;
normalizedEvent.errorMessage = (() => {
if (eventData.error) {
return `${eventData.error.dataPath} ${eventData.error.message}`;
}
return " ";
})();
normalizedEvent.eventId = eventData.eventId;
normalizedEvent.enteredBy = eventData.enteredBy || eventData.userEmail;
normalizedEvent.eventName = eventData.eventName ? eventData.eventName : " ";
normalizedEvent.displayName = eventData.displayName || eventData.Member;
normalizedEvent.officePersonId = eventData.officePersonId || " ";
normalizedEvent.meetingType = eventData.meetingType || " ";
normalizedEvent.location = eventData.location
? eventData.location
: eventData.Location
? eventData.Location
: " ";
normalizedEvent.address = eventData.address || " ";
normalizedEvent.lat = eventData.lat || " ";
normalizedEvent.lng = eventData.lng || " ";
normalizedEvent.govtrack_id = eventData.govtrack_id || 0;
normalizedEvent.party = eventData.party || " ";
normalizedEvent.level = eventData.level || " ";
normalizedEvent.chamber = eventData.chamber || " ";
normalizedEvent.state = eventData.state || " ";
normalizedEvent.district = eventData.district;
normalizedEvent.timestamp = eventData.timestamp || eventData.dateObj;
if (eventData.timeZone) {
normalizedEvent.timeStart = eventData.dateString
? moment(`${eventData.dateString} ${eventData.Time}`).format(
"MMMM Do YYYY, h:mm a z"
) + `${eventData.timeZone}`
: moment
.tz(eventData.timeStart, eventData.timeZone)
.format("MMMM Do YYYY, h:mm a z");
} else {
normalizedEvent.timeStart = eventData.dateString
? `${eventData.dateString} ${eventData.Time}`
: moment.tz(eventData.timeStart).format("MMMM Do YYYY, h:mm a z");
}
// Live events in Firebase currently store timeEnd as human-readable strings, e.g. "12:00 PM", instead of ISO-8601
normalizedEvent.timeEnd = eventData.timeEnd || " ";
normalizedEvent.timeZone = eventData.timeZone || " ";
normalizedEvent.dateValid = eventData.dateValid || false;
normalizedEvent.validated = eventData.validated || false;
normalizedEvent.ada_accessible = eventData.ada_accessible || false;
normalizedEvent.error = eventData.error || false;
normalizedEvent.notes = (() => {
if (eventData.Notes) {
return eventData.Notes.replace(/"/g, "'");
}
if (eventData.notes) {
return eventData.notes.replace(/"/g, "'");
}
return " ";
})();
normalizedEvent.link =
eventData.link ||
"https://townhallproject.com/?eventId=" + eventData.eventId;
normalizedEvent.iconFlag = eventData.iconFlag || " ";
normalizedEvent.dateCreated = eventData.dateCreated || " ";
// Live events in Firebase store lastUpdated as a timestamp. Archived events in Firestore use ISO-8601.
normalizedEvent.lastUpdated = moment(eventData.lastUpdated).toISOString();
normalizedEvent.internalNotes = eventData.internalNotes || " ";
return normalizedEvent;
};
export const getAllEventsForAnalysis = createSelector(
[
includeLiveEventsInLookup,
getAllOldEventsWithUserEmails,
getAllFederalAndStateLiveEvents,
getDateRange,
getDateLookupType,
],
(includeLive, oldEvents, liveEvents, dateRange, dateLookupType) => {
oldEvents = map(oldEvents, (event) => {
event.editable = true;
return event;
});
if (dateLookupType === DATE_CREATED) {
oldEvents = filter(oldEvents, (event) => {
if (event[DATE_CREATED]) {
let date = moment(event[DATE_CREATED]).valueOf();
return date >= dateRange[0] && date <= dateRange[1];
}
let date = moment(event.lastUpdated).valueOf();
return date >= dateRange[0] && date <= dateRange[1];
});
}
if (includeLive) {
liveEvents = filter(liveEvents, (event) => {
const dateKey =
dateLookupType === DATE_TIMESTAMP ? DATE_OBJ : dateLookupType;
let date;
if (event[dateKey] && moment(event[dateKey]).isValid()) {
date = moment(event[dateKey]).valueOf();
} else if (
!event[dateKey] &&
event.dateString &&
dateKey === DATE_OBJ
) {
date = moment(event.dateString).valueOf();
} else {
return false;
}
return date >= dateRange[0] && date <= dateRange[1];
});
liveEvents = map(liveEvents, (event) => {
event.editable = false;
return event;
});
return [...oldEvents, ...liveEvents];
}
return oldEvents;
}
);
export const getReturnedStateEventsLength = createSelector(
[getAllEventsForAnalysis],
(allEvents) => {
return filter(allEvents, (event) => event.level === "state").length;
}
);
export const getReturnedErrorEventsLength = createSelector(
[getAllEventsForAnalysis],
(allEvents) => {
return filter(allEvents, (event) => event.error).length;
}
);
export const getTotalUnFilteredOldEventsCount = createSelector(
[getAllEventsForAnalysis],
(totalEvents) => totalEvents.length
);
export const getFilteredEvents = createSelector(
[
getAllEventsForAnalysis,
getStatesToFilterArchiveBy,
getChamber,
getEventTypes,
getLegislativeBody,
getNameFilter,
getResearcherFilter,
getResearchersEmailById,
],
(
allEvents,
states,
chamber,
events,
legislativeBody,
name,
researcherEmail,
researchersEmailById
) => {
let filteredEvents = allEvents;
filteredEvents = map(filteredEvents, normalizeEventSchema);
if (states.length) {
filteredEvents = filter(filteredEvents, (event) => {
return includes(states, event.state);
});
}
if (chamber !== "all") {
filteredEvents = filter(filteredEvents, (event) => {
return chamber === event.chamber;
});
}
if (events.length > 0) {
filteredEvents = filter(filteredEvents, (event) => {
return includes(events, event.meetingType);
});
}
filteredEvents = filter(filteredEvents, (event) => {
if (legislativeBody === "federal") {
return event.level === "federal" || event.level === " ";
}
return event.level === "state" && event.state === legislativeBody;
});
if (name) {
filteredEvents = filter(filteredEvents, (event) => {
return name === event.displayName;
});
}
if (researcherEmail) {
filteredEvents = filter(filteredEvents, (event) => {
return researcherEmail === researchersEmailById[event.enteredBy];
});
}
filteredEvents = orderBy(filteredEvents, ["timestamp"], ["desc"]);
return filteredEvents;
}
);
export const getFilteredUniqueNames = createSelector(
[getFilteredEvents],
(allEvents) => {
const allNames = map(allEvents, (eventData) => {
return eventData.displayName;
});
return [...new Set(allNames)];
}
);
export const getFilteredOldEventsLength = createSelector(
[getFilteredEvents],
(filtered) => {
return filtered.length;
}
);
export const getEventsAsDownloadObjects = createSelector(
[getFilteredEvents, getAllResearchers],
(allEvents, researchers) => {
return map(allEvents, (eventData) => {
// Future: Customize normalizedEvent > CSV field mappings if desired
const newEventData = { ...eventData };
researchers.forEach((researcher) => {
if (researcher.id === eventData.enteredBy) {
newEventData.enteredBy = researcher.email;
}
if (researcher.uid === eventData.enteredBy) {
newEventData.enteredBy = researcher.email;
}
if (researcher.email === eventData.enteredBy) {
newEventData.enteredBy = researcher.email;
}
if (!Boolean(eventData.enteredBy)) {
newEventData.enteredBy = "Not available";
}
});
return newEventData;
});
}
);
export const getEventsForDownload = createSelector(
[getAllEvents],
(allEvents) => {
return map(allEvents, (eventData) => {
const convertedTownHall = {};
convertedTownHall.Member = eventData.displayName || eventData.Member;
convertedTownHall.Chamber = eventData.chamber;
convertedTownHall.Event_Name = eventData.eventName
? eventData.eventName
: " ";
convertedTownHall.Location = eventData.Location
? eventData.Location
: " ";
convertedTownHall.Meeting_Type = eventData.meetingType;
let district = eventData.district ? "-" + eventData.district : " ";
convertedTownHall.District = eventData.state + district;
convertedTownHall.govtrack_id = eventData.govtrack_id || " ";
convertedTownHall.Party = eventData.party;
convertedTownHall.State = eventData.state;
convertedTownHall.State_name = eventData.stateName
? eventData.stateName
: eventData.State;
if (eventData.repeatingEvent) {
convertedTownHall.Repeating_Event = eventData.repeatingEvent;
convertedTownHall.Date = " ";
} else if (eventData.dateString) {
convertedTownHall.Repeating_Event = " ";
convertedTownHall.Date = eventData.dateString;
} else {
convertedTownHall.Repeating_Event = " ";
convertedTownHall.Date = moment(eventData.dateObj).format(
"ddd, MMM D YYYY"
);
}
convertedTownHall.Time_Start = eventData.Time;
convertedTownHall.Time_End = eventData.timeEnd || " ";
convertedTownHall.Time_Zone = eventData.timeZone || " ";
convertedTownHall.Zone_ID = eventData.zoneString || " ";
convertedTownHall.Address = eventData.address;
convertedTownHall.Notes = eventData.Notes
? eventData.Notes.replace(/"/g, "'")
: " ";
convertedTownHall.Map_Icon = eventData.iconFlag;
convertedTownHall.Link =
eventData.link ||
"https://townhallproject.com/?eventId=" + eventData.eventId;
convertedTownHall.Link_Name = eventData.linkName || " ";
convertedTownHall.dateNumber = eventData.yearMonthDay;
convertedTownHall.Last_Updated = moment(eventData.lastUpdated).format(
"MMM D YYYY, h:mm a"
);
return convertedTownHall;
});
}
);
export const getNewEventsForDownload = createSelector(
[getEventsForDownload, getCurrentUser],
(allEvents, user) => {
return filter(allEvents, (event) => {
return (
!user.last_event_download ||
moment(event.Last_Updated, "MMM D YYYY, h:mm a").valueOf() >
user.last_event_download
);
});
}
);
export const getDataForArchiveChart = createSelector(
[getFilteredEvents],
(allEvents) => {
if (!allEvents || !allEvents.length) {
return [];
}
return map(
reduce(
allEvents,
(acc, cur) => {
const party = cur.party ? cur.party.substring(0, 1) : "None";
if (acc[party] >= 0) {
acc[party] = acc[party] + 1;
}
return acc;
},
{
D: 0,
R: 0,
I: 0,
None: 0,
}
),
(value, key) => {
return {
party: key,
value,
};
}
);
}
);
export const get116MissingMemberReport = createSelector(
[getFilteredEvents, get116thCongress],
(events, mocs) => {
return map(mocs, (moc) => {
const eventsForMoc = filter(events, { govtrack_id: moc.govtrack_id });
const hasEvent = filter(eventsForMoc, { meetingType: "Town Hall" });
const eventTypes = uniq(map(eventsForMoc, "meetingType"));
return {
memberId: moc.govtrack_id,
hasEvent: hasEvent.length > 0,
name: moc.displayName,
party: moc.party,
chamber: moc.chamber,
state: moc.state,
district: moc.district || "",
number_of_town_halls: hasEvent.length,
type_of_events: eventTypes,
eventIds: eventsForMoc.map((event) => event.eventId),
};
});
}
);
export const get116CongressSenateResults = createSelector(
[get116MissingMemberReport],
(mocs) => {
const allInChamber = filter(mocs, { chamber: "upper" });
return reduce(
allInChamber,
(acc, cur) => {
if (cur.hasEvent) {
if (cur.party[0].toLowerCase() === "d") {
acc.dEvents++;
} else if (cur.party[0].toLowerCase() === "r") {
acc.rEvents++;
} else {
acc.otherEvents++;
}
} else {
if (cur.party[0].toLowerCase() === "d") {
acc.dMissing++;
} else if (cur.party[0].toLowerCase() === "r") {
acc.rMissing++;
} else {
acc.otherMissing++;
}
}
return acc;
},
{
dMissing: 0,
dEvents: 0,
rMissing: 0,
rEvents: 0,
otherMissing: 0,
otherEvents: 0,
}
);
}
);
export const get116CongressHouseResults = createSelector(
[get116MissingMemberReport],
(mocs) => {
const allInChamber = filter(mocs, {
chamber: "lower",
});
return reduce(
allInChamber,
(acc, cur) => {
if (cur.hasEvent) {
if (cur.party[0].toLowerCase() === "d") {
acc.dEvents++;
} else if (cur.party[0].toLowerCase() === "r") {
acc.rEvents++;
} else {
acc.otherEvents++;
}
} else {
if (cur.party[0].toLowerCase() === "d") {
acc.dMissing++;
} else if (cur.party[0].toLowerCase() === "r") {
acc.rMissing++;
} else {
acc.otherMissing++;
}
}
return acc;
},
{
dMissing: 0,
dEvents: 0,
rMissing: 0,
rEvents: 0,
otherMissing: 0,
otherEvents: 0,
}
);
}
);
export const getCongressReport = createSelector(
[get116CongressSenateResults, get116CongressHouseResults],
(senateCount, houseCount) => {
return map(senateCount, (value, key) => {
return [
{
x: "senate",
y: value,
},
{
x: "house",
y: houseCount[key],
},
];
});
}
);
|
<reponame>matthewrobb/js2php<gh_stars>1-10
/*global global, testSuite*/
testSuite('core', function(assert) {
function throwCatch(value) {
try {
throw value;
} catch(e) {
return e;
}
}
testSuite('object', function() {
var toString = Object.prototype.toString;
assert('toString.call(null)', toString.call(null) === '[object Null]');
assert('toString.call(undefined)', toString.call(undefined) === '[object Undefined]');
assert('toString.call("")', toString.call('') === '[object String]');
assert('toString.call(0)', toString.call(0) === '[object Number]');
assert('toString.call(false)', toString.call(false) === '[object Boolean]');
assert('toString.call([])', toString.call([]) === '[object Array]');
assert('toString.call(new String)', toString.call(new String()) === '[object String]');
assert('new String() instanceof String', new String('s') instanceof String);
});
testSuite('in operator', function() {
var o = {a: null};
assert('should be true for null', 'a' in o);
o.b = void 0;
assert('should be true for undefined', 'b' in o);
assert('should be false for non-existant', !('c' in o));
delete o.b;
assert('should be false when deleted', !('b' in o));
});
testSuite('for..in', function() {
var nothing;
var a = [];
var b = {a: null, b: nothing, c: 0, d: false, e: '1'};
for (var key in b) {
a.push(key);
}
assert('should iterate keys', a.join('') === 'abcde');
var c = Object.create(b);
assert('should inherit', c.c === 0);
c.f = null;
a = [];
for (var k in c) {
a.push(k);
}
assert('should iterate all keys', a.sort().join('') === 'abcdef');
var d = Object.create(null);
//todo: Object.getPrototypeOf
assert('should allow null proto', Object.keys(d).length === 0);
for (var n in d) {
var found = true;
}
assert('should have no keys', found !== true);
});
testSuite('global', function() {
var nothing = void 0;
var a = [];
for (var key in global) {
a.push(key);
}
assert('keys not contain natives', a.indexOf('GLOBALS') === -1);
assert('keys contain Object', a.indexOf('Object') !== -1);
assert('keys contain self', a.indexOf('global') !== -1);
assert('in operator not find natives', !('_SERVER' in global));
assert('in operator find Math', 'Math' in global);
assert('in operator walk prototype chain', 'toString' in global);
var b = Object.keys(global);
assert('Object.keys works', b.join(',') === a.join(','));
assert('can access undeclared', global.asdf === nothing);
assert('contains `undefined`', 'undefined' in global);
global.undefined = 'foo';
assert('re-assigning built-in global does nothing', global.undefined === nothing);
delete global.Infinity;
assert('deleting built-in global does nothing', typeof global.Infinity === 'number');
});
testSuite('functions', function() {
var o = {};
var fn1 = function(a) { return this; };
var fnStrict = function(a) {
"use strict";
return this;
};
assert('instance of Function', fn1 instanceof Function);
assert('function has length', fn1.length === 1);
assert('function properties not enumerable', Object.keys(fn1).length === 0);
assert('can call null', fn1.call(null) === global);
assert('can call null (strict)', fnStrict.call(null) === null);
assert('can call primitive', fn1.call('s') instanceof String);
assert('can call primitive (strict)', fnStrict.call('s') === 's');
assert('can call object', fn1.call(o) === o);
assert('can call object.prototype functions', Object.prototype.toString.call([]) === '[object Array]');
assert('can apply object', fn1.apply(o, []) === o);
assert('can apply object without second param', fn1.apply(o) === o);
var fn1_ = fn1.bind(o);
assert('bind creates new function', fn1_ !== fn1);
assert('bind works', fn1_.call(null) === o);
var fn2 = function() { return arguments.length; };
assert('can call args', fn2.call(null, false, void 0) === 2);
assert('can apply args', fn2.apply(o, [0, null, o]) === 3);
(function a() {
(function b() {
assert('arguments.callee', arguments.callee === b);
assert('arguments.caller', arguments.caller === a);
})();
})();
});
testSuite('more functions', function() {
var fn1 = function(a, b, c) { return [a, b, c]; };
var fn2 = fn1.bind(null, 1, '2');
assert('function arity', fn1.length === 3);
assert('bound function arity', fn2.length === 1);
var result = fn2('a', 'b', 'c');
assert('bound function with args', result.join(';') === '1;2;a');
});
testSuite('throw/catch', function() {
var nothing = void 0;
assert('can throw undefined', throwCatch(nothing) === nothing);
assert('can throw null', throwCatch(null) === null);
assert('can throw number', throwCatch(1) === 1);
var e = new Error('err');
assert('can throw error', throwCatch(e) === e);
try {
throw 'foo';
} catch(e) {
assert('catch creates scope', e === 'foo');
}
assert('catch scope should not bleed', e instanceof Error);
var e1 = new TypeError('message');
assert('TypeError', e1 instanceof TypeError);
assert('TypeError inherits from Error', e1 instanceof Error);
assert('TypeError is distinct from Error', TypeError !== Error);
});
});
|
<reponame>akaczorowski/android-research
getFragmentManager().beginTransaction().replace(android.R.id.content,
new ExampleFragment()).commit();
|
import com.entity.User;
import com.mapper.UserMapper;
import com.system.XSqlSession;
import org.junit.Test;
public class XWTest {
@Test
public void test() {
XSqlSession xSqlSession = new XSqlSession("src/main/resources/xuwei-mybatis.xml");
UserMapper userMapper = (UserMapper) xSqlSession.getMapper(UserMapper.class);
//先查询一下数据库
System.out.println(userMapper.findAll());
//创建几个对象,保存到数据库中
User user1 = new User();
User user2 = new User();
User user3 = new User();
user1.setRealName("一号");
user1.setSex("男");
user1.setAddress("河北");
user2.setRealName("二号");
user2.setSex("女");
user2.setAddress("广东");
user3.setRealName("三号");
user3.setSex("保密");
user3.setAddress("北京");
userMapper.addUser(user1);
userMapper.addUser(user2);
userMapper.addUser(user3);
//再查询一下
System.out.println(userMapper.findAll());
//查询一个具体的,然后修改
User queryOne = userMapper.findById(1);
System.out.println(queryOne);
queryOne.setSex("保密");
userMapper.updateUser(queryOne);
//看看是否修改成功了
System.out.println(userMapper.findById(1));
//删除一个
userMapper.deleteUser(1);
//查询
System.out.println(userMapper.findAll());
}
}
|
<reponame>HaoNanYanToMe/akash<gh_stars>0
package prism.akash.controller;
import com.alibaba.fastjson.JSON;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import prism.akash.container.BaseData;
import prism.akash.controller.proxy.BaseProxy;
import prism.akash.tools.StringKit;
import prism.akash.tools.oauth.AccessTool;
import java.io.Serializable;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* 通用接口
* TODO : 系统·通用接口
*
* @author <NAME>
*/
@RestController
public class BaseController extends BaseProxy implements Serializable {
private final Logger logger = LoggerFactory.getLogger(BaseController.class);
@Autowired
AccessTool accessTool;
/**
* 系统业务 · 统一入口
*
*
* @param schemaName 需要调用的业务逻辑名称,默认使用base
* @param methodName 需要请求的方法名称
* @param id 数据表id / sql数据引擎id TODO ※ schemaName非base时,允许为空
* @param data 封装参数 方法所需参数集合 TODO ※ 允许为空,为空时建议传入 -> {}
* {
* *id :
* *executeData :
* {
* paramA : ……参数A
* paramB : ……参数B
* ……
* }
* }
* TODO 基础逻辑方法 「base」 :
* 1. select 查询数据 TODO ※ 需要使用sql引擎id
* 2. selectPage 查询数据「含分页」 TODO ※ 需要使用sql引擎id
* 3. selectByOne 根据数据id查询指定数据信息
* 4. insertData 新增数据
* 5. updateData 更新数据
* 6. deleteDataSoft 数据软删除
* 7. deleteData 数据暴力删除
* TODO 3~7 demo示例可在AkashApplicationTests查看
*
* @return 请求结果
*/
@CrossOrigin(origins = "*", maxAge = 3600)
@RequestMapping(value = "/executeUnify",
method = RequestMethod.POST,
produces = "application/json;charset=UTF-8")
public String executeUnify(
@RequestParam(value = "schemaName", required = false, defaultValue = "base") String schemaName,
@RequestParam(value = "methodName") String methodName,
@RequestParam(value = "id", required = false, defaultValue = "") String id,
@RequestParam(value = "data", required = false, defaultValue = "{}") String data) {
if (accessTool.accessParamCheck(schemaName, methodName, id)) {
//⚠ 检测到注入攻击
Map<String, Object> result = new ConcurrentHashMap<>();
result.put("result", "0");
result.put("resultData", "⚠ 操作失败,请联系管理员");
return JSON.toJSONString(result);
} else {
//数据正常,放行
BaseData execute = StringKit.parseBaseData(data);
return StringKit.formateSchemaData(invokeMethod(schemaName, methodName, id, execute));
}
}
}
|
#download some data and make tiles out of it
#NOTE: you can feed multiple extracts into pbfgraphbuilder
wget https://download.bbbike.org/osm/bbbike/Brisbane/Brisbane.osm.pbf
#get the config and setup
mkdir -p valhalla_tiles
valhalla_build_config --mjolnir-tile-dir ${PWD}/valhalla_tiles --mjolnir-tile-extract ${PWD}/valhalla_tiles.tar --mjolnir-timezone ${PWD}/valhalla_tiles/timezones.sqlite --mjolnir-admin ${PWD}/valhalla_tiles/admins.sqlite > valhalla.json
#build routing tiles
#TODO: run valhalla_build_admins?
echo $LD_LIBRARY_PATH
ls /home
ls /usr/local/lib
valhalla_build_tiles -c ${VALHALLA_CONFIG} Brisbane.osm.pbf
#tar it up for running the server
#either run this to build a tile index for faster graph loading times
# valhalla_build_extract -c valhalla.json -v
#or simply tar up the tiles
find valhalla_tiles | sort -n | tar cf valhalla_tiles.tar --no-recursion -T -
|
#!/bin/bash
java_tar="jdk-8u211-linux-x64.tar.gz"
jdk="openjdk"
cdh_version="5.16.1"
if [ "${1}" == "oracle" ]
then
if [ ! -s oracleJDK/${java_tar} ]
then
echo "Fatal: ${java_tar} not found; please download oracle jdk"
exit 1
else
cd oracleJDK
jdk="oraclejdk"
fi
else
cd openJDK
fi
if [ "${1}" == "5.11" -o "${2}" == "5.11" ]
then
cdh_version="5.11.1"
fi
image="seomoz_cdh_${cdh_version}_${jdk}_standalone:v2"
export cdh_version=${cdh_version}
export java_tar=${java_tar}
docker build --build-arg cdh_version --build-arg java_tar -t ${image} .
cd -
|
// Import the necessary modules and Axios instance
import axios from 'axios';
// Create the Axios instance
const instance = axios.create({
baseURL: 'https://api.example.com',
});
// Define the authInterceptor function
const authInterceptor = instance.interceptors.request.use((config) => {
// Retrieve the token from local storage
const token = localStorage.getItem('token');
// Attach the token to the 'Authorization' header if it exists
if (token) {
config.headers['Authorization'] = `Bearer ${token}`;
}
return config;
}, (error) => {
// Handle any request errors
return Promise.reject(error);
});
// Export the Axios instance with the attached interceptor
export default instance;
|
<filename>src/function/aggregate/distributive/sum.cpp
#include "duckdb/function/aggregate/distributive_functions.hpp"
#include "duckdb/common/exception.hpp"
#include "duckdb/common/types/null_value.hpp"
#include "duckdb/common/vector_operations/vector_operations.hpp"
#include "duckdb/common/vector_operations/aggregate_executor.hpp"
#include "duckdb/common/operator/numeric_binary_operators.hpp"
using namespace std;
namespace duckdb {
static void sum_update(Vector inputs[], idx_t input_count, Vector &result) {
assert(input_count == 1);
VectorOperations::Scatter::Add(inputs[0], result);
}
static void sum_combine(Vector &state, Vector &combined) {
VectorOperations::Scatter::Add(state, combined);
}
template <class T> static void sum_simple_update(Vector inputs[], idx_t input_count, data_ptr_t state_) {
auto state = (T *)state_;
T result;
if (!AggregateExecutor::Execute<T, T, duckdb::Add>(inputs[0], &result)) {
// no non-null values encountered
return;
}
if (inputs[0].vector_type == VectorType::CONSTANT_VECTOR) {
result *= inputs[0].size();
}
if (IsNullValue<T>(*state)) {
*state = result;
} else {
*state += result;
}
}
void SumFun::RegisterFunction(BuiltinFunctions &set) {
AggregateFunctionSet sum("sum");
// integer sums to bigint
sum.AddFunction(AggregateFunction({SQLType::BIGINT}, SQLType::BIGINT, get_return_type_size, null_state_initialize,
sum_update, sum_combine, gather_finalize, sum_simple_update<int64_t>));
// float sums to float
sum.AddFunction(AggregateFunction({SQLType::DOUBLE}, SQLType::DOUBLE, get_return_type_size, null_state_initialize,
sum_update, sum_combine, gather_finalize, sum_simple_update<double>));
set.AddFunction(sum);
}
} // namespace duckdb
|
public class AnimNavigationDrawer extends View {
private boolean isOpen = false;
public AnimNavigationDrawer(Context context, AttributeSet attrs) {
super(context, attrs);
// Initialize the view and attributes
}
public void openDrawer() {
// Implement the animation for opening the navigation drawer
isOpen = true;
}
public void closeDrawer() {
// Implement the animation for closing the navigation drawer
isOpen = false;
}
public boolean isOpen() {
return isOpen;
}
}
|
function capitalizeFirstLetter(string) {
return string.charAt(0).toUpperCase() + string.slice(1);
}
|
#include <iostream>
#include <cstdlib>
// Custom function to handle critical errors
void _FATALERROR(const char* errorMessage) {
std::cerr << "Fatal Error: " << errorMessage << std::endl;
std::exit(EXIT_FAILURE);
}
// Custom namespace for installing hooks
namespace Hooks {
bool InstallHooks() {
// Implementation of installing hooks
// Return true if successful, false if failed
return true; // Placeholder for successful installation
}
}
// Function for registering the messaging interface
bool RegisterMessagingInterface() {
// Attempt to register the messaging interface
if (!Hooks::InstallHooks()) {
_FATALERROR("Failed to install hooks for messaging interface");
return false;
}
// Additional registration process
// ...
std::cout << "Messaging interface registered successfully" << std::endl;
return true;
}
int main() {
if (RegisterMessagingInterface()) {
std::cout << "Registration process completed successfully" << std::endl;
} else {
std::cerr << "Registration process failed" << std::endl;
}
return 0;
}
|
class KeyValueStore<T> {
private store: Record<string, T> = {};
add(key: string, value: T): void {
this.store[key] = value;
}
get(key: string): T | undefined {
return this.store[key];
}
remove(key: string): void {
delete this.store[key];
}
}
|
<gh_stars>1-10
package com.abubusoft.filmfinder.view.ui;
import android.os.Bundle;
import android.support.design.widget.CollapsingToolbarLayout;
import android.support.v7.app.AppCompatActivity;
import android.widget.ImageView;
import android.widget.TextView;
import com.abubusoft.filmfinder.R;
import com.bumptech.glide.Glide;
public class DetailActivity extends AppCompatActivity {
public static final String MOVIE_DETAIL = "movie_detail";
public static final String IMAGE_URL = "image_url";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detail);
/*
final SearchService.Detail detail = getIntent().getParcelableExtra(MOVIE_DETAIL);
final String imageUrl = getIntent().getStringExtra(IMAGE_URL);
Glide.with(this).load(imageUrl).into( (ImageView) findViewById(R.id.main_backdrop));
// set title for the appbar
CollapsingToolbarLayout collapsingToolbarLayout = (CollapsingToolbarLayout) findViewById(R.id.main_collapsing);
collapsingToolbarLayout.setTitle(detail.Title);
((TextView) findViewById(R.id.grid_title)).setText(detail.Title);
((TextView) findViewById(R.id.grid_writers)).setText(detail.Writer);
((TextView) findViewById(R.id.grid_actors)).setText(detail.Actors);
((TextView) findViewById(R.id.grid_director)).setText(detail.Director);
((TextView) findViewById(R.id.grid_genre)).setText(detail.Genre);
((TextView) findViewById(R.id.grid_released)).setText(detail.Released);
((TextView) findViewById(R.id.grid_plot)).setText(detail.Plot);
((TextView) findViewById(R.id.grid_runtime)).setText(detail.Runtime);*/
}
}
|
<filename>node_modules/alga-js/types/date/rangeDate.d.ts<gh_stars>0
export function rangeDate(startDate: any, endDate: any, formatParam?: any): any[];
|
"use strict";
require("./modules/parts/widgets-mobile");
|
<gh_stars>0
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import { Injectable } from '@angular/core';
import { Http, Response } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import { Location } from './../../models/location.model';
import { Link } from './../../models/link.model';
@Injectable()
export class LocationService {
constructor(private http: Http) { }
getAllLocations(): Observable<Location[]> {
return this.http.get('/locations', { cache: true })
.map((res: Response) => res.json())
.catch(() => Observable.of('Error, could not load locations :-('));
}
}
|
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
# Make sure that the model can be trained without errors.
set -e
set -x
virtualenv -p python3 .venv_d3pm
source .venv_d3pm/bin/activate
pip install --upgrade pip
pip install -r d3pm/requirements.txt
python -m d3pm.images.main_test
|
# -*- coding: UTF-8 -*-
"""Definitions for `Ensembler` class."""
import gc
import sys
import time
import numpy as np
import scipy
from astrocats.catalog.model import MODEL
from astrocats.catalog.quantity import QUANTITY
from emcee.autocorr import AutocorrError
from mosfit.mossampler import MOSSampler
from mosfit.samplers.sampler import Sampler
from mosfit.utils import calculate_WAIC, pretty_num
class Ensembler(Sampler):
"""Fit transient events with the provided model."""
_MAX_ACORC = 5
_REPLACE_AGE = 20
def __init__(
self, fitter, model=None, iterations=2000, burn=None, post_burn=None,
num_temps=1, num_walkers=None, convergence_criteria=None,
convergence_type='psrf', gibbs=False, fracking=True,
frack_step=20, **kwargs):
"""Initialize `Ensembler` class."""
super(Ensembler, self).__init__(
fitter, num_walkers=num_walkers, **kwargs)
self._model = model
self._iterations = iterations
self._burn = burn
self._post_burn = post_burn
self._num_temps = num_temps
self._cc = convergence_criteria
self._ct = convergence_type
self._gibbs = gibbs
self._fracking = fracking
self._frack_step = frack_step
self._upload_model = None
self._WAIC = None
def append_output(self, modeldict):
"""Append output from the ensembler to the model description."""
self._WAIC = None
if self._iterations > 0:
self._WAIC = calculate_WAIC(self._scores)
modeldict[MODEL.SCORE] = {
QUANTITY.VALUE: str(self._WAIC),
QUANTITY.KIND: 'WAIC'
}
modeldict[MODEL.CONVERGENCE] = []
if self._psrf < np.inf:
modeldict[MODEL.CONVERGENCE].append(
{
QUANTITY.VALUE: str(self._psrf),
QUANTITY.KIND: 'psrf'
}
)
if self._acor and self._aacort > 0:
acortimes = '<' if self._aa < self._MAX_ACORC else ''
acortimes += str(np.int(float(self._emi -
self._ams) / self._actc))
modeldict[MODEL.CONVERGENCE].append(
{
QUANTITY.VALUE: str(acortimes),
QUANTITY.KIND: 'autocorrelationtimes'
}
)
modeldict[MODEL.STEPS] = str(self._emi)
def prepare_output(self, check_upload_quality, upload):
"""Prepare output for writing to disk and uploading."""
prt = self._printer
if check_upload_quality:
if self._WAIC is None:
self._upload_model = False
elif self._WAIC is not None and self._WAIC < 0.0:
if upload:
prt.message('no_ul_waic', ['' if self._WAIC is None
else pretty_num(self._WAIC)])
self._upload_model = False
if len(self._all_chain):
self._pout = self._all_chain[:, :, -1, :]
self._lnprobout = self._all_lnprob[:, :, -1]
self._lnlikeout = self._all_lnlike[:, :, -1]
else:
self._pout = self._p
self._lnprobout = self._lnprob
self._lnlikeout = self._lnlike
weight = 1.0 / (self._nwalkers * self._ntemps)
self._weights = np.full_like(self._lnlikeout, weight)
# Here, we append to the vector of walkers from the full chain based
# upon the value of acort (the autocorrelation timescale).
if self._acor and self._aacort > 0 and self._aa == self._MAX_ACORC:
actc0 = int(np.ceil(self._aacort))
for i in range(1, np.int(float(self._emi - self._ams) / actc0)):
self._pout = np.concatenate(
(self._all_chain[:, :, -i * self._actc, :], self._pout),
axis=1)
self._lnprobout = np.concatenate(
(self._all_lnprob[:, :, -i * self._actc],
self._lnprobout), axis=1)
self._lnlikeout = np.concatenate(
(self._all_lnlike[:, :, -i * self._actc],
self._lnlikeout), axis=1)
self._weights = np.full_like(self._lnlikeout, weight)
def run(self, walker_data):
"""Use ensemble sampling to determine posteriors."""
from mosfit.fitter import draw_walker, frack, ln_likelihood, ln_prior
prt = self._printer
self._emcee_est_t = 0.0
self._bh_est_t = 0.0
if self._burn is not None:
self._burn_in = min(self._burn, self._iterations)
elif self._post_burn is not None:
self._burn_in = max(self._iterations - self._post_burn, 0)
else:
self._burn_in = int(np.round(self._iterations / 2))
self._ntemps, ndim = (
self._num_temps, self._model._num_free_parameters)
if self._num_walkers:
self._nwalkers = self._num_walkers
else:
self._nwalkers = 2 * ndim
test_walker = self._iterations > 0
self._lnprob = None
self._lnlike = None
pool_size = max(self._pool.size, 1)
# Derived so only half a walker redrawn with Gaussian distribution.
redraw_mult = 0.5 * np.sqrt(
2) * scipy.special.erfinv(float(
self._nwalkers - 1) / self._nwalkers)
prt.message('nmeas_nfree', [self._model._num_measurements, ndim])
if test_walker:
if self._model._num_measurements <= ndim:
prt.message('too_few_walkers', warning=True)
if self._nwalkers < 10 * ndim:
prt.message('want_more_walkers', [10 * ndim, self._nwalkers],
warning=True)
p0 = [[] for x in range(self._ntemps)]
# Generate walker positions based upon loaded walker data, if
# available.
walkers_pool = []
walker_weights = []
nmodels = len(set([x[0] for x in walker_data]))
wp_extra = 0
while len(walkers_pool) < len(walker_data):
appended_walker = False
for walk in walker_data:
if (len(walkers_pool) + wp_extra) % nmodels != walk[0]:
continue
new_walk = np.full(self._model._num_free_parameters, None)
for k, key in enumerate(self._model._free_parameters):
param = self._model._modules[key]
walk_param = walk[1].get(key)
if walk_param is None or 'value' not in walk_param:
continue
if param:
val = param.fraction(walk_param['value'])
if not np.isnan(val):
new_walk[k] = val
walkers_pool.append(new_walk)
walker_weights.append(walk[2])
appended_walker = True
if not appended_walker:
wp_extra += 1
# Make sure weights are normalized.
if None not in walker_weights:
totw = np.sum(walker_weights)
walker_weights = [x / totw for x in walker_weights]
# Draw walker positions. This is either done from the priors or from
# loaded walker data. If some parameters are not available from the
# loaded walker data they will be drawn from their priors instead.
pool_len = len(walkers_pool)
for i, pt in enumerate(p0):
dwscores = []
while len(p0[i]) < self._nwalkers:
prt.status(
self,
desc='drawing_walkers',
iterations=[
i * self._nwalkers + len(p0[i]) + 1,
self._nwalkers * self._ntemps])
if self._pool.size == 0 or pool_len:
self._p, score = draw_walker(
test_walker, walkers_pool,
replace=pool_len < self._ntemps * self._nwalkers,
weights=walker_weights)
p0[i].append(self._p)
dwscores.append(score)
else:
nmap = min(self._nwalkers -
len(p0[i]), max(self._pool.size, 10))
dws = self._pool.map(draw_walker, [test_walker] * nmap)
p0[i].extend([x[0] for x in dws])
dwscores.extend([x[1] for x in dws])
if self._fitter._draw_above_likelihood is not False:
self._fitter._draw_above_likelihood = np.mean(dwscores)
prt.message('initial_draws', inline=True)
self._p = list(p0)
self._emi = 0
self._acor = None
self._aacort = -1
self._aa = 0
self._psrf = np.inf
self._all_chain = np.array([])
self._scores = np.ones((self._ntemps, self._nwalkers)) * -np.inf
tft = 0.0 # Total self._fracking time
sli = 1.0 # Keep track of how many times chain halved
s_exception = None
kmat = None
ages = np.zeros((self._ntemps, self._nwalkers), dtype=int)
oldp = self._p
max_chunk = 1000
kmat_chunk = 5
iter_chunks = int(np.ceil(float(self._iterations) / max_chunk))
iter_arr = [max_chunk if xi < iter_chunks - 1 else
self._iterations - max_chunk * (iter_chunks - 1)
for xi, x in enumerate(range(iter_chunks))]
# Make sure a chunk separation is located at self._burn_in
chunk_is = sorted(set(
np.concatenate(([0, self._burn_in], np.cumsum(iter_arr)))))
iter_arr = np.diff(chunk_is)
# The argument of the for loop runs emcee, after each iteration of
# emcee the contents of the for loop are executed.
converged = False
exceeded_walltime = False
ici = 0
try:
if self._iterations > 0:
sampler = MOSSampler(
self._ntemps, self._nwalkers, ndim, ln_likelihood,
ln_prior, pool=self._pool)
st = time.time()
while (self._iterations > 0 and (
self._cc is not None or ici < len(iter_arr))):
slr = int(np.round(sli))
ic = (max_chunk if self._cc is not None else
iter_arr[ici])
if exceeded_walltime:
break
if (self._cc is not None and converged and
self._emi > self._iterations):
break
for li, (
self._p, self._lnprob, self._lnlike) in enumerate(
sampler.sample(
self._p, iterations=ic, gibbs=self._gibbs if
self._emi >= self._burn_in else True)):
if (self._fitter._maximum_walltime is not False and
time.time() - self._fitter._start_time >
self._fitter._maximum_walltime):
prt.message('exceeded_walltime', warning=True)
exceeded_walltime = True
break
self._emi = self._emi + 1
emim1 = self._emi - 1
messages = []
# Increment the age of each walker if their positions are
# unchanged.
for ti in range(self._ntemps):
for wi in range(self._nwalkers):
if np.array_equal(self._p[ti][wi], oldp[ti][wi]):
ages[ti][wi] += 1
else:
ages[ti][wi] = 0
# Record then reset sampler proposal/acceptance counts.
accepts = list(
np.mean(sampler.nprop_accepted / sampler.nprop,
axis=1))
sampler.nprop = np.zeros(
(sampler.ntemps, sampler.nwalkers), dtype=np.float)
sampler.nprop_accepted = np.zeros(
(sampler.ntemps, sampler.nwalkers),
dtype=np.float)
# During self._burn-in only, redraw any walkers with scores
# significantly worse than their peers, or those that are
# stale (i.e. remained in the same position for a long
# time).
if emim1 <= self._burn_in:
pmedian = [np.median(x) for x in self._lnprob]
pmead = [np.mean([abs(y - pmedian) for y in x])
for x in self._lnprob]
redraw_count = 0
bad_redraws = 0
for ti, tprob in enumerate(self._lnprob):
for wi, wprob in enumerate(tprob):
if (wprob <= pmedian[ti] -
max(redraw_mult * pmead[ti],
float(self._nwalkers)) or
np.isnan(wprob) or
ages[ti][wi] >= self._REPLACE_AGE):
redraw_count = redraw_count + 1
dxx = np.random.normal(
scale=0.01, size=ndim)
tar_x = np.array(
self._p[np.random.randint(
self._ntemps)][
np.random.randint(self._nwalkers)])
# Reflect if out of bounds.
new_x = np.clip(np.where(
np.where(tar_x + dxx < 1.0,
tar_x + dxx,
tar_x - dxx) > 0.0,
tar_x + dxx, tar_x - dxx), 0.0, 1.0)
new_like = ln_likelihood(new_x)
new_prob = new_like + ln_prior(new_x)
if new_prob > wprob or np.isnan(wprob):
self._p[ti][wi] = new_x
self._lnlike[ti][wi] = new_like
self._lnprob[ti][wi] = new_prob
else:
bad_redraws = bad_redraws + 1
if redraw_count > 0:
messages.append(
'{:.0%} redraw, {}/{} success'.format(
redraw_count /
(self._nwalkers * self._ntemps),
redraw_count - bad_redraws, redraw_count))
oldp = self._p.copy()
# Calculate the autocorrelation time.
low = 10
asize = 0.5 * (emim1 - self._burn_in) / low
if asize >= 0 and self._ct == 'acor':
acorc = max(
1, min(self._MAX_ACORC,
int(np.floor(0.5 * self._emi / low))))
self._aacort = -1.0
self._aa = 0
self._ams = self._burn_in
cur_chain = (np.concatenate(
(self._all_chain,
sampler.chain[:, :, :li + 1:slr, :]),
axis=2) if len(self._all_chain) else
sampler.chain[:, :, :li + 1:slr, :])
for a in range(acorc, 1, -1):
ms = self._burn_in
if ms >= self._emi - low:
break
try:
acorts = sampler.get_autocorr_time(
chain=cur_chain, low=low, c=a,
min_step=int(np.round(float(ms) / sli)),
max_walkers=5, fast=True)
acort = max([
max(x)
for x in acorts
])
except AutocorrError:
continue
else:
self._aa = a
self._aacort = acort * sli
self._ams = ms
break
self._acor = [self._aacort, self._aa, self._ams]
self._actc = int(np.ceil(self._aacort / sli))
actn = np.int(
float(self._emi - self._ams) / self._actc)
if (self._cc is not None and
actn >= self._cc and
self._emi > self._iterations):
prt.message('converged')
converged = True
break
# Calculate the PSRF (Gelman-Rubin statistic).
if li > 1 and self._emi > self._burn_in + 2:
cur_chain = (np.concatenate(
(self._all_chain,
sampler.chain[:, :, :li + 1:slr, :]),
axis=2) if len(self._all_chain) else
sampler.chain[:, :, :li + 1:slr, :])
vws = np.zeros((self._ntemps, ndim))
for ti in range(self._ntemps):
for xi in range(ndim):
vchain = cur_chain[
ti, :, int(np.floor(
self._burn_in / sli)):, xi]
vws[ti][xi] = self.psrf(vchain)
self._psrf = np.max(vws)
if np.isnan(self._psrf):
self._psrf = np.inf
if (self._ct == 'psrf' and
self._cc is not None and
self._psrf < self._cc and
self._emi > self._iterations):
prt.message('converged')
converged = True
break
if self._cc is not None:
self._emcee_est_t = -1.0
else:
self._emcee_est_t = float(
time.time() - st - tft) / self._emi * (
self._iterations - self._emi
) + tft / self._emi * max(
0, self._burn_in - self._emi)
# Perform self._fracking if we are still in the self._burn
# in phase and iteration count is a multiple of the frack
# step.
frack_now = (self._fracking and self._frack_step != 0 and
self._emi <= self._burn_in and
self._emi % self._frack_step == 0)
self._scores = [np.array(x) for x in self._lnprob]
if emim1 % kmat_chunk == 0:
sout = self._model.run_stack(
self._p[np.unravel_index(
np.argmax(self._lnprob), self._lnprob.shape)],
root='objective')
kmat = sout.get('kmat')
kdiag = sout.get('kdiagonal')
variance = sout.get('obandvs', sout.get('variance'))
if kdiag is not None and kmat is not None:
kmat[np.diag_indices_from(kmat)] += kdiag
elif kdiag is not None and kmat is None:
kmat = np.diag(kdiag + variance)
prt.status(
self,
desc='fracking' if frack_now else
('burning' if self._emi < self._burn_in
else 'walking'),
scores=self._scores,
kmat=kmat,
accepts=accepts,
iterations=[self._emi, None if
self._cc is not None else
self._iterations],
acor=self._acor,
psrf=[self._psrf, self._burn_in],
messages=messages,
make_space=emim1 == 0,
convergence_type=self._ct,
convergence_criteria=self._cc)
if s_exception:
break
if not frack_now:
continue
# Fracking starts here
sft = time.time()
ijperms = [[x, y] for x in range(self._ntemps)
for y in range(self._nwalkers)]
ijprobs = np.array([
1.0
# self._lnprob[x][y]
for x in range(self._ntemps) for y in range(
self._nwalkers)
])
ijprobs -= max(ijprobs)
ijprobs = [np.exp(0.1 * x) for x in ijprobs]
ijprobs /= sum([x for x in ijprobs if not np.isnan(x)])
nonzeros = len([x for x in ijprobs if x > 0.0])
selijs = [
ijperms[x]
for x in np.random.choice(
range(len(ijperms)),
pool_size,
p=ijprobs,
replace=(pool_size > nonzeros))
]
bhwalkers = [self._p[i][j] for i, j in selijs]
seeds = [
int(round(time.time() * 1000.0)) % 4294900000 + x
for x in range(len(bhwalkers))
]
frack_args = list(zip(bhwalkers, seeds))
bhs = list(self._pool.map(frack, frack_args))
for bhi, bh in enumerate(bhs):
(wi, ti) = tuple(selijs[bhi])
if -bh.fun > self._lnprob[wi][ti]:
self._p[wi][ti] = bh.x
like = ln_likelihood(bh.x)
self._lnprob[wi][ti] = like + ln_prior(bh.x)
self._lnlike[wi][ti] = like
self._scores = [[-x.fun for x in bhs]]
prt.status(
self,
desc='fracking_results',
scores=self._scores,
kmat=kmat,
fracking=True,
iterations=[self._emi, None if
self._cc is not None else
self._iterations],
convergence_type=self._ct,
convergence_criteria=self._cc)
tft = tft + time.time() - sft
if s_exception:
break
if ici == 0:
self._all_chain = sampler.chain[:, :, :li + 1:slr, :]
self._all_lnprob = sampler.lnprobability[:, :, :li + 1:slr]
self._all_lnlike = sampler.lnlikelihood[:, :, :li + 1:slr]
else:
self._all_chain = np.concatenate(
(self._all_chain, sampler.chain[:, :, :li + 1:slr, :]),
axis=2)
self._all_lnprob = np.concatenate(
(self._all_lnprob,
sampler.lnprobability[:, :, :li + 1:slr]),
axis=2)
self._all_lnlike = np.concatenate(
(self._all_lnlike,
sampler.lnlikelihood[:, :, :li + 1:slr]),
axis=2)
mem_mb = (self._all_chain.nbytes + self._all_lnprob.nbytes +
self._all_lnlike.nbytes) / (1024. * 1024.)
if self._fitter._debug:
prt.prt('Memory `{}`'.format(mem_mb), wrapped=True)
if mem_mb > self._fitter._maximum_memory:
sfrac = float(
self._all_lnprob.shape[-1]) / self._all_lnprob[
:, :, ::2].shape[-1]
self._all_chain = self._all_chain[:, :, ::2, :]
self._all_lnprob = self._all_lnprob[:, :, ::2]
self._all_lnlike = self._all_lnlike[:, :, ::2]
sli *= sfrac
if self._fitter._debug:
prt.prt(
'Memory halved, sli: {}'.format(sli),
wrapped=True)
sampler.reset()
gc.collect()
ici = ici + 1
except (KeyboardInterrupt, SystemExit):
prt.message('ctrl_c', error=True, prefix=False, color='!r')
s_exception = sys.exc_info()
except Exception:
raise
if s_exception is not None:
self._pool.close()
if (not prt.prompt('mc_interrupted')):
sys.exit()
msg_criteria = (
1.1 if self._cc is None else self._cc)
if (test_walker and self._ct == 'psrf' and
msg_criteria is not None and self._psrf > msg_criteria):
prt.message('not_converged', [
'default' if self._cc is None else 'specified',
msg_criteria], warning=True)
|
<reponame>jrfaller/maracas
package mainclient.methodNowAbstract;
import main.methodNowAbstract.IMethodNowAbstract;
public interface MethodNowAbstractIntf extends IMethodNowAbstract {
}
|
<gh_stars>1-10
import gql from "graphql-tag";
import { printWithReducedWhitespace } from "..";
describe("printWithReducedWhitespace", () => {
it("removes whitespace", () => {
// Note: there's a tab after "tab->", which prettier wants to keep as a
// literal tab rather than \t. In the output, there should be a literal
// backslash-t.
const document = gql`
query Foo($a: Int) {
user(
name: " tab-> yay"
other: """
apple
bag
cat
"""
) {
name
}
}
`;
expect(printWithReducedWhitespace(document)).toBe(
`query Foo($a:Int){user(name:" tab->\\tyay"other:"apple\\n bag\\ncat"){name}}`,
);
});
});
|
<filename>ruby/spec/ruby/core/symbol/size_spec.rb
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../shared/length', __FILE__)
describe "Symbol#size" do
it_behaves_like :symbol_length, :size
end
|
package edu.fiuba.algo3.modelo;
import java.util.ArrayList;
import java.util.Stack;
import edu.fiuba.algo3.modelo.bloques.Bloque;
import edu.fiuba.algo3.modelo.bloques.BloqueAlgoritmo;
import edu.fiuba.algo3.modelo.bloques.BloqueBajarLapiz;
import edu.fiuba.algo3.modelo.bloques.BloqueComplejo;
import edu.fiuba.algo3.modelo.bloques.BloqueInversor;
import edu.fiuba.algo3.modelo.bloques.BloqueMovAbajo;
import edu.fiuba.algo3.modelo.bloques.BloqueMovArriba;
import edu.fiuba.algo3.modelo.bloques.BloqueMovDerecha;
import edu.fiuba.algo3.modelo.bloques.BloqueMovIzquierda;
import edu.fiuba.algo3.modelo.bloques.BloqueRepetirDosVeces;
import edu.fiuba.algo3.modelo.bloques.BloqueRepetirTresVeces;
import edu.fiuba.algo3.modelo.bloques.BloqueSubirLapiz;
import edu.fiuba.algo3.modelo.eventos.EventosBloque;
public class ModuloAlgoritmo {
private ArrayList<Bloque> bloques = new ArrayList<>();
private final EventosBloque eventos = new EventosBloque();
private Stack<BloqueComplejo> pilaBloquesComplejos = new Stack<BloqueComplejo>();
private BloqueAlgoritmo algoritmoGuardado = new BloqueAlgoritmo();
private Personaje personaje = new Personaje(new Posicion(0,0));
private Dibujo dibujo = new Dibujo();
public EventosBloque getEventos() {
return eventos;
}
public boolean isEmpty() {
return bloques.isEmpty();
}
public int size() {
return bloques.size();
}
private void agregarBloque(Bloque bloque) {
if(!pilaBloquesComplejos.isEmpty()) {
BloqueComplejo bloqueComplejo = pilaBloquesComplejos.pop();
bloqueComplejo.agregarBloque(bloque);
pilaBloquesComplejos.push(bloqueComplejo);
}else
{
bloques.add(bloque);
}
eventos.getOnAgregarBloque().notificar(bloque);
}
private void agregarBloqueComplejo(BloqueComplejo bloqueComplejo) {
agregarBloque(bloqueComplejo);
pilaBloquesComplejos.push(bloqueComplejo);
}
public void agregarBloqueMovimientoArriba() {
this.agregarBloque(new BloqueMovArriba());
}
public void agregarBloqueMovimientoAbajo() {
this.agregarBloque(new BloqueMovAbajo());
}
public void agregarBloqueMovimientoDerecha() {
this.agregarBloque(new BloqueMovDerecha());
}
public void agregarBloqueMovimientoIzquierda() {
this.agregarBloque(new BloqueMovIzquierda());
}
public void agregarBloqueLapizArriba() {
this.agregarBloque(new BloqueSubirLapiz());
}
public void agregarBloqueLapizAbajo() {
this.agregarBloque(new BloqueBajarLapiz());
}
public void agregarBloqueRepetirDosVeces() {
this.agregarBloqueComplejo(new BloqueRepetirDosVeces());
}
public void agregarBloqueRepetirTresVeces() {
this.agregarBloqueComplejo(new BloqueRepetirTresVeces());
}
public void agregarBloqueInvertir() {
this.agregarBloqueComplejo(new BloqueInversor());
}
public void agregarBloqueAlgoritmoGuardado() {
this.agregarBloque(algoritmoGuardado);
}
public void ejecutarAlgoritmo() {
reiniciarPersonajeYDibujo();
BloqueAlgoritmo algoritmo = new BloqueAlgoritmo();
algoritmo.agregarBloques(bloques);
algoritmo.ejecutar(personaje, dibujo);
eventos.getOnDibujar().notificar(dibujo.obtenerTramos());
}
public void reiniciarAlgoritmo() {
reiniciarPersonajeYDibujo();
this.bloques = new ArrayList<>();
this.pilaBloquesComplejos = new Stack<BloqueComplejo>();
eventos.getOnReiniciar().notificar("");
}
public void guardarAlgoritmo() {
algoritmoGuardado = new BloqueAlgoritmo();
algoritmoGuardado.agregarBloques(bloques);
}
private void reiniciarPersonajeYDibujo() {
dibujo = new Dibujo();
personaje = new Personaje(new Posicion(0,0));
}
public void noAgregaMas() {
if (pilaBloquesComplejos.isEmpty()) return;
pilaBloquesComplejos.pop();
if(this.hayComplejoActivo())
eventos.getOnNoAgregarMas().notificar(getNombreUltimoComplejo());
else
eventos.getOnNoAgregarMas().notificar(null);
}
public boolean hayComplejoActivo() {
if (pilaBloquesComplejos.size() == 0)
return false;
return true;
}
public String getNombreUltimoComplejo() {
BloqueComplejo bloque = pilaBloquesComplejos.pop();
String nombre = bloque.getNombreBloque();
pilaBloquesComplejos.push(bloque);
return nombre;
}
}
|
#!/usr/bin/env sh
# Originally from https://github.com/latex3/latex3
# This script is used for testing using Travis
# It is intended to work on their VM set up: Ubuntu 12.04 LTS
# A minimal current TL is installed adding only the packages that are
# required
# See if there is a cached version of TL available
export PATH=/tmp/texlive/bin/x86_64-linux:$PATH
if ! command -v texlua > /dev/null; then
# Obtain TeX Live
wget http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz
tar -xzf install-tl-unx.tar.gz
cd install-tl-20*
# Install a minimal system
./install-tl --profile=../texlive/texlive.profile
cd ..
fi
# Just including texlua so the cache check above works
# Needed for any use of texlua even if not testing LuaTeX
tlmgr install luatex
# Other contrib packages: done as a block to avoid multiple calls to tlmgr
# texlive-latex-base is needed to run pdflatex
tlmgr install \
exam \
amsfonts \
stmaryrd \
amsmath
# Keep no backups (not required, simply makes cache bigger)
tlmgr option -- autobackup 0
# Update the TL install but add nothing new
tlmgr update --self --all --no-auto-install
|
<reponame>junifar/job1_sample
import React , { Component } from 'react';
import { Form } from 'reactstrap';
import axios from 'axios';
import { Input, Button } from '../../_Main';
export default class mandiriclickpay extends Component{
constructor(props){
super(props);
this.state = {
debit_card: "",
token: ""
}
}
onDebitChange = (e) => {
this.setState({
debit_card: e.target.value
});
}
onTokenChange = (e) => {
this.setState({
token: e.target.value
});
}
payment = () => {
//var booking = "bookings/"+this.props.location.state.booking.booking.id;
console.log('data send :'+this.props.location.state.booking.id+'-'+this.props.location.state.booking.invoiceNumber);
console.log('data send :'+this.props.location.state.booking.payment);
var dataSend = {
bookingId : this.props.location.state.booking.id,
invoiceNumber :this.props.location.state.booking.invoiceNumber,
method : 1,
cardNumber : this.state.debit_card, //4616999900000028
authorizationCode : "000000",
amount : this.props.location.state.booking.payment
};
var url = "/v1/flight/payment";
console.log(dataSend);
let axiosConfig = {
headers: {
'Content-Type': 'application/json',
'WLPS_TOKEN': localStorage.getItem("token")
}};
axios.post(url, dataSend, axiosConfig).then((res) => {
console.log(res.data);
}).catch((error) => {
console.log(error);
});
}
render(){
return(
<div className="garuda-login">
<div className="garuda-login-container">
<div className="garuda-login-card-container">
<div className="garuda-login-card my-mandiriclickpay">
<div className="garuda-login-card-top">
<div className="garuda-login-picture">
<div className="garuda-login-title">MANDIRI CLICKPAY</div>
</div>
</div>
<div className="garuda-login-card-mid">
<Form className="garuda-login-form">
<Input placeholder="<NAME>" type="text" className="garuda-login-input" value={this.state.debit_card} onChange={this.onDebitChange}/>
<Input placeholder="Token" type="password" className="" value={this.state.token} onChange={this.onTokenChange}/>
</Form>
</div>
</div>
<div className="garuda-login-card button my-mandiriclickpay-button" onClick={this.payment}>
<span>Bayar</span>
</div>
</div>
</div>
</div>
);
}
}
|
#!/bin/sh
# Copyright 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DOCS_ROOT=docs-gen
[ -d $DOCS_ROOT ] && rm -r $DOCS_ROOT
mkdir $DOCS_ROOT
copyReadme() {
cp $1/README.md $DOCS_ROOT/$1.md
mkdir -p $DOCS_ROOT/$1
cp -r $1/images $DOCS_ROOT/$1
}
# Work around Dokka failing to link against external links generated from 'gfm' sources.
curl -o package-list-coil-base https://coil-kt.github.io/coil/api/coil-base/package-list
sed -i.bak 's/$dokka.linkExtension:md/$dokka.linkExtension:html/g' package-list-coil-base
# Clear out the old API docs
[ -d docs/api ] && rm -r docs/api
# Build the docs with dokka
./gradlew clean dokkaHtmlMultiModule
# Dokka doesn't currently allow us to change the index page name so move it manually
mv docs/api/-modules.html docs/api/index.html
# Re-word the Dokka call out
find docs/api/ -type f -name '*.html' -exec sed -i -e 's/Sponsored and developed/Documentation generated/g' {} \;
# Remove the copyright declaration
find docs/api/ -type f -name '*.html' -exec sed -i -e 's/© [0-9]* Copyright//' {} \;
# Clean up the temp Coil package list
rm package-list-coil-base
# Copy over any static + API docs to our $DOCS_ROOT
cp -R docs/* $DOCS_ROOT
cp README.md $DOCS_ROOT/index.md
cp CONTRIBUTING.md $DOCS_ROOT/contributing.md
sed -i.bak 's/CONTRIBUTING.md/contributing/' $DOCS_ROOT/index.md
sed -i.bak 's/README.md//' $DOCS_ROOT/index.md
sed -i.bak 's/docs\/header.png/header.png/' $DOCS_ROOT/index.md
# Convert docs/xxx.md links to just xxx/
sed -i.bak 's/docs\/\([a-zA-Z-]*\).md/\1/' $DOCS_ROOT/index.md
copyReadme coil
copyReadme glide
# Finally delete all of the backup files
find . -name '*.bak' -delete
|
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
interface Parser {
ParsedURI parseURI(String uri);
}
class ParsedURI {
private String scheme;
private String host;
private int port;
private String path;
private Map<String, String> queryParams;
private String fragment;
public ParsedURI(String scheme, String host, int port, String path, Map<String, String> queryParams, String fragment) {
this.scheme = scheme;
this.host = host;
this.port = port;
this.path = path;
this.queryParams = queryParams;
this.fragment = fragment;
}
// Getters for parsed URI parts
// ...
}
class URIParser implements Parser {
@Override
public ParsedURI parseURI(String uri) {
try {
URI javaURI = new URI(uri);
String scheme = javaURI.getScheme();
String host = javaURI.getHost();
int port = javaURI.getPort();
String path = javaURI.getPath();
String query = javaURI.getQuery();
String fragment = javaURI.getFragment();
Map<String, String> queryParams = new HashMap<>();
if (query != null) {
String[] pairs = query.split("&");
for (String pair : pairs) {
String[] keyValue = pair.split("=");
if (keyValue.length == 2) {
queryParams.put(keyValue[0], keyValue[1]);
}
}
}
return new ParsedURI(scheme, host, port, path, queryParams, fragment);
} catch (URISyntaxException e) {
// Handle URI parsing exception
e.printStackTrace();
return null;
}
}
}
|
sudo apt-get install apt-transport-https ca-certificates dirmngr
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
echo "deb https://repo.clickhouse.com/deb/stable/ main/" | sudo tee \
/etc/apt/sources.list.d/clickhouse.list
sudo apt-get update
sudo apt-get install -y clickhouse-server clickhouse-client
sudo service clickhouse-server start
clickhouse-client # or "clickhouse-client --password" if you set up a password.
|
<filename>include/Repeater.hpp
//
// Repeater.hpp
// GameBT
//
// Created by <NAME> on 15/1/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
// RepeatDecorator is a Decorator that executes it child
// until a limit is reached.
// if the child fails, the decorator fails;
// if the child succeeds, it is executed again in the same
// update after the reset.
#ifndef Repeat_h
#define Repeat_h
#include "Decorator.hpp"
#include "Behavior.hpp"
namespace BT
{
class Repeater : public Decorator
{
private:
unsigned int m_iCounter;
unsigned int m_iLimit;
public:
Repeater(Behavior* _child) : Decorator(_child) { }
virtual ~Repeater() { }
inline void onInitialize() { m_iCounter = 0; }
inline void setLimit(unsigned int _limit) { m_iLimit = _limit; }
Status update()
{
for (;;)
{
m_pChild->tick();
if (m_pChild->getStatus() == Status::BH_RUNNING) break;
if (m_pChild->getStatus() == Status::BH_FAILURE) return Status::BH_FAILURE;
if (++m_iCounter == m_iLimit) return Status::BH_SUCCESS;
m_pChild->reset();
}
return Status::BH_INVALID;
}
};
}
#endif /* Repeat_h */
|
def cube_volume(side_length):
return side_length ** 3
|
public class EndpointValidator
{
public void ValidateEndpoints(List<string> endpoints)
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
// Add validation logic for endpoints here
}
}
|
package testsubjects.foo;
public class BaseClass {
void method() {
}
}
|
export default {
publicKey: '-----BEGIN PUBLIC KEY-----' +
'<KEY>' +
'<KEY>' +
'<KEY>n' +
'<KEY>' +
'<KEY>' +
'<KEY>' +
'lwIDAQAB\n' +
'-----END PUBLIC KEY-----'
}
|
<reponame>gabrieloandco/RiscV-Arqui1
from myhdl import *
from AND import *
import random
@block
def tbAND():
data1 = Signal(modbv(0)[32:0])
data2 = Signal(modbv(0)[32:0])
dataout = Signal(modbv(0)[32:0])
dut = AND(data1,data2,dataout)
interv = delay(5)
@always(interv)
def stim():
data1.next = random.randrange(0, 2**32)
data2.next = random.randrange(0, 2**32)
condicion = dataout == data1 & data2
assert condicion, "ERROR"
return dut, stim
test = tbAND()
test.config_sim(trace=True)
test.run_sim(500)
|
<filename>utils/generateMarkdown.js
const licenseArray = [
{
name: "Apache License 2.0",
link: "https://www.apache.org/licenses/LICENSE-2.0.txt",
},
{
name: "GNU General Public License v3.0",
link: "https://www.gnu.org/licenses/gpl-3.0.en.html",
},
{
name: "MIT License",
link: "https://opensource.org/licenses/MIT",
},
{
name: "BSD T-Clause 'Simplified' License",
link: "https://opensource.org/licenses/BSD-2-Clause",
},
{
name: "BSD 3-Clause 'New' or 'Revised' License",
link: "https://spdx.org/licenses/BSD-3-Clause.html",
},
{
name: "Creative Commons Zero v1.0 Universal",
link: "https://choosealicense.com/licenses/cc0-1.0/",
},
{
name: "Eclipse Public License 2.0",
link: "https://www.eclipse.org/legal/epl-2.0/",
},
{
name: "GNU Affero General Public License v2.1",
link: "https://www.gnu.org/licenses/agpl-3.0.en.html",
},
{
name: "GNU General Public License v2.0",
link: "https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html",
},
{
name: "GNU Lesser General Public License v2.1",
link: "https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html",
},
{
name: "GNU Lesser General Public License v3.0",
link: "https://www.gnu.org/licenses/lgpl-3.0.en.html",
},
{
name: "Mozilla Public License 2.0",
link: "https://www.mozilla.org/en-US/MPL/2.0/",
},
{
name: "The Unilicense",
link: "https://unlicense.org/",
},
];
// function to generate markdown for README
function generateMarkdown(response) {
let spaceTitle = response.license.replace(/ /g, "%20");
let licenseLink = "";
for (var i = 0; i < licenseArray.length; i++) {
if (response.license === licenseArray[i].name) {
licenseLink = licenseArray[i].link;
}
}
return `# ${response.title} 
# Live Site
${response.url}
## Description
${response.description}
## Table of Contents
* [Installation](#installation)
* [Usage](#usage)
* [License](#license)
* [Credits](#credits)
* [Tests](#tests)
* [Questions](#questions)
## Installation
${response.installation}
## Usage
${response.usage}
## License
Copyright (c) [2020]
The license is ${response.license}.
Read more about it at ${licenseLink}.
## Credits
${response.credits}
## Tests
${response.tests}
## Questions
If you have any additional questions please contact me at ${response.email}.
GitHub: https://github.com/${response.github}
`;
}
module.exports = generateMarkdown;
|
import java.util.*;
public class Main {
public static void main(String[] args) {
char[][] board = new char[3][3];
generateBoard(board);
String player1 = "X";
String player2 = "O";
while(true) {
if(!updateBoard(board, player1)) break;
if(!updateBoard(board, player2)) break;
}
}
public static void generateBoard(char[][] board) {
for(int r = 0; r < board.length; r++) {
for(int c = 0; c < board[0].length; c++) {
board[r][c] = '_';
}
}
}
public static boolean updateBoard(char[][] board, String player) {
Scanner input = new Scanner(System.in);
System.out.println("Player: "+ player +" turn!");
int r, c;
while(true) {
System.out.print("Enter row: ");
r = input.nextInt();
if(r >= 0 && r < board.length) {
System.out.print("Enter col: ");
c = input.nextInt();
if(c >= 0 && c < board[0].length) {
if(board[r][c] == '_') {
board[r][c] = player.charAt(0);
break;
} else {
System.out.println("Slot already taken!");
}
}
}
}
printBoard(board);
if(isWin(board,player)) {
System.out.println("Player "+ player +" wins!");
return false;
}
return true;
}
public static void printBoard(char[][] board) {
for(int r = 0; r < board.length; r++) {
for(int c = 0; c < board[0].length; c++) {
System.out.print(board[r][c]+" ");
}
System.out.println();
}
}
public static boolean isWin(char[][] board, String player) {
int countRow = 0, countCol = 0;
boolean isDiagonal = true;
for(int r = 0; r < board.length; r++) {
for(int c = 0; c < board[0].length; c++) {
if(board[r][c] == player.charAt(0)) countRow++;
if(board[c][r] == player.charAt(0)) countCol++;
if(r == c && board[r][c] != player.charAt(0)) isDiagonal = false;
}
if(countRow == 3 || countCol == 3) return true;
countRow = 0;
countCol = 0;
}
return (isDiagonal) ? true : false;
}
}
|
package utils.semanticActions;
import logic.Lexicon;
public class SA_AddChar implements SemanticAction{
@Override
public void action(Lexicon Lex) {
Lex.addCharacter();
}
}
|
#!/bin/sh
set -ex
# Print the env for debug purpose
env
# Migrate the database
./manage.py migrate
# Run the command
exec $@
|
# Doesn't work. See https://code.google.com/p/gmpy/issues/detail?id=85
$PYTHON setup.py install --prefix=$PREFIX
|
from sqlalchemy import Column, String, Integer
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def generate_sqlalchemy_models(table_columns):
model_definitions = ""
for table_name, columns in table_columns.items():
table_class_name = table_name.capitalize() + "Table"
model_definitions += f"class {table_class_name}(Base):\n"
model_definitions += f" __tablename__ = '{table_name}'\n\n"
for column_name in columns:
model_definitions += f" {column_name} = Column(String(250))\n"
model_definitions += "\n"
return model_definitions
|
<reponame>Remolten/ld33
var Entity = {};
Entity.prototype = {
init: function(id) {
this.id = id;
this.components = {};
},
getComponent: function(id) {
return this.components[id];
},
add: function() {
for (var i = 0; i < arguments.length; i++) {
this.components[arguments[i].id] = arguments[i];
}
},
remove: function(id) {
delete this.components[id];
}
};
|
(function() {
// tFrame is a DOMHighResTimeStamp, accurate to 1/1000 of a ms
function main(tFrame) {
window.requestAnimationFrame(main);
// stopMain contains the token ID generated by requestAnimationFrame
MyGame.stopMain = window.requestAnimationFrame(main);
var nextTick = MyGame.lastTick + MyGame.tickLength;
var numTicks = 0;
if (tFrame > nextTick) {
var timeSinceTick = tFrame - MyGame.lastTick;
numTicks = Math.floor(timeSinceTick / MyGame.tickLength);
}
queueUpdates(numTicks);
render(tFrame);
MyGame.lastRender = tFrame;
// var tNow = window.performance.now(); // Get timestamp which is the number of ms since navigationStart
// console.log(tNow);
// This cancels the request that corresponds to the token
// window.cancelAnimationFrame( MyGame.stopMain );
// console.log(stopMain);
}
function queueUpdates(numTicks) {
for (var i = 0; i < numTicks; i++) {
MyGame.lastTick = MyGame.lastTick + MyGame.tickLength;
update(MyGame.lastTick);
}
}
var MyGame = {
stopMain: 0,
lastTick: performance.now(),
lastRender: MyGame.lastTick,
tickLength: 50
};
main(performance.now()); // Start the cycle
})();
|
#pragma once
#include <algorithm>
#include <cstddef>
#include "nifty/marray/marray.hxx"
#include "nifty/tools/runtime_check.hxx"
#include "nifty/tools/block_access.hxx"
#include "nifty/hdf5/hdf5_array.hxx"
namespace nifty{
namespace graph{
template<std::size_t DIM, class LABEL_TYPE>
class Hdf5Labels{
public:
typedef tools::BlockStorage< LABEL_TYPE> BlockStorageType;
typedef LABEL_TYPE LabelType;
typedef const hdf5::Hdf5Array<LABEL_TYPE> Hdf5ArrayType;
Hdf5Labels(const Hdf5ArrayType & labels, const uint64_t numberOfLabels)
: labels_(labels),
shape_(),
numberOfLabels_(numberOfLabels)
{
for(std::size_t i=0; i<DIM; ++i)
shape_[i] = labels_.shape(i);
}
// part of the API
uint64_t numberOfLabels() const {
return numberOfLabels_;
}
const array::StaticArray<int64_t, DIM> & shape()const{
return shape_;
}
template<
class ROI_BEGIN_COORD,
class ROI_END_COORD
>
void readSubarray(
const ROI_BEGIN_COORD & roiBeginCoord,
const ROI_END_COORD & roiEndCoord,
marray::View<LABEL_TYPE> & outArray
)const{
for(auto d = 0 ; d<DIM; ++d){
NIFTY_CHECK_OP(roiEndCoord[d] - roiBeginCoord[d],==,outArray.shape(d),"wrong shape");
NIFTY_CHECK_OP(roiEndCoord[d] ,<=,labels_.shape()[d],"hubs");
}
//std::cout<<"read subarray "<<roiBeginCoord<<" "<<roiEndCoord<<"\n";
labels_.readSubarray(roiBeginCoord.begin(), outArray);
}
const hdf5::Hdf5Array<LABEL_TYPE> & hdf5Array()const{
return labels_;
}
private:
array::StaticArray<int64_t, DIM> shape_;
const hdf5::Hdf5Array<LABEL_TYPE> & labels_;
int64_t numberOfLabels_;
};
} // namespace graph
namespace tools{
template<class LABEL_TYPE, std::size_t DIM, class COORD>
inline void readSubarray(
const graph::Hdf5Labels<DIM, LABEL_TYPE> & labels,
const COORD & beginCoord,
const COORD & endCoord,
marray::View<LABEL_TYPE> & subarray
){
labels.readSubarray(beginCoord, endCoord, subarray);
}
}
} // namespace nifty
|
#!/bin/bash
# This script starts a local Docker container with created image.
# Find package.json inside project tree.
# This allows to call bash scripts within any folder inside project.
PROJECT_DIR=$(git rev-parse --show-toplevel)
if [ ! -f "${PROJECT_DIR}/package.json" ]; then
echo "[E] Can't find '${PROJECT_DIR}/package.json'."
echo " Check that you run this script inside git repo or init a new one in project root."
fi
# Extract project name and version from package.json
PROJECT_NAME=$(cat "${PROJECT_DIR}/package.json" \
| grep name \
| head -1 \
| awk -F: '{ print $2 }' \
| sed 's/[",]//g' \
| tr -d '[[:space:]]')
PROJECT_VERSION=$(cat "${PROJECT_DIR}/package.json" \
| grep version \
| head -1 \
| awk -F: '{ print $2 }' \
| sed 's/[",]//g' \
| tr -d '[[:space:]]')
HOST_IP=`ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p' | head -n 1`
HOST_NAME="travis"
echo "[I] Starting a Docker container '${PROJECT_NAME}' (version '${PROJECT_VERSION}') from path '${PROJECT_DIR}'.."
echo "[I] Assigning parent host '${HOST_NAME}' with IP '${HOST_IP}'."
echo "${HOST_NAME}:${HOST_IP}"
docker run -p 8080:8080 \
-d --name ${PROJECT_NAME} \
"${PROJECT_NAME}:${PROJECT_VERSION}"
|
#!/bin/bash
##
# Remote Installer script for MirrorBot 1.0
# Copyright (c) 2011-2014 Joseph Huckaby and PixlCore.com
# Released under the MIT License: http://opensource.org/licenses/MIT
#
# To install or upgrade, issue this command as root:
#
# curl -L -s "http://pixlcore.com/software/mirrorbot/install-latest-_BRANCH_.txt" | bash
#
# Or, if you don't have curl, you can use wget:
#
# wget -O - "http://pixlcore.com/software/mirrorbot/install-latest-_BRANCH_.txt" | bash
##
SIMPLEBOT_TARBALL="latest-_BRANCH_.tar.gz"
if [[ $EUID -ne 0 ]]; then
echo "ERROR: The MirrorBot remote installer script must be run as root." 1>&2
exit 1
fi
echo ""
echo "Installing latest _BRANCH_ MirrorBot build..."
echo ""
# Stop services, if they are running
/etc/init.d/mirrorbotd stop >/dev/null 2>&1
if which yum >/dev/null 2>&1 ; then
# Linux prereq install
yum -y install perl wget gzip zip gcc gcc-c++ libstdc++-devel pkgconfig curl make openssl openssl-devel openssl-perl perl-libwww-perl perl-Time-HiRes perl-JSON perl-ExtUtils-MakeMaker perl-TimeDate perl-Test-Simple || exit 1
else
if which apt-get >/dev/null 2>&1 ; then
# Ubuntu prereq install
apt-get -y install perl wget gzip zip build-essential libssl-dev pkg-config libwww-perl libjson-perl || exit 1
else
echo ""
echo "ERROR: This server is not supported by the MirrorBot auto-installer, as it does not have 'yum' nor 'apt-get'."
echo "Please see the manual installation instructions at: http://pixlcore.com/mirrorbot/"
echo ""
exit 1
fi
fi
if which cpanm >/dev/null 2>&1 ; then
echo "cpanm is already installed, good."
else
export PERL_CPANM_OPT="--notest --configure-timeout=3600"
if which curl >/dev/null 2>&1 ; then
curl -L http://cpanmin.us | perl - App::cpanminus
else
wget -O - http://cpanmin.us | perl - App::cpanminus
fi
fi
mkdir -p /opt
cd /opt
if which curl >/dev/null 2>&1 ; then
curl -L -O "http://pixlcore.com/software/mirrorbot/$SIMPLEBOT_TARBALL" || exit 1
else
wget "http://pixlcore.com/software/mirrorbot/$SIMPLEBOT_TARBALL" || exit 1
fi
tar zxf $SIMPLEBOT_TARBALL || exit 1
rm -f $SIMPLEBOT_TARBALL
chmod 775 /opt/mirrorbot/install/*
/opt/mirrorbot/install/install.pl || exit 1
|
let sales = [7, 10, 15];
let total = 0;
for (let i = 0; i < sales.length; i++) {
total += sales[i];
}
console.log(total);
|
var arr1 = [];
arr1.length = 3;
console.log(arr1);
var arr2 = new Array(3);
console.log(arr2);
var arr3 = [undefined, undefined, undefined];
console.log(arr3);
|
function applyStyles(style) {
let pageContent = document.getElementById("pageContent");
pageContent.style.cssText = style;
}
style = "background-color: #f2f2f2; color: #000; font-family: Arial; font-size: 16px;"
applyStyles(style);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.