text stringlengths 1 1.05M |
|---|
import '../node_modules/normalize.css/normalize.css';
import './stylesheets/custom.css';
import './stylesheets/font-awesome.min.css';
// App's UI initialization
console.log('loaded!'); |
#!/bin/bash
set -euxo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
# shellcheck disable=SC1091
source "$DIR"/_common.sh
# Build Docker images for Next.js-based apps
exec "$DIR"/_docker.sh Dockerfile output-next
|
/**
* Autogenerated code by SdkModelGenerator.
* Do not edit. Any modification on this file will be removed automatically after project build
*
*/
package test.backend.www.model.hotelbeds.basic.model;
import java.util.List;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
@JsonInclude(Include.NON_NULL)
@ToString
@NoArgsConstructor
@Data
public class BookingRoom {
@NotNull
@JsonProperty
private String rateKey;
@Valid
private List<Pax> paxes;
} |
<filename>lib/assets/javascripts/builder/editor/layers/layer-content-views/legend/legends-view.js
var Backbone = require('backbone');
var _ = require('underscore');
var CoreView = require('backbone/core-view');
var LegendColorView = require('./color/legend-color-view');
var LegendSizeView = require('./size/legend-size-view');
var LegendSizeTypes = require('./size/legend-size-types');
var createTextLabelsTabPane = require('builder/components/tab-pane/create-text-labels-tab-pane');
var TabPaneTemplate = require('builder/editor/tab-pane-submenu.tpl');
var sqlErrorTemplate = require('./legend-content-sql-error.tpl');
var actionErrorTemplate = require('builder/editor/layers/sql-error-action.tpl');
var legendNoGeometryTemplate = require('./legend-no-geometry-template.tpl');
var checkAndBuildOpts = require('builder/helpers/required-opts');
var InfoboxModel = require('builder/components/infobox/infobox-model');
var REQUIRED_OPTS = [
'mapDefinitionModel',
'userActions',
'layerDefinitionModel',
'queryGeometryModel',
'legendDefinitionsCollection',
'editorModel',
'userModel',
'configModel',
'modals',
'layerContentModel'
];
var LEGEND_TYPES = {
color: 'color',
size: 'size'
};
module.exports = CoreView.extend({
initialize: function (opts) {
checkAndBuildOpts(opts, REQUIRED_OPTS, this);
this._initModels();
this._initBinds();
},
render: function () {
this.clearSubViews();
this.$el.empty();
if (this._isErrored()) {
this._renderError();
} else {
this._initViews();
}
return this;
},
_initModels: function () {
this._infoboxModel = new InfoboxModel({
state: this._isLayerHidden() ? 'layer-hidden' : ''
});
this._overlayModel = new Backbone.Model({
visible: this._isLayerHidden()
});
},
_initBinds: function () {
this.listenTo(this._editorModel, 'change:edition', this._changeStyle);
},
_isLayerHidden: function () {
return this._layerDefinitionModel.get('visible') === false;
},
_onQueryChanged: function () {
if (this._isErrored()) {
this.render();
}
},
_isErrored: function () {
return this._layerContentModel.isErrored();
},
_renderError: function () {
this.$el.append(
sqlErrorTemplate({
body: _t('editor.error-query.body', {
action: actionErrorTemplate({
label: _t('editor.error-query.label')
})
})
})
);
},
_renderEmptyGeometry: function () {
this.$el.append(legendNoGeometryTemplate());
},
_initViews: function () {
var tabPaneTabs = [this._getColorTabPaneOptions(), this._getSizeTabPaneOptions()];
var tabPaneOptions = {
tabPaneOptions: {
template: TabPaneTemplate,
tabPaneItemOptions: {
tagName: 'li',
klassName: 'CDB-NavSubmenu-item'
}
},
tabPaneItemLabelOptions: {
tagName: 'button',
className: 'CDB-NavSubmenu-link u-upperCase'
}
};
this._layerTabPaneView = createTextLabelsTabPane(tabPaneTabs, tabPaneOptions);
this.$el.append(this._layerTabPaneView.render().$el);
this.addView(this._layerTabPaneView);
this._changeStyle(this._editorModel);
},
_getColorTabPaneOptions: function () {
var self = this;
return {
name: LEGEND_TYPES.color,
label: _t('editor.legend.menu-tab-pane-labels.color'),
createContentView: function () {
return new LegendColorView({
className: 'Editor-content js-type',
mapDefinitionModel: self._mapDefinitionModel,
editorModel: self._editorModel,
userActions: self._userActions,
layerContentModel: self._layerContentModel,
layerDefinitionModel: self._layerDefinitionModel,
legendDefinitionsCollection: self._legendDefinitionsCollection,
type: LEGEND_TYPES.color,
userModel: self._userModel,
configModel: self._configModel,
modals: self._modals,
overlayModel: self._overlayModel,
infoboxModel: self._infoboxModel
});
}
};
},
_getSizeTabPaneOptions: function () {
var legendType = LEGEND_TYPES.size;
var disabled = !this._isStyleCompatible(legendType);
var self = this;
return {
name: legendType,
label: _t('editor.legend.menu-tab-pane-labels.size'),
disabled: disabled,
tooltip: disabled && _t('editor.legend.menu-tab-pane-labels.size-disabled'),
createContentView: function () {
return new LegendSizeView({
className: 'Editor-content js-type',
mapDefinitionModel: self._mapDefinitionModel,
editorModel: self._editorModel,
userActions: self._userActions,
userModel: self._userModel,
configModel: self._configModel,
modals: self._modals,
layerContentModel: self._layerContentModel,
layerDefinitionModel: self._layerDefinitionModel,
legendDefinitionsCollection: self._legendDefinitionsCollection,
type: legendType,
overlayModel: self._overlayModel,
infoboxModel: self._infoboxModel
});
}
};
},
_isStyleCompatible: function (legendType) {
if (legendType === LEGEND_TYPES.size) {
var isCompatible = _.some(LegendSizeTypes, function (type) {
var isStyleCompatible = type.isStyleCompatible;
if (isStyleCompatible) {
return isStyleCompatible(this._layerDefinitionModel.styleModel);
}
return false;
}, this);
return isCompatible;
}
return true;
},
_changeStyle: function (m) {
this._layerTabPaneView && this._layerTabPaneView.changeStyleMenu(m);
},
_showHiddenLayer: function () {
var savingOptions = {
shouldPreserveAutoStyle: true
};
this._layerDefinitionModel.toggleVisible();
this._userActions.saveLayer(this._layerDefinitionModel, savingOptions);
}
});
|
#!/bin/bash
# source environment
. ./env.sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
sub_name=$1
BUILD_DIR=$ROOT_DIR/Expt/data/mrp_data/$sub_name/
### TRAIN for task1
pargs="
--suffix=.mrp_psd \
--companion_suffix=.mrp_conllu_pre_processed \
--build_folder=${BUILD_DIR} \
"
pushd $ROOT_DIR
python utility/psd_utils/test_mrp_psd.py $pargs 2>&1 &> $ROOT_DIR/Expt/pre_logs/test_mrp_psd_${sub_name}.log
popd
|
#install prometheus on grafana server
#!/bin/bash
sudo wget https://github.com/prometheus/prometheus/releases/download/v2.8.1/prometheus-2.8.1.linux-amd64.tar.gz
sudo useradd --no-create-home --shell /bin/false prometheus
sudo mkdir /etc/prometheus
sudo mkdir /var/lib/prometheus
sudo chown prometheus:prometheus /etc/prometheus
sudo chown prometheus:prometheus /var/lib/prometheus
sudo tar -xvzf prometheus-2.8.1.linux-amd64.tar.gz
sudo mv prometheus-2.8.1.linux-amd64 prometheuspackage
sudo cp prometheuspackage/prometheus /usr/local/bin/
sudo cp prometheuspackage/promtool /usr/local/bin/
sudo chown prometheus:prometheus /usr/local/bin/prometheus
sudo chown prometheus:prometheus /usr/local/bin/promtool
sudo cp -r prometheuspackage/consoles /etc/prometheus
sudo cp -r prometheuspackage/console_libraries /etc/prometheus
sudo chown -R prometheus:prometheus /etc/prometheus/consoles
sudo chown -R prometheus:prometheus /etc/prometheus/console_libraries
echo "global:
scrape_interval: 5s
scrape_configs:
- job_name: 'prometheus_master'
scrape_interval: 5s
static_configs:
- targets: ['localhost:9090']" | sudo tee -a /etc/prometheus/prometheus.yml
sudo chown prometheus:prometheus /etc/prometheus/prometheus.yml
/etc/systemd/system/prometheus.service
echo "[Unit]
Description=Prometheus
Wants=network-online.target
After=network-online.target
[Service]
User=prometheus
Group=prometheus
Type=simple
ExecStart=/usr/local/bin/prometheus \
--config.file /etc/prometheus/prometheus.yml \
--storage.tsdb.path /var/lib/prometheus/ \
--web.console.templates=/etc/prometheus/consoles \
--web.console.libraries=/etc/prometheus/console_libraries
[Install]
WantedBy=multi-user.target" | sudo tee -a /etc/systemd/system/prometheus.service
sudo systemctl daemon-reload
sudo systemctl enable prometheus
sudo systemctl start prometheus
#- job_name: 'node_exporter_centos'
# scrape_interval: 5s
# static_configs:
# - targets: ['10.94.10.209:9100']
|
<reponame>Project-ITSOL-Selling/front-end-prime
import { Component, OnInit } from '@angular/core';
import {FormBuilder, FormGroup} from '@angular/forms';
import {NgbModal} from '@ng-bootstrap/ng-bootstrap';
import {NgxSpinnerService} from 'ngx-spinner';
import {DEFAULT_MODAL_OPTIONS} from '../../@core/app-config';
import {BillOrderService} from '../../@core/services/_service/bill-order.service';
import {ActionBillOrderComponent} from './action-bill-order/action-bill-order/action-bill-order.component';
import {DeleteBillOrderComponent} from './delete-bill-order/delete-bill-order.component';
@Component({
selector: 'ngx-bill-order',
templateUrl: './bill-order.component.html',
styleUrls: ['./bill-order.component.scss'],
})
export class BillOrderComponent implements OnInit {
formSearch: FormGroup;
listBillOrder: any[] = [];
total: any;
lstDel: any[] = [];
lstDataSearch: any[] = [];
constructor(
private modal: NgbModal,
private fb: FormBuilder,
private service: BillOrderService,
private spinner: NgxSpinnerService,
) {
}
ngOnInit(): void {
this.processSearchData();
}
// initForm() {
// this.formSearch = this.fb.group({
// price: [''],
// });
// }
// getDataSearch() {
// this.service.getListBillOrder().subscribe(res => {
// this.lstDataSearch = res.data;
// });
// }
//
// processSearch(event?: any) {
// // @ts-ignore
// this.processSearchData(event);
// }
processEdit(item: any) {
const modalRef = this.modal.open(ActionBillOrderComponent, DEFAULT_MODAL_OPTIONS);
modalRef.componentInstance.action = false;
modalRef.componentInstance.billOrder = item;
modalRef.result.then(value => {
if (value === 'success') {
this.processSearchData();
}
},
);
}
processSearchData(event?: any) {
this.spinner.show();
this.service.getListBillOrder().subscribe(res => {
this.spinner.hide();
this.listBillOrder = res.data;
this.total = res.recordsTotal;
});
}
processSave() {
const modalRef = this.modal.open(ActionBillOrderComponent, DEFAULT_MODAL_OPTIONS);
modalRef.componentInstance.action = true;
modalRef.result.then(value => {
if (value === 'success') {
this.processSearchData();
}
}, (reason) => {
});
}
processDelete(id: any) {
const modalRef = this.modal.open(DeleteBillOrderComponent, DEFAULT_MODAL_OPTIONS);
modalRef.componentInstance.idBillOrder = id;
modalRef.result.then(value => {
if (value === 'success') {
this.processSearchData();
}
}, (reason) => {
});
}
close() {
// @ts-ignore
this.modal.close();
}
}
|
// Copyright (c) 2021-2022 Uber Technologies Inc.
//
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
import {
STATUS_ALL,
STATUS_CANCELED,
STATUS_CLOSED,
STATUS_COMPLETED,
STATUS_CONTINUED_AS_NEW,
STATUS_FAILED,
STATUS_OPEN,
STATUS_TERMINATED,
STATUS_TIMED_OUT,
} from '../constants';
import getStatus from './get-status';
describe('getStatus', () => {
describe('when calling getStatus with status = STATUS_ALL', () => {
const status = STATUS_ALL;
it('should return { value: STATUS_ALL, label: "All" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('All');
expect(output.value).toEqual(STATUS_ALL);
});
});
describe('when calling getStatus with status = STATUS_CANCELED', () => {
const status = STATUS_CANCELED;
it('should return { value: STATUS_CANCELED, label: "Cancelled" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Cancelled');
expect(output.value).toEqual(STATUS_CANCELED);
});
});
describe('when calling getStatus with status = STATUS_CLOSED', () => {
const status = STATUS_CLOSED;
it('should return { value: STATUS_CANCELED, label: "Closed" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Closed');
expect(output.value).toEqual(STATUS_CLOSED);
});
});
describe('when calling getStatus with status = STATUS_COMPLETED', () => {
const status = STATUS_COMPLETED;
it('should return { value: STATUS_COMPLETED, label: "Completed" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Completed');
expect(output.value).toEqual(STATUS_COMPLETED);
});
});
describe('when calling getStatus with status = STATUS_CONTINUED_AS_NEW', () => {
const status = STATUS_CONTINUED_AS_NEW;
it('should return { value: STATUS_CONTINUED_AS_NEW, label: "Continued As New" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Continued As New');
expect(output.value).toEqual(STATUS_CONTINUED_AS_NEW);
});
});
describe('when calling getStatus with status = STATUS_FAILED', () => {
const status = STATUS_FAILED;
it('should return { value: STATUS_FAILED, label: "Failed" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Failed');
expect(output.value).toEqual(STATUS_FAILED);
});
});
describe('when calling getStatus with status = STATUS_OPEN', () => {
const status = STATUS_OPEN;
it('should return { value: STATUS_OPEN, label: "Open" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Open');
expect(output.value).toEqual(STATUS_OPEN);
});
});
describe('when calling getStatus with status = STATUS_TERMINATED', () => {
const status = STATUS_TERMINATED;
it('should return { value: STATUS_TERMINATED, label: "Terminated" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Terminated');
expect(output.value).toEqual(STATUS_TERMINATED);
});
});
describe('when calling getStatus with status = STATUS_TIMED_OUT', () => {
const status = STATUS_TIMED_OUT;
it('should return { value: STATUS_TIMED_OUT, label: "Timed Out" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Timed Out');
expect(output.value).toEqual(STATUS_TIMED_OUT);
});
});
describe('when calling getStatus with status = ""', () => {
const status = '';
it('should return { value: STATUS_ALL, label: "All" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('All');
expect(output.value).toEqual(STATUS_ALL);
});
});
describe('when calling getStatus with status = "open"', () => {
const status = 'open';
it('should return { value: STATUS_OPEN, label: "Open" }.', () => {
const output = getStatus(status);
expect(output.label).toEqual('Open');
expect(output.value).toEqual(STATUS_OPEN);
});
});
});
|
#!/bin/bash
# Install the package
pip install twine
# Create the following file in home dir
touch ~/.pypirc
# Create the following entries in the file
[distutils]
index-servers= pypi
[pypi]
repository = https://upload.pypi.org/legacy/
username=YOUR_USERNAME
password=YOUR_PASSWORD
# Build a source code dist
python3 setup.py sdist
# Check if there are errors in dist files
twine check dist/*
# Upload project to PyPi
twine upload -r pypi dist/*
|
#!/bin/bash
set -e
# Note: rh-git218 is needed to run `git -C` in docs build process.
yum install -y centos-release-scl epel-release
yum update -y
yum install -y devtoolset-7-gcc devtoolset-7-gcc-c++ devtoolset-7-binutils java-1.8.0-openjdk-headless rsync \
rh-git218 wget unzip which make cmake3 patch ninja-build devtoolset-7-libatomic-devel openssl python27 \
libtool autoconf tcpdump
ln -s /usr/bin/cmake3 /usr/bin/cmake
ln -s /usr/bin/ninja-build /usr/bin/ninja
BAZEL_VERSION="$(curl -s https://api.github.com/repos/bazelbuild/bazel/releases/latest |
python -c "import json, sys; print json.load(sys.stdin)['tag_name']")"
BAZEL_INSTALLER="bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh"
curl -OL "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/${BAZEL_INSTALLER}"
chmod u+x "./${BAZEL_INSTALLER}"
"./${BAZEL_INSTALLER}"
rm "./${BAZEL_INSTALLER}"
# SLES 11 has older glibc than CentOS 7, so pre-built binary for it works on CentOS 7
LLVM_VERSION=7.0.1
LLVM_RELEASE="clang+llvm-${LLVM_VERSION}-x86_64-linux-sles11.3"
curl -OL "https://releases.llvm.org/${LLVM_VERSION}/${LLVM_RELEASE}.tar.xz"
tar Jxf "${LLVM_RELEASE}.tar.xz"
mv "./${LLVM_RELEASE}" /opt/llvm
rm "./${LLVM_RELEASE}.tar.xz"
# httpd24 is equired by rh-git218
echo "/opt/rh/httpd24/root/usr/lib64" > /etc/ld.so.conf.d/httpd24.conf
echo "/opt/llvm/lib" > /etc/ld.so.conf.d/llvm.conf
ldconfig
# Setup tcpdump for non-root.
groupadd pcap
chgrp pcap /usr/sbin/tcpdump
chmod 750 /usr/sbin/tcpdump
setcap cap_net_raw,cap_net_admin=eip /usr/sbin/tcpdump
./build_container_common.sh
|
import os
import argparse
import numpy
import pyro_models
from pyro_models.utils import json_file_to_mem_format
from utils import save, load
from model_constants import model_constants
import pyro.poutine as poutine
from pyro.infer.mcmc import MCMC, NUTS
from pyro.infer.abstract_infer import TracePredictive
import torch
import pystan
def pyro_nuts(name):
models = pyro_models.load()
md = models[name]
data = pyro_models.data(md)
model = md['model']
# will need to block out discrete vars for cjs models
# NOTE: dont jit with minibatches
nuts_kernel = NUTS(model, jit_compile=True, max_tree_depth=5)
posterior = MCMC(nuts_kernel, num_samples=2000, warmup_steps=2000, disable_progbar=True)
file_name = '{}_nuts_pyro.pkl'.format(name)
if os.path.exists(file_name):
print('Using cached model {}'.format(file_name))
mcmc_md = load(file_name)
for i, k in mcmc_md.items():
setattr(posterior, i, k)
else:
posterior = posterior.run(data, {})
mcmc_md = {'exec_traces': posterior.exec_traces,
'log_weights': posterior.log_weights,
'chain_ids': posterior.chain_ids,
'_idx_by_chain': posterior._idx_by_chain,
'_categorical': posterior._categorical
}
print('Saving mcmc traces to {}'.format(file_name))
save(mcmc_md, file_name)
obs = model_constants[name]['sitename'] if 'sitename' in model_constants[name] else 'y'
# set obs to None to sample from the posterior predictive
true_samples = data[obs]
data[obs] = None
mses = []
for i in range(10):
posterior_pred = TracePredictive(model, posterior, num_samples=1000).run(data, {})
marginal = posterior_pred.marginal([obs])
pred_samples = marginal.support(flatten=True)[obs]
mse = (pred_samples - true_samples).pow(2).mean()
print(mse)
mses.append(mse)
return mses
def stan_nuts(file_name, data, src_code, n_samples=2000, n_chains=10):
"""
Runs the No U-Turn Sampler using Pystan
:param data: model data
:param src_code: Stan source code as a string
:param init_values: initial values for NUTS initialization
:param n_samples: number of samples for NUTS
:returns: OrderedDict of variables keyed on variable name
"""
if os.path.exists(file_name):
print('Using cached model {}'.format(file_name))
model = load(file_name)
else:
model = pystan.StanModel(model_code=src_code)
print('Caching compiled stan model to: {}'.format(file_name))
save(model, file_name)
fit = model.sampling(data=data, iter=n_samples, warmup=1000, chains=n_chains, algorithm="NUTS")
print(fit.extract())
print('saving summary object to: {}'.format(file_name[:-3] + 'out'))
save(fit.summary(), file_name[:-3] + 'out')
return fit.summary()
def main(args):
book, model_name = args.name.split('.')
data_path = pyro_models.__path__[0]+'/' + book + '/' + model_name + '.py.json'
src_path = 'stan_models/' + model_name + '.stan'
file_name = book + '.' + model_name + '.pkl'
data = json_file_to_mem_format(data_path)
if args.backend == 'pyro':
mses = pyro_nuts(args.name)
m, std = torch.tensor(mses).mean(), torch.tensor(mses).std()
save([m.item(), std.item()], args.results_dir + args.prefix + '_nuts_' + file_name)
print(model_name)
print([m.item(), std.item()])
return mses
else:
with open(src_path, "r") as f:
src_code = f.read()
return stan_nuts(file_name, data, src_code)
if __name__ == "__main__":
# assumes all pyro models and json data are in pyro_models/
# assumes all stan models are in stan_models
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--name', type=str, default='arm.wells_dist', help="name of model")
parser.add_argument('-r', '--results-dir', type=str, default='./', help="directory in which to save results")
parser.add_argument('-n', '--num-samples', type=int, default=2000, help="num samples")
parser.add_argument('--prefix', type=str, default='')
parser.add_argument('--backend', type=str, default="pyro", help="{pyro, stan}")
args = parser.parse_args()
mses = main(args)
|
<filename>app/src/main/java/sma/rhythmtapper/framework/Sound.java
package sma.rhythmtapper.framework;
public interface Sound {
void play(float volume);
void dispose();
void stop();
}
|
'use strict'
module.exports = {
OpenRefine: require('./lib/openrefine')
}
|
#!/usr/bin/env bash
rootdir=$(readlink -f $(dirname $0))
source "$rootdir/test/common/autotest_common.sh"
source "$rootdir/test/nvmf/common.sh"
set -xe
if [ $EUID -ne 0 ]; then
echo "$0 must be run as root"
exit 1
fi
if [ $(uname -s) = Linux ]; then
# set core_pattern to a known value to avoid ABRT, systemd-coredump, etc.
echo "core" > /proc/sys/kernel/core_pattern
fi
trap "process_core; autotest_cleanup; exit 1" SIGINT SIGTERM EXIT
timing_enter autotest
create_test_list
src=$(readlink -f $(dirname $0))
out=$PWD
cd $src
./scripts/setup.sh status
if hash lcov; then
# setup output dir for unittest.sh
export UT_COVERAGE=$out/ut_coverage
export LCOV_OPTS="
--rc lcov_branch_coverage=1
--rc lcov_function_coverage=1
--rc genhtml_branch_coverage=1
--rc genhtml_function_coverage=1
--rc genhtml_legend=1
--rc geninfo_all_blocks=1
"
export LCOV="lcov $LCOV_OPTS --no-external"
# Print lcov version to log
$LCOV -v
# zero out coverage data
$LCOV -q -c -i -t "Baseline" -d $src -o cov_base.info
fi
# Make sure the disks are clean (no leftover partition tables)
timing_enter cleanup
# Remove old domain socket pathname just in case
rm -f /var/tmp/spdk*.sock
if [ $(uname -s) = Linux ]; then
# Load the kernel driver
./scripts/setup.sh reset
# Let the kernel discover any filesystems or partitions
sleep 10
# Delete all partitions on NVMe devices
devs=`lsblk -l -o NAME | grep nvme | grep -v p` || true
for dev in $devs; do
parted -s /dev/$dev mklabel msdos
done
# Load RAM disk driver if available
modprobe brd || true
fi
timing_exit cleanup
# set up huge pages
timing_enter afterboot
./scripts/setup.sh
timing_exit afterboot
timing_enter nvmf_setup
rdma_device_init
timing_exit nvmf_setup
#####################
# Unit Tests
#####################
if [ $SPDK_TEST_UNITTEST -eq 1 ]; then
timing_enter unittest
run_test ./test/unit/unittest.sh
report_test_completion "unittest"
timing_exit unittest
fi
timing_enter lib
if [ $SPDK_TEST_BLOCKDEV -eq 1 ]; then
run_test test/bdev/blockdev.sh
fi
if [ $SPDK_TEST_EVENT -eq 1 ]; then
run_test test/event/event.sh
fi
if [ $SPDK_TEST_NVME -eq 1 ]; then
run_test test/nvme/nvme.sh
run_test test/nvme/spdk_nvme_cli.sh
# Only test hotplug without ASAN enabled. Since if it is
# enabled, it catches SEGV earlier than our handler which
# breaks the hotplug logic
if [ $SPDK_RUN_ASAN -eq 0 ]; then
run_test test/nvme/hotplug.sh intel
fi
fi
run_test test/env/env.sh
if [ $SPDK_TEST_IOAT -eq 1 ]; then
run_test test/ioat/ioat.sh
fi
timing_exit lib
if [ $SPDK_TEST_ISCSI -eq 1 ]; then
run_test ./test/iscsi_tgt/iscsi_tgt.sh
fi
if [ $SPDK_TEST_BLOBFS -eq 1 ]; then
run_test ./test/blobfs/rocksdb/rocksdb.sh
run_test ./test/blobstore/blobstore.sh
fi
if [ $SPDK_TEST_NVMF -eq 1 ]; then
run_test ./test/nvmf/nvmf.sh
fi
if [ $SPDK_TEST_VHOST -eq 1 ]; then
timing_enter vhost
timing_enter negative
run_test ./test/vhost/spdk_vhost.sh --negative
timing_exit negative
if [ $RUN_NIGHTLY -eq 1 ]; then
timing_enter integrity_blk
run_test ./test/vhost/spdk_vhost.sh --integrity-blk
timing_exit integrity_blk
timing_enter integrity
run_test ./test/vhost/spdk_vhost.sh --integrity
timing_exit integrity
timing_enter fs_integrity_scsi
run_test ./test/vhost/spdk_vhost.sh --fs-integrity-scsi
timing_exit fs_integrity_scsi
timing_enter fs_integrity_blk
run_test ./test/vhost/spdk_vhost.sh --fs-integrity-blk
timing_exit fs_integrity_blk
timing_enter integrity_lvol_scsi_nightly
run_test ./test/vhost/spdk_vhost.sh --integrity-lvol-scsi-nightly
timing_exit integrity_lvol_scsi_nightly
timing_enter integrity_lvol_blk_nightly
run_test ./test/vhost/spdk_vhost.sh --integrity-lvol-blk-nightly
timing_exit integrity_lvol_blk_nightly
timing_enter vhost_migration
run_test ./test/vhost/spdk_vhost.sh --migration
timing_exit vhost_migration
timing_enter readonly
run_test ./test/vhost/spdk_vhost.sh --readonly
timing_exit readonly
fi
timing_enter integrity_lvol_scsi
run_test ./test/vhost/spdk_vhost.sh --integrity-lvol-scsi
timing_exit integrity_lvol_scsi
timing_enter integrity_lvol_blk
run_test ./test/vhost/spdk_vhost.sh --integrity-lvol-blk
timing_exit integrity_lvol_blk
timing_exit vhost
fi
if [ $SPDK_TEST_LVOL -eq 1 ]; then
timing_enter lvol
test_cases="1,50,51,52,53,100,101,102,150,200,201,250,251,252,253,254,255,"
test_cases+="300,301,450,451,452,550,600,601,650,651,652,654,655,"
test_cases+="700,701,750,751,752,753,754,755,800,801,802,803,804,10000"
run_test ./test/lvol/lvol.sh --test-cases=$test_cases
report_test_completion "lvol"
timing_exit lvol
fi
if [ $SPDK_TEST_VHOST_INIT -eq 1 ]; then
run_test ./test/vhost/initiator/blockdev.sh
report_test_completion "vhost_initiator"
fi
if [ $SPDK_TEST_PMDK -eq 1 ]; then
run_test ./test/pmem/pmem.sh -x
fi
timing_enter cleanup
autotest_cleanup
timing_exit cleanup
timing_exit autotest
chmod a+r $output_dir/timing.txt
trap - SIGINT SIGTERM EXIT
# catch any stray core files
process_core
if hash lcov; then
# generate coverage data and combine with baseline
$LCOV -q -c -d $src -t "$(hostname)" -o cov_test.info
$LCOV -q -a cov_base.info -a cov_test.info -o $out/cov_total.info
$LCOV -q -r $out/cov_total.info '*/dpdk/*' -o $out/cov_total.info
$LCOV -q -r $out/cov_total.info '/usr/*' -o $out/cov_total.info
git clean -f "*.gcda"
rm -f cov_base.info cov_test.info OLD_STDOUT OLD_STDERR
fi
|
import java.time.LocalDate;
import java.time.Period;
public class AgeCalculator {
public static int getAge(String dateString) {
// Parse the String to LocalDate
LocalDate birthdate = LocalDate.parse(dateString);
// Calculate the age
Period age = Period.between(birthdate, LocalDate.now());
return age.getYears();
}
} |
const mongoose = require('mongoose');
module.exports = mongoose.model('Harass', mongoose.Schema({
name: {
type: String,
required: true
},
userID: {
type: String,
required: true
},
expiry: {
type: String,
required: true
}
}, { timestamps: true })); |
#!/bin/bash
title="Node project start menu"
text="Please pick an application to run"
errormsg="Invalid option. Try another again."
options=("Chrome" "Firefox" "Atom" "MongoBD" "development BackEnd" "testing BackEnd" "Frontend")
windowHeight=600
while opt=$(zenity --title="$title" --text="$text" --height=$windowHeight --list \
--column="Options" "${options[@]}"); do
case "$opt" in
"${options[0]}" )
google-chrome & disown
;;
"${options[1]}" )
firefox & disown
;;
"${options[2]}" )
atom & disown
;;
"${options[3]}" )
gnome-terminal -- bash -c \"mongo\" & disown
;;
"${options[4]}" )
cd ~
cd ./Documents/projects/Webpage/backEnd
gnome-terminal -- bash -c "npm run-script developmentBackEnd" & disown
;;
"${options[5]}" )
cd ~
cd ./Documents/projects/Webpage/backEnd
gnome-terminal -- bash -c "npm run-script testingBackEnd" & disown
;;
"${options[6]}" )
cd ~
cd ./Documents/projects/Webpage/frontEnd
gnome-terminal -- bash -c "http-server" & disown
;;
esac
done
|
<filename>Week3/homework/7-step3.js
'use strict';
const x = 9;
function f1(val) {
val = val + 1;
return val;
}
f1(x);
console.log(x);
/*
Here I tell JavaScript that I want the same variable value (x) and increase it by one value and return it.
'val' here is a local variable. It has the same x value but has nothing to do with it.
I did not tell JavaScript that I wanted to change the value of X .
If I want to do this I must first change the X to let or var because it is const and the code must be like :
let x = 9;
function f1() {
x ++ ;
}
f1();
or
let x = 9;
function f1() {
x = x + 1;
return x;
}
f1(x);
*/
const y = { x: 9 };
function f2(val) {
val.x = val.x + 1;
return val;
}
f2(y);
console.log(y);
// Add your explanation as a comment here
/*
Here it is different. We tell JavaScript to change the 'x' value in the object, where each of 'y' and 'val' refers to the same object.
If we change the value of val.x or y.x , the original value in y will be changing
*/ |
#!/bin/bash
set -e # exit if anything returns a non-zero status
until dotnet ef database update; do
>&2 echo "SQL Server is starting up"
sleep 1
done
|
let array = ["Jesus", "David", "Marcano", "Mora"];
let objeto = {
"Nombre": "Jesus",
"Apellido": "Marcano",
"Edad": 23
}
// const recibirArray = (a) => {
// console.log(a[0]);
// }
// recibirArray(array);
// const imprimerElementos = (a) => {
// a.forEach(element => {
// console.log(element);
// });
// };
// imprimerElementos(array);
const imprimirElementosDeUnObjeto = (o) => {
for(element in o) {
console.log(`${element}: ${o[element]}`);
}
console.log(Object.keys(o));
console.log(Object.values(o));
}
imprimirElementosDeUnObjeto(objeto); |
<reponame>veryaustin/veryaustin-frontend-2017
import React from "react";
import { Route, IndexRoute } from "react-router";
import App from "./components/App";
import Home from "./containers/Home";
import Work from "./containers/Work";
import About from "./containers/About";
import Contact from "./containers/Contact";
export default (
<Route path="/" component={App}>
<IndexRoute path="home" component={Home} />
<Route path="work" component={Work} />
<Route path="about" component={About} />
<Route path="contact" component={Contact} />
</Route>
);
|
#!/bin/bash
usage="$(basename "$0") [help] [movies] [kids]-- program to rename movie files
where:
help show this help text
movies sets a specific source and destination for adult content
kids sets a specific source and destination for kids content
The last 2 options then continue processing through the following steps:
- files in source folder are moved from subfolders if any to the root source folder
- empty directories and non-media files are removed using hardcoded rules
- remaining files are renamed using Filebot in a human-friendly format
- remaining files are moved to the destination"
if [[ $# -eq 0 ]] ; then
echo 'Please provide either "help", movies" or "kids" argument'
exit 0
fi
if [ "$1" == "help" ]; then
echo "$usage"
exit 0
elif [ $1 = "movies" ]; then
sourceConfig="configMovies.xml"
sourceDirectory="/media/Other/Transfer/Movies/"
targetDirectory="/media/Media/Movies2/"
elif [ $1 = "kids" ]; then
sourceConfig="configKids.xml"
sourceDirectory="/media/Other/Transfer/Kids/"
targetDirectory="/media/Media/Kids2/"
fi
#First go into our source directory
cd $sourceDirectory
#Move all files from subfolders into root folder
find . -type f -mindepth 2 -exec mv -i -f -- {} . \;
#Remove stuff we don't care about
#Based on extensions txt|nfo|png|jpg|url|sfv|srt
find . -type f -name "*.txt" -exec rm -f {} \;
find . -type f -name "*.nfo" -exec rm -f {} \;
find . -type f -name "*.png" -exec rm -f {} \;
find . -type f -name "*.jpg" -exec rm -f {} \;
find . -type f -name "*.url" -exec rm -f {} \;
find . -type f -name "*.sfv" -exec rm -f {} \;
find . -type f -name "*.srt" -exec rm -f {} \;
find . -type f -name "*.nfo" -exec rm -f {} \;
#Based on names sample|trailer|extras|
find . -type f -name "*sample*" -exec rm -f {} \;
find . -type f -name "*Sample*" -exec rm -f {} \;
find . -type f -name "*trailer*" -exec rm -f {} \;
find . -type f -name "*extras*" -exec rm -f {} \;
find . -type f -name "ETRG.mp4" -exec rm -f {} \;
#Remove subfolders
find . -type d -empty -delete
#Rename
echo "Renaming"
#Switching from filebot to tmm
/media/Other-Local/Apps/tmm/tinyMediaManagerCMD.sh -config $sourceConfig -update -scrapeNew -renameNew
#Now to clean up because TMM creates subfolders and we don't need them
#Move all files from subfolders into root folder
find . -type f -mindepth 2 -exec mv -i -f -- {} . \;
#Remove stuff we don't care about
#Based on extensions txt|nfo|png|jpg|url|sfv|srt
find . -type f -name "*.txt" -exec rm -f {} \;
find . -type f -name "*.nfo" -exec rm -f {} \;
find . -type f -name "*.png" -exec rm -f {} \;
find . -type f -name "*.jpg" -exec rm -f {} \;
find . -type f -name "*.url" -exec rm -f {} \;
find . -type f -name "*.sfv" -exec rm -f {} \;
find . -type f -name "*.srt" -exec rm -f {} \;
find . -type f -name "*.nfo" -exec rm -f {} \;
#Based on names sample|trailer|extras|
find . -type f -name "*sample*" -exec rm -f {} \;
find . -type f -name "*Sample*" -exec rm -f {} \;
find . -type f -name "*trailer*" -exec rm -f {} \;
find . -type f -name "*extras*" -exec rm -f {} \;
find . -type f -name "ETRG.mp4" -exec rm -f {} \;
#Remove subfolders
find . -type d -empty -delete
#Reset TMM DB
rm /media/Other-Local/Apps/tmm/data/movies.db
cp /media/Other-Local/Apps/tmm/data/moviesEmpty.db /media/Other-Local/Apps/tmm/data/movies.db
echo "Rename Complete"
#Move
if [ $1 = "kids" ]; then
echo "Moving to Kids directory"
echo `mv $sourceDirectory* $targetDirectory`
elif [ $1 = "movies" ]; then
echo "Moving to Movies directory"
echo `mv $sourceDirectory* $targetDirectory`
fi
echo "Move Complete"
|
<filename>codes/src/main/java/org/glamey/training/codes/leetcode/FindRepeatNumber.java<gh_stars>0
package org.glamey.training.codes.leetcode;
import java.util.Arrays;
import java.util.HashSet;
/**
* 找出数组中重复的数字。
* <p>
* <p>
* 在一个长度为 n 的数组 nums 里的所有数字都在 0~n-1 的范围内。数组中某些数字是重复的,但不知道有几个数字重复了,也不知道每个数字重复了几次。请找出数组中任意一个重复的数字。
* <p>
* 示例 1:
* <p>
* 输入:
* [2, 3, 1, 0, 2, 5, 3]
* 输出:2 或 3
*
* <p>
* 限制:
* <p>
* 2 <= n <= 100000
* <p>
* 通过次数42,201提交次数62,962
* <p>
* 来源:力扣(LeetCode)
* 链接:https://leetcode-cn.com/problems/shu-zu-zhong-zhong-fu-de-shu-zi-lcof
*/
public class FindRepeatNumber {
public static void main(String[] args) {
System.out.println(findRepeatNumber(new int[] {2, 3, 1, 0, 2, 5, 3}));
System.out.println(findRepeatNumber_v2(new int[] {2, 3, 1, 0, 2, 5, 3}));
System.out.println(findRepeatNumber_v3(new int[] {2, 3, 1, 0, 2, 5, 3}));
}
/**
* 时间复杂度:O(N)
* 空间复杂度:O(1)
* 在排序后的情况下,index = nums[index]。
* 如果index != nums[index], 那么讲当前值,与nums[index]下标的值进行交换
*/
private static int findRepeatNumber_v3(int[] nums) {
if (nums == null || nums.length == 1) {
return -1;
}
/**
* 2, 3, 1, 0, 2, 5, 3
* 1, 3, 2, 0, 2, 5, 3
* 3, 1, 2, 0, 2, 5, 3
* 0, 1, 2, 3, 2, 5, 3
*/
for (int i = 0; i < nums.length; i++) {
while (i != nums[i]) {
int tmp = nums[nums[i]];
if (nums[i] == tmp) {
return tmp;
}
nums[nums[i]] = nums[i];
nums[i] = tmp;
}
}
return -1;
}
/**
* O(N*logN)
*/
private static int findRepeatNumber_v2(int[] nums) {
Arrays.sort(nums);
for (int i = 0; i < nums.length - 1; i++) {
if (i != nums[i]) {
return nums[i];
}
}
return -1;
}
public static int findRepeatNumber(int[] nums) {
if (nums == null || nums.length <= 1) {
return -1;
}
HashSet<Integer> set = new HashSet<>(nums.length);
for (Integer integer : nums) {
if (!set.add(integer)) {
return integer;
}
}
return -1;
}
}
|
#!/bin/bash
sort_numbers() {
local __sort="$1"
local __len=${#__sort}
local __sorted='0'
while [ "$__sorted" -eq '0' ]; do
__sorted='1'
for (( i=0; i<__len-1; i++ )); do
if [ "${__sort[$i]}" -gt "${__sort[$((i+1))]}" ]; then
local __temp="${__sort[$i]}"
__sort[$i]="${__sort[$((i+1))]}"
__sort[$((i+1))]="__temp"
__sorted='0'
fi
done
done
echo "${__sort[@]}"
}
# Example usage
numbers=(5 3 8 2 1)
sorted_numbers=($(sort_numbers "${numbers[@]}"))
echo "Sorted numbers: ${sorted_numbers[@]}" |
def strStr(haystack: str, needle: str) -> int:
if len(needle) == 0:
return 0
j = 0
index = 0
count = 0
while index < len(haystack):
if haystack[index] == needle[j]:
count += 1
if len(needle) == count:
return index - count + 1
j += 1
else:
index = index - count
j = 0
count = 0
index += 1
return -1
def strStr2(haystack: str, needle: str) -> int:
if needle:
if haystack:
if haystack == needle:
return 0
len_n = len(needle)
len_h = len(haystack)
diff = len_h - len_n
for i in range(diff + 1):
if haystack[i : i + len_n] == needle:
return i
return -1
else:
return 0
print(strStr2(haystack="hello", needle="ll"))
print(strStr2(haystack="aaaaa", needle="bba"))
print(strStr2("mississippi", "issip"))
print(strStr2("mississippi", "issi"))
|
/* Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <string>
#include "cirq/google/api/v2/program.pb.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/shape_inference.h"
#include "tensorflow/core/framework/tensor_shape.h"
#include "tensorflow/core/lib/core/error_codes.pb.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/lib/core/threadpool.h"
#include "tensorflow_quantum/core/ops/parse_context.h"
#include "tensorflow_quantum/core/ops/tfq_simulate_utils.h"
#include "tensorflow_quantum/core/qsim/mux.h"
#include "tensorflow_quantum/core/qsim/unitary_space.h"
#include "tensorflow_quantum/core/src/circuit_parser.h"
#include "tensorflow_quantum/core/src/program_resolution.h"
namespace tfq {
using ::cirq::google::api::v2::Program;
using ::tensorflow::Status;
using ::tfq::Circuit;
using ::tfq::CircuitFromProgram;
using ::tfq::qsim_old::GetUnitarySpace;
using ::tfq::qsim_old::UnitarySpace;
class TfqSimulateStateOp : public tensorflow::OpKernel {
public:
explicit TfqSimulateStateOp(tensorflow::OpKernelConstruction *context)
: OpKernel(context) {}
void Compute(tensorflow::OpKernelContext *context) override {
// TODO (mbbrough): add more dimension checks for other inputs here.
DCHECK_EQ(3, context->num_inputs());
std::vector<Program> programs;
std::vector<int> num_qubits;
OP_REQUIRES_OK(context,
GetProgramsAndNumQubits(context, &programs, &num_qubits));
std::vector<SymbolMap> maps;
OP_REQUIRES_OK(context, GetSymbolMaps(context, &maps));
OP_REQUIRES(
context, maps.size() == programs.size(),
tensorflow::errors::InvalidArgument(absl::StrCat(
"Number of circuits and values do not match. Got ", programs.size(),
" circuits and ", maps.size(), " values.")));
int max_num_qubits = 0;
for (const int num : num_qubits) {
max_num_qubits = std::max(max_num_qubits, num);
}
// TODO(pmassey): Investigate creating a matrix that isn't just the maximum
// required size.
const int output_dim_size = maps.size();
tensorflow::TensorShape output_shape;
output_shape.AddDim(output_dim_size);
output_shape.AddDim(1 << max_num_qubits);
output_shape.AddDim(1 << max_num_qubits);
tensorflow::Tensor *output = nullptr;
OP_REQUIRES_OK(context, context->allocate_output(0, output_shape, &output));
auto output_tensor = output->tensor<std::complex<float>, 3>();
auto DoWork = [&](int start, int end) {
std::unique_ptr<UnitarySpace> state = GetUnitarySpace(1, 1);
int old_num_qubits = -1;
auto pad = std::complex<float>(-2, 0);
for (int i = start; i < end; i++) {
Program program = programs[i];
const int num = num_qubits[i];
OP_REQUIRES_OK(context, ResolveSymbols(maps[i], &program));
// QSim work below
Circuit circuit;
OP_REQUIRES_OK(context, CircuitFromProgram(program, num, &circuit));
// TODO(mbbrough): Update this allocation hack so that a StateSpace
// object can grow it's memory dynamically to larger and larger size
// without ever having to call free (until the very end). This is
// tricky to implement because right now certain statespaces can't
// simulate all states and we use StateSpaceSlow for smaller circuits.
if (num != old_num_qubits) {
state = GetUnitarySpace(num, 1);
state->CreateUnitary();
}
state->SetIdentity();
OP_REQUIRES_OK(context, state->Update(circuit));
uint64_t state_size = uint64_t(1) << state->GetNumQubits();
for (uint64_t j = 0; j < state_size; j++) {
for (uint64_t k = 0; k < state_size; k++) {
// Cast to size_t to keep windows compiler happy.
// We run less of a risk of overflowing size_t since
// this is a unitary and not a state.
output_tensor(static_cast<ptrdiff_t>(i), static_cast<ptrdiff_t>(j),
static_cast<ptrdiff_t>(k)) = state->GetEntry(j, k);
}
}
// -2 padding for lower portion.
for (uint64_t j = state_size; j < (uint64_t(1) << max_num_qubits);
j++) {
for (uint64_t k = 0; k < (uint64_t(1) << max_num_qubits); k++) {
output_tensor(static_cast<ptrdiff_t>(i), static_cast<ptrdiff_t>(j),
static_cast<ptrdiff_t>(k)) = pad;
}
}
// -2 padding for right portion.
for (uint64_t j = 0; j < state_size; j++) {
for (uint64_t k = state_size; k < (uint64_t(1) << max_num_qubits);
k++) {
output_tensor(static_cast<ptrdiff_t>(i), static_cast<ptrdiff_t>(j),
static_cast<ptrdiff_t>(k)) = pad;
}
}
old_num_qubits = num;
}
};
const int block_size = GetBlockSize(context, output_dim_size);
context->device()
->tensorflow_cpu_worker_threads()
->workers->TransformRangeConcurrently(block_size, output_dim_size,
DoWork);
}
};
REGISTER_KERNEL_BUILDER(
Name("TfqCalculateUnitary").Device(tensorflow::DEVICE_CPU),
TfqSimulateStateOp);
REGISTER_OP("TfqCalculateUnitary")
.Input("programs: string")
.Input("symbol_names: string")
.Input("symbol_values: float")
.Output("unitary: complex64")
.SetShapeFn([](tensorflow::shape_inference::InferenceContext *c) {
tensorflow::shape_inference::ShapeHandle programs_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &programs_shape));
tensorflow::shape_inference::ShapeHandle symbol_names_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &symbol_names_shape));
tensorflow::shape_inference::ShapeHandle symbol_values_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(2), 2, &symbol_values_shape));
c->set_output(
0, c->MakeShape(
{c->Dim(programs_shape, 0),
tensorflow::shape_inference::InferenceContext::kUnknownDim,
tensorflow::shape_inference::InferenceContext::kUnknownDim}));
return tensorflow::Status::OK();
});
} // namespace tfq
|
<reponame>andreapatri/cms_journal
import React from 'react';
import PropTypes from 'prop-types';
const Sync = ({ fill, ...rest }) => (
<svg
viewBox="0 0 11 11"
width="11"
height="11"
{...rest}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M11 .917v3.208a.44.44 0 01-.136.322.44.44 0 01-.322.136H7.333c-.2 0-.341-.095-.422-.286-.081-.186-.048-.351.1-.494L8 2.814a3.558 3.558 0 00-2.499-.98c-.497 0-.97.096-1.422.29a3.695 3.695 0 00-1.17.784c-.33.329-.591.72-.785 1.17-.193.452-.29.925-.29 1.422 0 .497.097.97.29 1.422.194.45.455.841.785 1.17.329.33.72.591 1.17.785.452.193.925.29 1.422.29.568 0 1.105-.124 1.611-.373.506-.248.934-.599 1.282-1.052.034-.048.089-.077.165-.086.072 0 .131.021.179.064l.981.988a.199.199 0 01.068.147.235.235 0 01-.054.161 5.377 5.377 0 01-1.89 1.465c-.74.346-1.52.519-2.342.519a5.351 5.351 0 01-2.134-.437A5.549 5.549 0 011.61 9.39 5.549 5.549 0 01.437 7.634 5.351 5.351 0 010 5.5c0-.745.146-1.456.437-2.134A5.549 5.549 0 011.61 1.61 5.549 5.549 0 013.366.437 5.351 5.351 0 015.5 0a5.56 5.56 0 013.788 1.518l.931-.924c.139-.148.306-.181.502-.1.186.081.279.222.279.423z"
fill={fill}
fillRule="nonzero"
/>
</svg>
);
Sync.defaultProps = {
fill: '#fff',
};
Sync.propTypes = {
fill: PropTypes.string,
};
export default Sync;
|
import java.util.Random;
public class DiceGame {
public static void main(String[] args) {
Random random = new Random();
int die1 = random.nextInt(6) + 1; // Generate a random value between 1 and 6 for die1
int die2 = random.nextInt(6) + 1; // Generate a random value between 1 and 6 for die2
System.out.println("Die 1: " + die1); // Display the value of die1
System.out.println("Die 2: " + die2); // Display the value of die2
System.out.println("Total: " + (die1 + die2)); // Calculate and display the total sum of die1 and die2
}
} |
<filename>source/starter/nextjs-redux-starter/src/components/Layout.js
import React, { PureComponent } from 'react'
import Link from 'next/link'
export default class Layout extends PureComponent {
render () {
return (
<div className='layout'>
<header>
<img src='/static/next-logo.png' />
<h3>nextjs redux starter</h3>
<menu>
<Link href='/about'>
<a>About</a>
</Link>
<Link href='/'>
<a>Redux demo</a>
</Link>
</menu>
<style jsx>{`
header {
display: flex;
align-items: center;
font-family: Monospace;
margin: 10px 20px;
}
h3 {
margin-left: 10px
}
img {
width: 30px;
}
menu > a {
margin-right: 16px;
}
`}</style>
</header>
{ this.props.children }
</div>
)
}
}
|
#!/bin/bash
cd NodejsWebApp1/NodejsWebApp1
npm run server
|
/*=========================================================================
Program: Visualization Toolkit
Module: vtkPCLRadiusOutlierRemoval.cxx
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkPCLRadiusOutlierRemoval.h"
#include "vtkPCLConversions.h"
#include "vtkPolyData.h"
#include "vtkPointData.h"
#include "vtkInformation.h"
#include "vtkInformationVector.h"
#include "vtkObjectFactory.h"
#include "vtkStreamingDemandDrivenPipeline.h"
#include "vtkSmartPointer.h"
#include "vtkNew.h"
#include <pcl/filters/radius_outlier_removal.h>
namespace {
//-----------------------------------------------------------------------------
pcl::IndicesConstPtr ApplyRadiusOutlierRemoval(pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud,
double searchRadius,
int neighborsInSearchRadius)
{
if (!cloud || !cloud->points.size())
{
return pcl::IndicesConstPtr(new std::vector<int>);
}
pcl::PointCloud<pcl::PointXYZ>::Ptr cloudFiltered (new pcl::PointCloud<pcl::PointXYZ>);
pcl::RadiusOutlierRemoval<pcl::PointXYZ> outrem(true);
outrem.setInputCloud(cloud);
outrem.setRadiusSearch(searchRadius);
outrem.setMinNeighborsInRadius(neighborsInSearchRadius);
outrem.filter(*cloudFiltered);
return outrem.getRemovedIndices();
}
}
//----------------------------------------------------------------------------
vtkStandardNewMacro(vtkPCLRadiusOutlierRemoval);
//----------------------------------------------------------------------------
vtkPCLRadiusOutlierRemoval::vtkPCLRadiusOutlierRemoval()
{
this->SearchRadius = 0.3;
this->NeighborsInSearchRadius = 10;
this->SetNumberOfInputPorts(1);
this->SetNumberOfOutputPorts(1);
}
//----------------------------------------------------------------------------
vtkPCLRadiusOutlierRemoval::~vtkPCLRadiusOutlierRemoval()
{
}
//----------------------------------------------------------------------------
int vtkPCLRadiusOutlierRemoval::RequestData(
vtkInformation* vtkNotUsed(request),
vtkInformationVector **inputVector,
vtkInformationVector *outputVector)
{
// get input and output data objects
vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
vtkPolyData *input = vtkPolyData::SafeDownCast(inInfo->Get(vtkDataObject::DATA_OBJECT()));
vtkInformation *outInfo = outputVector->GetInformationObject(0);
vtkPolyData *output = vtkPolyData::SafeDownCast(outInfo->Get(vtkDataObject::DATA_OBJECT()));
// perform outlier removal
pcl::PointIndices::Ptr inlierIndices;
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = vtkPCLConversions::PointCloudFromPolyData(input);
pcl::IndicesConstPtr outlierIndices = ApplyRadiusOutlierRemoval(cloud,
this->SearchRadius,
this->NeighborsInSearchRadius);
// pass thru input add labels
vtkSmartPointer<vtkIntArray> labels = vtkPCLConversions::NewLabelsArray(outlierIndices, input->GetNumberOfPoints());
labels->SetName("is_outlier");
output->ShallowCopy(input);
output->GetPointData()->AddArray(labels);
return 1;
}
//----------------------------------------------------------------------------
void vtkPCLRadiusOutlierRemoval::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os,indent);
}
|
import math
radius = 5
area = math.pi * radius ** 2
print("The area of the circle is", area) |
InteractiveFileLineNumber=0
interactive:start() {
local ast code compiled_code line="" state=none
local proc rfifo wfifo end_token result
local powhistory="${POWSCRIPT_HISTORY_FILE-$HOME/.powscript_history}"
local extra_line=''
local compile_flag=false ast_flag=false echo_flag=false incomplete_flag=false lower_flag=false
[ ! -f "$powhistory" ] && echo >"$powhistory"
history -c
history -r "$powhistory"
powscript:make-fifo ".interactive.wfifo" wfifo
powscript:make-fifo ".interactive.rfifo" rfifo
powscript:temp-name ".end" end_token
backend:interactive "$wfifo" "$rfifo" "$end_token" &
proc="$!"
PowscriptGuestProcess="$proc"
exec 3<>"$wfifo"
exec 4<>"$rfifo"
if ${PowscriptIncludeStd-true}; then
echo "$(cache:library std)" >>"$wfifo"
fi
if [ -f "$HOME/.powrc" ]; then
files:compile-file "$wfifo" <"$HOME/.powrc"
fi
while pgrep -P $proc >/dev/null; do
result=
if [ -n "${extra_line// /}" ]; then
line="$extra_line"
extra_line=""
else
interactive:read-powscript top line
fi
code="$line"
case "$code" in
'.compile')
interactive:toggle-flag compile_flag
;;
'.ast')
interactive:toggle-flag ast_flag
;;
'.lower')
interactive:toggle-flag lower_flag
;;
'.echo')
interactive:toggle-flag echo_flag
;;
'.incomplete')
interactive:toggle-flag incomplete_flag
;;
'.show '*)
interactive:show-ast "${code//.show /}"
echo
;;
'.tokens '*)
interactive:show-tokens "${code//.tokens /}"
;;
'.help'*)
interactive:help
;;
*)
state=none
while [ ! "$state" = top ]; do
interactive:clear-compilation
state="$( { stream:init; POWSCRIPT_SHOW_INCOMPLETE_MESSAGE=$incomplete_flag ast:parse:try; } <<< "$code" )"
[ -z "$line" ] && state=top
case "$state" in
top)
interactive:clear-compilation
{ stream:init; ast:parse ast; } <<< "$code"$'\n'
;;
error*)
>&2 echo "$state"
state=none
code=
line=
;;
*)
interactive:read-powscript "$state" line
code="$code"$'\n'"$line"
;;
esac
done
while IFS= read -r codeline; do
[ -n "$codeline" ] && history -s "$codeline"
done <<<"$code"
if $echo_flag; then
echo "---- CODE ECHO -----"
echo "$code"
echo "---------------------"
fi
if ! stream:end; then
interactive:get-remaining-input extra_line
code="${code:0:$(($# - ${#extra_line}))}"
fi
if $ast_flag; then
echo "---- SYNTAX TREE ----"
interactive:show-ast $ast
echo "---------------------"
fi
ast:lower $ast ast
if $lower_flag; then
echo "---- LOWERED TREE ---"
interactive:show-ast $ast
echo "---------------------"
fi
backend:compile $ast compiled_code
if $compile_flag; then
echo "--- COMPILED CODE ---"
echo "$compiled_code"
echo "---------------------"
fi
echo "$compiled_code" >>"$wfifo"
echo "#<<END>>" >>"$wfifo"
while [ ! "$result" = "#<<END.$end_token>>" ]; do
IFS= read -r result <"$rfifo"
[ ! "$result" = "#<<END.$end_token>>" ] && echo "$result"
done
echo
;;
esac
done
history -w "$powhistory"
[ -p "$wfifo" ] && rm $wfifo
[ -p "$rfifo" ] && rm $rfifo
}
interactive:help() {
echo '
Special Commands:
.help Display this message
.ast Toggle the display of the abstract syntax tree
.lower Toggle the display of the lowered ast
.compile Toggle the display of compilated code
.tokens t* Display information about the given tokens
.echo Toggle echoing the code
.incomplete Toggle allowing incomplete code
.show ast Display information about the ast with the given ID
'
}
interactive:get-remaining-input() { #<<NOSHADOW>>
local collumn out="$1"
token:peek -cs collumn <<< ""
stream:jump-to-collumn $collumn
stream:get-rest-of-line "$out"
}
noshadow interactive:get-remaining-input
interactive:clear-compilation() {
token:clear-all
token:clear-states
ast:clear-all
ast:clear-states
}
interactive:show-ast() {
echo "id: $1"
echo "head: $(ast:from $1 head)"
echo "value: $(ast:from $1 value)"
echo "children: $(ast:from $1 children)"
ast:print $1
}
interactive:show-tokens() {
local value class
{
interactive:clear-compilation
stream:init
while ! stream:end; do
token:get -v value -c class
echo "-----------------"
echo "$value :: $class"
done
} <<< "$1"
echo
}
interactive:toggle-flag() {
if ${!1}; then
setvar "$1" false
else
setvar "$1" true
fi
}
interactive:read-powscript() {
IFS="" read -r -e -p "$(interactive:format-powscript-prompt "$1")" "$2"
InteractiveFileLineNumber=$((InteractiveFileLineNumber+1))
}
interactive:format-powscript-prompt() {
local state_name=$1 state
case $state_name in
top) state="--" ;;
double-quotes) state='""' ;;
single-quotes) state="''" ;;
*) state="$state_name" ;;
esac
local default_prompt='pow[%L]%S> '
local prompt="${POWSCRIPT_PS1-$default_prompt}"
prompt="${prompt//%L/$(printf '%.3d' $InteractiveFileLineNumber)}"
prompt="${prompt//%S/$(printf '%4s' $state)}"
echo "$prompt"
}
|
//
// INDCollectionVideoPlayerView.h
// <NAME>
//
// Created by <NAME> on 2014-04-10.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
/**
* A view that plays a video using `AVPlayerLayer` and repeats it when playback
* has ended.
*/
@interface INDCollectionVideoPlayerView : UIView
/**
* Player used for video playback.
*/
@property (nonatomic, strong) AVPlayer *player;
@end
|
import os
class FileMapper:
@staticmethod
def mapAlternativeName(file):
if file.getParent() is not None:
return file.getName() + "__" + file.getParent().getName()
else:
return file.getName()
# Example usage
# Assuming a Java File object named javaFile
alternativeName = FileMapper.mapAlternativeName(javaFile)
print(alternativeName) |
def average(numbers: list):
if numbers:
return sum(numbers) / len(numbers)
else:
return 0
numbers = [2, 3, 4, 5, 6]
print(f"Average of {numbers} is {average(numbers)}") |
<filename>src/lib/tx-parser/proxy-abi.ts
import { AbiItem } from "web3-utils"
import { deployedBytecode as proxyBytecodeV1, abi as proxyAbiV1 } from "../core-contracts/Proxy-v1.json"
export interface KnownProxy {
verifiedName: string,
abi: AbiItem[],
bytecode: string, // Deployed bytecode.
implementationMethod: string,
}
export const KnownProxies: KnownProxy[] = [
{
verifiedName: "CoreContract:Proxy",
abi: proxyAbiV1 as AbiItem[],
bytecode: proxyBytecodeV1,
implementationMethod: "_getImplementation",
},
]
|
<gh_stars>1-10
Rails.application.routes.draw do
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
devise_for :users
root :to => 'pages#index'
# Frontend
resources :users, :except => :show
namespace :api do
namespace :v3 do
resources :areas, :only => [:index, :show]
resources :scenarios, :only => [:show, :create, :update] do
member do
get :batch
get :application_demands, to: 'export#application_demands'
get :production_parameters, to: 'export#production_parameters'
get :energy_flow, to: 'export#energy_flow'
get :molecule_flow, to: 'export#molecule_flow'
get :merit
put :dashboard
post :interpolate
end
collection do
post :merge
end
get :templates, :on => :collection
resources :nodes, :only => :show do
get :topology, :on => :collection
post :stats, :on => :collection
end
get 'converters', to: redirect('/api/v3/scenarios/%{scenario_id}/nodes')
get 'converters/:id', to: redirect('/api/v3/scenarios/%{scenario_id}/nodes/%{id}')
resources :inputs, :only => [:index, :show]
resource :flexibility_order, only: [:show, :update],
controller: :user_sortables, sortable_type: :flexibility
resource :heat_network_order, only: [:show, :update],
controller: :user_sortables, sortable_type: :heat_network
resources :custom_curves, only: %i[index show update destroy],
constraints: { id: %r{[a-z\d_\-/]+} }
resource :esdl_file, only: %i[show update]
get 'curves/buildings_heat',
to: 'curves#buildings_heat_curves',
as: :curves_buildings_heat_download
get 'curves/merit_order',
to: 'curves#merit_order',
as: :curves_merit_order_download
get 'curves/electricity_price',
to: 'curves#electricity_price',
as: :curves_electricity_price_download
get 'curves/heat_network',
to: 'curves#heat_network',
as: :curves_heat_network_download
get 'curves/household_heat',
to: 'curves#household_heat_curves',
as: :curves_household_heat_download
get 'curves/hydrogen',
to: 'curves#hydrogen',
as: :curves_hydrogen_download
get 'curves/network_gas',
to: 'curves#network_gas',
as: :curves_network_gas_download
end
resources :nodes, :only => :show do
get :topology, :on => :collection
end
get 'converters', to: redirect('/api/v3/nodes')
get 'converters/*rest', to: redirect('/api/v3/nodes/%{rest}')
resources :inputs, :only => [:index, :show] do
get :list, :on => :collection
end
end
end
namespace :mechanical_turk do
root :to => 'turks#index'
resource :factory, :only => [:new, :create, :show]
resources :turks, :only => [:index, :show]
end
namespace :inspect do
get '/' => 'pages#start_inspect'
get '/redirect' => "base#redirect", :as => 'redirect'
get 'search.js' => 'search#index', as: :search_autocomplete
scope '/:api_scenario_id' do
root :to => "pages#index"
post '/clear_cache' => 'pages#clear_cache', :as => 'clear_cache'
# The Graphviz
resources :layouts, :except => [:new, :index, :create, :destroy] do
member { get 'yaml' }
end
get 'layout', to: redirect("api/v3/scenarios/%{api_scenario_id}/layout/energy")
resources :gqueries, :only => [:index, :show] do
get :result, :on => :member
collection do
get :test
post :test
get :result
end
end
scope '/graphs/:graph_name' do
resources :nodes, :only => [:index, :show]
end
resources :carriers, :only => [:index, :show]
resource :area, :as => :area, :only => :show
resources :query_tables
resources :query_table_cells, :except => [:show, :index]
resources :inputs, :only => [:index, :show]
resources :scenarios, :only => [:index, :show, :edit, :update, :new, :create] do
put :fix, :on => :member
end
get '/checks/share_groups' => 'checks#share_groups'
get '/checks/gquery_results' => 'checks#gquery_results'
get '/checks/loops' => 'checks#loops'
get '/checks/expected_demand' => 'checks#expected_demand'
get '/checks/index' => 'checks#index'
get '/debug/merit_order' => 'debug#merit_order'
get '/debug/calculation' => 'debug#calculation'
get '/debug/gquery' => 'debug#gquery', :as => :debug_gql
get '/gql' => "gql#index"
get '/gql/search' => "gql#search", :as => :gql_search
get '/gql/log' => "gql#log", :as => :gql_log
get '/gql/warnings' => "gql#warnings", :as => :gql_warnings
get '/merit' => 'merit#index'
get '/merit/download',
to: redirect("api/v3/scenarios/%{api_scenario_id}/merit/loads.csv")
get '/merit/download_prices',
to: redirect("api/v3/scenarios/%{api_scenario_id}/merit/price.csv")
get 'converters', to: redirect('/inspect/%{api_scenario_id}/nodes')
get 'converters/:id', to: redirect('/inspect/%{api_scenario_id}/nodes/%{id}')
get 'search' => 'search#index', :as => :search
end
end
get '/data', to: redirect('/inspect', status: 302)
get '/data/*rest',
to: redirect(status: 302) { |params| "/inspect/#{params[:rest]}" }
namespace :etsource do
root :to => 'commits#index'
resources :commits, :only => [:index, :show] do
get :import, :on => :member
end
end
end
|
<reponame>BUGBOUNTYchrisg8691/BugTracker-1
package com.portfolio.bugtracker.models;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
/**
* The type Login credentials.
*/
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public class LoginCredentials
{
private String username;
private String password;
} |
#!/bin/bash
#
# script that passes password from stdin to ssh.
#
# Copyright (C) 2010 André Frimberger <andre OBVIOUS_SIGN frimberger.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
if [ -n "$SSH_ASKPASS_TMPFILE" ]; then
cat "$SSH_ASKPASS_TMPFILE"
exit 0
elif [ $# -lt 1 ]; then
echo "Usage: echo password | $0 <ssh command line options>" >&2
exit 1
fi
sighandler() {
rm "$TMP_PWD"
}
TMP_PWD=$(mktemp)
chmod 600 "$TMP_PWD"
trap 'sighandler' SIGHUP SIGINT SIGQUIT SIGABRT SIGKILL SIGALRM SIGTERM
export SSH_ASKPASS=$0
export SSH_ASKPASS_TMPFILE=$TMP_PWD
[ "$DISPLAY" ] || export DISPLAY=dummydisplay:0
read password
echo $password >> "$TMP_PWD"
# use setsid to detach from tty
exec setsid "$@"
rm "$TMP_PWD"
|
import { AdonisApplication } from './src/adonis-app'
export default AdonisApplication
|
#!/bin/bash
set -e
git clone https://aur.archlinux.org/paru-bin
cd paru-bin/
makepkg -si --noconfirm
cd
rm -rf paru-bin
sudo pacman -S --noconfirm --needed reflector
sudo reflector -c AU -a 12 --sort rate --save /etc/pacman.d/mirrorlist --verbose
sudo pacman -Syu
sudo pacman -S --noconfirm --needed xorg-server xorg-apps xorg-xinit xorg-twm
sudo pacman -S --noconfirm --needed nvidia nvidia-utils nvidia-settings
# install x essentials
sudo pacman -S --noconfirm xorg-server xorg-apps xorg-xinit xorg-fonts-misc
# install only selected xfce goodies
sudo pacman -S --noconfirm xfce4 xfce4-weather-plugin xfce4-whiskermenu-plugin xfce4-pulseaudio-plugin xfce4-netload-plugin xfce4-mpc-plugin xfce4-datetime-plugin xfce4-cpufreq-plugin thunar-media-tags-plugin thunar-archive-plugin
# install windows manager
sudo pacman -S --noconfirm dunst arc-gtk-theme arc-icon-theme papirus-icon-theme
# install login manager
sudo pacman -S --noconfirm lightdm lightdm-gtk-greeter lightdm-gtk-greeter-settings
# Install fonts
sudo pacman -S ttf-dejavu ttf-liberation ttf-anonymous-pro terminus-font ttf-font-awesome
paru -S --noconfirm ttf-ms-fonts noto-fonts ttf-roboto ttf-inconsolata
# Install admin tools
paru -S --noconfirm pamac-aur
# install dev tools
sudo pacman -S --noconfirm stow zsh zsh-completions fzf composer
paru -S --noconfirm visual-studio-code-bin fnm-bin
# install useful apps
sudo pacman -S --noconfirm mpv vlc unrar neofetch alacritty vlc firefox
paru -S --noconfirm alacritty-themes timeshift timeshift-autosnap libreoffice-fresh numlockx unimatrix
echo "change shell"
chsh --shell /bin/zsh bknight2k
curl -sS https://starship.rs/install.sh | sh
# wallpapers
git clone --recursive https://github.com/bknightInfo/wallpapers ~/.local/share/wallpapers
# Dotfiles
git clone --recursive https://github.com/bknightInfo/dotfiles ~/.dotfiles
sudo systemctl enable lightdm
|
# see issue #89
loop_func() {
local search="none one two tree"
local d
for d in $search ; do
echo "$d"
done
}
@test "loop_func" {
run loop_func
[[ "${lines[3]}" == 'tree' ]]
run loop_func
[[ "${lines[2]}" == 'two' ]]
}
|
<filename>src/routes/postRoute.js
// biblioteca de validações Joi que é compatível com o Hapi
import Joi from 'joi'
import { Schema } from 'mongoose'
import PostsController from '../controllers/posts'
import PostModel from '../models/posts'
const postsController = new PostsController(PostModel)
// o argumento server recebe um objeto Hapi
const postRoute = (server) => {
//server.route configura uma rota
server.route({
method: 'GET', // método da requisição
path: '/posts/{id?}', // caminho da requisição, no Hapi os parêmtros são definidos entre chaves, usar ? para definir que é opcional
handler: (request, h) => postsController.find(request, h) // handler define a função que será executada ao enviar a requisição
})
server.route({
method: 'POST',
path: '/posts',
handler: (request, h) => postsController.create(request, h),
options: {
validate: {
payload: {
title: Joi.string().required(),
content: Joi.string().required(),
author: Joi.string().required()
}
}
} // options define diversas opções, aqui estamos validando os dados com a biblioteca Joi
})
server.route({
method: 'PUT',
path: '/posts/{id}',
handler: (request, h) => postsController.update(request, h),
options: {
validate: {
payload: {
title: Joi.string(),
content: Joi.string(),
author: Joi.string().required()
}
}
}
})
server.route({
method: 'DELETE',
path: '/posts/{id}',
handler: (request, h) => postsController.delete(request, h)
})
}
module.exports = postRoute
|
#!/bin/bash
# Switchboard-1 recipe customized for Edinburgh
# Author: Arnab Ghoshal (Jan 2013)
exit 1;
# This is a shell script, but it's recommended that you run the commands one by
# one by copying and pasting into the shell.
# Caution: some of the graph creation steps use quite a bit of memory, so you
# should run this on a machine that has sufficient memory.
. cmd.sh
. path.sh
# Data prep
# Here we make some Edinburgh-specific changes from the Kaldi recipe in
# trunk/egs/swbd/s5 (rev. 1841). The major differences are that everything is
# made lowercase since SRILM has an option to make the data lowercase, but not
# uppercase. [It is easy to change since SRILM uses the awk tolower function,
# but I prefered not to change SRILM]. The prefix in the names of the data
# processing scripts are changed to swbd1_ from swbd_p1_ since Switchboard-1
# Release 2 (LDC97S62) has two phases marked as p1_ and p2_ in the data. We
# are using both and so p1_ prefix in the scripts is confusing. There are a
# few minor changes related to where the scripts expect the data to be, which
# are Edinburgh-specific. --Arnab (Jan 2013)
local/swbd1_data_prep_edin.sh /exports/work/inf_hcrc_cstr_general/corpora/switchboard/switchboard1
local/swbd1_prepare_dict.sh
utils/prepare_lang.sh data/local/dict "<unk>" data/local/lang data/lang
# Now train the language models. We are using SRILM and interpolating with an
# LM trained on the Fisher transcripts (part 2 disk is currently missing; so
# only part 1 transcripts ~700hr are used)
local/swbd1_train_lms_edin.sh \
--fisher /exports/work/inf_hcrc_cstr_general/corpora/fisher/transcripts \
data/local/train/text data/local/dict/lexicon.txt data/local/lm
# We don't really need all these options for SRILM, since the LM training script
# does some of the same processings (e.g. -subset -tolower)
srilm_opts="-subset -prune-lowprobs -unk -tolower -order 3"
LM=data/local/lm/sw1.o3g.kn.gz
utils/format_lm_sri.sh --srilm-opts "$srilm_opts" \
data/lang $LM data/local/dict/lexicon.txt data/lang_sw1_tg
LM=data/local/lm/sw1_fsh.o3g.kn.gz
utils/format_lm_sri.sh --srilm-opts "$srilm_opts" \
data/lang $LM data/local/dict/lexicon.txt data/lang_sw1_fsh_tg
# For some funny reason we are still using IRSTLM for doing LM pruning :)
prune-lm --threshold=1e-7 data/local/lm/sw1_fsh.o3g.kn.gz /dev/stdout \
| gzip -c > data/local/lm/sw1_fsh.o3g.pr1-7.kn.gz
LM=data/local/lm/sw1_fsh.o3g.pr1-7.kn.gz
utils/format_lm_sri.sh --srilm-opts "$srilm_opts" \
data/lang $LM data/local/dict/lexicon.txt data/lang_sw1_fsh_tgpr
# Data preparation and formatting for eval2000 (note: the "text" file
# is not very much preprocessed; for actual WER reporting we'll use
# sclite.
local/eval2000_data_prep_edin.sh /exports/work/inf_hcrc_cstr_general/corpora/switchboard/hub5/2000 /exports/work/inf_hcrc_cstr_general/corpora/switchboard/hub5/2000/transcr
# mfccdir should be some place with a largish disk where you
# want to store MFCC features.
mfccdir=mfcc
steps/make_mfcc.sh --nj 20 --cmd "$train_cmd" data/train exp/make_mfcc/train $mfccdir || exit 1;
# Remove the small number of utterances that couldn't be extracted for some
# reason (e.g. too short; no such file).
utils/fix_data_dir.sh data/train || exit 1;
steps/compute_cmvn_stats.sh data/train exp/make_mfcc/train $mfccdir || exit 1
# Create MFCCs for the eval set
steps/make_mfcc.sh --cmd "$train_cmd" --nj 10 data/eval2000 exp/make_mfcc/eval2000 $mfccdir || exit 1;
utils/fix_data_dir.sh data/eval2000 || exit 1 # remove segments with problems
steps/compute_cmvn_stats.sh data/eval2000 exp/make_mfcc/eval2000 $mfccdir || exit 1;
# Use the first 4k sentences as dev set. Note: when we trained the LM, we used
# the 1st 10k sentences as dev set, so the 1st 4k won't have been used in the
# LM training data. However, they will be in the lexicon, plus speakers
# may overlap, so it's still not quite equivalent to a test set.
utils/subset_data_dir.sh --first data/train 4000 data/train_dev # 5hr 6min
n=$[`cat data/train/segments | wc -l` - 4000]
utils/subset_data_dir.sh --last data/train $n data/train_nodev
## To see the amount of speech in each set:
# perl -ne 'split; $s+=($_[3]-$_[2]); END{$h=int($s/3600); $r=($s-$h*3600); $m=int($r/60); $r-=$m*60; printf "%.1f sec -- %d:%d:%.1f\n", $s, $h, $m, $r;}' data/local/train/segments
# Now-- there are 260k utterances (313hr 23min), and we want to start the
# monophone training on relatively short utterances (easier to align), but not
# only the shortest ones (mostly uh-huh). So take the 100k shortest ones;
# remove most of the repeated utterances (these are the uh-huh type ones), and
# then take 10k random utterances from those (about 4hr 40mins)
utils/subset_data_dir.sh --shortest data/train_nodev 100000 data/train_100kshort
local/remove_dup_utts.sh 10 data/train_100kshort data/train_100kshort_nodup
utils/subset_data_dir.sh data/train_100kshort_nodup 10000 data/train_10k_nodup
# Take the first 30k utterances (about 1/8th of the data)
utils/subset_data_dir.sh --first data/train_nodev 30000 data/train_30k
local/remove_dup_utts.sh 200 data/train_30k data/train_30k_nodup # 33hr
# Take the first 100k utterances (just under half the data); we'll use
# this for later stages of training.
utils/subset_data_dir.sh --first data/train_nodev 100000 data/train_100k
local/remove_dup_utts.sh 200 data/train_100k data/train_100k_nodup # 110hr
# Finally, the full training set:
local/remove_dup_utts.sh 300 data/train_nodev data/train_nodup # 286hr
## Starting basic training on MFCC features
mkdir -p exp/mono
steps/train_mono.sh --nj 10 --cmd "$train_cmd" \
data/train_10k_nodup data/lang exp/mono >& exp/mono/train.log || exit 1;
steps/align_si.sh --nj 30 --cmd "$train_cmd" \
data/train_30k_nodup data/lang exp/mono exp/mono_ali || exit 1;
mkdir -p exp/tri1
steps/train_deltas.sh --cmd "$train_cmd" \
3200 30000 data/train_30k_nodup data/lang exp/mono_ali exp/tri1 \
>& exp/tri1/train.log || exit 1;
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri1/graph_sw1_${lm_suffix}
$train_cmd $graph_dir/mkgraph.log \
utils/mkgraph.sh data/lang_sw1_${lm_suffix} exp/tri1 $graph_dir
steps/decode.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
$graph_dir data/eval2000 exp/tri1/decode_eval2000_sw1_${lm_suffix}
) &
done
steps/align_si.sh --nj 30 --cmd "$train_cmd" \
data/train_30k_nodup data/lang exp/tri1 exp/tri1_ali || exit 1;
mkdir -p exp/tri2
steps/train_deltas.sh --cmd "$train_cmd" \
3200 30000 data/train_30k_nodup data/lang exp/tri1_ali exp/tri2 \
>& exp/tri2/train.log || exit 1;
wait; # for the previous decoding (really the mkgraph) step to finish
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri2/graph_sw1_${lm_suffix}
$train_cmd $graph_dir/mkgraph.log \
utils/mkgraph.sh data/lang_sw1_${lm_suffix} exp/tri2 $graph_dir
steps/decode.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
$graph_dir data/eval2000 exp/tri2/decode_eval2000_sw1_${lm_suffix}
) &
done
steps/align_si.sh --nj 30 --cmd "$train_cmd" \
data/train_30k_nodup data/lang exp/tri2 exp/tri2_ali || exit 1;
# Train tri3a, which is LDA+MLLT, on 30k_nodup data.
mkdir -p exp/tri3a
steps/train_lda_mllt.sh --cmd "$train_cmd" \
--splice-opts "--left-context=3 --right-context=3" \
3200 30000 data/train_30k_nodup data/lang exp/tri2_ali exp/tri3a \
>& exp/tri3a/train.log || exit 1;
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri3a/graph_sw1_${lm_suffix}
$train_cmd $graph_dir/mkgraph.log \
utils/mkgraph.sh data/lang_sw1_${lm_suffix} exp/tri3a $graph_dir
steps/decode.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
$graph_dir data/eval2000 exp/tri3a/decode_eval2000_sw1_${lm_suffix}
) &
done
# From now, we start building a bigger system (on train_100k_nodup, which has
# 110hrs of data). We start with the LDA+MLLT system
steps/align_si.sh --nj 30 --cmd "$train_cmd" \
data/train_100k_nodup data/lang exp/tri2 exp/tri2_ali_100k_nodup || exit 1;
# Train tri3b, which is LDA+MLLT, on 100k_nodup data.
mkdir -p exp/tri3b
steps/train_lda_mllt.sh --cmd "$train_cmd" \
--splice-opts "--left-context=3 --right-context=3" \
5500 90000 data/train_100k_nodup data/lang exp/tri2_ali_100k_nodup exp/tri3b \
>& exp/tri3b/train.log || exit 1;
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri3b/graph_sw1_${lm_suffix}
$train_cmd $graph_dir/mkgraph.log \
utils/mkgraph.sh data/lang_sw1_${lm_suffix} exp/tri3b $graph_dir
steps/decode.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
$graph_dir data/eval2000 exp/tri3b/decode_eval2000_sw1_${lm_suffix}
) &
done
# Train tri4a, which is LDA+MLLT+SAT, on 100k_nodup data.
steps/align_fmllr.sh --nj 30 --cmd "$train_cmd" \
data/train_100k_nodup data/lang exp/tri3b exp/tri3b_ali_100k_nodup || exit 1;
mkdir -p exp/tri4a
steps/train_sat.sh --cmd "$train_cmd" \
5500 90000 data/train_100k_nodup data/lang exp/tri3b_ali_100k_nodup \
exp/tri4a >& exp/tri4a/train.log || exit 1;
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri4a/graph_sw1_${lm_suffix}
$train_cmd $graph_dir/mkgraph.log \
utils/mkgraph.sh data/lang_sw1_${lm_suffix} exp/tri4a $graph_dir
steps/decode_fmllr.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
$graph_dir data/eval2000 exp/tri4a/decode_eval2000_sw1_${lm_suffix}
) &
done
# Now train a LDA+MLLT+SAT model on the entire training data (train_nodup;
# 286 hours)
# Train tri4b, which is LDA+MLLT+SAT, on train_nodup data.
steps/align_fmllr.sh --nj 30 --cmd "$train_cmd" \
data/train_nodup data/lang exp/tri3b exp/tri3b_ali_all || exit 1;
mkdir -p exp/tri4b
steps/train_sat.sh --cmd "$train_cmd" \
11500 200000 data/train_nodup data/lang exp/tri3b_ali_all exp/tri4b \
>& exp/tri4b/train.log || exit 1;
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri4b/graph_sw1_${lm_suffix}
$train_cmd $graph_dir/mkgraph.log \
utils/mkgraph.sh data/lang_sw1_${lm_suffix} exp/tri4b $graph_dir
steps/decode_fmllr.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
$graph_dir data/eval2000 exp/tri4b/decode_eval2000_sw1_${lm_suffix}
) &
done
# MMI training starting from the LDA+MLLT+SAT systems on both the
# train_100k_nodup (110hr) and train_nodup (286hr) sets
steps/align_fmllr.sh --nj 50 --cmd "$train_cmd" \
data/train_100k_nodup data/lang exp/tri4a exp/tri4a_ali_100k_nodup || exit 1
steps/align_fmllr.sh --nj 100 --cmd "$train_cmd" \
data/train_nodup data/lang exp/tri4b exp/tri4b_ali_all || exit 1
steps/make_denlats.sh --nj 50 --cmd "$decode_cmd" --config conf/decode.config \
--transform-dir exp/tri4a_ali_100k_nodup \
data/train_100k_nodup data/lang exp/tri4a exp/tri4a_denlats_100k_nodup \
|| exit 1;
steps/make_denlats.sh --nj 100 --cmd "$decode_cmd" --config conf/decode.config \
--transform-dir exp/tri4b_ali_all \
data/train_nodup data/lang exp/tri4b exp/tri4b_denlats_all || exit 1;
# 4 iterations of MMI seems to work well overall. The number of iterations is
# used as an explicit argument even though train_mmi.sh will use 4 iterations by
# default.
num_mmi_iters=4
steps/train_mmi.sh --cmd "$decode_cmd" --boost 0.1 --num-iters $num_mmi_iters \
data/train_100k_nodup data/lang exp/tri4a_{ali,denlats}_100k_nodup \
exp/tri4a_mmi_b0.1 || exit 1;
steps/train_mmi.sh --cmd "$decode_cmd" --boost 0.1 --num-iters $num_mmi_iters \
data/train_nodup data/lang exp/tri4b_{ali,denlats}_all \
exp/tri4b_mmi_b0.1 || exit 1;
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri4a/graph_sw1_${lm_suffix}
decode_dir=exp/tri4a_mmi_b0.1/decode_eval2000_${i}.mdl_sw1_${lm_suffix}
steps/decode.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
--transform-dir exp/tri4a/decode_eval2000_sw1_${lm_suffix} \
$graph_dir data/eval2000 $decode_dir
) &
done
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri4b/graph_sw1_${lm_suffix}
decode_dir=exp/tri4b_mmi_b0.1/decode_eval2000_${i}.mdl_sw1_${lm_suffix}
steps/decode.sh --nj 30 --cmd "$decode_cmd" --config conf/decode.config \
--transform-dir exp/tri4b/decode_eval2000_sw1_${lm_suffix} \
$graph_dir data/eval2000 $decode_dir
) &
done
#TODO(arnab): add lmrescore here
# ./steps/lmrescore.sh --mode 3 --cmd "$highmem_cmd" data/lang_sw1_fsh_tgpr data/lang_sw1_fsh_tg data/eval2000 exp/tri3a/decode_eval2000_sw1_fsh_tgpr exp/tri3a/decode_eval2000_sw1_fsh_tg.3 &
# Now do fMMI+MMI training
steps/train_diag_ubm.sh --silence-weight 0.5 --nj 50 --cmd "$train_cmd" \
700 data/train_100k_nodup data/lang exp/tri4a_ali_100k_nodup exp/tri4a_dubm
steps/train_diag_ubm.sh --silence-weight 0.5 --nj 100 --cmd "$train_cmd" \
700 data/train_nodup data/lang exp/tri4b_ali_all exp/tri4b_dubm
steps/train_mmi_fmmi.sh --learning-rate 0.005 --boost 0.1 --cmd "$train_cmd" \
data/train_100k_nodup data/lang exp/tri4a_ali_100k_nodup exp/tri4a_dubm \
exp/tri4a_denlats_100k_nodup exp/tri4a_fmmi_b0.1 || exit 1;
steps/train_mmi_fmmi.sh --learning-rate 0.005 --boost 0.1 --cmd "$train_cmd" \
data/train_nodup data/lang exp/tri4b_ali_all exp/tri4b_dubm \
exp/tri4b_denlats_all exp/tri4b_fmmi_b0.1 || exit 1;
for iter in 4 5 6 7 8; do
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri4a/graph_sw1_${lm_suffix}
decode_dir=exp/tri4a_fmmi_b0.1/decode_eval2000_it${iter}_sw1_${lm_suffix}
steps/decode_fmmi.sh --nj 30 --cmd "$decode_cmd" --iter $iter \
--transform-dir exp/tri4a/decode_eval2000_sw1_${lm_suffix} \
--config conf/decode.config $graph_dir data/eval2000 $decode_dir
) &
done
done
for iter in 4 5 6 7 8; do
for lm_suffix in tg fsh_tgpr; do
(
graph_dir=exp/tri4b/graph_sw1_${lm_suffix}
decode_dir=exp/tri4b_fmmi_b0.1/decode_eval2000_it${iter}_sw1_${lm_suffix}
steps/decode_fmmi.sh --nj 30 --cmd "$decode_cmd" --iter $iter \
--transform-dir exp/tri4b/decode_eval2000_sw1_${lm_suffix} \
--config conf/decode.config $graph_dir data/eval2000 $decode_dir
) &
done
done
# TODO(arnab): add SGMM and hybrid
# local/run_sgmm.sh
# # Recipe with DNN system on top of fMLLR features
# local/run_hybrid.sh
# # getting results (see RESULTS file)
# for x in 1 2 3a 3b 4a; do grep 'Percent Total Error' exp/tri$x/decode_eval2000_sw1_tg/score_*/eval2000.ctm.filt.dtl | sort -k5 -g | head -1; done
|
import React, { Component } from 'react';
import { Text, View, ScrollView } from 'react-native';
class App extends Component {
state = {
products: [],
orders: [],
payments: []
}
componentDidMount() {
// Fetch products
// Fetch orders
// Fetch payments
}
render() {
return (
<View>
<Text>Products</Text>
{this.state.products.map(product => (
<View>
<Text>{product.name}</Text>
<Text>{product.price}</Text>
</View>
))}
<Text>Orders</Text>
{this.state.orders.map(order => (
<View>
<Text>{order.customerName}</Text>
<Text>{order.totalPrice}</Text>
</View>
))}
<Text>Payments</Text>
{this.state.payments.map(payment => (
<View>
<Text>{payment.paymentType}</Text>
<Text>{payment.status}</Text>
</View>
))}
</View>
);
}
}
export default App; |
///////////////////////////////////////////////////////////////////////////////
// Name: src/unix/secretstore.cpp
// Purpose: wxSecretStore implementation using libsecret.
// Author: <NAME>
// Created: 2016-05-27
// Copyright: (c) 2016 <NAME> <<EMAIL>>
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// for compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#if wxUSE_SECRETSTORE
#include "wx/secretstore.h"
#include "wx/private/secretstore.h"
// Older versions of libsecret, such as 0.16 from Ubuntu 14.04 LTS, require
// predefining this symbol in order to have access to the APIs we use below.
#define SECRET_API_SUBJECT_TO_CHANGE
#include <libsecret/secret.h>
#include "wx/gtk/private/error.h"
#include "wx/gtk/private/list.h"
#include "wx/gtk/private/object.h"
namespace
{
// Tiny helper to ensure freeing GHashTable.
class wxGHashTable
{
public:
// Ctor takes ownership of its argument.
explicit wxGHashTable(GHashTable* hash) : m_hash(hash) { }
// Must have a copy ctor to allow returning these objects from functions in
// C++98, this class could/should be move-only in C++11.
wxGHashTable(const wxGHashTable& other)
: m_hash(other.m_hash)
{
g_hash_table_ref(m_hash);
}
~wxGHashTable() { g_hash_table_unref(m_hash); }
operator GHashTable *() const { return m_hash; }
private:
GHashTable* const m_hash;
wxDECLARE_NO_ASSIGN_CLASS(wxGHashTable);
};
// ============================================================================
// wxSecretStoreImpl using libsecret
// ============================================================================
class wxSecretValueLibSecretImpl : public wxSecretValueImpl
{
public:
// Create a new secret value.
//
// Notice that we have to use text/plain as content type and not
// application/octet-stream which would have been more logical because
// libsecret accepts only valid UTF-8 strings for the latter, while our
// data is not necessarily UTF-8 (nor even text at all...).
wxSecretValueLibSecretImpl(size_t size, const void* data)
: m_value(secret_value_new(static_cast<const gchar*>(data), size,
"text/plain"))
{
}
// Adopt an existing secret value.
//
// This ctor takes ownership of the provided pointer and will release it
// when this object is destroyed.
explicit wxSecretValueLibSecretImpl(SecretValue* value)
: m_value(value)
{
}
virtual ~wxSecretValueLibSecretImpl()
{
// No need to wipe memory, this will happen by default.
secret_value_unref(m_value);
}
virtual size_t GetSize() const wxOVERRIDE
{
gsize length = 0;
(void)secret_value_get(m_value, &length);
return length;
}
virtual const void *GetData() const wxOVERRIDE
{
return secret_value_get(m_value, NULL);
}
SecretValue* GetValue() const
{
return m_value;
}
private:
SecretValue* const m_value;
};
// Dummy implementation used when secret service is not available.
class wxSecretStoreNotAvailableImpl : public wxSecretStoreImpl
{
public:
explicit wxSecretStoreNotAvailableImpl(const wxString& error)
: m_error(error)
{
}
virtual bool IsOk(wxString* errmsg) const wxOVERRIDE
{
if ( errmsg )
*errmsg = m_error;
return false;
}
virtual bool Save(const wxString& WXUNUSED(service),
const wxString& WXUNUSED(user),
const wxSecretValueImpl& WXUNUSED(secret),
wxString& errmsg) wxOVERRIDE
{
errmsg = m_error;
return false;
}
virtual bool Load(const wxString& WXUNUSED(service),
wxString* WXUNUSED(user),
wxSecretValueImpl** WXUNUSED(secret),
wxString& errmsg) const wxOVERRIDE
{
errmsg = m_error;
return false;
}
virtual bool Delete(const wxString& WXUNUSED(service),
wxString& errmsg) wxOVERRIDE
{
errmsg = m_error;
return false;
}
private:
const wxString m_error;
};
// This implementation uses synchronous libsecret functions which is supposed
// to be a bad idea, but doesn't seem to be a big deal in practice and as there
// is no simple way to implement asynchronous API under the other platforms, it
// doesn't seem to be worth it to use it just under Unix, so keep things simple
// (even if blocking) for now.
class wxSecretStoreLibSecretImpl : public wxSecretStoreImpl
{
public:
static wxSecretStoreLibSecretImpl* Create(wxString& errmsg)
{
wxGtkError error;
SecretService* const service = secret_service_get_sync
(
SECRET_SERVICE_OPEN_SESSION,
NULL, // No cancellation
error.Out()
);
if ( !service )
{
errmsg = error.GetMessage();
return NULL;
}
// This passes ownership of service to the new object.
return new wxSecretStoreLibSecretImpl(service);
}
virtual bool Save(const wxString& service,
const wxString& user,
const wxSecretValueImpl& secret,
wxString& errmsg) wxOVERRIDE
{
// We don't have any argument for the user-visible secret description
// supported by libsecret, so we just reuse the service string. It
// might be a good idea to add a possibility to specify a more
// informative description later.
// Notice that we can't use secret_password_store_sync() here because
// our secret can contain NULs, so we must pass by the lower level API.
wxGtkError error;
if ( !secret_service_store_sync
(
m_service,
GetSchema(),
BuildAttributes(service, user),
SECRET_COLLECTION_DEFAULT,
service.utf8_str(),
static_cast<const wxSecretValueLibSecretImpl&>(secret).GetValue(),
NULL, // Can't be cancelled
error.Out()
) )
{
errmsg = error.GetMessage();
return false;
}
return true;
}
virtual bool Load(const wxString& service,
wxString* user,
wxSecretValueImpl** secret,
wxString& errmsg) const wxOVERRIDE
{
wxGtkError error;
GList* const found = secret_service_search_sync
(
m_service,
GetSchema(),
BuildAttributes(service),
static_cast<SecretSearchFlags>
(
SECRET_SEARCH_UNLOCK |
SECRET_SEARCH_LOAD_SECRETS
),
NULL, // Can't be cancelled
error.Out()
);
if ( !found )
{
// There can be no error message if the secret was just not found
// and no other error occurred -- just leave the error message
// empty in this case, this is exactly how our API is supposed to
// behave.
if ( error )
errmsg = error.GetMessage();
return false;
}
wxGtkList ensureListFreed(found);
SecretItem* const item = static_cast<SecretItem*>(found->data);
wxGtkObject<SecretItem> ensureItemFreed(item);
const wxGHashTable attrs(secret_item_get_attributes(item));
const gpointer field = g_hash_table_lookup(attrs, FIELD_USER);
if ( field )
*user = wxString::FromUTF8(static_cast<char*>(field));
*secret = new wxSecretValueLibSecretImpl(secret_item_get_secret(item));
return true;
}
virtual bool Delete(const wxString& service,
wxString& errmsg) wxOVERRIDE
{
wxGtkError error;
if ( !secret_service_clear_sync
(
m_service,
GetSchema(),
BuildAttributes(service),
NULL, // Can't be cancelled
error.Out()
) )
{
if ( error )
errmsg = error.GetMessage();
return false;
}
return true;
}
private:
// Constants for the schema fields.
static const char* FIELD_SERVICE;
static const char* FIELD_USER;
// Currently we use a hard-coded schema, but we might allow customizing it
// (or at least its name?) in the future, so wrap access to it in this
// helper function to make changing the code later simpler.
static SecretSchema* GetSchema()
{
// SecretSchema struct has some "reserved" fields in it which we don't
// want to initialize, but this results in this warning if it's
// enabled, so just suppress it here.
wxGCC_WARNING_SUPPRESS(missing-field-initializers)
static SecretSchema s_schema =
{
"org.freedesktop.Secret.Generic",
SECRET_SCHEMA_NONE,
{
{ FIELD_SERVICE, SECRET_SCHEMA_ATTRIBUTE_STRING },
{ FIELD_USER, SECRET_SCHEMA_ATTRIBUTE_STRING },
{ NULL }
}
};
wxGCC_WARNING_RESTORE(missing-field-initializers)
return &s_schema;
}
// Return attributes for the schema defined above.
static wxGHashTable BuildAttributes(const wxString& service)
{
return wxGHashTable(secret_attributes_build
(
GetSchema(),
FIELD_SERVICE, service.utf8_str().data(),
NULL
));
}
static wxGHashTable BuildAttributes(const wxString& service,
const wxString& user)
{
return wxGHashTable(secret_attributes_build
(
GetSchema(),
FIELD_SERVICE, service.utf8_str().data(),
FIELD_USER, user.utf8_str().data(),
NULL
));
}
// Ctor is private, Create() should be used for creating objects of this
// class.
explicit wxSecretStoreLibSecretImpl(SecretService* service)
: m_service(service)
{
}
wxGtkObject<SecretService> m_service;
};
const char* wxSecretStoreLibSecretImpl::FIELD_SERVICE = "service";
const char* wxSecretStoreLibSecretImpl::FIELD_USER = "user";
} // anonymous namespace
// ============================================================================
// LibSecret-specific implementation of common methods
// ============================================================================
/* static */
wxSecretValueImpl* wxSecretValue::NewImpl(size_t size, const void *data)
{
return new wxSecretValueLibSecretImpl(size, data);
}
/* static */
wxSecretStore wxSecretStore::GetDefault()
{
// Try to create the real implementation.
wxString errmsg;
wxSecretStoreImpl* impl = wxSecretStoreLibSecretImpl::Create(errmsg);
if ( !impl )
{
// But if we failed, fall back to a dummy one, so that we could at
// least return the error to the code using this class.
impl = new wxSecretStoreNotAvailableImpl(errmsg);
}
return wxSecretStore(impl);
}
#endif // wxUSE_SECRETSTORE
|
var namespacedroid_1_1_runtime_1_1_managers =
[
[ "Experimental", "namespacedroid_1_1_runtime_1_1_managers_1_1_experimental.html", "namespacedroid_1_1_runtime_1_1_managers_1_1_experimental" ],
[ "AbstractNeodroidManager", "classdroid_1_1_runtime_1_1_managers_1_1_abstract_neodroid_manager.html", "classdroid_1_1_runtime_1_1_managers_1_1_abstract_neodroid_manager" ],
[ "PausableManager", "classdroid_1_1_runtime_1_1_managers_1_1_pausable_manager.html", "classdroid_1_1_runtime_1_1_managers_1_1_pausable_manager" ]
]; |
coreInfo=`curl -s -X GET \
"https://api.supertokens.io/0/core/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$1" \
-H 'api-version: 0'`
if [[ `echo $coreInfo | jq .tag` == "null" ]]
then
echo "fetching latest X.Y.Z version for core, X.Y version: $1, planType: FREE gave response: $coreInfo"
exit 1
fi
coreTag=$(echo $coreInfo | jq .tag | tr -d '"')
coreVersion=$(echo $coreInfo | jq .version | tr -d '"')
pluginInterfaceVersionXY=`curl -s -X GET \
"https://api.supertokens.io/0/core/dependency/plugin-interface/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$1" \
-H 'api-version: 0'`
if [[ `echo $pluginInterfaceVersionXY | jq .pluginInterface` == "null" ]]
then
echo "fetching latest X.Y version for plugin-interface, given core X.Y version: $1, planType: FREE gave response: $pluginInterfaceVersionXY"
exit 1
fi
pluginInterfaceVersionXY=$(echo $pluginInterfaceVersionXY | jq .pluginInterface | tr -d '"')
pluginInterfaceInfo=`curl -s -X GET \
"https://api.supertokens.io/0/plugin-interface/latest?password=$SUPERTOKENS_API_KEY&planType=FREE&mode=DEV&version=$pluginInterfaceVersionXY" \
-H 'api-version: 0'`
if [[ `echo $pluginInterfaceInfo | jq .tag` == "null" ]]
then
echo "fetching latest X.Y.Z version for plugin-interface, X.Y version: $pluginInterfaceVersionXY, planType: FREE gave response: $pluginInterfaceInfo"
exit 1
fi
pluginInterfaceTag=$(echo $pluginInterfaceInfo | jq .tag | tr -d '"')
pluginInterfaceVersion=$(echo $pluginInterfaceInfo | jq .version | tr -d '"')
echo "Testing with frontend website: $2, FREE core: $coreVersion, plugin-interface: $pluginInterfaceVersion"
cd ../../
git clone git@github.com:supertokens/supertokens-root.git
cd supertokens-root
echo -e "core,$1\nplugin-interface,$pluginInterfaceVersionXY" > modules.txt
./loadModules --ssh
cd supertokens-core
git checkout $coreTag
cd ../supertokens-plugin-interface
git checkout $pluginInterfaceTag
cd ../
echo $SUPERTOKENS_API_KEY > apiPassword
./utils/setupTestEnvLocal
cd ../
git clone git@github.com:supertokens/supertokens-website.git
cd supertokens-website
git checkout $2
cd ../project/tests/frontendIntegration/django3x
uvicorn mysite.asgi:application --port 8080 &
pid=$!
uvicorn mysite.asgi:application --port 8082 &
pid2=$!
cd ../../../../supertokens-website/test/server
npm i -d --quiet --no-progress
npm i git+https://github.com:supertokens/supertokens-node.git#$3 --quiet --no-progress
cd ../../
npm i -d --quiet --no-progress
SUPERTOKENS_CORE_TAG=$coreTag NODE_PORT=8081 INSTALL_PATH=../supertokens-root npm test
if [[ $? -ne 0 ]]
then
echo "test failed... killing $pid, $pid2 and exiting!"
kill -9 $pid
kill -9 $pid2
rm -rf ./test/server/node_modules/supertokens-node
git checkout HEAD -- ./test/server/package.json
exit 1
fi
echo "all tests passed, killing processes: $pid and $pid2"
kill -9 $pid
kill -9 $pid2
rm -rf ./test/server/node_modules/supertokens-node
git checkout HEAD -- ./test/server/package.json |
<reponame>TeamSAIDA/StarcraftAITournamentManager<filename>src/server/KeepAliveTask.java
package server;
import java.util.TimerTask;
public class KeepAliveTask extends TimerTask {
@Override
public void run() {
Server.Instance().keepAlive();
}
}
|
#!/bin/bash
set -e
COMMON_FLAGS="-DCURL_IS_STATICALLY_LINKED"
export EXTRA_CFLAGS="$COMMON_FLAGS"
export EXTRA_CXXFLAGS="$COMMON_FLAGS"
export PATH="$FORCE_GEM_HOME_AND_PATH/bin:$PATH"
exec "$@"
|
<gh_stars>0
package dao;
import java.util.List;
import model.Order;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository
public interface OrderDao extends JpaRepository<Order, Long> {
@Query("SELECT o FROM Order o LEFT JOIN o.shop s WHERE o.isArchived = false")
public List<Order> findAllOrders();
@Modifying
@Transactional
@Query("UPDATE Order o SET o.isArchived = true, o.modifiedAt = :modifiedAtUtcTime WHERE o.orderKey = :orderKey")
public void archiveOrder(@Param("orderKey") long orderKey,
@Param("modifiedAtUtcTime") long modifiedAtUtcTime);
@Query("SELECT o FROM Order o LEFT JOIN o.shop s WHERE o.modifiedAt >= :utcTime")
public List<Order> findOrdersNewerThan(@Param("utcTime") long utcTime);
@Query("SELECT o FROM Order o LEFT JOIN o.shop s WHERE o.isArchived = false AND o.orderAt = :orderAtUtcTime")
public Order findOrderByOrderAt(@Param("orderAtUtcTime") long orderAtUtcTime);
}
|
require File.dirname(__FILE__) + '/../../spec_helper'
# include Remote
require File.dirname(__FILE__) + '/ec2_mocks_and_stubs.rb'
describe "ec2 remote base" do
before(:each) do
@cloud = TestCloud.new :test_remoter_base_cloud
@tr = TestEC2Class.new(@cloud)
stub_remoter_for(@tr)
# @tr.stub!(:get_instances_description).and_return response_list_of_instances
end
%w(launch_new_instance! terminate_instance! describe_instance describe_instances create_snapshot).each do |method|
eval <<-EOE
it "should have the method #{method}" do
@tr.respond_to?(:#{method}).should == true
end
EOE
end
describe "helpers" do
it "should be able to convert an ec2 ip to a real ip" do
"ec2-72-44-36-12.compute-1.amazonaws.com".convert_from_ec2_to_ip.should == "172.16.58.3"
end
it "should not throw an error if another string is returned" do
"172.16.58.3".convert_from_ec2_to_ip.should == "172.16.58.3"
end
it "should be able to parse the date from the timestamp" do
"2008-11-13T09:33:09+0000".parse_datetime.should == DateTime.parse("2008-11-13T09:33:09+0000")
end
it "should rescue itself and just return the string if it fails" do
"thisisthedate".parse_datetime.should == "thisisthedate"
end
end
describe "launching" do
before(:each) do
@ret_hash = {:instance_id => "1", :name => "instance"}
@tr.ec2({}).stub!(:run_instances).and_return @ret_hash
end
it "should call run_instances on the ec2 Base class when asking to launch_new_instance!" do
# @tr.ec2.should_receive(:run_instances).and_return true
@tr.launch_new_instance!
end
it "should use a specific security group if one is specified" do
@tr.stub!(:security_group).and_return "web"
@tr.ec2.should_receive(:run_instances).and_return @ret_hash
@tr.launch_new_instance!
end
# it "should use the default security group if none is specified" do
# @tr.ec2.should_receive(:run_instances).with(hash_including(:group_id => ['default'])).and_return @ret_hash
# @tr.launch_new_instance!
# end
it "should get the hash response from EC2ResponseObject" do
EC2ResponseObject.should_receive(:get_hash_from_response).and_return @ret_hash
@tr.launch_new_instance! :keypair => "keys"
end
end
describe "terminating" do
it "should call terminate_instance! on ec2 when asking to terminate_instance!" do
@tr.ec2.should_receive(:terminate_instances).with(:instance_id => "abc-123").and_return true
@tr.terminate_instance!({:instance_id => "abc-123"})
end
end
describe "describe_instance" do
it "should return a default instance if called with no paramters" do
@tr.describe_instances(:id => "i-1234").size.should > 0
end
it "should return nil if the cloud has no instances" do
@tr.stub!(:describe_instances).and_return []
@tr.describe_instance.nil?.should == true
end
end
describe "get_instances_description" do #NOTE MF: this test is sorta bogus since it is just checking what we stubbed
it "should return a hash" do
@tr.describe_instances.class.should == Array
end
it "should call the first node master" do
@tr.describe_instances.first[:name].should == "master"
end
it "should call the second one node1" do
@tr.describe_instances[1][:name].should == "node1"
end
it "should call the third node2" do
@tr.describe_instances[2][:name].should == "node2"
end
end
describe "elastic_ips" do
before(:each) do
@resp = {"requestId"=>"be9bd2e9-4f8c-448f-993d-c21fe537e322", "addressesSet"=>{"item"=>[{"instanceId"=>nil, "publicIp"=>"192.168.127.12"}, {"instanceId"=>nil, "publicIp"=>"192.168.3.11"}]}, "xmlns"=>"http://ec2.amazonaws.com/doc/2008-12-01/"}
@tr.ec2.stub!(:describe_addresses).and_return @resp
end
it "should have the next available elastic_ip" do
@tr.next_unused_elastic_ip.should == "192.168.127.12"
end
it "should use only the elastic ips set on the cloud" do
@cloud.stub!(:elastic_ips?).and_return true
@cloud.stub!(:elastic_ips).and_return ["192.168.3.11"]
@tr.stub!(:cloud).and_return @cloud
@tr.next_unused_elastic_ip.should == "192.168.3.11"
end
end
describe "create_keypair" do
before(:each) do
Kernel.stub!(:system).with("ec2-add-keypair fake_keypair > #{Default.base_keypair_path}/id_rsa-fake_keypair && chmod 600 #{Default.base_keypair_path}/id_rsa-fake_keypair").and_return true
# @tr.stub!(:base_keypair_path).and_return "#{ENV["HOME"]}/.ec2"
end
it "should try to create the directory when making a new keypair" do
# FileUtils.should_receive(:mkdir_p).and_return true
# ::File.stub!(:directory?).and_return false
# @tr.create_keypair
pending
#TODO Fix with new remoter branch
end
it "should not create a keypair if the keypair is nil" do
pending
#TODO Fix with new remoter branch
# Kernel.should_not_receive(:system)
# @tr.stub!(:keypair).and_return nil
# @tr.create_keypair
end
end
describe "create_snapshot" do
# We can assume that create_snapshot on the ec2 gem works
before(:each) do
@tr.ec2.stub!(:create_snapshot).and_return nil
end
it "should create a snapshot of the current EBS volume" do
@tr.ec2.stub!(:create_snapshot).and_return {{"snapshotId" => "snap-123"}}
@tr.stub!(:ebs_volume_id).and_return "vol-123"
@tr.create_snapshot.should == {"snapshotId" => "snap-123"}
end
it "should not create a snapshot if there is no EBS volume" do
@tr.create_snapshot.should == nil
end
end
end
|
from typing import List
from win32com import client
def convert_excel_to_pdf(excel_files: List[str]) -> None:
excel = client.Dispatch("Excel.Application")
for file in excel_files:
workbook = excel.Workbooks.Open(file)
workbook.ActiveSheet.ExportAsFixedFormat(0, file.replace(".xlsx", ".pdf"))
workbook.Close()
excel.Quit() |
#!/bin/bash
NDK_PATH=~/Library/Android/sdk/ndk/22.1.7171670
# linux-x86_64
HOST_TAG=darwin-x86_64
MIN_SDK_VER=21
# ==================================
TOOLCHAINS=${NDK_PATH}/toolchains/llvm/prebuilt/${HOST_TAG}
SYSROOT=${TOOLCHAINS}/sysroot
function build_one
{
if [ $ARCH == "arm" ]
then
CROSS_PREFIX=$TOOLCHAINS/bin/arm-linux-androideabi-
elif [ $ARCH == "aarch64" ]
then
CROSS_PREFIX=$TOOLCHAINS/bin/aarch64-linux-android-
elif [ $ARCH == "x86_32" ]
then
CROSS_PREFIX=$TOOLCHAINS/bin/i686-linux-android-
else
CROSS_PREFIX=$TOOLCHAINS/bin/x86_64-linux-android-
fi
pushd ffmpeg-4.4
./configure \
--prefix=$PREFIX \
--extra-cflags="$OPTIMIZE_CFLAGS" \
--cross-prefix=$CROSS_PREFIX \
--sysroot=$SYSROOT \
--enable-cross-compile \
--target-os=android \
--arch=$ARCH \
--cc=${CC} \
--cxx=${CC}++ \
--ld=${CC} \
--ar=${TOOLCHAINS}/bin/llvm-ar \
--as=${CC} \
--nm=${TOOLCHAINS}/bin/llvm-nm \
--ranlib=${TOOLCHAINS}/bin/llvm-ranlib \
--strip=${TOOLCHAINS}/bin/llvm-strip \
--disable-everything \
--disable-programs \
--disable-x86asm \
--disable-inline-asm \
--disable-swresample \
--disable-swscale \
--disable-avfilter \
--disable-avdevice \
--disable-avformat \
--disable-static \
--enable-decoder=adpcm_ima_qt \
--enable-encoder=adpcm_ima_qt \
--enable-shared \
--enable-small \
--enable-pic
make clean
make -j6
make install
popd
}
#armeabi-v7a
ARCH=arm
OPTIMIZE_CFLAGS="-g -DANDROID -fdata-sections -ffunction-sections -funwind-tables -fstack-protector-strong -no-canonical-prefixes -D_FORTIFY_SOURCE=2 -march=armv7-a -mthumb -Wformat -Werror=format-security -Oz -DNDEBUG -fPIC --target=armv7-none-linux-androideabi$MIN_SDK_VER --gcc-toolchain=$TOOLCHAINS"
PREFIX=`pwd`/prebuilt/armeabi-v7a
export CC=$TOOLCHAINS/bin/armv7a-linux-androideabi$MIN_SDK_VER-clang
export CXX=$TOOLCHAINS/bin/armv7a-linux-androideabi$MIN_SDK_VER-clang++
build_one
#arm64-v8a
ARCH=aarch64
OPTIMIZE_CFLAGS="-g -DANDROID -fdata-sections -ffunction-sections -funwind-tables -fstack-protector-strong -no-canonical-prefixes -D_FORTIFY_SOURCE=2 -Wformat -Werror=format-security -O2 -DNDEBUG -fPIC --target=aarch64-none-linux-android$MIN_SDK_VER --gcc-toolchain=$TOOLCHAINS"
PREFIX=`pwd`/prebuilt/arm64-v8a
export CC=$TOOLCHAINS/bin/aarch64-linux-android$MIN_SDK_VER-clang
export CXX=$TOOLCHAINS/bin/aarch64-linux-android$MIN_SDK_VER-clang++
build_one
##x86_32
#ARCH=x86_32
#OPTIMIZE_CFLAGS="-g -DANDROID -fdata-sections -ffunction-sections -funwind-tables -fstack-protector-strong -no-canonical-prefixes -mstackrealign -D_FORTIFY_SOURCE=2 -Wformat -Werror=format-security -O2 -DNDEBUG -fPIC --target=i686-none-linux-android$MIN_SDK_VER --gcc-toolchain=$TOOLCHAINS"
#PREFIX=`pwd`/prebuilt/x86
#export CC=$TOOLCHAINS/bin/i686-linux-android$MIN_SDK_VER-clang
#export CXX=$TOOLCHAINS/bin/i686-linux-android$MIN_SDK_VER-clang++
#build_one
#
##x86_64
#ARCH=x86_64
#OPTIMIZE_CFLAGS="-g -DANDROID -fdata-sections -ffunction-sections -funwind-tables -fstack-protector-strong -no-canonical-prefixes -D_FORTIFY_SOURCE=2 -Wformat -Werror=format-security -O2 -DNDEBUG -fPIC --target=x86_64-none-linux-android$MIN_SDK_VER --gcc-toolchain=$TOOLCHAINS"
#PREFIX=`pwd`/prebuilt/x86_64
#export CC=$TOOLCHAINS/bin/x86_64-linux-android$MIN_SDK_VER-clang
#export CXX=$TOOLCHAINS/bin/x86_64-linux-android$MIN_SDK_VER-clang++
#build_one |
#pragma once
#include <iostream>
#include <cmath>
namespace lio
{
/**
* @brief A 2D vector class
*
* @tparam T Type for storing the coordinates
*/
template <typename T>
struct Vec2
{
T x = 0.0;
T y = 0.0;
/**
* @brief Construct a new Vec2 at (0, 0)
*
*/
Vec2() = default;
/**
* @brief Construct a new Vec2 from another Vec2
*
*/
Vec2(const Vec2&) = default;
/**
* @brief Construct a new Vec2 given the coordinates
*
* @param x X coordinate
* @param y Y coordinate
*/
Vec2(T x, T y) : x(x), y(y) {}
/**
* @brief Cast to Vec2\<U\>
*
* @tparam U The type to cast to
* @return Vec2\<U\> The casted Vec2
*/
template <typename U>
operator Vec2<U>() const
{
return Vec2<U>(x, y);
}
/**
* @brief Cast to Vec2\<U\>
*
* @tparam U The type to cast to
* @return Vec2\<U\> The casted Vec2
*/
template <typename U>
Vec2<U> Cast() const
{
return Vec2<U>(x, y);
}
/**
* @brief Set the coordinates of this
*
* @param x X coordinate
* @param y Y coordinate
* @return Vec2& This Vec2
*/
Vec2& Set(T x, T y) &
{
this->x = x;
this->y = y;
return *this;
}
/**
* @brief Get the rotated Vec2 of v by theta radian
*
* @param v The Vec2 to be rotated
* @param theta The angle to be rotated in radian
* @return Vec2 The Vec2 rotated
*/
template <typename U>
static Vec2 Rotated(Vec2 v, U theta)
{
return v.Rotated(theta);
}
/**
* @brief Get the rotated Vec2 of this by theta radian
*
* @param theta The angle to be rotated in radian
* @return Vec2 The Vec2 rotated
*/
template <typename U>
Vec2<T> Rotated(U theta) const
{
U c = std::cos(theta);
U s = std::sin(theta);
T tx = c * x - s * y;
T ty = s * x + c * y;
return Vec2<T>(tx, ty);
}
/**
* @brief Rotate this by theta radian
*
* @param theta The angle to be rotated in radian
*/
template <typename U>
Vec2& Rotate(U theta) &
{
*this = Rotated(theta);
return *this;
}
/**
* @brief Get the normalized Vec2 of v
*
* @param v The Vec2 to be normalized
* @return Vec2 The normalized Vec2
*/
static Vec2 Normalized(const Vec2& v)
{
return v.Normalized();
}
/**
* @brief Get the normalized Vec2 of this
*
* @return Vec2 The normalized Vec2
*/
Vec2 Normalized() const
{
if (Magnitude() == 0) return *this;
return Vec2(*this / Magnitude());
}
/**
* @brief Normalize this
*
* @return Vec2& This Vec2
*/
Vec2& Normalize() &
{
if (Magnitude() == 0) return *this;
*this *= (1.0 / Magnitude());
return *this;
}
/**
* @brief Get the orthogonal Vec2 of v
*
* @param v The Vec2 to be used
* @return Vec2 The Vec2 orthogonal to v
*/
static Vec2 Ortho(const Vec2& v)
{
return v.Ortho();
}
/**
* @brief Get the orthogonal Vec2 of this
*
* @return Vec2 The Vec2 orthogonal to this
*/
Vec2 Ortho() const
{
return Vec2(y, -x);
}
/**
* @brief Get the distance between v1 and v2
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @return auto The distance
*/
template <typename U = T>
static auto Distance(const Vec2& v1, const Vec2<U>& v2)
{
return v1.Distance(v2);
}
/**
* @brief Get the distance between this and v
*
* @param v The Vec2 to be used with this
* @return auto The distance
*/
template <typename U = T>
auto Distance(const Vec2<U>& v) const
{
Vec2<decltype(std::declval<U&>() - std::declval<T&>())> d(v.x - x, v.y - y);
return d.Magnitude();
}
/**
* @brief Get the square of magnitude of v
*
* @param v The Vec2 to be used
* @return auto The square of magnitude
*/
static auto SqrMagnitude(const Vec2& v)
{
return v.SqrMagnitude();
}
/**
* @brief Get the square of magnitude of this
*
* @return auto The square of magnitude
*/
auto SqrMagnitude() const
{
return x * x + y * y;
}
/**
* @brief Get the magnitude of v
*
* @param v The Vec2 to be used
* @return auto The magnitude
*/
static auto Magnitude(const Vec2& v)
{
return v.Magnitude();
}
/**
* @brief Get the magnitude of this
*
* @return auto The magnitude
*/
auto Magnitude() const
{
return std::hypot(x, y);
}
/**
* @brief Get the absolute values of v
*
* @param v The Vec2 to be used
* @return Vec2 The absolute values
*/
static Vec2 Abs(const Vec2& v)
{
return v.Abs();
}
/**
* @brief Get the absolute values of this
*
* @return Vec2 The absolute values
*/
Vec2 Abs() const
{
return Vec2(x < 0 ? x * -1 : x, y < 0 ? y * -1 : y);
}
/**
* @brief Get the dot product of v1 and v2
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @return auto The dot product
*/
template <typename U = T>
static auto Dot(const Vec2& v1, const Vec2<U>& v2)
{
return v1.Dot(v2);
}
/**
* @brief Get the dot product of this and v
*
* @param v The Vec2 to be used
* @return auto The dot product
*/
template <typename U = T>
auto Dot(const Vec2<U>& v) const
{
return x * v.x + y * v.y;
}
/**
* @brief Get the cross product of v1 and v2
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @return auto The cross product
*/
template <typename U = T>
static auto Cross(const Vec2& v1, const Vec2<U>& v2)
{
return v1.Cross(v2);
}
/**
* @brief Get the cross product of this and v
*
* @param v The Vec2 to be used
* @return auto The cross product
*/
template <typename U = T>
auto Cross(const Vec2<U>& v) const
{
return (x * v.y) - (y * v.x);
}
/**
* @brief Get the Hadamard (element-wise) multiplication product of v1 and v2
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @return Vec2 The Hadamard multiplication product
*/
template <typename U = T>
static auto HadamardMultiplication(const Vec2& v1, const Vec2<U>& v2)
{
return v1.HadamardMultiplication(v2);
}
/**
* @brief Get the Hadamard (element-wise) multiplication product of this and v
*
* @param v The Vec2 to be used
* @return Vec2 The Hadamard multiplication product
*/
template <typename U = T>
auto HadamardMultiplication(const Vec2<U>& v) const
{
return Vec2<decltype(std::declval<T&>() * std::declval<U&>())>(x * v.x, y * v.y);
}
/**
* @brief Get the Hadamard (element-wise) division quotient of v1 and v2
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @return Vec2 The Hadamard division quotient
*/
template <typename U = T>
static auto HadamardDivision(const Vec2& v1, const Vec2<U>& v2)
{
return v1.HadamardDivision(v2);
}
/**
* @brief Get the Hadamard (element-wise) division quotient of this and v
*
* @param v The Vec2 to be used
* @return Vec2 The Hadamard division quotient
*/
template <typename U = T>
auto HadamardDivision(const Vec2<U>& v) const
{
return Vec2<decltype(std::declval<T&>() / std::declval<U&>())>(x / v.x, y / v.y);
}
/**
* @brief Get the Hadamard (element-wise) modulo remainder of v1 and v2
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @return Vec2 The Hadamard modulo remainder
*/
template <typename U = T>
static auto HadamardModulo(const Vec2& v1, const Vec2<U>& v2)
{
return v1.HadamardModulo(v2);
}
/**
* @brief Get the Hadamard (element-wise) modulo remainder of this and v
*
* @param v The Vec2 to be used
* @return Vec2 The Hadamard modulo remainder
*/
template <typename U = T>
auto HadamardModulo(const Vec2<U>& v) const
{
return Vec2<decltype(std::fmod(std::declval<T&>(), std::declval<U&>()))>(std::fmod(x, v.x), std::fmod(y, v.y));
}
/**
* @brief Get the orientation of v1, v2, v3. Equivalent to v3.Cross(v2 - v1).
* 0: collinear.
* >0: v3 to rotate clockwise to become collinear
* <0: v3 to rotate anticlockwise to become collinear
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @param v3 The third Vec2
* @return auto The orientation
*/
template <typename U = T, typename S = T>
static auto OrientationEx(const Vec2& v1, const Vec2<U>& v2, const Vec2<S>& v3)
{
return v1.OrientationEx(v2, v3);
}
/**
* @brief Get the orientation of this, v2, v3. Equivalent to v3.Cross(v2 - *this).
* 0: collinear.
* >0: v3 to rotate clockwise to become collinear
* <0: v3 to rotate anticlockwise to become collinear
*
* @param v2 The second Vec2
* @param v3 The third Vec2
* @return auto The orientation
*/
template <typename U = T, typename S = T>
auto OrientationEx(const Vec2<U>& v2, const Vec2<S>& v3) const
{
return v3.Cross(v2 - *this);
}
/**
* @brief Get the orientation of v1, v2, v3.
* 0: collinear.
* 1: v3 to rotate clockwise to become collinear
* -1: v3 to rotate anticlockwise to become collinear
*
* @param v1 The first Vec2
* @param v2 The second Vec2
* @param v3 The third Vec2
* @return int The orientation
*/
template <typename U = T, typename S = T>
static int Orientation(const Vec2& v1, const Vec2<U>& v2, const Vec2<S>& v3)
{
return v1.Orientation(v2, v3);
}
/**
* @brief Get the orientation of this, v2, v3.
* 0: collinear.
* 1: v3 to rotate clockwise to become collinear
* -1: v3 to rotate anticlockwise to become collinear
*
* @param v2 The second Vec2
* @param v3 The third Vec2
* @return int The orientation
*/
template <typename U = T, typename S = T>
int Orientation(const Vec2<U>& v2, const Vec2<S>& v3) const
{
auto val = OrientationEx(v2, v3);
if (val == 0) return 0;
return (val > 0) ? 1 : -1;
}
static const Vec2 One;
static const Vec2 Zero;
static const Vec2 Up;
static const Vec2 Down;
static const Vec2 Right;
static const Vec2 Left;
static const Vec2 i;
static const Vec2 j;
};
template <typename T> const Vec2<T> Vec2<T>::One = Vec2<T>(1, 1);
template <typename T> const Vec2<T> Vec2<T>::Zero = Vec2<T>(0, 0);
template <typename T> const Vec2<T> Vec2<T>::Up = Vec2<T>(0, 1);
template <typename T> const Vec2<T> Vec2<T>::Down = Vec2<T>(0, -1);
template <typename T> const Vec2<T> Vec2<T>::Right = Vec2<T>(1, 0);
template <typename T> const Vec2<T> Vec2<T>::Left = Vec2<T>(-1, 0);
template <typename T> const Vec2<T> Vec2<T>::i = Vec2<T>(1, 0);
template <typename T> const Vec2<T> Vec2<T>::j = Vec2<T>(0, 1);
template <typename T, typename U>
auto operator+(const Vec2<T>& v1, const Vec2<U>& v2)
{
return Vec2<decltype(std::declval<T&>() + std::declval<U&>())>(v1.x + v2.x, v1.y + v2.y);
}
template <typename T, typename U>
auto operator-(const Vec2<T>& v1, const Vec2<U>& v2)
{
return Vec2<decltype(std::declval<T&>() - std::declval<U&>())>(v1.x - v2.x, v1.y - v2.y);
}
template <typename T, typename U>
auto operator*(const Vec2<T>& v1, const Vec2<U>& v2)
{
return Vec2<decltype(std::declval<T&>() * std::declval<U&>())>(v1.x * v2.x, v1.y * v2.y);
}
template <typename T, typename U>
auto operator/(const Vec2<T>& v1, const Vec2<U>& v2)
{
return Vec2<decltype(std::declval<T&>() / std::declval<U&>())>(v1.x / v2.x, v1.y / v2.y);
}
template <typename T, typename U>
auto operator%(const Vec2<T>& v1, const Vec2<U>& v2)
{
return Vec2<decltype(std::fmod(std::declval<T&>(), std::declval<U&>()))>(std::fmod(v1.x, v2.x), std::fmod(v1.y, v2.y));
}
template <typename T, typename U>
Vec2<T>& operator+=(Vec2<T>& v1, const Vec2<U>& v2)
{
return v1 = v1 + v2;
}
template <typename T, typename U>
Vec2<T>& operator-=(Vec2<T>& v1, const Vec2<U>& v2)
{
return v1 = v1 - v2;
}
template <typename T, typename U>
Vec2<T>& operator*=(Vec2<T>& v1, const Vec2<U>& v2)
{
return v1 = v1 * v2;
}
template <typename T, typename U>
Vec2<T>& operator/=(Vec2<T>& v1, const Vec2<U>& v2)
{
return v1 = v1 / v2;
}
template <typename T, typename U>
Vec2<T>& operator%=(Vec2<T>& v1, const Vec2<U>& v2)
{
return v1 = v1 % v2;
}
template <typename T, typename U>
auto operator*(U s, const Vec2<T>& v)
{
return Vec2<decltype(s * std::declval<T&>())>(v.x * s, v.y * s);
}
template <typename T, typename U>
auto operator/(U s, const Vec2<T>& v)
{
return Vec2<decltype(s / std::declval<T&>())>(s / v.x, s / v.y);
}
template <typename T, typename U>
auto operator%(U s, const Vec2<T>& v)
{
return Vec2<decltype(std::fmod(std::declval<T&>(), s))>(std::fmod(s, v.x), std::fmod(s, v.y));
}
template <typename T, typename U>
auto operator*(const Vec2<T>& v, U s)
{
return Vec2<decltype(s * std::declval<T&>())>(v.x * s, v.y * s);
}
template <typename T, typename U>
auto operator/(const Vec2<T>& v, U s)
{
return Vec2<decltype(s / std::declval<T&>())>(v.x / s, v.y / s);
}
template <typename T, typename U>
auto operator%(const Vec2<T>& v, U s)
{
return Vec2<decltype(std::fmod(s, std::declval<T&>()))>(std::fmod(v.x, s), std::fmod(v.y, s));
}
template <typename T, typename U>
Vec2<T>& operator*=(Vec2<T>& v, U s)
{
return v = v * s;
}
template <typename T, typename U>
Vec2<T>& operator/=(Vec2<T>& v, U s)
{
return v = v / s;
}
template <typename T, typename U>
Vec2<T>& operator%=(Vec2<T>& v, U s)
{
return v = v % s;
}
template <typename T>
Vec2<T> operator-(const Vec2<T>& v)
{
return Vec2<T>(-v.x, -v.y);
}
template <typename T>
Vec2<T> operator+(const Vec2<T>& v)
{
return Vec2<T>(v.x, v.y);
}
template <typename T, typename U>
bool operator==(const Vec2<T>& v1, const Vec2<U>& v2)
{
return v1.x == v2.x && v1.y == v2.y;
}
template <typename T, typename U>
bool operator!=(const Vec2<T>& v1, const Vec2<U>& v2)
{
return !(v1.x == v2.x && v1.y == v2.y);
}
template <typename T>
std::ostream& operator<<(std::ostream& os, const Vec2<T>& v)
{
os << "(" << v.x << ", " << v.y << ")";
return os;
}
using Vec2d = Vec2<double>;
using Vec2f = Vec2<float>;
using Vec2i = Vec2<int>;
using Vec2u = Vec2<unsigned int>;
} |
#
# Copyright SecureKey Technologies Inc. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
#!/usr/bin/env bash
# Set default values, which may be overriden by the environment variables
: ${DOMAIN:=trustbloc.dev}
: ${MEMORY:=6g}
: ${CPUS:=4}
: ${ADDONS:=ingress,ingress-dns,dashboard}
PATCH=.ingress_coredns.patch
# List of services used to generate domain names
SERVICES=$( cat service_list.txt )
OS=$( uname -s | tr '[:upper:]' '[:lower:]' )
# Use specified driver if set, otherwise minikube will auto-detect the best default driver for a given platform
if [[ -n $DRIVER ]]; then
DRIVER="--driver=$DRIVER"
else
# MacOS requires hyperkit driver instead of the auto-detected for compatibility with ingress addon
if [[ $OS == darwin ]]; then
DRIVER='--driver=hyperkit'
fi
fi
minikube start --memory=$MEMORY --cpus=$CPUS --addons=$ADDONS $DRIVER $MINIKUBE_OPTIONS
MINIKUBE_IP=$( minikube ip )
# Patch coredns configMap
if ! kubectl get cm coredns -n kube-system -o yaml | grep -q hosts; then
# Generate coredns configMap patch
echo ' hosts {' > $PATCH
for service in $SERVICES; do
echo " $MINIKUBE_IP $service.$DOMAIN" >> $PATCH
done
echo ' fallthrough' >> $PATCH
echo ' }' >> $PATCH
echo 'Patching coredns ConfigMap'
EDITOR='sed -i "/loadbalance/r.ingress_coredns.patch"' kubectl edit cm coredns -n kube-system
kubectl delete po -l k8s-app=kube-dns -n kube-system # apply new configmap changes
else
# Generate coredns configMap patch
rm $PATCH
for service in $SERVICES; do
echo " $MINIKUBE_IP $service.$DOMAIN" >> $PATCH
done
echo 'Patching coredns ConfigMap'
EDITOR='sed -i "/hosts {/r.ingress_coredns.patch"' kubectl edit cm coredns -n kube-system
kubectl delete po -l k8s-app=kube-dns -n kube-system # apply new configmap changes
fi
echo '!!! Make sure you have these entries added to your /etc/hosts !!!'
echo '=========================== CUT =========================='
for service in $SERVICES; do
echo "$MINIKUBE_IP $service.$DOMAIN"
done
echo '=========================== CUT =========================='
|
class ErrorFormatter {
func formatError(message: String, severity: String) {
var formattedMessage = ""
switch severity {
case "error":
formattedMessage = "\u{001B}[31m[Error] \(message)"
case "warning":
formattedMessage = "\u{001B}[33m[Warning] \(message)"
case "info":
formattedMessage = "\u{001B}[32m[Info] \(message)"
default:
formattedMessage = "[Unknown] \(message)"
}
print(formattedMessage)
}
}
// Example usage
let errorFormatter = ErrorFormatter()
errorFormatter.formatError(message: "authors: manage_add, manage_update, manage_delete", severity: "error") |
rexdep
|
import {Ts} from "./types"
import * as zjson from "./zjson"
export type Payload =
| SearchRecords
| SearchWarnings
| SearchStats
| SearchEnd
| TaskStart
| TaskEnd
| PcapPostStatus
export type SearchRecords = {
type: "SearchRecords"
records: zjson.Items
channel_id: number
}
export type SearchWarnings = {
type: "SearchWarnings"
warnings: string[]
}
export type SearchStats = {
type: "SearchStats"
update_time: Ts
start_time: Ts
bytes_read: number
bytes_matched: number
records_read: number
records_matched: number
}
export type SearchEnd = {
type: "SearchEnd"
channel_id: number
reason: string
}
export type TaskStart = {
type: "TaskStart"
task_id: number
}
export type TaskEnd = {
type: "TaskEnd"
task_id: number
}
export type PcapPostStatus = {
type: "PcapPostStatus"
snapshot_count: number
start_time: Ts
update_time: Ts
pcap_total_size: number
pcap_read_size: number
}
|
def detect_anomalies(data: pd.DataFrame) -> pd.Series:
# Step 1: Standardize the data
scaler = StandardScaler()
scaled_data = scaler.fit_transform(data)
# Step 2: Reduce dimensionality using PCA
pca = PCA(n_components=2)
reduced_data = pca.fit_transform(scaled_data)
# Step 3: Apply Isolation Forest for anomaly detection
clf = IsolationForest(contamination=0.1) # Assuming 10% of the data are anomalies
clf.fit(reduced_data)
anomaly_scores = clf.decision_function(reduced_data)
# Step 4: Return anomaly scores as a pandas Series
return pd.Series(anomaly_scores, index=data.index) |
/**
* Layout component that queries for data
* with Gatsby's StaticQuery component
*
* See: https://www.gatsbyjs.org/docs/static-query/
*/
import React from "react"
import PropTypes from "prop-types"
import { StaticQuery, graphql } from "gatsby"
import { Helmet } from "react-helmet"
import Header from "./header"
import "./layout.css"
function Layout({ children }) {
return (
<StaticQuery
query={graphql`
query SiteTitleQuery {
site {
siteMetadata {
title
}
}
}
`}
render={data => (
<>
<Helmet>
<link rel="stylesheet" media="none" type="text/css" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" onload="this.media='all';" />
<link rel="stylesheet" media="all" href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/5.3.45/css/materialdesignicons.min.css" />
<link rel="stylesheet" media="none" type="text/css" href="https://cdn.rawgit.com/konpa/devicon/df6431e323547add1b4cf45992913f15286456d3/devicon.min.css" onload="this.media='all';" />
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Anonymous+Pro&display=swap" />
</Helmet>
<Header />
<main>{children}</main>
<footer className="footer">
© {new Date().getFullYear()}, Built with
{` `}
<a href="https://www.gatsbyjs.org">Gatsby</a>
</footer>
</>
)}
/>
)
}
Layout.propTypes = {
children: PropTypes.node.isRequired,
}
export default Layout
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2014:0377
#
# Security announcement date: 2014-04-08 07:07:23 UTC
# Script generation date: 2017-01-01 21:15:14 UTC
#
# Operating System: Red Hat 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - openssl.x86_64:1.0.1e-16.el6_5.7
# - openssl-debuginfo.x86_64:1.0.1e-16.el6_5.7
# - openssl-devel.x86_64:1.0.1e-16.el6_5.7
# - openssl-perl.x86_64:1.0.1e-16.el6_5.7
# - openssl-static.x86_64:1.0.1e-16.el6_5.7
#
# Last versions recommanded by security team:
# - openssl.x86_64:1.0.1e-30.el6_6.13
# - openssl-debuginfo.x86_64:1.0.1e-30.el6_6.13
# - openssl-devel.x86_64:1.0.1e-30.el6_6.13
# - openssl-perl.x86_64:1.0.1e-30.el6_6.13
# - openssl-static.x86_64:1.0.1e-30.el6_6.13
#
# CVE List:
# - CVE-2014-0160
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install openssl.x86_64-1.0.1e -y
sudo yum install openssl-debuginfo.x86_64-1.0.1e -y
sudo yum install openssl-devel.x86_64-1.0.1e -y
sudo yum install openssl-perl.x86_64-1.0.1e -y
sudo yum install openssl-static.x86_64-1.0.1e -y
|
SCRABBLE_LETTER_VALUES = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1,
'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
total_points = 0
for letter in word:
total_points += SCRABBLE_LETTER_VALUES[letter]
total_points *= len(word)
if len(word) == n:
total_points += 50
return total_points
print(getWordScore('waybill', 7))
|
package net.dean.jraw.fluent;
import net.dean.jraw.ApiException;
import net.dean.jraw.models.Message;
import net.dean.jraw.paginators.InboxPaginator;
/**
* A reference to an authenticated-user's inbox
*/
public final class InboxReference extends ElevatedAbstractReference {
/**
* Instantiates a new InboxReference
*
* @param managers A manager aggregation. Must not be null.
*/
protected InboxReference(ManagerAggregation managers) {
super(managers);
}
/**
* Creates a new Paginator that will iterate through unread messages. Equivalent to
* {@code read("unread")}.
*/
public InboxPaginator read() {
return read("unread");
}
/**
* Creates a new Paginator that will iterate through the inbox.
* @param what One of "inbox", "unread", "messages", "sent", "moderator", or "moderator/unread"
*/
public InboxPaginator read(String what) {
return new InboxPaginator(reddit, what);
}
/**
* Composes a message
* @throws ApiException If the reddit API returned an error
*/
@NetworkingCall
public void compose(String to, String subject, String body) throws ApiException {
managers.inbox().compose(to, subject, body);
}
/** Mark a given message as 'read' */
@NetworkingCall
public void readMessage(boolean read, Message m, Message... more) {
managers.inbox().setRead(read, m, more);
}
/** Mark all unread messages as 'read' */
@NetworkingCall
public void readAllMessages() {
managers.inbox().setAllRead();
}
}
|
<gh_stars>0
/*
* Copyright (C) 2015-2017 Uber Technologies, Inc. (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.uber.stream.kafka.mirrormaker.controller.core;
import com.uber.stream.kafka.mirrormaker.controller.ControllerConf;
import com.uber.stream.kafka.mirrormaker.controller.ControllerInstance;
import com.uber.stream.kafka.mirrormaker.controller.utils.HelixUtils;
import org.apache.helix.HelixManager;
import org.apache.helix.HelixManagerFactory;
import org.apache.helix.InstanceType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Helix between uReplicator Manager and Controllers.
* @author <NAME>
*/
public class ManagerControllerHelix {
private static final Logger LOGGER = LoggerFactory.getLogger(ManagerControllerHelix.class);
private static final String MANAGER_CONTROLLER_HELIX_PREFIX = "manager-controller-";
private static final String CONTROLLER_WORKER_HELIX_PREFIX = "controller-worker-";
private static final String CONFIG_KAFKA_CLUSTER_KEY_PREFIX = "kafka.cluster.zkStr.";
private final ControllerConf _controllerConf;
private final String _helixClusterName;
private final String _helixZkURL;
private final String _instanceId;
private HelixManager _helixZkManager;
private final Object _handlerLock = new Object();
private ControllerInstance _currentControllerInstance = null;
private String _currentSrcCluster = null;
private String _currentDstCluster = null;
private String _currentRoutePartition = null;
public ManagerControllerHelix(ControllerConf controllerConf) {
_controllerConf = controllerConf;
_helixClusterName = MANAGER_CONTROLLER_HELIX_PREFIX + _controllerConf.getDeploymentName();
_helixZkURL = HelixUtils.getAbsoluteZkPathForHelix(_controllerConf.getZkStr());
_instanceId = controllerConf.getInstanceId();
}
public synchronized void start() {
LOGGER.info("Trying to start ManagerControllerHelix!");
_helixZkManager = HelixManagerFactory.getZKHelixManager(_helixClusterName,
_instanceId,
InstanceType.PARTICIPANT,
_helixZkURL);
_helixZkManager.getStateMachineEngine().registerStateModelFactory("OnlineOffline",
new ControllerStateModelFactory(this));
try {
_helixZkManager.connect();
} catch (Exception e) {
LOGGER.error("Failed to start ManagerControllerHelix " + _helixClusterName, e);
}
}
public synchronized void stop() {
LOGGER.info("Trying to stop ManagerControllerHelix!");
_helixZkManager.disconnect();
}
public void handleRouteAssignmentEvent(String srcCluster, String dstCluster, String routePartition, String toState) {
synchronized (_handlerLock) {
if (toState.equals("ONLINE")) {
handleRouteAssignmentOnline(srcCluster, dstCluster, routePartition);
} else if (toState.equals("OFFLINE")) {
handleRouteAssignmentOffline(srcCluster, dstCluster, routePartition);
} else if (toState.equals("DROPPED")) {
handleRouteAssignmentDropped(srcCluster, dstCluster, routePartition);
} else {
LOGGER.error("Invalid route assignement state {}", toState);
}
}
}
private void handleRouteAssignmentOnline(String srcCluster, String dstCluster, String routePartition) {
if (_currentControllerInstance != null) {
if (!(srcCluster.equals(_currentSrcCluster) && dstCluster.equals(_currentDstCluster)
&& routePartition.equals(_currentRoutePartition))) {
String msg = String.format(
"Invalid route partition assignment. Current route src=%s, dst=%s, partition=%s; new route src=%s, dst=%s, partition=%s",
_currentSrcCluster, _currentDstCluster, _currentRoutePartition, srcCluster, dstCluster, routePartition);
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
} else {
if (_currentControllerInstance.isStarted()) {
LOGGER.info("Controller has already been started");
} else {
String msg = "Controller has already been initiated but not started yet";
LOGGER.error(msg);
throw new IllegalStateException(msg);
}
}
return;
}
// validate src and dst clusters in configuration
if (srcCluster.equals(dstCluster)) {
String msg = String.format("The source cluster %s cannot be the same as destination cluster", srcCluster);
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
if (!_controllerConf.getSourceClusters().contains(srcCluster)) {
String msg = String.format("The cluster %s is not a valid source cluster", srcCluster);
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
if (!_controllerConf.getDestinationClusters().contains(dstCluster)) {
String msg = String.format("The cluster %s is not a valid destination cluster", dstCluster);
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
// set corresponding zkpath for src and dst clusters
String srcKafkaZkPath = (String)_controllerConf.getProperty(CONFIG_KAFKA_CLUSTER_KEY_PREFIX + srcCluster);
if (srcKafkaZkPath == null) {
String msg = "Failed to find configuration of ZooKeeper path for source cluster " + srcCluster;
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
_controllerConf.setSrcKafkaZkPath(srcKafkaZkPath);
String destKafkaZkPath = (String)_controllerConf.getProperty(CONFIG_KAFKA_CLUSTER_KEY_PREFIX + dstCluster);
if (destKafkaZkPath == null) {
String msg = "Failed to find configuration of ZooKeeper path for destination cluster " + dstCluster;
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
_controllerConf.setDestKafkaZkPath(destKafkaZkPath);
String clusterName = CONTROLLER_WORKER_HELIX_PREFIX + srcCluster + "-" + dstCluster + "-" + routePartition;
_controllerConf.setHelixClusterName(clusterName);
_controllerConf.setEnableSrcKafkaValidation("true");
_controllerConf.setGroupId("ureplicator-" + srcCluster + "-" + dstCluster);
_currentControllerInstance = new ControllerInstance(this, _controllerConf);
LOGGER.info("Starting controller instance for route {}", clusterName);
try {
_currentControllerInstance.start();
} catch (Exception e) {
String msg = "Failed to start controller instance. Roll back.";
LOGGER.error(msg);
if (_currentControllerInstance.stop()) {
_currentControllerInstance = null;
} else {
LOGGER.error("Failed to stop the controller instance.");
}
throw new RuntimeException(msg);
}
_currentSrcCluster = srcCluster;
_currentDstCluster = dstCluster;
_currentRoutePartition = routePartition;
LOGGER.info("Successfully started controller instance for route {}", clusterName);
}
private boolean handleRouteAssignmentOffline(String srcCluster, String dstCluster, String routePartition) {
if (_currentControllerInstance == null) {
String msg = "Controller instance is not started yet";
LOGGER.info(msg);
return false;
}
if (!(srcCluster.equals(_currentSrcCluster) && dstCluster.equals(_currentDstCluster)
&& routePartition.equals(_currentRoutePartition))) {
String msg = String.format(
"Invalid route to offline. Current route src=%s, dst=%s, routeId=%s; new route src=%s, dst=%s, routeId=%s",
_currentSrcCluster, _currentDstCluster, _currentRoutePartition, srcCluster, dstCluster, routePartition);
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
String clusterName = CONTROLLER_WORKER_HELIX_PREFIX + srcCluster + "-" + dstCluster + "-" + routePartition;
LOGGER.info("Stopping controller instance for cluster: " + clusterName);
if (!_currentControllerInstance.stop()) {
LOGGER.error("Failed to stop controller instance. Shutdown JVM instead.");
System.exit(-1);
}
_currentSrcCluster = null;
_currentDstCluster = null;
_currentRoutePartition = null;
_currentControllerInstance = null;
LOGGER.info("Successfully stopped controller instance for route {}", clusterName);
return true;
}
private boolean handleRouteAssignmentDropped(String srcCluster, String dstCluster, String routePartition) {
return handleRouteAssignmentOffline(srcCluster, dstCluster, routePartition);
}
public boolean handleTopicAssignmentEvent(String topic, String srcCluster, String dstCluster, String routePartition, String toState) {
synchronized (_handlerLock) {
if (_currentControllerInstance == null) {
if (toState.equals("OFFLINE") || toState.equals("DROPPED")) {
LOGGER.error(
"Controller is not started yet. Failed to action={} topic={} for srcCluster={}, dstCluster={}, routePartition={}",
toState, topic, srcCluster, dstCluster, routePartition);
return false;
}
LOGGER.info(
"Controller is not started yet. Start a new instance: srcCluster={}, dstCluster={}, routePartition={}",
srcCluster, dstCluster, routePartition);
handleRouteAssignmentOnline(srcCluster, dstCluster, routePartition);
}
if (!(srcCluster.equals(_currentSrcCluster) && dstCluster.equals(_currentDstCluster))) {
String msg = String.format("Inconsistent route assignment: expected src=%s, dst=%s, but given src=%s, dst=%s, toState=%s",
_currentSrcCluster, _currentDstCluster, srcCluster, dstCluster, toState);
LOGGER.error(msg);
if (!toState.equals("OFFLINE") && !toState.equals("DROPPED")) {
throw new IllegalArgumentException(msg);
} else {
return false;
}
}
if (toState.equals("ONLINE")) {
return handleTopicAssignmentOnline(topic, srcCluster, dstCluster);
} else if (toState.equals("OFFLINE")) {
return handleTopicAssignmentOffline(topic, srcCluster, dstCluster);
} else if (toState.equals("DROPPED")) {
return handleTopicAssignmentDropped(topic, srcCluster, dstCluster);
} else {
String msg = "Invalid topic assignement state: " + toState;
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
}
}
private boolean handleTopicAssignmentOnline(String topic, String srcCluster, String dstCluster) {
HelixMirrorMakerManager helixManager = _currentControllerInstance.getHelixResourceManager();
if (helixManager.isTopicExisted(topic)) {
LOGGER.warn("Topic {} already exists from cluster {} to {}", topic, srcCluster, dstCluster);
return false;
}
TopicPartition topicPartitionInfo = null;
KafkaBrokerTopicObserver topicObserver = _currentControllerInstance.getSourceKafkaTopicObserver();
if (topicObserver == null) {
// no source partition information, use partitions=1 and depend on auto-expanding later
topicPartitionInfo = new TopicPartition(topic, 1);
} else {
topicPartitionInfo = topicObserver.getTopicPartitionWithRefresh(topic);
if (topicPartitionInfo == null) {
String msg = String.format(
"Failed to whitelist topic %s on controller because topic does not exists in src cluster %s",
topic, srcCluster);
LOGGER.error(msg);
throw new IllegalArgumentException(msg);
}
}
helixManager.addTopicToMirrorMaker(topicPartitionInfo);
LOGGER.info("Whitelisted topic {} from cluster {} to {}", topic, srcCluster, dstCluster);
return true;
}
private boolean handleTopicAssignmentOffline(String topic, String srcCluster, String dstCluster) {
HelixMirrorMakerManager helixManager = _currentControllerInstance.getHelixResourceManager();
if (!helixManager.isTopicExisted(topic)) {
LOGGER.warn("Topic {} does not exist from cluster {} to {}", topic, srcCluster, dstCluster);
return false;
}
helixManager.deleteTopicInMirrorMaker(topic);
LOGGER.info("Blacklisted topic {} from {} to {}", topic, srcCluster, dstCluster);
return true;
}
private boolean handleTopicAssignmentDropped(String topic, String srcCluster, String dstCluster) {
return handleTopicAssignmentOffline(topic, srcCluster, dstCluster);
}
public ControllerInstance getControllerInstance() {
return _currentControllerInstance;
}
}
|
<filename>client_test.go<gh_stars>1-10
package cdek
import (
"crypto/md5"
"encoding/hex"
"reflect"
"testing"
"time"
)
func TestNewClient(t *testing.T) {
type args struct {
apiURL string
}
tests := []struct {
name string
args args
want *Client
}{
{
"Client created",
args{
apiURL: "apiURL",
},
&Client{
apiURL: "apiURL",
calculatorURL: calculatorURLDefault,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := NewClient(tt.args.apiURL); !reflect.DeepEqual(got, tt.want) {
t.Errorf("NewClient() = %v, want %v", got, tt.want)
}
})
}
}
func TestAuth_EncodedSecure(t *testing.T) {
now := time.Now().Format("2006-01-02")
encoder := md5.New()
_, _ = encoder.Write([]byte(now + "&" + "testSecure"))
testSecureEncoded := hex.EncodeToString(encoder.Sum(nil))
type fields struct {
Account string
Secure string
}
tests := []struct {
name string
fields fields
wantDate string
wantEncodedSecure string
}{
{
name: "successful encoding",
fields: fields{
Account: "testAccount",
Secure: "testSecure",
},
wantDate: now,
wantEncodedSecure: testSecureEncoded,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := auth{
account: tt.fields.Account,
secure: tt.fields.Secure,
}
gotDate, gotEncodedSecure := a.encodedSecure()
if gotDate != tt.wantDate {
t.Errorf("encodedSecure() gotDate = %v, want %v", gotDate, tt.wantDate)
}
if gotEncodedSecure != tt.wantEncodedSecure {
t.Errorf("encodedSecure() gotEncodedSecure = %v, want %v", gotEncodedSecure, tt.wantEncodedSecure)
}
})
}
}
func TestClient_SetAuth(t *testing.T) {
type fields struct {
Auth *auth
CdekAPIURL string
CalculatorURL string
}
type args struct {
account string
secure string
}
tests := []struct {
name string
fields fields
args args
want *Client
}{
{
name: "auth set",
fields: fields{
Auth: nil,
},
args: args{
account: "testAccount",
secure: "testSecure",
},
want: &Client{
auth: &auth{
account: "testAccount",
secure: "testSecure",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
clientConf := &Client{
auth: tt.fields.Auth,
apiURL: tt.fields.CdekAPIURL,
calculatorURL: tt.fields.CalculatorURL,
}
if got := clientConf.SetAuth(tt.args.account, tt.args.secure); !reflect.DeepEqual(got, tt.want) {
t.Errorf("SetAuth() = %v, want %v", got, tt.want)
}
})
}
}
func TestClient_SetCalculatorURL(t *testing.T) {
type fields struct {
Auth *auth
CdekAPIURL string
CalculatorURL string
}
type args struct {
calculatorURL string
}
tests := []struct {
name string
fields fields
args args
want *Client
}{
{
name: "set url",
fields: fields{
CalculatorURL: "",
},
args: args{
calculatorURL: "testCalcUrl",
},
want: &Client{
calculatorURL: "testCalcUrl",
},
},
{
name: "rewrite url",
fields: fields{
CalculatorURL: "",
},
args: args{
calculatorURL: "testCalcUrl_rewritten",
},
want: &Client{
calculatorURL: "testCalcUrl_rewritten",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
clientConf := &Client{
auth: tt.fields.Auth,
apiURL: tt.fields.CdekAPIURL,
calculatorURL: tt.fields.CalculatorURL,
}
if got := clientConf.SetCalculatorURL(tt.args.calculatorURL); !reflect.DeepEqual(got, tt.want) {
t.Errorf("SetCalculatorURL() = %v, want %v", got, tt.want)
}
})
}
}
func ExampleNewClient() {
client := NewClient("https://integration.edu.cdek.ru/")
client.SetAuth("<KEY>", "<KEY>")
}
|
#!/bin/sh
#
# Copyright (c) 2007, Cameron Rich
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the axTLS project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
# Test the various axssl bindings. To run it, got to the _install directory
# and run this script from there.
#
if grep "CONFIG_PLATFORM_WIN32=y" "../config/.config" > /dev/null; then
JAVA_EXE="$JAVA_HOME/bin/java.exe"
PERL_BIN="/cygdrive/c/Perl/bin/perl"
KILL_AXSSL="kill %1"
KILL_CSHARP="kill %1"
KILL_PERL="kill %1"
KILL_JAVA="kill %1"
KILL_LUA="kill %1"
else
if grep "CONFIG_PLATFORM_CYGWIN=y" "../config/.config" > /dev/null; then
# no .net or java on cygwin
PERL_BIN=/usr/bin/perl
KILL_AXSSL="killall axssl"
KILL_PERL="killall /usr/bin/perl"
KILL_LUA="killall /usr/local/bin/lua"
else # Linux
JAVA_EXE=/usr/java/default/bin/java
PERL_BIN=/usr/bin/perl
KILL_AXSSL="killall axssl"
KILL_CSHARP="killall mono"
KILL_PERL="killall /usr/bin/perl"
RUN_CSHARP="mono"
KILL_JAVA="killall $JAVA_EXE"
KILL_LUA="killall /usr/local/bin/lua"
fi
fi
BASE=..
SERVER_ARGS="s_server -accept 15001 -verify -CAfile $BASE/ssl/test/axTLS.ca_x509.cer"
CLIENT_ARGS="s_client -reconnect -connect localhost:15001 -verify -CAfile $BASE/ssl/test/axTLS.ca_x509.cer -key $BASE/ssl/test/axTLS.key_1024 -cert $BASE/ssl/test/axTLS.x509_1024.cer"
# check pem arguments
SERVER_PEM_ARGS="s_server -accept 15001 -pass abcd -key $BASE/ssl/test/axTLS.key_aes128.pem -cert $BASE/ssl/test/axTLS.x509_aes128.pem"
CLIENT_PEM_ARGS="s_client -connect localhost:15001 -CAfile $BASE/ssl/test/axTLS.ca_x509.pem -key $BASE/ssl/test/axTLS.key_1024.pem -cert $BASE/ssl/test/axTLS.x509_1024.pem"
export LD_LIBRARY_PATH=.:`perl -e 'use Config; print $Config{archlib};'`/CORE
if [ -x ./axssl ]; then
echo "############################# C SAMPLE ###########################"
./axssl $SERVER_ARGS &
echo "C Test passed" | ./axssl $CLIENT_ARGS
$KILL_AXSSL
sleep 1
./axssl $SERVER_PEM_ARGS &
echo "C Test passed" | ./axssl $CLIENT_PEM_ARGS
$KILL_AXSSL
sleep 1
echo "### C tests complete"
fi
if [ -f ./axtls.jar ]; then
echo "########################## JAVA SAMPLE ###########################"
"$JAVA_EXE" -jar ./axtls.jar $SERVER_ARGS &
echo "Java Test passed" | "$JAVA_EXE" -jar ./axtls.jar $CLIENT_ARGS
$KILL_JAVA
sleep 1
"$JAVA_EXE" -jar ./axtls.jar $SERVER_PEM_ARGS &
echo "Java Test passed" | "$JAVA_EXE" -jar ./axtls.jar $CLIENT_PEM_ARGS
$KILL_JAVA
sleep 1
echo "### Java tests complete"
fi
if [ -x ./axssl.csharp.exe ]; then
echo "############################ C# SAMPLE ###########################"
$RUN_CSHARP ./axssl.csharp.exe $SERVER_ARGS &
echo "C# Test passed" | $RUN_CSHARP ./axssl.csharp.exe $CLIENT_ARGS
$KILL_CSHARP
sleep 1
$RUN_CSHARP ./axssl.csharp.exe $SERVER_PEM_ARGS &
echo "C# Test passed" | $RUN_CSHARP ./axssl.csharp.exe $CLIENT_PEM_ARGS
$KILL_CSHARP
sleep 1
echo "### C# tests complete"
fi
if [ -x ./axssl.vbnet.exe ]; then
echo "######################## VB.NET SAMPLE ###########################"
echo $SERVER_ARGS
echo $CLIENT_ARGS
./axssl.vbnet $SERVER_ARGS &
echo "VB.NET Test passed" | ./axssl.vbnet.exe $CLIENT_ARGS
kill %1
sleep 1
./axssl.vbnet $SERVER_PEM_ARGS &
echo "VB.NET Test passed" | ./axssl.vbnet.exe $CLIENT_PEM_ARGS
kill %1
sleep 1
echo "### VB.NET tests complete"
fi
if [ -f ./axssl.pl ]; then
echo "########################## PERL SAMPLE ###########################"
"$PERL_BIN" ./axssl.pl $SERVER_ARGS &
echo "Perl Test passed" | "$PERL_BIN" ./axssl.pl $CLIENT_ARGS
$KILL_PERL
sleep 1
"$PERL_BIN" ./axssl.pl $SERVER_PEM_ARGS &
echo "Perl Test passed" | "$PERL_BIN" ./axssl.pl $CLIENT_PEM_ARGS
$KILL_PERL
sleep 1
echo "### Perl tests complete"
fi
if [ -f ./axssl.lua ]; then
echo "########################## LUA SAMPLE ###########################"
./axssl.lua $SERVER_ARGS &
echo "Lua Test passed" | ./axssl.lua $CLIENT_ARGS
$KILL_LUA
sleep 1
./axssl.lua $SERVER_PEM_ARGS &
echo "Lua Test passed" | ./axssl.lua $CLIENT_PEM_ARGS
$KILL_LUA
sleep 1
echo "### Lua tests complete"
fi
echo "########################## ALL TESTS COMPLETE ###########################"
|
class MyAnimeListScraper
module DateRangeParser
extend ActiveSupport::Concern
def parse_date(date_str)
return if date_str.include?('?')
Date.strptime(date_str, '%b %d, %Y')
rescue ArgumentError
begin
Date.strptime(date_str, '%Y')
rescue ArgumentError
Date.strptime(date_str, '%b %Y')
end
end
def parse_date_range(date_range_str)
if date_range_str.include?(' to ')
start_date, end_date = date_range_str.split(' to ').map(&:strip)
[parse_date(start_date), parse_date(end_date)]
elsif date_range_str.include?('Not available')
[nil, nil]
else
[parse_date(date_range_str.strip)] * 2
end
end
end
end
|
import React, { Component } from 'react'
import {Link} from 'react-router-dom';
import axios from './axiosConfig';
import "./index.css";
export default class LoginForm extends Component {
constructor(props){
super(props)
this.state = {
username: '',
password: '',
auth: null,
endpoint: null
}
}
setCSRF = () => {
axios.get('api/set-csrf/').then(res => console.log(res))
}
handleChange = (e) => {
this.setState({[e.target.name]: e.target.value})
}
handleSubmit = (event) => {
event.preventDefault();
axios.post('/api/login/',
{username: this.state.username,
password: <PASSWORD>}
).then(res => {
this.setState({auth: true})
}).catch(res => this.setState({auth: false}))
}
testEndpoint = () => {
axios.get('/api/test-auth/').then(res => this.setState(
{endpoint: true}))
.catch(res => this.setState({endpoint: false}))
}
render() {
return (
<div className='Login'>
<form onSubmit={this.handleSubmit}>
<div className='form-inner'>
<h2>Login</h2>
<div className='form-group'>
<label htmlFor='username'>Username</label>
<input name='username' id='username' value={this.state.username} onChange={this.handleChange}></input>
</div>
<div className='form-group'>
<label htmlFor='password'>Password</label>
<input type='password'name='password' id='password' value={this.state.password} onChange={this.handleChange}></input>
</div>
<Link to="/">
<input type='submit' value='Login' onClick={this.setCSRF}></input>
</Link>
</div>
</form>
<div style={{marginTop: '20px'}}>
{this.state.auth === null ? '' : (this.state.auth ? 'Login successful' : 'Login Failed' )}
</div>
<div className='other-button'>
<button onClick={this.testEndpoint}>Continue</button>
</div>
<div>{this.state.endpoint === null ? '' : (this.state.endpoint ? 'Successful Request' : 'Request Rejected')}</div>
</div>
)
}
}
|
package net.dodogang.plume.ash;
import dev.architectury.injectables.annotations.ExpectPlatform;
import java.nio.file.Path;
public final class Environment {
private Environment() {}
@ExpectPlatform
public static boolean isDevelopmentEnvironment() {
throw new AssertionError();
}
/**
* Gets the platform during runtime.
* Quilt will be supported in the future.
*
* @return the runtime platform
*/
@ExpectPlatform
public static Platform getPlatform() {
throw new AssertionError();
}
@ExpectPlatform
public static Path getConfigDir() {
throw new AssertionError();
}
}
|
python seatsInTheater.py |
#! /usr/bin/env bash
# BuildCompatible: KitCreator
pkg="tcl-socketserver"
url_prefix="https://github.com/Dash-OS/${pkg}"
### The version we want to build. This should match
### a release available in the repo releases.
### ${url_prefix}/releases
version='1.0.1';
### If the tcl package has a different version than the
### one we use to download the release archive
pkg_version="${version}";
### The name of the package
pkg_name='socketserver'
### The name of the tclpkglib that should be loaded.
### Used when we call load {} $tclpkglib.
### Defaults to $pkg_name if not defined
#pkg_lib_name="${pkg_name}"
### override user / force build static
# pkg_always_static='1'
### Install the release specified above and
### verify the sha256 signature.
### https://hash.online-convert.com/sha256-generator
url="${url_prefix}/archive/${version}.tar.gz"
sha256='f34446641083c59897bbe2161dce59588ca2d35215e9c51108e9c3524c87c4b5'
### If we want to use master
# url="${url_prefix}/archive/master.tar.gz"
# sha256='-'
|
<reponame>lgoldstein/communitychest
package com.vmware.spring.workshop.facade.web;
import java.io.IOException;
import java.util.Collection;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.Assert;
import org.springframework.validation.BindingResult;
import org.springframework.validation.ObjectError;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.ModelAndView;
import com.vmware.spring.workshop.dto.user.UserDTO;
import com.vmware.spring.workshop.dto.user.UserRoleTypeDTO;
import com.vmware.spring.workshop.facade.support.UserDTOImportExport;
import com.vmware.spring.workshop.services.facade.UsersFacade;
/**
* @author lgoldstein
*/
@Controller("usersController")
@RequestMapping("/users")
public class UsersController extends AbstractWebController {
private final UsersFacade _usrFacade;
private final UserDTOImportExport _importer;
@Inject
public UsersController(final UsersFacade usrFacade,
final UserDTOImportExport importer) {
_usrFacade = usrFacade;
_importer = importer;
}
@RequestMapping(method=RequestMethod.GET)
public String listUsers (final Model model) {
model.addAttribute("usersList", _usrFacade.findAll());
return getTopLevelViewPath("list");
}
@RequestMapping(method=RequestMethod.GET, value="/export")
public ModelAndView exportUsers () {
return exportDTOList(_importer, _usrFacade.findAll());
}
@RequestMapping(method=RequestMethod.POST, value="/import")
public String importUsers (@RequestParam("file") final MultipartFile file, final Model model)
throws IOException {
importDTOList(file, _importer, _usrFacade);
return listUsers(model); // refresh view to reflect changes after import
}
@RequestMapping(method=RequestMethod.GET, value="/create")
public String prepareUserCreateForm (final Model model) {
return prepareUserForm(model, new UserDTO(), "create");
}
@RequestMapping(method=RequestMethod.GET, value="/edit/" + BY_ID_TEMPLATE)
public String prepareUserEditForm (final Model model, @PathVariable(ID_PARAM_NAME) final Long id) {
return prepareUserForm(model, _usrFacade.findById(id), "edit");
}
@RequestMapping(method=RequestMethod.POST, value="/edit/" + BY_ID_TEMPLATE)
public String updateUser (@Valid final UserDTO userData,
@PathVariable(ID_PARAM_NAME) final Long id,
final BindingResult result,
final HttpServletRequest request,
final Model model) {
handleBindingResultErrors(userData, result);
final UserDTO curData=_usrFacade.findById(id);
Assert.state(curData != null, "Referenced DTO no longer exists");
final Long idData=userData.getId();
if (idData == null)
userData.setId(id);
else
Assert.isTrue(id.equals(idData), "Mismatched user ID(s)");
// password field is left null by the form if not edited by the user
final String curPassword=<PASSWORD>(),
dataPassword=userData.getPassword();
if (StringUtils.isBlank(dataPassword))
userData.setPassword(<PASSWORD>);
_usrFacade.update(userData);
return listUsers(model);
}
@RequestMapping(method=RequestMethod.POST, value="/create")
public String createUser (@Valid final UserDTO userData,
final BindingResult result,
final HttpServletRequest request,
final Model model) {
if (result.hasErrors()) {
handleBindingResultErrors(userData, result);
throw new IllegalStateException("Bad DTO: " + userData);
}
_usrFacade.create(userData);
return listUsers(model);
}
@RequestMapping(method=RequestMethod.GET, value="/admin")
public String adminView () {
return getTopLevelViewPath("admin");
}
@RequestMapping(method=RequestMethod.GET, value="/guest")
public String guestView () {
return getTopLevelViewPath("guest");
}
private void handleBindingResultErrors (final UserDTO userData, final BindingResult result) {
if (!result.hasErrors())
return;
final Collection<? extends ObjectError> errsList=result.getAllErrors();
for (final ObjectError err : errsList) {
_logger.warn(err.getObjectName() + "[" + err.getCode() + "]: " + err.getDefaultMessage());
}
_logger.error("Bad DTO: " + userData);
}
private String prepareUserForm (final Model model, final UserDTO userData, final String actionName) {
Assert.notNull(userData, "No user DTO");
model.addAttribute("actionName", actionName);
model.addAttribute("userData", userData);
model.addAttribute("rolesList", UserRoleTypeDTO.VALUES);
return getTopLevelViewPath("manageUser");
}
}
|
/***********************************************************************
* Copyright (c) 2011:
* Istituto Nazionale di Fisica Nucleare (INFN), Italy
* Consorzio COMETA (COMETA), Italy
*
* See http://www.infn.it and and http://www.consorzio-cometa.it for details on
* the copyright holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***********************************************************************/
package it.infn.ct.security.actions;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionSupport;
import it.infn.ct.security.entities.UserReActivateRequest;
import it.infn.ct.security.utilities.LDAPUser;
import java.io.UnsupportedEncodingException;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import javax.naming.Context;
import javax.naming.InitialContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts2.ServletActionContext;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Restrictions;
/**
*
* @author <NAME> <<EMAIL>>
*/
public class ReactivateUser extends ActionSupport{
private Log _log= LogFactory.getLog(ReactivateUser.class);
private String mailFrom;
private String mailTo;
private String mailSubject;
private String mailBody;
private String username;
public String getMailFrom() {
return mailFrom;
}
public void setMailFrom(String mailFrom) {
this.mailFrom = mailFrom;
}
public String getMailTo() {
return mailTo;
}
public void setMailTo(String mailTo) {
this.mailTo = mailTo;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getMailSubject() {
return mailSubject;
}
public void setMailSubject(String mailSubject) {
this.mailSubject = mailSubject;
}
public String getMailBody() {
return mailBody;
}
public void setMailBody(String mailBody) {
this.mailBody = mailBody;
}
@Override
public String execute() throws Exception {
if(ActionContext.getContext().getSession().containsKey("ldapUser")){
LDAPUser user = (LDAPUser) ActionContext.getContext().getSession().get("ldapUser");
UserReActivateRequest urear= new UserReActivateRequest(user.getUsername());
SessionFactory factory = (SessionFactory) ServletActionContext.getServletContext().getAttribute("IDPPublic.hibernatefactory");
Session session = factory.openSession();
session.beginTransaction();
if(session.createCriteria(UserReActivateRequest.class)
.add(Restrictions.eq("username", user.getUsername()))
.add(Restrictions.eq("open", Boolean.TRUE))
.list()
.isEmpty()){
session.save(urear);
}
session.getTransaction().commit();
session.close();
sendMail(user);
return SUCCESS;
}
return ERROR;
}
private void sendMail(LDAPUser user) throws MailException{
javax.mail.Session session=null;
try {
Context initCtx = new InitialContext();
Context envCtx = (Context) initCtx.lookup("java:comp/env");
session = (javax.mail.Session) envCtx.lookup("mail/Users");
} catch (Exception ex) {
_log.error("Mail resource lookup error");
_log.error(ex.getMessage());
throw new MailException("Mail Resource not available");
}
Message mailMsg = new MimeMessage(session);
try {
mailMsg.setFrom(new InternetAddress(mailFrom,mailFrom));
InternetAddress mailTos[] = new InternetAddress[1];
mailTos[0] = new InternetAddress(mailTo);
mailMsg.setRecipients(Message.RecipientType.TO, mailTos);
mailMsg.setSubject(mailSubject);
mailBody = mailBody.replaceAll("_USER_", user.getTitle() +" "+user.getGivenname()+" "+user.getSurname()+" ("+user.getUsername()+")");
mailMsg.setText(mailBody);
Transport.send(mailMsg);
} catch (UnsupportedEncodingException ex) {
_log.error(ex);
throw new MailException("Mail address format not valid");
} catch (MessagingException ex) {
_log.error(ex);
throw new MailException("Mail message has problems");
}
}
}
|
def detect_fraud(input_data):
"""
This function takes a credit card data and detect if there is fraud
using machine learning algorithms.
Args:
input_data (list): The list of credit card data.
Returns:
int: Indicates 0 (normal) or 1 (fraudulent activity)
"""
# Your code goes here
# Extract the features from the input data
features = extract_features(input_data)
# Load the pre-trained fraud detection model
model = load_model("fraud_detection.hdf5")
# Use the model to predict the probability of fraud
prediction = model.predict(features)[0]
# Check whether the probability is above the threshold
if prediction > 0.5:
return 1
else:
return 0 |
<filename>src/defaults/templates/controller.tpl.js
{{#if opts.useStrict}}
'use strict';
{{/if}}
describe('Controller: {{name}}', function () {
var $scope, {{name}}{{and arg.deps}};
beforeEach(function () {
module('{{module}}');
module(function ($provide) {
{{#each deps}}
{{> (this.partial) this}}
{{/each}}
});
inject(function ($controller{{and arg._deps_}}) {
$scope = {};
{{#each deps}}
{{this.name}} = _{{this.name}}_;
{{/each}}
{{name}} = $controller('{{name}}', {
$scope: $scope
});
});
});
// Specs here
{{#if opts.includeSamples }}
/*
it('should return a property value', function () {
expect($scope.foo).toBe('bar');
});
it('should return a method value', function () {
expect($scope.baz()).toBe('qux');
});
*/
{{/if}}
}); |
package com.yoga.utility.qr.dto;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.NotBlank;
@Data
public class ChartDto {
@ApiModelProperty(value = "需要生成图片的二维码", required = true)
@NotBlank(message = "二维码不能为空")
private String code;
@ApiModelProperty(value = "二维码宽度,默认值512px")
private int width = 512;
@ApiModelProperty(value = "二维码高度,默认512px")
private int height = 512;
@ApiModelProperty(value = "二维码颜色")
private long color = 0xff000000;
}
|
# Import necessary libraries
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
# Initialize the Dash app
app = dash.Dash(__name__)
# Sales data
product_categories = ['Electronics', 'Clothing', 'Books', 'Home & Kitchen']
sales_figures = [35000, 28000, 40000, 32000]
# Create the bar chart layout
app.layout = html.Div(children=[
html.H1('Sales Data Visualization'),
dcc.Graph(
id='sales-bar-chart',
figure={
'data': [
{'x': product_categories, 'y': sales_figures, 'type': 'bar', 'name': 'Sales'},
],
'layout': {
'title': 'Sales Figures by Product Category',
'xaxis': {'title': 'Product Category'},
'yaxis': {'title': 'Sales Figures'}
}
}
)
])
# Run the Dash app
if __name__ == '__main__':
app.run_server(debug=True) |
#!/bin/bash
set -e
# Start data coordinator locally and build it if necessary
REALPATH=$(python -c "import os; print(os.path.realpath('$0'))")
BINDIR=$(dirname "$REALPATH")
CONFIG=$1
if [[ -z $CONFIG ]]; then
CONFIG="$BINDIR/../configs/application.conf"
fi
JARFILE=$("$BINDIR"/build.sh "$@")
"$BINDIR"/run_migrations.sh "$CONFIG" migrate
java -Djava.net.preferIPv4Stack=true -Dconfig.file="$CONFIG" -jar "$JARFILE" com.socrata.datacoordinator.service.Main
|
#!/usr/bin/env bash
koopa_gnu_mirror_url() {
# """
# Get GNU FTP mirror URL.
# @note Updated 2020-04-16.
# """
koopa_assert_has_no_args "$#"
koopa_variable 'gnu-mirror-url'
return 0
}
|
<gh_stars>0
package com.player.db.dto;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.List;
@Data
@Accessors(chain = true)
public class PlayerDTO extends ContactDTO {
private ManagerDTO manager;
private LicenseDTO license;
private NationalityDTO nationality;
private List<TeamDTO> oldTeams;
}
|
<reponame>andremasson/reactnd-project-myreads
import React, { Component } from 'react';
import PropTypes from 'prop-types';
/**
* @class
* @classdesc Componente que muda estante de um livro
* @prop {object} book - Livro ao qual esse componente pertence
* @prop {func} onMoveShelf - Ação executada ao mover de prateleira
*/
class BookActions extends Component {
static propTypes = {
book: PropTypes.object.isRequired,
onMoveShelf: PropTypes.func.isRequired
};
render() {
const { book, onMoveShelf } = this.props;
/**
* @description Ações disponíveis para seleção.
* @member {string} value - Valor da prateleira como armazenada no BD
* @member {string} label - Nome de exibição
* @member {bool} enabled - Habilitado para seleção
*/
const actions = [
{value: 'move', label: 'Move to...', enabled: false},
{value: 'currentlyReading', label: 'Currently Reading', enabled: true},
{value: 'wantToRead', label: 'Want to Read', enabled: true},
{value: 'read', label: 'Read', enabled: true},
{value: 'none', label: 'None', enabled: true}
];
/**
* Componente será visivel somente quando tiver a informação da prateleira.
*/
return (
<div className='book-shelf-changer'>
<select
value={book.shelf || 'none'}
onChange={(e) => onMoveShelf(book, e.target.value)}
>
{actions.map((action) => (
<option
key={action.value}
value={action.value}
disabled={!action.enabled}
>{action.label}</option>
))}
</select>
</div>
);
}
}
export default BookActions; |
const fetchUserFollowers = async(username) => {
const response = await fetch(`https://twitter.com/${username}`);
const html = await response.text();
const regex = /followers_count":(\d+)/;
const match = regex.exec(html);
return match[1];
};
const numFollowers = await fetchUserFollowers('realDonaldTrump');
console.log(numFollowers); // Outputs the number of followers |
<reponame>abanicaisse/travel-advisor
import React, { useEffect, useState } from "react";
import Header from "./components/Header/Header";
import List from "./components/List/List";
import Map from "./components/Map/Map";
import { getPlacesData } from "./api";
const App = () => {
const [places, setPlaces] = useState([]);
const [coordinates, setCoordinates] = useState({});
const [boundaries, setBoundaries] = useState({
ne: {
lat: 0.5077264679633942,
lng: 32.80528619843784,
},
sw: {
lat: 0.1476302962044258,
lng: 32.37681940156267,
},
});
// user live location
useEffect(() => {
navigator.geolocation.getCurrentPosition(
({ coords: { latitude, longitude } }) => {
setCoordinates({ lng: longitude, lat: latitude });
}
);
}, []);
useEffect(() => {
getPlacesData(boundaries.ne, boundaries.sw).then((data) => {
setPlaces(data);
});
}, []);
return (
<>
<Header />
<main className="layout">
<List className="list" places={places} />
<Map
className="map"
setCoordinates={setCoordinates}
setBoundaries={setBoundaries}
coordinates={coordinates}
/>
</main>
</>
);
};
export default App;
|
#!/bin/sh
# VERSION defines the graphloader version
# LDR defines the graphloader path
# TYPE defines the input type. Values are: TEXT, CSV, JSON, TEXTXFORM
# INPUTEXAMPLE defines the mapping example
# INPUTBASEDIR defines the main directory of the examples
# INPUTFILEDIR defines the directory of the input files
# SCRIPTNAME defines the name of the mapping script
# GRAPHNAME defines the name of the graph loaded.
# It does not have to exist prior to loading.
VERSION=dse-graph-loader-5.0.5
LDR=<location>/$VERSION/graphloader
TYPE=CSV
INPUTEXAMPLE='authorBook'
INPUTBASEDIR='/graph-examples/documentation-examples'
INPUTFILEDIR=$INPUTBASEDIR/$TYPE/$INPUTEXAMPLE
SCRIPTNAME='authorBookMapping'$TYPE'.groovy'
GRAPHNAME='test'$INPUTEXAMPLE
$LDR $INPUTFILEDIR/$SCRIPTNAME -graph $GRAPHNAME -address localhost
|
$(document).ready(function ()
{
// Submit Login Form w/ post validation
$("#loginForm").submit(function (event)
{
event.preventDefault();
$.ajax({
url: "/authenticate",
type: "POST",
cache: false,
async: false,
data: $(this).serialize(),
success: function (result)
{
alert(result);
location.reload();
},
error: function (xhr, status, error)
{
alert("Login request could not be processed. \nStatus: " + status + "\n: " + error + "\n" + xhr.responseText);
}
});
});
}); |
<reponame>bsisa/hbUi.geo<filename>src/main/js/hbGeoLeafletService.js
/**
* Provides helper functions for Leaflet related objects.
* `L` is freely used as abbreviation for `Leaflet`
*
* Useful references to Leaflet documentation:
* <ul>
* <li>Leaflet API Reference: http://leafletjs.com/reference.html</li>
* <li>LatLng: Represents a geographical point with a certain latitude and longitude: http://leafletjs.com/reference.html#latlng</li>
* </ul>
*
*/
(function() {
angular.module('hbUi.geo').factory('hbGeoLeafletService', [
'$log', 'hbGeoService', function($log, hbGeoService) {
// =================================================================
// Leaflet objects from/to GeoXml utilities
// =================================================================
/**
* Reminder: XG => longitude and YG => latitude
*/
var getLeafletPoint = function(geoxmlPoint) {
return {
lat : parseFloat(geoxmlPoint.YG),
lng : parseFloat(geoxmlPoint.XG)
};
};
/**
* Returns L.circleMarker for `elfin` base point translated swiss
* coordinates to latitudes, longitudes and `style`.
*/
var getPointLayer = function(elfin, style) {
var point = hbGeoService.getElfinBasePoint(elfin);
if (!point) {
return null;
}
// TODO: Move to POINT.{XG, YG, ZG}:
//var coords = { lat : point.XG, lng : point.YG };
var coords = getLeafletPoint(point);
//var coords = hbGeoService.getLongitudeLatitudeCoordinates(point.X, point.Y);
var circleMarker = L.circleMarker(L.latLng(coords.lat, coords.lng), style);
return circleMarker;
};
/**
* Returns L.marker for `elfin` base point translated swiss
* coordinates to latitudes, longitudes and `style`.
*/
var getMarkerLayer = function(elfin, style) {
var point = hbGeoService.getElfinBasePoint(elfin);
if (!point) {
return null;
}
// TODO: Move to POINT.{XG, YG, ZG}:
var coords = getLeafletPoint(point);
// var coords2 = { lat : parseFloat(point.XG), lng : parseFloat(point.YG) };
// var coords = hbGeoService.getLongitudeLatitudeCoordinates(point.X, point.Y);
//$log.debug("coords = " + angular.toJson(coords));
//$log.debug("coords2 = " + angular.toJson(coords2));
var marker = L.marker(L.latLng(coords.lat, coords.lng), style);
return marker;
};
/**
* Returns an array of L.latLng defining a polygon corresponding to
* the `elfin` FORME.ZONE at POS=1
*/
var getPolygonCoords = function(elfin) {
var points = hbGeoService.getElfinZone1Points(elfin);
// Transform each GeoXml POINT to Leaflet L.latLng
var latLngs = _.map(points, function(point){
// TODO: Move to POINT.{XG, YG, ZG}:
var coords = getLeafletPoint(point);
//var coords = { lat : point.XG, lng : point.YG };
//var coords = hbGeoService.getLongitudeLatitudeCoordinates(point.X,point.Y);
return L.latLng(coords.lat, coords.lng);
});
return latLngs;
};
/**
* Returns a L.polygon with provided `style` for the given `elfin` parameter.
*/
var getPolygonLayer = function(elfin, style) {
var coords = getPolygonCoords(elfin);
if (coords && coords.length > 0) {
var polygon = L.polygon(coords, style);
return polygon;
} else {
return null;
}
};
/**
* Returns ILayer object {L.circleMarker, L.marker, L.polygon} given
* @param elfin
* @param representation {'point', 'marker', 'polygon'}
* @param style
*
* TODO: review, pitfall with marker management.
* See monkey patching while used in hbMapController.
*/
var getObjectLayer = function(elfin, representation, style) {
var result = null;
switch (representation.toLowerCase()) {
case 'point': result = getPointLayer(elfin, style); break;
case 'marker': result = getMarkerLayer(elfin, style); break;
case 'polygon': result = getPolygonLayer(elfin, style); break;
}
if (result !== null) {
//$log.debug(">>>> getObjectLayer *** START <<<<");
result.bindPopup(getPopupContent(elfin));
// TODO: Test whether this was necessary !?
//angular.extend(result, {elfin:elfin});
// TODO: CURRENT
angular.extend(result, {representation:representation.toLowerCase()});
}
return result;
};
/**
* Returns a custom L.Icon intended for selected object marker styling.
*/
var getSelectedObjectMarkerStyle = function() {
// zIndexOffset only effective in higher version...
var CustomIcon = L.Icon.extend({
options: {
iconSize: [25, 41],
iconAnchor: [12, 41],
popupAnchor: [1, -34],
shadowSize: [41, 41],
zIndexOffset: 1000
}
});
// var selectedIcon = new CustomIcon({iconUrl: '/assets/lib/leaflet/custom/markers/marker-icon-orange.png'});
var selectedIcon = new CustomIcon({iconUrl: '/assets/lib/leaflet/custom/markers/marker-icon-purple.png'});
// var selectedIcon = new CustomIcon({iconUrl: '/assets/lib/leaflet/custom/markers/marker-icon-red.png'});
// var selectedIcon = new CustomIcon({iconUrl: '/assets/lib/leaflet/custom/markers/marker-icon-yellow.png'});
// var selectedIcon = new CustomIcon({iconUrl: '/assets/lib/leaflet/custom/markers/marker-icon-green.png'});
var customStyle = {icon: selectedIcon};
return customStyle;
};
/**
* Returns a custom L.Icon intended for standard object marker styling.
*/
var getStandardObjectMarkerStyle = function() {
// zIndexOffset only effective in higher version...
var CustomIcon = L.Icon.extend({
options: {
iconSize: [25, 41],
iconAnchor: [12, 41],
popupAnchor: [1, -34],
shadowSize: [41, 41],
zIndexOffset: 999
}
});
var selectedIcon = new CustomIcon({iconUrl: '/assets/lib/leaflet/custom/markers/marker-icon.png'});
var customStyle = {icon: selectedIcon};
return customStyle;
};
/**
* Returns L.bounds for the layer identified by
* @param elfin
* @param representation
*/
var getObjectBounds = function (elfin, representation) {
// No need for style when computing bounds
var style = {};
var elfinLayer = getObjectLayer(elfin, representation, style);
return elfinLayer.getBounds();
};
/**
* Updates `layer` popup content with `elfin` data if layer has a popup.
*/
var updateLayerPopupContent = function(elfin, layer) {
if (angular.isDefined(layer.getPopup) && layer.getPopup()) {
layer.getPopup().setContent(getPopupContent(elfin));
}
};
/**
* Updates `layer` latitude, longitude coordinates from elfin.FORME.ZONE
*/
var updatePolygonCoords = function(elfin, layer) {
if (angular.isDefined(layer.setLatLngs)) {
var coords = getPolygonCoords(elfin);
if (coords && coords.length > 0) {
//$log.debug("Map service: updatePolygonCoords - layer.setLatLngs(coords)\ncoords =\n" + angular.toJson(coords));
layer.setLatLngs(coords);
}
}
};
/**
* Updates leaflet layer object with ELFIN FORME BASE POINT coordinates
*/
var updateLayerCoords = function(elfin, layer) {
if (angular.isDefined(layer.setLatLng)) {
var point = hbGeoService.getElfinBasePoint(elfin);
if (point) {
// TODO: Move to POINT.{XG, YG, ZG}:
var coords = getLeafletPoint(point);
//var coords = hbGeoService.getLongitudeLatitudeCoordinates(point.X, point.Y);
layer.setLatLng(L.latLng(coords.lat, coords.lng));
}
}
};
// =================================================================
// Leaflet configuration objects
// Might move in a hbGeoConfig service in the future
// =================================================================
/**
* Provides default for angular-leaflet-directive variables
* extending controllers using leaflet.
*/
var getDefaultLeafletScopeVars = function() {
var defaultLeafletAdditionToScope = {
iconStyles: {
selectedMarker: getSelectedObjectMarkerStyle(),
standardMarker: getStandardObjectMarkerStyle()
},
center: {
lat: 0,
lng: 0,
zoom: 10
},
defaults: {
drawControl: true
},
layers: {
baselayers: {
standard: {
name: 'Standard',
url: 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
type: 'xyz'
}
// Image overlay test (WIP. GIS funct. dev.)
// ,
// gespatri: {
// name: 'Gespatri',
// type: 'imageOverlay',
// url: '/assets/images/abribus.jpeg',
// bounds: [[46.992737010038404, 6.931471483187033], [46.99302251894073, 6.931947239697114]],
// //{"_southWest":{"lat":46.992737010038404,"lng":6.931471483187033},"_northEast":{"lat":46.99302251894073,"lng":6.931947239697114}}
// layerParams: {
// noWrap: true,
// attribution: 'Custom map <a href="http://www.bsisa.ch/">by BSI SA</a>'
// }
// }
,
transport: {
name: 'Transport',
url: 'http://{s}.tile.thunderforest.com/transport/{z}/{x}/{y}.png',
type: 'xyz'
}
// ,
// landscape: {
// name: 'Paysage',
// url: 'http://{s}.tile.thunderforest.com/landscape/{z}/{x}/{y}.png',
// type: 'xyz'
// },
// grayscale: {
// name: 'Routes - Gris',
// url: 'http://openmapsurfer.uni-hd.de/tiles/roadsg/x={x}&y={y}&z={z}',
// type: 'xyz'
// },
// watercoloe: {
// name: 'Aquarelle',
// url: 'http://{s}.tile.stamen.com/watercolor/{z}/{x}/{y}.png',
// type: 'xyz'
// },
},
overlays: {
}
}
};
// Hack: Serialisation prevents modification by reference, but breaks
// $scope.drawControl.draw.marker.icon definition in caller i.e. hbMapController
//return angular.fromJson(angular.toJson(defaultLeafletAdditionToScope));
return defaultLeafletAdditionToScope;
};
/**
* Returns tooltips, buttons, actions, toolbar,... labels translated
* properties to extend/merge with L.drawLocal.
*
* (Currently hardcoded to french only, can be extended to database
* configured per language locale as needed).
*/
var getDrawLocalTranslation = function() {
var drawLocalTranslation_fr = {
draw: {
toolbar: {
actions: {
title: 'Annuler le dessin',
text: 'Annuler'
},
undo : {
title: 'Effacer le dernier point dessiné',
text: 'Effacer le dernier point'
},
buttons: {
polyline: 'Dessiner une polyligne',
polygon: 'Dessiner un polygone',
marker: 'Dessiner un point',
rectangle: 'Draw a rectangle',
circle: 'Draw a circle'
}
},
handlers: {
circle: {
tooltip: {
start: 'Click and drag to draw circle.'
}
},
rectangle: {
tooltip: {
start: 'Click and drag to draw rectangle.'
}
},
simpleshape: {
tooltip: {
end: 'Release mouse to finish drawing.'
}
},
marker: {
tooltip: {
start: 'Click sur le plan pour placer un point.'
}
},
polygon: {
tooltip: {
start: 'Click pour commencer à dessiner la forme.',
cont: 'Click pour continuer de dessiner.',
end: 'Click sur le premier point pour fermer la forme.'
}
},
polyline: {
error: '<strong>Erreur:</strong> les côtés ne peuvent pas se croiser!',
tooltip: {
start: 'Click pour commencer à dessiner la ligne.',
cont: 'Click pour continuer de dessiner.',
end: 'Click sur le dernier point pour terminer.'
}
}
}
},
edit: {
toolbar: {
actions: {
save: {
title: 'Save changes.',
text: 'Save'
},
cancel: {
title: 'Cancel editing, discards all changes.',
text: 'Cancel'
}
},
buttons: {
edit: 'Edit layers.',
editDisabled: 'No layers to edit.',
remove: 'Delete layers.',
removeDisabled: 'No layers to delete.'
}
},
handlers: {
edit: {
tooltip: {
text: 'Drag handles, or marker to edit feature.',
subtext: 'Click cancel to undo changes.'
}
},
remove: {
tooltip: {
text: 'Click on a feature to remove'
}
}
}
}
};
return drawLocalTranslation_fr;
};
/**
* TODO: This is not generic: Re-design and refactor.
* (I.e.: Have template per CLASSE and template list loaded from database at startup.)
* Microservice architecture: => geo database accessed by hbGeoService...
*/
var getPopupContent = function(elfin) {
var popup = '<b>' + elfin.IDENTIFIANT.NOM + ' ' + elfin.IDENTIFIANT.ALIAS + '</b><br>';
popup += 'No SAI <b>' + elfin.IDENTIFIANT.OBJECTIF + '</b> - ' + elfin.CLASSE + '<br>';
popup += '<a href="/elfin/' + elfin.ID_G + '/' + elfin.CLASSE + '/' + elfin.Id + '">Détails</a>';
return popup;
};
return {
getDefaultLeafletScopeVars: getDefaultLeafletScopeVars,
getDrawLocalTranslation: getDrawLocalTranslation,
getObjectBounds: getObjectBounds,
getObjectLayer: getObjectLayer,
getSelectedObjectMarkerStyle: getSelectedObjectMarkerStyle,
getStandardObjectMarkerStyle: getStandardObjectMarkerStyle,
updateLayerCoords: updateLayerCoords,
updateLayerPopupContent: updateLayerPopupContent,
updatePolygonCoords: updatePolygonCoords
}
}]);
})(); |
#!/usr/bin/env bash
if [[ $# -eq 0 ]] ; then
echo 'No arguments provided. Please enter the ClientName'
exit 1
fi
CLIENT_NAME=$1
LOWER_CLIENT_NAME=`echo $1 | tr A-Z a-z`
composer install
php app/console kuma:generate:bundle --namespace=$CLIENT_NAME/WebsiteBundle --dir=/var/www/src --no-interaction
php app/console bbg:generate:default-site --namespace=$CLIENT_NAME/WebsiteBundle --prefix=${LOWER_CLIENT_NAME}_website_
php app/console doctrine:database:drop --force
php app/console doctrine:database:create
php app/console doctrine:migrations:diff
php app/console doctrine:migrations:migrate --no-interaction
php app/console doctrine:fixtures:load --no-interaction
bundle install
npm install --save bower gulp uglify-js uglifycss
npm install
npm shrinkwrap
node_modules/.bin/bower install --config.interactive=false
node_modules/.bin/gulp build
php app/console assets:install --symlink
php app/console assetic:dump |
#include <stdarg.h>
#include <stddef.h>
#include <setjmp.h>
#include "cmockery.h"
#include "c.h"
#include "../checkpointer.c"
#include "postgres.h"
#define MAX_BGW_REQUESTS 5
static void
init_request_queue(void)
{
size_t size = sizeof(CheckpointerShmemStruct) + sizeof(CheckpointerRequest)*MAX_BGW_REQUESTS;
CheckpointerShmem = (CheckpointerShmemStruct *) malloc(size);
memset(CheckpointerShmem, 0, size);
CheckpointerShmem->checkpointer_pid = 1234;
CheckpointerShmem->max_requests = MAX_BGW_REQUESTS;
IsUnderPostmaster = true;
ProcGlobal = (PROC_HDR *) malloc(sizeof(PROC_HDR));
ProcGlobal->checkpointerLatch = NULL;
}
/*
* Basic enqueue tests, including compaction upon enqueuing into a
* full queue.
*/
static void
test__ForwardFsyncRequest_enqueue(void **state)
{
bool ret;
int i;
RelFileNode dummy = {1,1,1};
init_request_queue();
ProcGlobal->checkpointerLatch = NULL;
expect_value(LWLockAcquire, l, CheckpointerCommLock);
expect_value(LWLockAcquire, mode, LW_EXCLUSIVE);
will_return(LWLockAcquire, true);
expect_value(LWLockRelease, lock, CheckpointerCommLock);
will_be_called(LWLockRelease);
/* basic enqueue */
ret = ForwardFsyncRequest(dummy, MAIN_FORKNUM, 1);
assert_true(ret);
assert_true(CheckpointerShmem->num_requests == 1);
/* fill up the queue */
for (i=2; i<=MAX_BGW_REQUESTS; i++)
{
expect_value(LWLockAcquire, l, CheckpointerCommLock);
expect_value(LWLockAcquire, mode, LW_EXCLUSIVE);
will_return(LWLockAcquire, true);
expect_value(LWLockRelease, lock, CheckpointerCommLock);
will_be_called(LWLockRelease);
ret = ForwardFsyncRequest(dummy, MAIN_FORKNUM, i);
assert_true(ret);
}
expect_value(LWLockAcquire, l, CheckpointerCommLock);
expect_value(LWLockAcquire, mode, LW_EXCLUSIVE);
will_return(LWLockAcquire, true);
expect_value(LWLockRelease, lock, CheckpointerCommLock);
will_be_called(LWLockRelease);
#ifdef USE_ASSERT_CHECKING
expect_value(LWLockHeldByMe, l, CheckpointerCommLock);
will_return(LWLockHeldByMe, true);
#endif
/*
* This enqueue request should trigger compaction but no
* duplicates are in the queue. So the queue should remain
* full.
*/
ret = ForwardFsyncRequest(dummy, MAIN_FORKNUM, 0);
assert_false(ret);
assert_true(CheckpointerShmem->num_requests == CheckpointerShmem->max_requests);
free(CheckpointerShmem);
}
int
main(int argc, char* argv[]) {
cmockery_parse_arguments(argc, argv);
MemoryContextInit();
const UnitTest tests[] = {
unit_test(test__ForwardFsyncRequest_enqueue)
};
return run_tests(tests);
}
|
using System;
using System.Collections.Generic;
public class ClassManager
{
public List<ClassInfo> infos = new List<ClassInfo>();
private IViewBase mView;
public IViewBase View
{
get { return mView; }
private set { mView = value; }
}
public ClassManager()
{
infos = new List<ClassInfo>();
}
public void AddClassInfo(ClassInfo info)
{
infos.Add(info);
}
public void RemoveClassInfo(string className)
{
ClassInfo infoToRemove = infos.Find(c => c.ClassName == className);
if (infoToRemove != null)
{
infos.Remove(infoToRemove);
}
}
} |
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { HomeComponent } from '../desktop-web/pages/home/home.component';
import { DesktopWebComponent } from './desktop-web/desktop-web.component';
import { BlogComponent } from './pages/blog/blog.component';
import { SingleComponent } from './pages/blog/single/single.component';
import { EquipoComponent } from './pages/equipo/equipo.component';
import { ContactoComponent } from './pages/experiencia/contacto/contacto.component';
import { LaboratorioComponent } from './pages/experiencia/laboratorio/laboratorio.component';
import { PageCasosComponent } from './pages/page-casos/page-casos.component';
import { PageSolucionesComponent } from './pages/page-soluciones/page-soluciones.component';
import { PresenciaComponent } from './pages/somos/presencia/presencia.component';
import { SedeColombiaComponent } from './pages/somos/sedes/colombia/sede-colombia.component';
import { sedeEspanaComponent } from './pages/somos/sedes/espana/sede-espana.component';
import { SedePanamaComponent } from './pages/somos/sedes/sede-panama/sede-panama.component';
import { SomosComponent } from './pages/somos/somos/somos.component';
import { TrayectoriaComponent } from './pages/trayectoria/trayectoria.component';
const routes: Routes = [
{
path: '', component:DesktopWebComponent,
children: [
{ path: 'home', component: HomeComponent },
{ path: 'nosotros', component: SomosComponent, },
{ path: 'nosotros/trayectoria', component:TrayectoriaComponent},
{ path: 'nosotros/equipo', component:EquipoComponent},
{ path: 'nosotros/presencia', component: PresenciaComponent },
{ path: 'nosotros/sedes/panama', component: SedePanamaComponent },
{ path: 'nosotros/sedes/colombia', component: SedeColombiaComponent },
{ path: 'nosotros/sedes/espana', component: sedeEspanaComponent },
{ path: 'soluciones', component: PageSolucionesComponent },
{ path: 'casos', component: PageCasosComponent },
{ path: 'experiencia/contacto', component: ContactoComponent },
{ path: 'experiencia/itm', component: LaboratorioComponent },
{ path: 'blog', component: BlogComponent },
{ path: 'blog/:id', component: SingleComponent },
{ path: '**', redirectTo: '/home', }
]
},
]
@NgModule({
imports:[
RouterModule.forChild(routes)
],
exports:[
RouterModule
]
})
export class DesktopRoutingModule { }
|
import requests
from bs4 import BeautifulSoup
url = 'https://www.nytimes.com/'
response = requests.get(url)
if response.status_code == 200:
html_data = response.text
soup = BeautifulSoup(html_data, 'html.parser')
headlines = soup.find_all('h2', {'class': 'e1voiwgp0'})
for headline in headlines[:5]:
print(headline.text) |
#!/bin/bash -e
required_env_vars=(
"CLASSIC_SA_CONNECTION_STRING"
"STORAGE_ACCT_BLOB_URL"
"VHD_NAME"
"OS_NAME"
"OFFER_NAME"
"SKU_NAME"
"HYPERV_GENERATION"
"IMAGE_VERSION"
)
for v in "${required_env_vars[@]}"
do
if [ -z "${!v}" ]; then
if [ "$v" == "IMAGE_VERSION" ]; then
IMAGE_VERSION="date +%Y.%m.%d"
echo "$v was not set, set it to ${!v}"
else
echo "$v was not set!"
exit 1
fi
fi
done
start_date=$(date +"%Y-%m-%dT00:00Z" -d "-1 day")
expiry_date=$(date +"%Y-%m-%dT00:00Z" -d "+1 year")
sas_token=$(az storage container generate-sas --name vhds --permissions lr --connection-string ${CLASSIC_SA_CONNECTION_STRING} --start ${start_date} --expiry ${expiry_date} | tr -d '"')
vhd_url="${STORAGE_ACCT_BLOB_URL}/${VHD_NAME}?$sas_token"
echo "COPY ME ---> ${vhd_url}"
sku_name=$(echo $SKU_NAME | tr -d '.')
cat <<EOF > vhd-publishing-info.json
{
"vhd_url" : "$vhd_url",
"os_name" : "$OS_NAME",
"sku_name" : "$sku_name",
"offer_name" : "$OFFER_NAME",
"hyperv_generation": "${HYPERV_GENERATION}",
"image_version": "${IMAGE_VERSION}"
}
EOF
cat vhd-publishing-info.json |
import os
import logging
def check_parameter(required_params):
def decorator(func):
def func_wrapper(self, input):
missing_params = [param for param in required_params if param not in input]
if len(missing_params) > 0:
raise ValueError("Missing parameters: %s" % ", ".join(missing_params))
return func(self, input)
return func_wrapper
return decorator
@check_parameter(['ip_dir'])
def get_task_logger(func, ip_dir):
"""
Task logger (creates log file in information package directory)
"""
logfile = os.path.join(ip_dir, "processing.log")
if not os.path.exists(logfile):
with open(logfile, 'w'):
pass
return func |
//index.js
const requestUrl = require('../../config').requestUrl
var pageIndex = 1;
var pageSize = 20;
var loadFlag = false;
var getDataList = function (that) {
if (loadFlag == false) {
loadFlag = true
wx.request({
url: requestUrl + 'wxIndex.ashx',
data: {
pageIndex: pageIndex,
pageSize: pageSize
},
success: function (res) {
var indexList = that.data.indexList;
for (var i = 0; i < res.data.ChinaValue.length; i++) {
indexList.push(res.data.ChinaValue[i]);
}
that.setData({
indexList: indexList
});
pageIndex++
loadFlag = false
}
})
}
}
Page({
data: {
userInfo: {},
indexList: [],
scrollHeight: 0
},
//添加一笔新账单
bindNewTap: function () {
wx.navigateTo({
url: '../new/new'
})
},
//长按封面图 重新加载
bindRefresh: function () {
pageIndex = 1
this.setData({
indexList: []
})
getDataList(this)
},
//点击标签
bindTagTap: function (e) {
wx.navigateTo({
url: '../result/result?KeyWord=' + e.currentTarget.dataset.id
})
},
//图片预览
bindShowImage: function (e) {
wx.previewImage({
urls: [e.target.dataset.url]
})
},
//底部弹出菜单
bindItemTap: function (e) {
var that = this
wx.showActionSheet({
itemList: ['查看', '修改', '删除'],
success: function (res) {
if (res.tapIndex == 0) {
wx.navigateTo({
url: '../result/result?ID=' + e.currentTarget.dataset.id
})
}
if (res.tapIndex == 1) {
wx.navigateTo({
url: '../new/new?ID=' + e.currentTarget.dataset.id
})
}
if (res.tapIndex == 2) {
wx.showModal({
title: '提示',
content: '该操作不可恢复,确认删除该账单?',
success: function (res) {
if (res.confirm) {
wx.request({
url: requestUrl + 'wxDelete.ashx?ID=' + e.currentTarget.dataset.id,
success: function (res) {
if (res.data.ChinaValue[0].Result == 'True') {
wx.showToast({
title: '已删除',
mask: true,
duration: 500
})
pageIndex = 1
that.setData({
indexList: []
})
getDataList(that)
}
}
})
}
}
})
}
}
})
},
onLoad: function () {
var that = this
//调用应用实例的方法获取全局数据
var app = getApp()
app.getUserInfo(function (userInfo) {
//更新数据
that.setData({
userInfo: userInfo
})
})
wx.getSystemInfo({
success: function (res) {
that.setData({
scrollHeight: res.screenHeight
})
}
})
getDataList(that)
},
//滑动到底部自动加载
bindDownLoad: function () {
var that = this
getDataList(that)
},
onShow: function (options) {
var that = this
wx.getStorage({
key: 'IsUpdate',
success: function (res) {
if (res.data) {
pageIndex = 1
that.setData({
indexList: []
})
getDataList(that)
}
wx.setStorage({
key: "IsUpdate",
data: false
})
}
})
}
}) |
fn main() {
println!("Hello World!");
}
$ rustc hello_world.rs
$ ./hello_world
Hello World! |
<gh_stars>0
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
import React from 'react';
import { storiesOf } from '@storybook/react';
import { Grommet, Box, Meter } from 'grommet';
import { grommet } from 'grommet/themes';
var CircleMeter =
/*#__PURE__*/
function (_React$Component) {
_inheritsLoose(CircleMeter, _React$Component);
function CircleMeter() {
var _this;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _React$Component.call.apply(_React$Component, [this].concat(args)) || this;
_defineProperty(_assertThisInitialized(_this), "state", {
value: 20
});
return _this;
}
var _proto = CircleMeter.prototype;
_proto.componentDidMount = function componentDidMount() {
var _this2 = this;
this.timer = setInterval(function () {
var value = _this2.state.value;
_this2.setState({
value: value < 100 ? value + 8 : 20
});
}, 2000);
};
_proto.componentWillUnmount = function componentWillUnmount() {
clearInterval(this.timer);
};
_proto.render = function render() {
var value = this.state.value;
return React.createElement(Grommet, {
theme: grommet
}, React.createElement(Box, {
align: "center",
pad: "large"
}, React.createElement(Meter, {
type: "circle",
background: "light-2",
values: [{
value: value,
color: value > 50 ? 'accent-2' : 'accent-1'
}]
})));
};
return CircleMeter;
}(React.Component);
storiesOf('Meter', module).add('Circle', function () {
return React.createElement(CircleMeter, null);
}); |
<reponame>xcfox/react-tile-pane<gh_stars>1-10
import { TileNodeRect } from '../../../../../..'
export function calcBarStyles(
{ top, left, width, height }: TileNodeRect,
offset: number,
isRow?: boolean
) {
return {
top: top * 100 + '%',
left: left * 100 + '%',
width: isRow ? undefined : width * 100 + '%',
height: isRow ? height * 100 + '%' : undefined,
transform: `translate${isRow ? 'X' : 'Y'}(${offset}%)`,
}
}
|
<gh_stars>0
import { Component, OnInit } from "@angular/core";
import { ActivatedRoute, Router } from "@angular/router";
import { UrlMapService } from "src/app/Services/url-map.service";
@Component({
selector: "app-access-url",
templateUrl: "./access-url.component.html",
styleUrls: ["./access-url.component.scss"],
})
export class AccessURLComponent implements OnInit {
shortUrlKey: String | undefined;
redirectToHome = () => {
// TODO: pass info to home that valid short url not found
this.router.navigateByUrl("/");
};
constructor(
private route: ActivatedRoute,
private urlMapService: UrlMapService,
private router: Router
) {}
ngOnInit(): void {
this.route.params.subscribe((params) => {
this.shortUrlKey = params["sKey"];
});
if (this.shortUrlKey) {
// if key found
this.urlMapService.accessThisUrl(this.shortUrlKey).subscribe(
(data: any) => {
console.log(data);
// found in map -> redirect to final location
window.location.replace(data.longUrl);
},
(error: any) => {
// valid key not found redirect to home page
this.redirectToHome();
}
);
} else {
// valid key not found redirect to home page
this.redirectToHome();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.