text
stringlengths 1
1.05M
|
|---|
def add_num_to_list(list, num):
new_list = []
for x in list:
new_list.append(x + num)
return new_list
list = [1,2,3,4,5]
result = add_num_to_list(list, 10)
print(result)
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bean;
import Models.Alimento;
import java.sql.Date;
import java.util.ArrayList;
/**
*
* @author Marcelo
*/
public class TurnoBean {
private int id;
private String nomeTurno;
private ArrayList<Alimento> alimentos;
private Date date;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getNomeTurno() {
return nomeTurno;
}
public void setNomeTurno(String nomeTurno) {
this.nomeTurno = nomeTurno;
}
public ArrayList<Alimento> getAlimentos() {
return alimentos;
}
public void setAlimentos(ArrayList<Alimento> alimentos) {
this.alimentos = alimentos;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
}
|
<reponame>conductiveresearch/abstractions
SELECT aave.insert_aave_tokens(
NOW() - interval '3 days',
NOW());
SELECT aave.insert_aave_daily_interest_rates(
DATE_TRUNC('day',NOW()) - interval '3 days',
DATE_TRUNC('day',NOW()) );
SELECT aave.insert_aave_daily_atoken_balances(
DATE_TRUNC('day',NOW()) - interval '3 days',
DATE_TRUNC('day',NOW()) );
SELECT aave.insert_aave_daily_liquidity_mining_rates(
DATE_TRUNC('day',NOW()) - interval '3 days',
DATE_TRUNC('day',NOW()) );
SELECT aave.insert_aave_daily_treasury_fees(
DATE_TRUNC('day',NOW()) - interval '3 days',
DATE_TRUNC('day',NOW()) );
SELECT aave.insert_aave_daily_treasury_events(
DATE_TRUNC('day',NOW()) - interval '3 days',
DATE_TRUNC('day',NOW()) );
|
class GeneticDataProcessor:
def __init__(self, server, dataset):
self.server = server
self.dataset = dataset
def query_snp(self, rsid_list, verbose=False):
if verbose:
print("query_snp parameter: %s" % rsid_list)
response = self.dataset.search({
'filters': {
'rsid_list': rsid_list
}
})
return response
|
#!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Script that creates a Kubemark cluster with Master running on GCE.
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/../..
source "${KUBE_ROOT}/test/kubemark/common.sh"
function writeEnvironmentFiles() {
cat > "${RESOURCE_DIRECTORY}/apiserver_flags" <<EOF
${APISERVER_TEST_ARGS}
--service-cluster-ip-range="${SERVICE_CLUSTER_IP_RANGE}"
EOF
sed -i'' -e "s/\"//g" "${RESOURCE_DIRECTORY}/apiserver_flags"
cat > "${RESOURCE_DIRECTORY}/scheduler_flags" <<EOF
${SCHEDULER_TEST_ARGS}
EOF
sed -i'' -e "s/\"//g" "${RESOURCE_DIRECTORY}/scheduler_flags"
cat > "${RESOURCE_DIRECTORY}/controllers_flags" <<EOF
${CONTROLLER_MANAGER_TEST_ARGS}
--allocate-node-cidrs="${ALLOCATE_NODE_CIDRS}"
--cluster-cidr="${CLUSTER_IP_RANGE}"
--service-cluster-ip-range="${SERVICE_CLUSTER_IP_RANGE}"
--terminated-pod-gc-threshold="${TERMINATED_POD_GC_THRESHOLD}"
EOF
sed -i'' -e "s/\"//g" "${RESOURCE_DIRECTORY}/controllers_flags"
}
RUN_FROM_DISTRO=${RUN_FROM_DISTRO:-false}
MAKE_DIR="${KUBE_ROOT}/cluster/images/kubemark"
if [ "${RUN_FROM_DISTRO}" == "false" ]; then
# Running from repository
cp "${KUBE_ROOT}/_output/release-stage/server/linux-amd64/kubernetes/server/bin/kubemark" "${MAKE_DIR}"
else
cp "${KUBE_ROOT}/server/kubernetes-server-linux-amd64.tar.gz" "."
tar -xzf kubernetes-server-linux-amd64.tar.gz
cp "kubernetes/server/bin/kubemark" "${MAKE_DIR}"
rm -rf "kubernetes-server-linux-amd64.tar.gz" "kubernetes"
fi
CURR_DIR=`pwd`
cd "${MAKE_DIR}"
RETRIES=3
for attempt in $(seq 1 ${RETRIES}); do
if ! make; then
if [[ $((attempt)) -eq "${RETRIES}" ]]; then
echo "${color_red}Make failed. Exiting.${color_norm}"
exit 1
fi
echo -e "${color_yellow}Make attempt $(($attempt)) failed. Retrying.${color_norm}" >& 2
sleep $(($attempt * 5))
else
break
fi
done
rm kubemark
cd $CURR_DIR
GCLOUD_COMMON_ARGS="--project ${PROJECT} --zone ${ZONE}"
run-gcloud-compute-with-retries disks create "${MASTER_NAME}-pd" \
${GCLOUD_COMMON_ARGS} \
--type "${MASTER_DISK_TYPE}" \
--size "${MASTER_DISK_SIZE}"
run-gcloud-compute-with-retries instances create "${MASTER_NAME}" \
${GCLOUD_COMMON_ARGS} \
--machine-type "${MASTER_SIZE}" \
--image-project="${MASTER_IMAGE_PROJECT}" \
--image "${MASTER_IMAGE}" \
--tags "${MASTER_TAG}" \
--network "${NETWORK}" \
--scopes "storage-ro,compute-rw,logging-write" \
--disk "name=${MASTER_NAME}-pd,device-name=master-pd,mode=rw,boot=no,auto-delete=no"
run-gcloud-compute-with-retries firewall-rules create "${INSTANCE_PREFIX}-kubemark-master-https" \
--project "${PROJECT}" \
--network "${NETWORK}" \
--source-ranges "0.0.0.0/0" \
--target-tags "${MASTER_TAG}" \
--allow "tcp:443"
MASTER_IP=$(gcloud compute instances describe ${MASTER_NAME} \
--zone="${ZONE}" --project="${PROJECT}" | grep natIP: | cut -f2 -d":" | sed "s/ //g")
if [ "${SEPARATE_EVENT_MACHINE:-false}" == "true" ]; then
EVENT_STORE_NAME="${INSTANCE_PREFIX}-event-store"
run-gcloud-compute-with-retries disks create "${EVENT_STORE_NAME}-pd" \
${GCLOUD_COMMON_ARGS} \
--type "${MASTER_DISK_TYPE}" \
--size "${MASTER_DISK_SIZE}"
run-gcloud-compute-with-retries instances create "${EVENT_STORE_NAME}" \
${GCLOUD_COMMON_ARGS} \
--machine-type "${MASTER_SIZE}" \
--image-project="${MASTER_IMAGE_PROJECT}" \
--image "${MASTER_IMAGE}" \
--tags "${EVENT_STORE_NAME}" \
--network "${NETWORK}" \
--scopes "storage-ro,compute-rw,logging-write" \
--disk "name=${EVENT_STORE_NAME}-pd,device-name=master-pd,mode=rw,boot=no,auto-delete=no"
EVENT_STORE_IP=$(gcloud compute instances describe ${EVENT_STORE_NAME} \
--zone="${ZONE}" --project="${PROJECT}" | grep networkIP: | cut -f2 -d":" | sed "s/ //g")
until gcloud compute ssh --zone="${ZONE}" --project="${PROJECT}" "${EVENT_STORE_NAME}" --command="ls" &> /dev/null; do
sleep 1
done
gcloud compute ssh "${EVENT_STORE_NAME}" --zone="${ZONE}" --project="${PROJECT}" \
--command="sudo docker run --net=host -d gcr.io/google_containers/etcd:2.0.12 /usr/local/bin/etcd \
--listen-peer-urls http://127.0.0.1:2380 \
--addr=127.0.0.1:4002 \
--bind-addr=0.0.0.0:4002 \
--data-dir=/var/etcd/data"
fi
ensure-temp-dir
gen-kube-bearertoken
create-certs ${MASTER_IP}
KUBELET_TOKEN=$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64 | tr -d "=+/" | dd bs=32 count=1 2>/dev/null)
KUBE_PROXY_TOKEN=$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64 | tr -d "=+/" | dd bs=32 count=1 2>/dev/null)
echo "${CA_CERT_BASE64}" | base64 --decode > "${RESOURCE_DIRECTORY}/ca.crt"
echo "${KUBECFG_CERT_BASE64}" | base64 --decode > "${RESOURCE_DIRECTORY}/kubecfg.crt"
echo "${KUBECFG_KEY_BASE64}" | base64 --decode > "${RESOURCE_DIRECTORY}/kubecfg.key"
until gcloud compute ssh --zone="${ZONE}" --project="${PROJECT}" "${MASTER_NAME}" --command="ls" &> /dev/null; do
sleep 1
done
password=$(python -c 'import string,random; print("".join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(16)))')
gcloud compute ssh --zone="${ZONE}" --project="${PROJECT}" "${MASTER_NAME}" \
--command="sudo mkdir /srv/kubernetes -p && \
sudo bash -c \"echo ${MASTER_CERT_BASE64} | base64 --decode > /srv/kubernetes/server.cert\" && \
sudo bash -c \"echo ${MASTER_KEY_BASE64} | base64 --decode > /srv/kubernetes/server.key\" && \
sudo bash -c \"echo ${CA_CERT_BASE64} | base64 --decode > /srv/kubernetes/ca.crt\" && \
sudo bash -c \"echo ${KUBECFG_CERT_BASE64} | base64 --decode > /srv/kubernetes/kubecfg.crt\" && \
sudo bash -c \"echo ${KUBECFG_KEY_BASE64} | base64 --decode > /srv/kubernetes/kubecfg.key\" && \
sudo bash -c \"echo \"${KUBE_BEARER_TOKEN},admin,admin\" > /srv/kubernetes/known_tokens.csv\" && \
sudo bash -c \"echo \"${KUBELET_TOKEN},kubelet,kubelet\" >> /srv/kubernetes/known_tokens.csv\" && \
sudo bash -c \"echo \"${KUBE_PROXY_TOKEN},kube_proxy,kube_proxy\" >> /srv/kubernetes/known_tokens.csv\" && \
sudo bash -c \"echo ${password},admin,admin > /srv/kubernetes/basic_auth.csv\""
writeEnvironmentFiles
if [ "${RUN_FROM_DISTRO}" == "false" ]; then
gcloud compute copy-files --zone="${ZONE}" --project="${PROJECT}" \
"${KUBE_ROOT}/_output/release-tars/kubernetes-server-linux-amd64.tar.gz" \
"${KUBEMARK_DIRECTORY}/start-kubemark-master.sh" \
"${KUBEMARK_DIRECTORY}/configure-kubectl.sh" \
"${RESOURCE_DIRECTORY}/apiserver_flags" \
"${RESOURCE_DIRECTORY}/scheduler_flags" \
"${RESOURCE_DIRECTORY}/controllers_flags" \
"${MASTER_NAME}":~
else
gcloud compute copy-files --zone="${ZONE}" --project="${PROJECT}" \
"${KUBE_ROOT}/server/kubernetes-server-linux-amd64.tar.gz" \
"${KUBEMARK_DIRECTORY}/start-kubemark-master.sh" \
"${KUBEMARK_DIRECTORY}/configure-kubectl.sh" \
"${RESOURCE_DIRECTORY}/apiserver_flags" \
"${RESOURCE_DIRECTORY}/scheduler_flags" \
"${RESOURCE_DIRECTORY}/controllers_flags" \
"${MASTER_NAME}":~
fi
gcloud compute ssh "${MASTER_NAME}" --zone="${ZONE}" --project="${PROJECT}" \
--command="chmod a+x configure-kubectl.sh && chmod a+x start-kubemark-master.sh && sudo ./start-kubemark-master.sh ${EVENT_STORE_IP:-127.0.0.1}"
# create kubeconfig for Kubelet:
KUBECONFIG_CONTENTS=$(echo "apiVersion: v1
kind: Config
users:
- name: kubelet
user:
client-certificate-data: "${KUBELET_CERT_BASE64}"
client-key-data: "${KUBELET_KEY_BASE64}"
clusters:
- name: kubemark
cluster:
certificate-authority-data: "${CA_CERT_BASE64}"
server: https://${MASTER_IP}
contexts:
- context:
cluster: kubemark
user: kubelet
name: kubemark-context
current-context: kubemark-context" | base64 | tr -d "\n\r")
KUBECONFIG_SECRET="${RESOURCE_DIRECTORY}/kubeconfig_secret.json"
cat > "${KUBECONFIG_SECRET}" << EOF
{
"apiVersion": "v1",
"kind": "Secret",
"metadata": {
"name": "kubeconfig"
},
"type": "Opaque",
"data": {
"kubeconfig": "${KUBECONFIG_CONTENTS}"
}
}
EOF
NODE_CONFIGMAP="${RESOURCE_DIRECTORY}/node_config_map.json"
cat > "${NODE_CONFIGMAP}" << EOF
{
"apiVersion": "v1",
"kind": "ConfigMap",
"metadata": {
"name": "node-configmap"
},
"data": {
"content.type": "${TEST_CLUSTER_API_CONTENT_TYPE}"
}
}
EOF
LOCAL_KUBECONFIG="${RESOURCE_DIRECTORY}/kubeconfig.loc"
cat > "${LOCAL_KUBECONFIG}" << EOF
apiVersion: v1
kind: Config
users:
- name: admin
user:
client-certificate-data: "${KUBECFG_CERT_BASE64}"
client-key-data: "${KUBECFG_KEY_BASE64}"
username: admin
password: admin
clusters:
- name: kubemark
cluster:
certificate-authority-data: "${CA_CERT_BASE64}"
server: https://${MASTER_IP}
contexts:
- context:
cluster: kubemark
user: admin
name: kubemark-context
current-context: kubemark-context
EOF
sed "s/##numreplicas##/${NUM_NODES:-10}/g" "${RESOURCE_DIRECTORY}/hollow-node_template.json" > "${RESOURCE_DIRECTORY}/hollow-node.json"
sed -i'' -e "s/##project##/${PROJECT}/g" "${RESOURCE_DIRECTORY}/hollow-node.json"
mkdir "${RESOURCE_DIRECTORY}/addons" || true
sed "s/##MASTER_IP##/${MASTER_IP}/g" "${RESOURCE_DIRECTORY}/heapster_template.json" > "${RESOURCE_DIRECTORY}/addons/heapster.json"
metrics_mem_per_node=4
metrics_mem=$((200 + ${metrics_mem_per_node}*${NUM_NODES:-10}))
sed -i'' -e "s/##METRICS_MEM##/${metrics_mem}/g" "${RESOURCE_DIRECTORY}/addons/heapster.json"
eventer_mem_per_node=500
eventer_mem=$((200 * 1024 + ${eventer_mem_per_node}*${NUM_NODES:-10}))
sed -i'' -e "s/##EVENTER_MEM##/${eventer_mem}/g" "${RESOURCE_DIRECTORY}/addons/heapster.json"
"${KUBECTL}" create -f "${RESOURCE_DIRECTORY}/kubemark-ns.json"
"${KUBECTL}" create -f "${KUBECONFIG_SECRET}" --namespace="kubemark"
"${KUBECTL}" create -f "${NODE_CONFIGMAP}" --namespace="kubemark"
"${KUBECTL}" create -f "${RESOURCE_DIRECTORY}/addons" --namespace="kubemark"
"${KUBECTL}" create -f "${RESOURCE_DIRECTORY}/hollow-node.json" --namespace="kubemark"
rm "${KUBECONFIG_SECRET}"
rm "${NODE_CONFIGMAP}"
echo "Waiting for all HollowNodes to become Running..."
start=$(date +%s)
nodes=$("${KUBECTL}" --kubeconfig="${RESOURCE_DIRECTORY}/kubeconfig.loc" get node) || true
ready=$(($(echo "${nodes}" | grep -v "NotReady" | wc -l) - 1))
until [[ "${ready}" -ge "${NUM_NODES}" ]]; do
echo -n .
sleep 1
now=$(date +%s)
# Fail it if it already took more than 15 minutes.
if [ $((now - start)) -gt 900 ]; then
echo ""
echo "Timeout waiting for all HollowNodes to become Running"
# Try listing nodes again - if it fails it means that API server is not responding
if "${KUBECTL}" --kubeconfig="${RESOURCE_DIRECTORY}/kubeconfig.loc" get node &> /dev/null; then
echo "Found only ${ready} ready Nodes while waiting for ${NUM_NODES}."
exit 1
fi
echo "Got error while trying to list Nodes. Probably API server is down."
exit 1
fi
nodes=$("${KUBECTL}" --kubeconfig="${RESOURCE_DIRECTORY}/kubeconfig.loc" get node) || true
ready=$(($(echo "${nodes}" | grep -v "NotReady" | wc -l) - 1))
done
echo ""
echo "Password to kubemark master: ${password}"
|
<gh_stars>100-1000
Page({
data: {
array:["天海新世纪号","歌诗达邮轮•大西洋号","歌诗达邮轮•幸运号","歌诗达邮轮•维多利亚号","歌诗达邮轮•赛琳娜号","海洋量子号","海洋水手号","蓝宝石公主号"],
index:0
},
bindPickerChange: function(e) {
console.log('picker发送选择改变,携带值为', e.detail.value)
this.setData({
index: e.detail.value
})
}
})
|
<gh_stars>0
import nc from 'next-connect'
import cache from 'memory-cache'
import imageToBase64 from 'image-to-base64'
import { renderToStaticMarkup } from 'react-dom/server'
import fortyTwo from '../../middlewares/42'
import Profile from '../../components/cards/Profile'
const handler = nc()
handler.use(fortyTwo)
handler.get(async (req, res) => {
const {
login,
cursus,
email: queryEmail,
leet_logo,
forty_two_network_logo,
dark,
} = req.query
let userData
const userCache = cache.get(`login:${login}`)
if (userCache) userData = userCache
else {
userData = await req.fortyTwo.getUser(login)
cache.put(`login:${login}`, userData, 7200000)
}
const {
email,
first_name,
last_name,
image_url,
'staff?': isStaff,
cursus_users: cursuses,
} = userData
const getCursus = cursuses.find(({ cursus: { slug } }) => slug === cursus)
const isPisciner = cursuses.length === 1 && cursuses[0].cursus_id === 6
let image
const imageCache = cache.get(`image:${login}`)
if (imageCache) image = imageCache
else {
image = await imageToBase64(image_url)
cache.put(`image:${login}`, image, 7200000)
}
const user = {
login,
fullName: `${first_name} ${last_name}`,
email: queryEmail !== 'hide' && email,
image,
isPisciner,
isStaff,
cursus: !isStaff &&
getCursus && {
grade: getCursus.grade,
name: getCursus.cursus.name,
level: getCursus.level,
},
leetLogo: leet_logo !== 'hide',
fortyTwoLogo: forty_two_network_logo !== 'hide',
isDark: dark === 'true',
}
res.setHeader('Content-Type', 'image/svg+xml')
res.end(renderToStaticMarkup(<Profile user={user} />))
})
export default handler
|
<filename>app/models/service.rb
class Service < ActiveRecord::Base
validates :service_name, :user_id, presence: true
has_many :domain_names
belongs_to :user, class_name: "user", foreign_key: "user_id"
private
end
|
<gh_stars>0
class ProductCharacteristic < ApplicationRecord
belongs_to :product
belongs_to :color
belongs_to :size
end
|
import { Component } from '@angular/core';
@Component({
selector: 'my-app',
templateUrl: './app.component.html',
styleUrls: [ './app.component.css' ]
})
export class AppComponent {
name = 'Angular';
fruits = ['apple', 'banana', 'orange', 'strawberry', 'grape'];
sortFruits() {
this.fruits.sort();
}
}
<div>
<button (click)="sortFruits()">Sort</button>
<ul>
<li *ngFor="let fruit of fruits">{{fruit}}</li>
</ul>
</div>
|
// Copyright 2014 <NAME>, <NAME>.
// Copyright 2015-2021 <NAME>.
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt
// or copy at http://www.boost.org/LICENSE_1_0.txt)
#include "../b2_workarounds.hpp"
//[callplugcpp_tutorial3
#include <boost/dll/import.hpp> // for import_alias
#include <boost/make_shared.hpp>
#include <boost/function.hpp>
#include <iostream>
#include "../tutorial_common/my_plugin_api.hpp"
namespace dll = boost::dll;
std::size_t search_for_symbols(const std::vector<boost::dll::fs::path>& plugins) {
std::size_t plugins_found = 0;
for (std::size_t i = 0; i < plugins.size(); ++i) {
std::cout << "Loading plugin: " << plugins[i] << '\n';
dll::shared_library lib(plugins[i], dll::load_mode::append_decorations);
if (!lib.has("create_plugin")) {
// no such symbol
continue;
}
// library has symbol, importing...
typedef boost::shared_ptr<my_plugin_api> (pluginapi_create_t)();
boost::function<pluginapi_create_t> creator
= dll::import_alias<pluginapi_create_t>(boost::move(lib), "create_plugin");
std::cout << "Matching plugin name: " << creator()->name() << std::endl;
++ plugins_found;
}
return plugins_found;
}
//]
int main(int argc, char* argv[]) {
BOOST_ASSERT(argc >= 3);
std::vector<boost::dll::fs::path> plugins;
plugins.reserve(argc - 1);
for (int i = 1; i < argc; ++i) {
if (b2_workarounds::is_shared_library(argv[i])) {
plugins.push_back(argv[i]);
}
}
const std::size_t res = search_for_symbols(plugins);
BOOST_ASSERT(res == 1);
(void)res;
}
|
import React from 'react'
import './styles.css'
import Banner from '../../components/Banner'
import Card from '../../components/Card'
const cardInfos = [
{
src: 'https://a0.muscache.com/im/pictures/eb9c7c6a-ee33-414a-b1ba-14e8860d59b3.jpg?im_w=720',
title: 'Online Experiences',
description: 'Unique activities we can do together, led by a world of hosts.',
},
{
src: 'https://a0.muscache.com/im/pictures/15159c9c-9cf1-400e-b809-4e13f286fa38.jpg?im_w=720',
title: 'Unique stays',
description: 'Spaces that are more than just a place to sleep.',
},
{
src: 'https://a0.muscache.com/im/pictures/fdb46962-10c1-45fc-a228-d0b055411448.jpg?im_w=720',
title: 'Entire homes',
description: 'Comfortable private places, with room for friends or family.',
},
{
src: 'https://media.nomadicmatt.com/2019/airbnb_breakup3.jpg',
title: '3 Bedroom Flat in Bournemouth',
description: 'Superhost with a stunning view of the beachside in Sunny Bournemouth',
price: '£130/night',
},
{
src: 'https://thespaces.com/wp-content/uploads/2017/08/Courtesy-of-Airbnb.jpg',
title: 'Penthouse in London',
description: 'Enjoy the amazing sights of London with this stunning penthouse',
price: '£350/night',
},
{
src: 'https://media.nomadicmatt.com/2018/apartment.jpg',
title: '1 Bedroom apartment',
description: 'Superhost with great amenities and a fabolous shopping complex nearby',
price: '£70/night',
}
]
function Home() {
return (
<div className='homepage'>
<Banner />
<div className='homepage_section'>
<Card
src={cardInfos[0].src}
title={cardInfos[0].title}
description={cardInfos[0].description}
/>
<Card
src={cardInfos[1].src}
title={cardInfos[1].title}
description={cardInfos[1].description}
/>
<Card
src={cardInfos[2].src}
title={cardInfos[2].title}
description={cardInfos[2].description}
/>
</div>
<div className='homepage_section'>
<Card
src={cardInfos[3].src}
title={cardInfos[3].title}
description={cardInfos[3].description}
price={cardInfos[3].price}
/>
<Card
src={cardInfos[4].src}
title={cardInfos[4].title}
description={cardInfos[4].description}
price={cardInfos[4].price}
/>
<Card
src={cardInfos[5].src}
title={cardInfos[5].title}
description={cardInfos[5].description}
price={cardInfos[5].price}
/>
</div>
</div>
)
}
export default Home
|
using System;
using System.Text;
public class UniqueIdGenerator
{
public string GenerateRequestId()
{
const string prefix = "REQ-";
const string suffix = "-2022";
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
Random random = new Random();
StringBuilder requestIdBuilder = new StringBuilder(prefix);
for (int i = 0; i < 6; i++)
{
requestIdBuilder.Append(chars[random.Next(chars.Length)]);
}
requestIdBuilder.Append(suffix);
return requestIdBuilder.ToString();
}
}
|
#!/usr/bin/env bash
set -o noclobber -o noglob -o nounset -o pipefail
IFS=$'\n'
# If the option `use_preview_script` is set to `true`,
# then this script will be called and its output will be displayed in ranger.
# ANSI color codes are supported.
# STDIN is disabled, so interactive scripts won't work properly
# This script is considered a configuration file and must be updated manually.
# It will be left untouched if you upgrade ranger.
# Meanings of exit codes:
# code | meaning | action of ranger
# -----+------------+-------------------------------------------
# 0 | success | Display stdout as preview
# 1 | no preview | Display no preview at all
# 2 | plain text | Display the plain content of the file
# 3 | fix width | Don't reload when width changes
# 4 | fix height | Don't reload when height changes
# 5 | fix both | Don't ever reload
# 6 | image | Display the image `$IMAGE_CACHE_PATH` points to as an image preview
# 7 | image | Display the file directly as an image
# Script arguments
FILE_PATH="${1}" # Full path of the highlighted file
PV_WIDTH="${2}" # Width of the preview pane (number of fitting characters)
PV_HEIGHT="${3}" # Height of the preview pane (number of fitting characters)
IMAGE_CACHE_PATH="${4}" # Full path that should be used to cache image preview
PV_IMAGE_ENABLED="${5}" # 'True' if image previews are enabled, 'False' otherwise.
FILE_EXTENSION="${FILE_PATH##*.}"
FILE_EXTENSION_LOWER="$(printf "%s" "${FILE_EXTENSION}" | tr '[:upper:]' '[:lower:]')"
# Settings
HIGHLIGHT_SIZE_MAX=262143 # 256KiB
HIGHLIGHT_TABWIDTH=8
HIGHLIGHT_STYLE='pablo'
PYGMENTIZE_STYLE='autumn'
handle_extension() {
case "${FILE_EXTENSION_LOWER}" in
# Archive
a|ace|alz|arc|arj|bz|bz2|cab|cpio|deb|gz|jar|lha|lz|lzh|lzma|lzo|\
rpm|rz|t7z|tar|tbz|tbz2|tgz|tlz|txz|tZ|tzo|war|xpi|xz|Z|zip)
atool --list -- "${FILE_PATH}" && exit 5
bsdtar --list --file "${FILE_PATH}" && exit 5
exit 1;;
rar)
# Avoid password prompt by providing empty password
unrar lt -p- -- "${FILE_PATH}" && exit 5
exit 1;;
7z)
# Avoid password prompt by providing empty password
7z l -p -- "${FILE_PATH}" && exit 5
exit 1;;
# PDF
pdf)
# Preview as text conversion
pdftotext -l 10 -nopgbrk -q -- "${FILE_PATH}" - | fmt -w ${PV_WIDTH} && exit 5
mutool draw -F txt -i -- "${FILE_PATH}" 1-10 | fmt -w ${PV_WIDTH} && exit 5
exiftool "${FILE_PATH}" && exit 5
exit 1;;
# BitTorrent
torrent)
transmission-show -- "${FILE_PATH}" && exit 5
exit 1;;
# OpenDocument
odt|ods|odp|sxw)
# Preview as text conversion
odt2txt "${FILE_PATH}" && exit 5
exit 1;;
# HTML
htm|html|xhtml)
# Preview as text conversion
w3m -dump "${FILE_PATH}" && exit 5
lynx -dump -- "${FILE_PATH}" && exit 5
elinks -dump "${FILE_PATH}" && exit 5
;; # Continue with next handler on failure
# JSON
json)
jq --color-output . "${FILE_PATH}" && exit 5
python -m json.tool -- "${FILE_PATH}" && exit 5
;;
esac
}
handle_image() {
# Size of the preview if there are multiple options or it has to be rendered
# from vector graphics. If the conversion program allows specifying only one
# dimension while keeping the aspect ratio, the width will be used.
local DEFAULT_SIZE="1920x1080"
local mimetype="${1}"
case "${mimetype}" in
# SVG
# image/svg+xml)
# convert -- "${FILE_PATH}" "${IMAGE_CACHE_PATH}" && exit 6
# exit 1;;
# DjVu
# image/vnd.djvu)
# ddjvu -format=tiff -quality=90 -page=1 -size="${DEFAULT_SIZE}" \
# - "${IMAGE_CACHE_PATH}" < "${FILE_PATH}" \
# && exit 6 || exit 1;;
# Image
image/*)
local orientation
orientation="$( identify -format '%[EXIF:Orientation]\n' -- "${FILE_PATH}" )"
# If orientation data is present and the image actually
# needs rotating ("1" means no rotation)...
if [[ -n "$orientation" && "$orientation" != 1 ]]; then
# ...auto-rotate the image according to the EXIF data.
convert -- "${FILE_PATH}" -auto-orient "${IMAGE_CACHE_PATH}" && exit 6
fi
# `w3mimgdisplay` will be called for all images (unless overridden as above),
# but might fail for unsupported types.
exit 7;;
# Video
# video/*)
# # Thumbnail
# ffmpegthumbnailer -i "${FILE_PATH}" -o "${IMAGE_CACHE_PATH}" -s 0 && exit 6
# exit 1;;
# PDF
# application/pdf)
# pdftoppm -f 1 -l 1 \
# -scale-to-x "${DEFAULT_SIZE%x*}" \
# -scale-to-y -1 \
# -singlefile \
# -jpeg -tiffcompression jpeg \
# -- "${FILE_PATH}" "${IMAGE_CACHE_PATH%.*}" \
# && exit 6 || exit 1;;
# ePub, MOBI, FB2 (using Calibre)
# application/epub+zip|application/x-mobipocket-ebook|application/x-fictionbook+xml)
# ebook-meta --get-cover="${IMAGE_CACHE_PATH}" -- "${FILE_PATH}" > /dev/null \
# && exit 6 || exit 1;;
# ePub (using <https://github.com/marianosimone/epub-thumbnailer>)
# application/epub+zip)
# epub-thumbnailer \
# "${FILE_PATH}" "${IMAGE_CACHE_PATH}" "${DEFAULT_SIZE%x*}" \
# && exit 6 || exit 1;;
# Font
application/font*|application/*opentype)
preview_png="/tmp/$(basename "${IMAGE_CACHE_PATH%.*}").png"
if fontimage -o "${preview_png}" \
--pixelsize "120" \
--fontname \
--pixelsize "80" \
--text " ABCDEFGHIJKLMNOPQRSTUVWXYZ " \
--text " abcdefghijklmnopqrstuvwxyz " \
--text " 0123456789.:,;(*!?') ff fl fi ffi ffl " \
--text " The quick brown fox jumps over the lazy dog. " \
"${FILE_PATH}";
then
convert -- "${preview_png}" "${IMAGE_CACHE_PATH}" \
&& rm "${preview_png}" \
&& exit 6
else
exit 1
fi
;;
# Preview archives using the first image inside.
# (Very useful for comic book collections for example.)
# application/zip|application/x-rar|application/x-7z-compressed|\
# application/x-xz|application/x-bzip2|application/x-gzip|application/x-tar)
# local fn=""; local fe=""
# local zip=""; local rar=""; local tar=""; local bsd=""
# case "${mimetype}" in
# application/zip) zip=1 ;;
# application/x-rar) rar=1 ;;
# application/x-7z-compressed) ;;
# *) tar=1 ;;
# esac
# { [ "$tar" ] && fn=$(tar --list --file "${FILE_PATH}"); } || \
# { fn=$(bsdtar --list --file "${FILE_PATH}") && bsd=1 && tar=""; } || \
# { [ "$rar" ] && fn=$(unrar lb -p- -- "${FILE_PATH}"); } || \
# { [ "$zip" ] && fn=$(zipinfo -1 -- "${FILE_PATH}"); } || return
#
# fn=$(echo "$fn" | python -c "import sys; import mimetypes as m; \
# [ print(l, end='') for l in sys.stdin if \
# (m.guess_type(l[:-1])[0] or '').startswith('image/') ]" |\
# sort -V | head -n 1)
# [ "$fn" = "" ] && return
# [ "$bsd" ] && fn=$(printf '%b' "$fn")
#
# [ "$tar" ] && tar --extract --to-stdout \
# --file "${FILE_PATH}" -- "$fn" > "${IMAGE_CACHE_PATH}" && exit 6
# fe=$(echo -n "$fn" | sed 's/[][*?\]/\\\0/g')
# [ "$bsd" ] && bsdtar --extract --to-stdout \
# --file "${FILE_PATH}" -- "$fe" > "${IMAGE_CACHE_PATH}" && exit 6
# [ "$bsd" ] || [ "$tar" ] && rm -- "${IMAGE_CACHE_PATH}"
# [ "$rar" ] && unrar p -p- -inul -- "${FILE_PATH}" "$fn" > \
# "${IMAGE_CACHE_PATH}" && exit 6
# [ "$zip" ] && unzip -pP "" -- "${FILE_PATH}" "$fe" > \
# "${IMAGE_CACHE_PATH}" && exit 6
# [ "$rar" ] || [ "$zip" ] && rm -- "${IMAGE_CACHE_PATH}"
# ;;
esac
}
handle_mime() {
local mimetype="${1}"
case "${mimetype}" in
# Text
text/* | */xml)
# Syntax highlight
if [[ "$( stat --printf='%s' -- "${FILE_PATH}" )" -gt "${HIGHLIGHT_SIZE_MAX}" ]]; then
exit 2
fi
if [[ "$( tput colors )" -ge 256 ]]; then
local pygmentize_format='terminal256'
local highlight_format='xterm256'
else
local pygmentize_format='terminal'
local highlight_format='ansi'
fi
highlight --replace-tabs="${HIGHLIGHT_TABWIDTH}" --out-format="${highlight_format}" \
--style="${HIGHLIGHT_STYLE}" --force -- "${FILE_PATH}" && exit 5
# pygmentize -f "${pygmentize_format}" -O "style=${PYGMENTIZE_STYLE}" -- "${FILE_PATH}" && exit 5
exit 2;;
# DjVu
image/vnd.djvu)
# Preview as text conversion (requires djvulibre)
djvutxt "${FILE_PATH}" | fmt -w ${PV_WIDTH} && exit 5
exiftool "${FILE_PATH}" && exit 5
exit 1;;
# Image
image/*)
# Preview as text conversion
# img2txt --gamma=0.6 --width="${PV_WIDTH}" -- "${FILE_PATH}" && exit 4
exiftool "${FILE_PATH}" && exit 5
exit 1;;
# Video and audio
video/* | audio/*)
mediainfo "${FILE_PATH}" && exit 5
exiftool "${FILE_PATH}" && exit 5
exit 1;;
esac
}
handle_fallback() {
echo '----- File Type Classification -----' && file --dereference --brief -- "${FILE_PATH}" && exit 5
exit 1
}
MIMETYPE="$( file --dereference --brief --mime-type -- "${FILE_PATH}" )"
if [[ "${PV_IMAGE_ENABLED}" == 'True' ]]; then
handle_image "${MIMETYPE}"
fi
handle_extension
handle_mime "${MIMETYPE}"
handle_fallback
exit 1
|
import TodoList from '../components/TodoList';
import {Link as RouterLink} from 'react-router-dom';
import React from 'react';
import FirebaseTodoListStore from '../stores/FirebaseTodoListStore';
import {Link, Typography} from '@mui/material';
function FirebasePage() {
return (
<>
<main>
<Typography variant="h4">Firebase Todo List</Typography>
<TodoList store={FirebaseTodoListStore}/>
</main>
<nav>
<Link component={RouterLink} to="/">In Memory Todo List</Link>
</nav>
</>
);
}
export default FirebasePage;
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.Froalacharts = {}));
}(this, (function (exports) { 'use strict';
function noop() { }
function run(fn) {
return fn();
}
function blank_object() {
return Object.create(null);
}
function run_all(fns) {
fns.forEach(run);
}
function is_function(thing) {
return typeof thing === 'function';
}
function safe_not_equal(a, b) {
return a != a ? b == b : a !== b || ((a && typeof a === 'object') || typeof a === 'function');
}
function is_empty(obj) {
return Object.keys(obj).length === 0;
}
function insert(target, node, anchor) {
target.insertBefore(node, anchor || null);
}
function detach(node) {
node.parentNode.removeChild(node);
}
function element(name) {
return document.createElement(name);
}
function attr(node, attribute, value) {
if (value == null)
node.removeAttribute(attribute);
else if (node.getAttribute(attribute) !== value)
node.setAttribute(attribute, value);
}
function children(element) {
return Array.from(element.childNodes);
}
function custom_event(type, detail) {
const e = document.createEvent('CustomEvent');
e.initCustomEvent(type, false, false, detail);
return e;
}
let current_component;
function set_current_component(component) {
current_component = component;
}
function get_current_component() {
if (!current_component)
throw new Error(`Function called outside component initialization`);
return current_component;
}
function beforeUpdate(fn) {
get_current_component().$$.before_update.push(fn);
}
function onMount(fn) {
get_current_component().$$.on_mount.push(fn);
}
function afterUpdate(fn) {
get_current_component().$$.after_update.push(fn);
}
function onDestroy(fn) {
get_current_component().$$.on_destroy.push(fn);
}
function createEventDispatcher() {
const component = get_current_component();
return (type, detail) => {
const callbacks = component.$$.callbacks[type];
if (callbacks) {
// TODO are there situations where events could be dispatched
// in a server (non-DOM) environment?
const event = custom_event(type, detail);
callbacks.slice().forEach(fn => {
fn.call(component, event);
});
}
};
}
const dirty_components = [];
const binding_callbacks = [];
const render_callbacks = [];
const flush_callbacks = [];
const resolved_promise = Promise.resolve();
let update_scheduled = false;
function schedule_update() {
if (!update_scheduled) {
update_scheduled = true;
resolved_promise.then(flush);
}
}
function add_render_callback(fn) {
render_callbacks.push(fn);
}
let flushing = false;
const seen_callbacks = new Set();
function flush() {
if (flushing)
return;
flushing = true;
do {
// first, call beforeUpdate functions
// and update components
for (let i = 0; i < dirty_components.length; i += 1) {
const component = dirty_components[i];
set_current_component(component);
update(component.$$);
}
dirty_components.length = 0;
while (binding_callbacks.length)
binding_callbacks.pop()();
// then, once components are updated, call
// afterUpdate functions. This may cause
// subsequent updates...
for (let i = 0; i < render_callbacks.length; i += 1) {
const callback = render_callbacks[i];
if (!seen_callbacks.has(callback)) {
// ...so guard against infinite loops
seen_callbacks.add(callback);
callback();
}
}
render_callbacks.length = 0;
} while (dirty_components.length);
while (flush_callbacks.length) {
flush_callbacks.pop()();
}
update_scheduled = false;
flushing = false;
seen_callbacks.clear();
}
function update($$) {
if ($$.fragment !== null) {
$$.update();
run_all($$.before_update);
const dirty = $$.dirty;
$$.dirty = [-1];
$$.fragment && $$.fragment.p($$.ctx, dirty);
$$.after_update.forEach(add_render_callback);
}
}
const outroing = new Set();
function transition_in(block, local) {
if (block && block.i) {
outroing.delete(block);
block.i(local);
}
}
function mount_component(component, target, anchor) {
const { fragment, on_mount, on_destroy, after_update } = component.$$;
fragment && fragment.m(target, anchor);
// onMount happens before the initial afterUpdate
add_render_callback(() => {
const new_on_destroy = on_mount.map(run).filter(is_function);
if (on_destroy) {
on_destroy.push(...new_on_destroy);
}
else {
// Edge case - component was destroyed immediately,
// most likely as a result of a binding initialising
run_all(new_on_destroy);
}
component.$$.on_mount = [];
});
after_update.forEach(add_render_callback);
}
function destroy_component(component, detaching) {
const $$ = component.$$;
if ($$.fragment !== null) {
run_all($$.on_destroy);
$$.fragment && $$.fragment.d(detaching);
// TODO null out other refs, including component.$$ (but need to
// preserve final state?)
$$.on_destroy = $$.fragment = null;
$$.ctx = [];
}
}
function make_dirty(component, i) {
if (component.$$.dirty[0] === -1) {
dirty_components.push(component);
schedule_update();
component.$$.dirty.fill(0);
}
component.$$.dirty[(i / 31) | 0] |= (1 << (i % 31));
}
function init(component, options, instance, create_fragment, not_equal, props, dirty = [-1]) {
const parent_component = current_component;
set_current_component(component);
const prop_values = options.props || {};
const $$ = component.$$ = {
fragment: null,
ctx: null,
// state
props,
update: noop,
not_equal,
bound: blank_object(),
// lifecycle
on_mount: [],
on_destroy: [],
before_update: [],
after_update: [],
context: new Map(parent_component ? parent_component.$$.context : []),
// everything else
callbacks: blank_object(),
dirty,
skip_bound: false
};
let ready = false;
$$.ctx = instance
? instance(component, prop_values, (i, ret, ...rest) => {
const value = rest.length ? rest[0] : ret;
if ($$.ctx && not_equal($$.ctx[i], $$.ctx[i] = value)) {
if (!$$.skip_bound && $$.bound[i])
$$.bound[i](value);
if (ready)
make_dirty(component, i);
}
return ret;
})
: [];
$$.update();
ready = true;
run_all($$.before_update);
// `false` as a special case of no DOM component
$$.fragment = create_fragment ? create_fragment($$.ctx) : false;
if (options.target) {
if (options.hydrate) {
const nodes = children(options.target);
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
$$.fragment && $$.fragment.l(nodes);
nodes.forEach(detach);
}
else {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
$$.fragment && $$.fragment.c();
}
if (options.intro)
transition_in(component.$$.fragment);
mount_component(component, options.target, options.anchor);
flush();
}
set_current_component(parent_component);
}
class SvelteComponent {
$destroy() {
destroy_component(this, 1);
this.$destroy = noop;
}
$on(type, callback) {
const callbacks = (this.$$.callbacks[type] || (this.$$.callbacks[type] = []));
callbacks.push(callback);
return () => {
const index = callbacks.indexOf(callback);
if (index !== -1)
callbacks.splice(index, 1);
};
}
$set($$props) {
if (this.$$set && !is_empty($$props)) {
this.$$.skip_bound = true;
this.$$set($$props);
this.$$.skip_bound = false;
}
}
}
var Events = [
"beforeLinkedItemOpen",
"linkedItemOpened",
"beforeLinkedItemClose",
"linkedItemClosed",
"printReadyStateChange",
"dataLoadRequestCompleted",
"dataLoadError",
"dataLoadCancelled",
"dataLoadRequestCancelled",
"dataUpdated",
"dataUpdateCancelled",
"dataLoadRequested",
"beforeDataUpdate",
"realTimeUpdateComplete",
"chartCleared",
"slicingEnd",
"slicingStart",
"entityRollOut",
"entityRollOver",
"entityClick",
"connectorRollOver",
"connectorRollOut",
"connectorClick",
"markerRollOver",
"markerRollOut",
"markerClick",
"pageNavigated",
"rotationEnd",
"rotationStart",
"centerLabelRollover",
"centerLabelRollout",
"centerLabelClick",
"centerLabelChanged",
"chartClick",
"chartMouseMove",
"chartRollOver",
"chartRollOut",
"backgroundLoaded",
"backgroundLoadError",
"legendItemClicked",
"legendItemRollover",
"legendItemRollout",
"logoRollover",
"logoRollout",
"logoClick",
"logoLoaded",
"logoLoadError",
"beforeExport",
"exported",
"exportCancelled",
"beforePrint",
"printComplete",
"printCancelled",
"dataLabelClick",
"dataLabelRollOver",
"dataLabelRollOut",
"scrollStart",
"scrollEnd",
"onScroll",
"zoomReset",
"zoomedOut",
"zoomedIn",
"zoomed",
"zoomModeChanged",
"pinned",
"dataRestored",
"beforeDataSubmit",
"dataSubmitError",
"dataSubmitted",
"dataSubmitCancelled",
"chartUpdated",
"nodeAdded",
"nodeUpdated",
"nodeDeleted",
"connectorAdded",
"connectorUpdated",
"connectorDeleted",
"labelAdded",
"labelDeleted",
"selectionRemoved",
"selectionStart",
"selectionEnd",
"labelClick",
"labelRollOver",
"labelRollOut",
"labelDragStart",
"labelDragEnd",
"dataplotDragStart",
"dataplotDragEnd",
"processClick",
"processRollOver",
"processRollOut",
"categoryClick",
"categoryRollOver",
"categoryRollOut",
"milestoneClick",
"milestoneRollOver",
"milestoneRollOut",
"chartTypeChanged",
"overlayButtonClick",
"loaded",
"rendered",
"drawComplete",
"renderComplete",
"dataInvalid",
"dataXMLInvalid",
"dataLoaded",
"noDataToDisplay",
"legendPointerDragStart",
"legendPointerDragStop",
"legendRangeUpdated",
"alertComplete",
"realTimeUpdateError",
"dataplotRollOver",
"dataplotRollOut",
"dataplotClick",
"linkClicked",
"beforeRender",
"renderCancelled",
"beforeResize",
"resized",
"resizeCancelled",
"beforeDispose",
"disposed",
"disposeCancelled",
"linkedChartInvoked",
"beforeDrillDown",
"drillDown",
"beforeDrillUp",
"drillUp",
"drillDownCancelled",
"drillUpCancelled"
];
const ATOMIC_DATA_TYPE = ['string', 'number', 'function', 'boolean', 'undefined'],
charSet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
isResizeRequired = (oldConfig, newConfig) => {
let { width, height } = oldConfig,
newWidth = newConfig.width,
newHeight = newConfig.height;
if (width !== newWidth || height !== newHeight) {
return true;
}
return false;
},
isChartTypeChanged = (oldConfig, newConfig) => {
return (oldConfig.type !== newConfig.type);
},
cloneObject = (arg, purpose = 'clone') => {
if ((ATOMIC_DATA_TYPE.indexOf(typeof arg) > -1) || arg === null) {
return arg;
}
if (Array.isArray(arg)) {
let i,
len,
arr = [];
for (i = 0, len = arg.length; i < len; i++) {
arr.push(cloneObject(arg[i], purpose));
}
return arr;
} else if (typeof arg === 'object') {
let cloneObj = {},
key;
for (key in arg) {
if (key === 'data') {
if (arg[key] && arg[key]._dataStore) {
cloneObj[key] = (purpose === 'clone') ? arg[key] : '-';
} else {
cloneObj[key] = cloneObject(arg[key], purpose);
}
} else {
cloneObj[key] = cloneObject(arg[key], purpose);
}
}
return cloneObj;
}
},
isDataSourceUpdated = (oldConfig, newConfig) => {
return JSON.stringify(cloneObject(oldConfig.dataSource, 'diff')) !== JSON.stringify(cloneObject(newConfig.dataSource, 'diff'));
},
createUniqueId = (length = 20) => {
let i,
result = '',
charactersLength = charSet.length;
for (i = 0; i < length; i++) {
result += charSet.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
};
/* src/index.svelte generated by Svelte v3.24.1 */
function create_fragment(ctx) {
let div;
return {
c() {
div = element("div");
attr(div, "class", /*className*/ ctx[0]);
attr(div, "style", /*inlineStyle*/ ctx[1]);
attr(div, "id", /*uniqueDivId*/ ctx[2]);
},
m(target, anchor) {
insert(target, div, anchor);
},
p(ctx, [dirty]) {
if (dirty & /*className*/ 1) {
attr(div, "class", /*className*/ ctx[0]);
}
if (dirty & /*inlineStyle*/ 2) {
attr(div, "style", /*inlineStyle*/ ctx[1]);
}
},
i: noop,
o: noop,
d(detaching) {
if (detaching) detach(div);
}
};
}
let FroalaCharts;
function fcRoot(core, ...modules) {
FroalaCharts = core;
modules.forEach(m => {
if (m.getName && m.getType || m.name && m.type) {
core.addDep(m);
} else {
m(core);
}
});
}
function instance($$self, $$props, $$invalidate) {
let { id } = $$props,
{ className = "" } = $$props,
{ inlineStyle = "" } = $$props,
{ type } = $$props,
{ renderAt } = $$props,
{ width } = $$props,
{ height } = $$props,
{ dataFormat = "json" } = $$props,
{ dataSource } = $$props,
{ chart } = $$props;
let oldChartConfig, chartConfig, eventListerners = [];
const dispatch = createEventDispatcher(), uniqueDivId = createUniqueId();
/**
* Life cycle method sequence
* beforeUpdate -> onMount -> afterUpdate (during intial render)
* beforeUpdate -> afterUpdate (during re-render)
*/
beforeUpdate(() => {
$$invalidate(3, renderAt = uniqueDivId);
chartConfig = {
id,
type,
renderAt,
width,
height,
dataFormat,
dataSource: cloneObject(dataSource)
};
});
onMount(() => {
if (!FroalaCharts) {
console.warn("Invalid FroalaCharts constructor");
} else {
FroalaCharts.ready(function () {
$$invalidate(4, chart = new FroalaCharts(chartConfig));
chart.render();
});
Events.forEach((event, index) => {
eventListerners.push(e => {
dispatch(event, e);
});
FroalaCharts.addEventListener(event, eventListerners[index]);
});
}
});
afterUpdate(() => {
// If not the first render
if (oldChartConfig) {
if (isResizeRequired(oldChartConfig, chartConfig)) {
chart.resizeTo(chartConfig.width, chartConfig.height);
}
if (isChartTypeChanged(oldChartConfig, chartConfig)) {
chart.chartType(chartConfig.type, chartConfig);
} else if (isDataSourceUpdated(oldChartConfig, chartConfig)) {
chart.setJSONData(chartConfig.dataSource);
}
}
oldChartConfig = cloneObject(chartConfig);
});
onDestroy(() => {
chart.dispose();
Events.forEach((event, index) => {
FroalaCharts.removeEventListener(event, eventListerners[index]);
});
});
$$self.$$set = $$props => {
if ("id" in $$props) $$invalidate(5, id = $$props.id);
if ("className" in $$props) $$invalidate(0, className = $$props.className);
if ("inlineStyle" in $$props) $$invalidate(1, inlineStyle = $$props.inlineStyle);
if ("type" in $$props) $$invalidate(6, type = $$props.type);
if ("renderAt" in $$props) $$invalidate(3, renderAt = $$props.renderAt);
if ("width" in $$props) $$invalidate(7, width = $$props.width);
if ("height" in $$props) $$invalidate(8, height = $$props.height);
if ("dataFormat" in $$props) $$invalidate(9, dataFormat = $$props.dataFormat);
if ("dataSource" in $$props) $$invalidate(10, dataSource = $$props.dataSource);
if ("chart" in $$props) $$invalidate(4, chart = $$props.chart);
};
return [
className,
inlineStyle,
uniqueDivId,
renderAt,
chart,
id,
type,
width,
height,
dataFormat,
dataSource
];
}
class Src extends SvelteComponent {
constructor(options) {
super();
init(this, options, instance, create_fragment, safe_not_equal, {
id: 5,
className: 0,
inlineStyle: 1,
type: 6,
renderAt: 3,
width: 7,
height: 8,
dataFormat: 9,
dataSource: 10,
chart: 4
});
}
}
exports.default = Src;
exports.fcRoot = fcRoot;
Object.defineProperty(exports, '__esModule', { value: true });
})));
|
package pepperlint
import (
"go/ast"
)
// RangeStmtRules is a list of type RangeStmtRule.
type RangeStmtRules []RangeStmtRule
// ValidateRangeStmt will iterate through the list of array types and call
// ValidateRangeStmt. If an error is returned, then that error will be added
// to the batch of errors.
func (rules RangeStmtRules) ValidateRangeStmt(stmt *ast.RangeStmt) error {
batchError := NewBatchError()
for _, rule := range rules {
if err := rule.ValidateRangeStmt(stmt); err != nil {
batchError.Add(err)
}
}
return batchError.Return()
}
// RangeStmtRule represents an interface that will allow for validation
// to occur on an ast.RangeStmt.
type RangeStmtRule interface {
ValidateRangeStmt(*ast.RangeStmt) error
}
|
<gh_stars>0
package models
import (
"github.com/astaxie/beego/orm"
"log"
"time"
)
type SdtBdiResult struct {
Id int `form:"id"` //主键
BdiId int `form:"bdiId"` //指标Id
TableName string `form:"tableName"` //表名
TableLabel string `form:"tableLabel"` //中文表名
BdiTypeId int `form:"bdiTypeId"` //指标分类ID
UserCode string `form:"userCode"` //创建人ID
CreateTime time.Time `form:"createTime"` //创建时间
EditTime time.Time `form:"editTime"` //修改时间
//以下字段为datagrid展示
BdiTypeName string
BdiName string
}
func (u *SdtBdiResult) GetTableName() string {
return "sdt_bdi_result"
}
/**
获取所有指标。
*/
func (this *SdtBdiResult) GetAllSdtBdiResult(rows int, page int) ([]SdtBdiResult, int, error) {
var err error
o := orm.NewOrm()
sdtBdiResultSlice := make([]SdtBdiResult, 0)
var querySql = "select r.*, t.type_name as bdi_type_name, bdi.bdi_name as bdi_name from sdt_bdi_result r " +
" left join sdt_bdi_type t on r.bdi_type_id = t.id " +
" left join sdt_bdi bdi on bdi.id = r.bdi_id " +
" where r.bdi_id = ? limit ?, ? "
_, err = o.Raw(querySql, this.BdiId, (page-1)*rows, page*rows).QueryRows(&sdtBdiResultSlice)
if err != nil {
log.Fatal("查询表:" + this.GetTableName() + "出错!")
return nil, 0, err
}
num := new(int)
var countSql = "select count(*) as counts from sdt_bdi_result where bdi_id = ? "
err = o.Raw(countSql, this.BdiId).QueryRow(&num)
if err != nil {
log.Fatal("查询表:" + this.GetTableName() + "出错!")
return nil, 0, err
}
return sdtBdiResultSlice, *num, nil
}
/**
获取所有指标。
*/
func (this *SdtBdiResult) GetSdtBdiResultById() error {
var o orm.Ormer
o = orm.NewOrm()
var querySql = " select r.* from sdt_bdi_result r where r.id = ? "
err := o.Raw(querySql, this.Id).QueryRow(this)
if err != nil {
log.Fatal("查询表:" + this.GetTableName() + "出错!")
return err
}
return nil
}
func (this *SdtBdiResult) Add() error {
o := orm.NewOrm()
o.Begin()
var insertSdtBdiResultSql = " insert into sdt_bdi_result ( " +
" bdi_id, " +
" table_name, " +
" table_label, " +
" bdi_type_id, " +
" user_code, " +
" create_time " +
") " +
" values (?, ?, ?, ?, ?, ?) "
_, err := o.Raw(insertSdtBdiResultSql, this.BdiId, this.TableName, this.TableLabel, this.BdiTypeId, 0, time.Now()).Exec()
if err != nil {
o.Rollback()
return err
}
o.Commit()
return nil
}
func (this *SdtBdiResult) Update() error {
o := orm.NewOrm()
o.Begin()
var updateSdtBdiResultSql = " update sdt_bdi_result " +
" set table_name = ?, " +
" table_label = ?, " +
" user_code = ?, " +
" edit_time = ? " +
" where id = ?"
_, err := o.Raw(updateSdtBdiResultSql, this.TableName, this.TableLabel, 0, time.Now(), this.Id).Exec()
if err != nil {
o.Rollback()
return err
}
o.Commit()
return nil
}
|
ln -s ~/notebooks/home/ksong/GoogleNews-vectors-negative300.bin GoogleNews-vectors-negative300.bin
|
<reponame>duraspace/lambdora
package org.fcrepo.lambdora.service.aws;
import org.fcrepo.lambdora.service.api.Container;
import org.fcrepo.lambdora.service.api.FedoraResourceImpl;
import org.fcrepo.lambdora.service.dao.ResourceTripleDao;
import java.net.URI;
/**
* AWS-based concrete implementation of the Container interface.
*
* @author dbernstein
*/
public class ContainerImpl extends FedoraResourceImpl implements Container {
/**
* Constructor
*
* @param identifier
* @param dao
*/
public ContainerImpl(final URI identifier, final ResourceTripleDao dao) {
super(identifier, dao);
}
}
|
<reponame>xwf20050250/SmallUtils
package com.smallcake.utils;
import java.math.BigDecimal;
/**
* MyApplication -- com.smallcake.utils
* Created by <NAME> on 2018/3/17 15:04.
*/
public class FloatUtils {
/**
* 相乘
* 两个float的高精度算法
* @param f1
* @param f2
* @return
*/
public static float multiply(float f1,float f2){
BigDecimal b1 = new BigDecimal(Float.toString(f1));
BigDecimal b2 = new BigDecimal(Float.toString(f2));
return b1.multiply(b2).floatValue();
}
/**
* 想除
* @param f1
* @param f2
* @return
*/
public static float divide(float f1,float f2){
BigDecimal b1 = new BigDecimal(Float.toString(f1));
BigDecimal b2 = new BigDecimal(Float.toString(f2));
return b1.divide(b2).floatValue();
}
}
|
#!/usr/bin/env bash
# Dependencies:
# LGWebOSRemote: https://github.com/klattimer/LGWebOSRemote
# Recommended installation:
# Use pipx (https://github.com/pypa/pipx) to install the package system-wide:
# pipx install git+https://github.com/klattimer/LGWebOSRemote
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Show Audio Volume
# @raycast.mode fullOutput
# Optional parameters:
# @raycast.packageName LG TV
# @raycast.icon images/lg.png
# Documentation:
# @raycast.author Jakub Lanski
# @raycast.authorURL https://github.com/jaklan
# @raycast.description Show the audio volume.
# Modified PATH to include pipx-installed packages. If you used a different installation method, adjust the variable properly to make the 'lgtv' package detectable.
PATH="$HOME/.local/bin:$PATH"
lgtv tv audioVolume
|
import React from "react";
import Img from "gatsby-image";
import { useStaticQuery, graphql } from "gatsby";
const Spinner = ({ direction }) => {
const data = useStaticQuery(graphql`
query {
placeholderImage: file(relativePath: { eq: "spinner.png" }) {
childImageSharp {
fluid(quality: 90, maxWidth: 1000) {
...GatsbyImageSharpFluid
}
}
}
}
`);
return (
<div className={`spinner ${direction === "left" && "left"}`}>
<Img fluid={data.placeholderImage.childImageSharp.fluid} />
</div>
);
};
export default Spinner;
|
#!/usr/bin/with-contenv bashio
##########
# INIT #
##########
# Define preferences line
cd /config/qBittorrent/
LINE=$(sed -n '/Preferences/=' qBittorrent.conf)
LINE=$((LINE + 1))
##################
# Default folder #
##################
if bashio::config.has_value 'SavePath'; then
DOWNLOADS=$(bashio::config 'SavePath')
sed -i '/SavePath/d' qBittorrent.conf
sed -i "$LINE i\Downloads\\\SavePath=$DOWNLOADS" qBittorrent.conf
mkdir -p $DOWNLOADS || true
chown -R abc:abc $DOWNLOADS || bashio::log.info "Error, please check default save folder configuration in addon"
bashio::log.info "Downloads can be found in $DOWNLOADS"
else
mkdir -p /share/qBittorrent || true
chown -R abc:abc /share/qBittorrent
fi
################
# Correct Port #
################
sed -i '/PortRangeMin/d' qBittorrent.conf
sed -i "$LINE i\Connection\\\PortRangeMin=6881" qBittorrent.conf
################
# SSL CONFIG #
################
# Clean data
sed -i '/HTTPS/d' qBittorrent.conf
bashio::config.require.ssl
if bashio::config.true 'ssl'; then
bashio::log.info "ssl enabled. If webui don't work, disable ssl or check your certificate paths"
#set variables
CERTFILE=$(bashio::config 'certfile')
KEYFILE=$(bashio::config 'keyfile')
sed -i "$LINE i\WebUI\\\HTTPS\\\Enabled=True" qBittorrent.conf
sed -i "$LINE i\WebUI\\\HTTPS\\\CertificatePath=/ssl/$CERTFILE" qBittorrent.conf
sed -i "$LINE i\WebUI\\\HTTPS\\\KeyPath=/ssl/$KEYFILE" qBittorrent.conf
fi
################
# WHITELIST #
################
cd /config/qBittorrent/
if bashio::config.has_value 'whitelist'; then
WHITELIST=$(bashio::config 'whitelist')
#clean data
sed -i '/AuthSubnetWhitelist/d' qBittorrent.conf
sed -i "$LINE i\WebUI\\\AuthSubnetWhitelistEnabled=true" qBittorrent.conf
sed -i "$LINE i\WebUI\\\AuthSubnetWhitelist=$WHITELIST" qBittorrent.conf
bashio::log.info "Whitelisted subsets will not require a password : $WHITELIST"
fi
###############
# USERNAME #
###############
cd /config/qBittorrent/
if bashio::config.has_value 'Username'; then
USERNAME=$(bashio::config 'Username')
#clean data
sed -i '/WebUI\\\Username/d' qBittorrent.conf
#add data
sed -i "$LINE i\WebUI\\\Username=$USERNAME" qBittorrent.conf
bashio::log.info "WEBUI username set to $USERNAME"
fi
################
# Alternate UI #
################
# Clean data
sed -i '/AlternativeUIEnabled/d' qBittorrent.conf
sed -i '/RootFolder/d' qBittorrent.conf
rm -f -r /webui
mkdir -p /webui
chown abc:abc /webui
if bashio::config.has_value 'customUI'; then
### Variables
CUSTOMUI=$(bashio::config 'customUI')
bashio::log.info "Alternate UI enabled : $CUSTOMUI. If webui don't work, disable this option"
### Download WebUI
case $CUSTOMUI in
"vuetorrent")
curl -s -S -J -L -o /webui/release.zip $(curl -s https://api.github.com/repos/WDaan/VueTorrent/releases/latest | grep -o "http.*vuetorrent.zip") >/dev/null
;;
"qbit-matUI")
curl -s -S -J -L -o /webui/release.zip $(curl -s https://api.github.com/repos/bill-ahmed/qbit-matUI/releases/latest | grep -o "http.*Unix.*.zip") >/dev/null
;;
"qb-web")
curl -s -S -J -L -o /webui/release.zip $(curl -s https://api.github.com/repos/CzBiX/qb-web/releases | grep -o "http.*qb-web-.*zip") >/dev/null
;;
esac
### Install WebUI
mkdir -p /webui/$CUSTOMUI
unzip -q /webui/release.zip -d /webui/$CUSTOMUI
rm /webui/*.zip
CUSTOMUIDIR="$(dirname "$(find /webui/$CUSTOMUI -iname "public" -type d)")"
sed -i "$LINE i\WebUI\\\AlternativeUIEnabled=true" /config/qBittorrent/qBittorrent.conf
sed -i "$LINE i\WebUI\\\RootFolder=$CUSTOMUIDIR" /config/qBittorrent/qBittorrent.conf
fi
##########
# CLOSE #
##########
bashio::log.info "Default username/password : admin/adminadmin"
bashio::log.info "Configuration can be found in /config/qBittorrent"
|
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// attaching listener to add notes button
Button addNoteButton = findViewById(R.id.addNoteButton);
addNoteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// launching add note activity
startActivity(new Intent(MainActivity.this, AddNoteActivity.class));
}
});
// attaching listener to view folders button
Button viewFoldersButton = findViewById(R.id.viewFoldersButton);
viewFoldersButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// launching folder list activity
startActivity(new Intent(MainActivity.this, FolderListActivity.class));
}
});
}
}
|
<reponame>softls/FogFrame-2.0
package at.ac.tuwien.infosys;
import at.ac.tuwien.infosys.database.IDatabaseService;
import at.ac.tuwien.infosys.fogactioncontrol.IFogActionControlService;
import at.ac.tuwien.infosys.model.*;
import at.ac.tuwien.infosys.reasoner.IReasonerService;
import at.ac.tuwien.infosys.sharedstorage.impl.SharedRedisService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Created by <NAME> on 27/10/2016.
*/
@RestController
@CrossOrigin(origins = "*")
@RequestMapping("/")
public class StatusController {
@Autowired
private IDatabaseService dbService;
@Autowired
private SharedRedisService sharedRedisService;
@Autowired
private IReasonerService reasonerService;
@Autowired
private IFogActionControlService fogActionControlService;
@PostConstruct
public void init(){ }
/**
* requests the status of the Service
*
* @return a String with HTML code for the display of availability
*/
@RequestMapping(method = RequestMethod.GET)
public String getPage(){
String html = "<html><head></head><body style='background: white; color: black; font-family: Verdana'>" +
"<h1>Fog Control Node Status-Page</h1>";
html+="<p> LOCAL DB ------------------------------------------------------------------------------------------------------------------------</p>";
html+="<ul>";
html+="<li><b>Id:</b> "+dbService.getDeviceId()+"</li>";
html+="<li><b>Device Type:</b> "+dbService.getDeviceType()+"</li>";
Location loc = dbService.getLocation();
html+="<li><b>Location:</b> ("+loc.getLatitude()+"/"+loc.getLongitude()+")</li>";
html+="<li><b>Utilization:</b> "+dbService.getUtilization()+"</li>";
html+="<li><b>IP:Port:</b> "+dbService.getIp()+":"+dbService.getPort()+"</li>";
html+="<li><b>Cloud:</b> "+dbService.getCloudIp()+":"+dbService.getCloudPort()+"</li>";
Fogdevice parent = dbService.getParent();
if(parent != null)
html+="<li><b>Parent:</b> "+parent.getIp()+":"+parent.getPort()+", Type:"+ parent.getType()+"</li>";
Fogdevice neighbor = dbService.getClosestNeighborFN();
if(neighbor != null)
html+="<li><b>Closest neighbor:</b> "+neighbor.getIp()+":"+neighbor.getPort()+", Type:"+ neighbor.getType()+"</li>";
html+="<li><b>Service Types:</b> "+dbService.getServiceTypes()+"</li>";
html+="<li><b>Child Devices:</b><ul>";
for(Fogdevice c : dbService.getChildren()){
html+="<li>"+c+"</li>";
}
html+="</ul></li></ul>";
html+="<hr/>";
html+="<p> SHARED DB ------------------------------------------------------------------------------------------------------------------------</p>";
html+="<ul>";
for (Map.Entry<String,String> entry : sharedRedisService.getAll().entrySet()) {
html += "<li>"+entry.getKey()+": "+entry.getValue()+"</li>";
}
html+="</ul>";
html+="<hr/>";
html+="<p> SERVICES ------------------------------------------------------------------------------------------------------------------------</p>";
Set<DockerContainer> createdContainers = fogActionControlService.getCreatedContainers();
html+="<p>Count: "+createdContainers.size()+"</p>";
html+="<ol>";
for(DockerContainer c : createdContainers){
html+="<li>"+c+"</li>";
}
html+="</ol>";
if(reasonerService.isRootFN()) {
html += "<hr/>";
html += "<p> APPLICATIONS ------------------------------------------------------------------------------------------------------------------------</p>";
List<ApplicationAssignment> asslist = reasonerService.getApplicationAssignments();
html += "<p>Count: " + asslist.size() + "</p>";
html += "<ol>";
for (ApplicationAssignment a : asslist) {
html += "<li>";
html+="<ul>";
for(TaskAssignment t : a.getAssignedTasks()) {
html += "<li>";
html += t.getFogdevice().getIp()+":"+t.getFogdevice().getPort()+" | "
+t.getTaskRequest().getServiceKey()+":"+t.getTaskRequest().getServiceType()+" | "
+t.getContainer().getContainerId().substring(0,6);
html += "</li>";
}
html+="</ul>";
html+= "</li>";
}
html += "</ol>";
html += "<hr/>";
html += "<p> EVALUATION ------------------------------------------------------------------------------------------------------------------------</p>";
html += "<p>" + reasonerService.getEvaluationSummary() + "</p>";
}
html += "</body></html>";
return html;
}
}
|
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Solids/PhysBAM_Deformables/Collisions_And_Interactions/TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY.cpp
//#####################################################################
// Copyright 2002-2007, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY
//#####################################################################
#include <PhysBAM_Tools/Arrays/INDIRECT_ARRAY.h>
#include <PhysBAM_Tools/Log/LOG.h>
#include <PhysBAM_Geometry/Basic_Geometry/SEGMENT_2D.h>
#include <PhysBAM_Geometry/Basic_Geometry/TRIANGLE_3D.h>
#include <PhysBAM_Geometry/Implicit_Objects/IMPLICIT_OBJECT_TRANSFORMED.h>
#include <PhysBAM_Geometry/Spatial_Acceleration/SEGMENT_HIERARCHY.h>
#include <PhysBAM_Geometry/Spatial_Acceleration/TRIANGLE_HIERARCHY.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/HEXAHEDRALIZED_VOLUME.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/TETRAHEDRALIZED_VOLUME.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Bindings/LINEAR_BINDING.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Collisions_And_Interactions/INTERSECTING_PAIRS_VISITOR.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Collisions_And_Interactions/STRUCTURE_INTERACTION_GEOMETRY.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Collisions_And_Interactions/TRIANGLE_COLLISION_PARAMETERS.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Collisions_And_Interactions/TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Deformable_Objects/DEFORMABLE_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Fracture/EMBEDDING.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Parallel_Computation/MPI_SOLIDS.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY(DEFORMABLE_BODY_COLLECTION<TV>& deformable_body_collection)
:mpi_solids(0),deformable_body_collection(deformable_body_collection),mass_modifier(0)
{
// set parameters
Allow_Intersections(false);Set_Allow_Intersections_Tolerance();
// output
Output_Number_Checked(false);
}
//#####################################################################
// Destructor
//#####################################################################
template<class TV> TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
~TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY()
{
structure_geometries.Delete_Pointers_And_Clean_Memory();
}
//#####################################################################
// Function Initialize
//#####################################################################
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Initialize(TRIANGLE_COLLISION_PARAMETERS<TV>& triangle_collision_parameters)
{
LOG::Stat("self collision structures",structures.m);
Build_Collision_Geometry();
Allow_Intersections(triangle_collision_parameters.allow_intersections);
Set_Allow_Intersections_Tolerance(triangle_collision_parameters.allow_intersections_tolerance);
Set_Small_Number(triangle_collision_parameters.collisions_small_number);
Output_Number_Checked(triangle_collision_parameters.collisions_output_number_checked);
Set_Gauss_Jacobi(triangle_collision_parameters.use_gauss_jacobi);
if(triangle_collision_parameters.collisions_disable_repulsions_based_on_proximity_factor)
Save_Self_Collision_Free_State();
}
//#####################################################################
// Function Build_Collision_Geometry
//#####################################################################
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Build_Collision_Geometry()
{
structure_geometries.Delete_Pointers_And_Clean_Memory();
structure_geometries.Resize(structures.m);
interacting_structure_pairs.Remove_All();
for(int k=1;k<=structures.m;k++){
structure_geometries(k)=new STRUCTURE_INTERACTION_GEOMETRY<TV>(deformable_body_collection.particles);
structure_geometries(k)->Build_Collision_Geometry(*structures(k));}
for(int i=1;i<=structures.m;i++) for(int j=i;j<=structures.m;j++) interacting_structure_pairs.Append(VECTOR<int,2>(i,j));
intersecting_point_face_pairs.Remove_All();
intersecting_edge_edge_pairs.Remove_All();
}
//#####################################################################
// Function Build_Topological_Structure_Of_Hierarchies
//#####################################################################
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Build_Topological_Structure_Of_Hierarchies()
{
for(int k=1;k<=structure_geometries.m;k++){
structure_geometries(k)->Build_Topological_Structure_Of_Hierarchies();
if(mpi_solids) structure_geometries(k)->Update_Processor_Masks(mpi_solids->Partition(),
mpi_solids->partition_id_from_particle_index);}
}
//#####################################################################
// Function Allow_Intersections
//#####################################################################
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Allow_Intersections(const bool allow_intersections_input)
{
allow_intersections=allow_intersections_input;
if(allow_intersections)
for(int k=1;k<=structure_geometries.m;k++)
if(!structure_geometries(k)->triangulated_surface->mesh.element_edges) structure_geometries(k)->triangulated_surface->mesh.Initialize_Element_Edges();
}
//#####################################################################
// Function Check_For_Intersection
//#####################################################################
template<class T,class TV> bool Check_For_Intersection_Helper(const ARRAY<STRUCTURE_INTERACTION_GEOMETRY<TV>*>& structure_geometries,const VECTOR<int,2>& pair,
ARRAY_VIEW<const VECTOR<T,1> > X,const bool grow_thickness_to_find_first_self_intersection,const T threshold)
{PHYSBAM_NOT_IMPLEMENTED();}
template<class T,class TV> bool Check_For_Intersection_Helper(const ARRAY<STRUCTURE_INTERACTION_GEOMETRY<TV>*>& structure_geometries,const VECTOR<int,2>& pair,
ARRAY_VIEW<const VECTOR<T,2> > X,const bool grow_thickness_to_find_first_self_intersection,const T threshold)
{
SEGMENTED_CURVE_2D<T>* segmented_curve1=structure_geometries(pair[1])->segmented_curve;
SEGMENTED_CURVE_2D<T>* segmented_curve2=structure_geometries(pair[2])->segmented_curve;
if(segmented_curve1 && segmented_curve2){
if(segmented_curve1->Segment_Segment_Intersection(segmented_curve2->mesh,X,threshold)){
{std::stringstream ss;ss<<"intersections found, pair = "<<pair<<", threshold = "<<threshold<<std::endl;LOG::filecout(ss.str());}return true;}
else if(grow_thickness_to_find_first_self_intersection) segmented_curve1->Find_First_Segment_Segment_Intersection(segmented_curve2->mesh,X,threshold,10);}
return false;
}
template<class T,class TV> bool Check_For_Intersection_Helper(const ARRAY<STRUCTURE_INTERACTION_GEOMETRY<TV>*>& structure_geometries,const VECTOR<int,2>& pair,
ARRAY_VIEW<const VECTOR<T,3> > X,const bool grow_thickness_to_find_first_self_intersection,const T threshold)
{
ARRAY<VECTOR<int,2> > intersecting_segment_triangle_pairs;
for(int i=1;i<=2;i++){if(i==2 && pair[1]==pair[2]) break;
SEGMENTED_CURVE<TV>* segmented_curve=structure_geometries(pair[i])->segmented_curve;
TRIANGULATED_SURFACE<T>* triangulated_surface=structure_geometries(pair[3-i])->triangulated_surface;
if(segmented_curve && triangulated_surface){
if(triangulated_surface->Segment_Triangle_Intersection(segmented_curve->mesh,X,threshold,true,&intersecting_segment_triangle_pairs)){
{std::stringstream ss;ss<<"intersections found, pair = "<<pair<<", threshold = "<<threshold<<std::endl;LOG::filecout(ss.str());}
for(int k=1;k<=intersecting_segment_triangle_pairs.m;k++){
int s,t;intersecting_segment_triangle_pairs(k).Get(s,t);
{std::stringstream ss;ss<<"segment "<<s<<", triangle "<<t<<", segment nodes = "<<segmented_curve->mesh.elements(s)<<", triangle nodes = "<<triangulated_surface->mesh.elements(t)<<std::endl;LOG::filecout(ss.str());}}
return true;}
else if(grow_thickness_to_find_first_self_intersection) triangulated_surface->Find_First_Segment_Triangle_Intersection(segmented_curve->mesh,X,threshold,10);}}
return false;
}
template<class TV> bool TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Check_For_Intersection(const bool grow_thickness_to_find_first_self_intersection,const T thickness,VECTOR<int,2>* interaction_pair) const
{
if(allow_intersections) return false;
LOG::SCOPE scope("checking for self intersection");
ARRAY_VIEW<const TV> X(deformable_body_collection.particles.X);
T threshold=small_number;if(thickness) threshold=thickness;
for(int k=1;k<=interacting_structure_pairs.m;k++){const VECTOR<int,2>& pair=interacting_structure_pairs(k);
if(Check_For_Intersection_Helper(structure_geometries,pair,X,grow_thickness_to_find_first_self_intersection,threshold)){
if(interaction_pair) *interaction_pair=pair;
return true;}}
return false;
}
template <class TV> bool TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Check_For_Intersection(const bool grow_thickness_to_find_first_self_intersection,const T thickness,ARRAY<VECTOR<int,2> >& interaction_pairs) const
{
if(allow_intersections) return false;
LOG::SCOPE scope("checking for self intersection");
ARRAY_VIEW<const TV> X(deformable_body_collection.particles.X);
T threshold=small_number;if(thickness) threshold=thickness;
for(int k=1;k<=interacting_structure_pairs.m;k++){const VECTOR<int,2>& pair=interacting_structure_pairs(k);
if(Check_For_Intersection_Helper(structure_geometries,pair,X,grow_thickness_to_find_first_self_intersection,threshold)){
interaction_pairs.Append(pair);}}
return interaction_pairs.Size()>0;
}
//#####################################################################
// Function Save_Self_Collision_Free_State
//#####################################################################
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Save_Self_Collision_Free_State() // assumes mass does not change
{
PARTICLES<TV>& full_particles=deformable_body_collection.particles;
X_self_collision_free=full_particles.X;
V_self_collision_free=full_particles.V;
}
//#####################################################################
// Function Restore_Self_Collision_Free_State
//#####################################################################
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Restore_Self_Collision_Free_State()
{
PARTICLES<TV>& full_particles=deformable_body_collection.particles;
full_particles.X=X_self_collision_free;
full_particles.V=V_self_collision_free;
}
//#####################################################################
// Function Compute_Intersecting_Segment_Face_Pairs
//#####################################################################
template<class TV> template<class S> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Compute_Intersecting_Pairs_Helper(typename IF<INTS_EQUAL<d,1>::value,const TV*,UNUSABLE>::TYPE input)
{
PHYSBAM_ASSERT(d==1);
PHYSBAM_NOT_IMPLEMENTED();
}
template<class TV> template<class S> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Compute_Intersecting_Pairs_Helper(typename IF<NOT<INTS_EQUAL<d,1>::value>::value,const TV*,UNUSABLE>::TYPE input)
{
{std::stringstream ss;ss<<"allowing intersections!!!"<<std::endl;LOG::filecout(ss.str());}
intersecting_point_face_pairs.Remove_All();intersecting_edge_edge_pairs.Remove_All();
// update hierarchies
for(int k=1;k<=structure_geometries.m;k++){
STRUCTURE_INTERACTION_GEOMETRY<TV>& structure=*structure_geometries(k);
if(structure.Face_Mesh_Object()) structure.Face_Hierarchy().Update_Boxes(X_self_collision_free);
if(d==3 && structure.segmented_curve) structure.segmented_curve->hierarchy->Update_Boxes(X_self_collision_free);}
// find intersecting pairs
for(int pair_index=1;pair_index<=interacting_structure_pairs.m;pair_index++){const VECTOR<int,2>& pair=interacting_structure_pairs(pair_index);
for(int i=1;i<=2;i++){if(i==2 && (d==2 || pair[1]==pair[2])) break;
STRUCTURE_INTERACTION_GEOMETRY<TV>& segment_structure=*structure_geometries(pair[i]);
STRUCTURE_INTERACTION_GEOMETRY<TV>& face_structure=*structure_geometries(pair[3-i]);
if(!segment_structure.segmented_curve || !face_structure.Face_Mesh_Object()) continue;
int count=0;
INTERSECTING_PAIRS_VISITOR<TV> visitor(intersecting_point_face_pairs,intersecting_edge_edge_pairs,segment_structure,face_structure,X_self_collision_free,
allow_intersections_tolerance,count);
if(mpi_solids){
BOX_VISITOR_MPI<INTERSECTING_PAIRS_VISITOR<TV> > mpi_visitor(visitor,segment_structure.segmented_curve_processor_masks,face_structure.Face_Processor_Masks());
segment_structure.segmented_curve->hierarchy->Intersection_List(face_structure.Face_Hierarchy(),mpi_visitor,allow_intersections_tolerance);}
else segment_structure.segmented_curve->hierarchy->Intersection_List(face_structure.Face_Hierarchy(),visitor,allow_intersections_tolerance);
if(count) {std::stringstream ss;ss<<"pair "<<pair<<" has "<<count<<" intersecting segment triangle pairs"<<std::endl;LOG::filecout(ss.str());}}} // TODO: correct for mpi
// synchronize if necessary
if(mpi_solids) mpi_solids->All_Gather_Intersecting_Pairs(intersecting_point_face_pairs,intersecting_edge_edge_pairs);
}
template<class TV> void TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<TV>::
Compute_Intersecting_Segment_Face_Pairs()
{
Compute_Intersecting_Pairs_Helper<void>((TV*)NULL);
}
//####################################################################
#define INSTANTIATION_HELPER(T,d) \
template class TRIANGLE_REPULSIONS_AND_COLLISIONS_GEOMETRY<VECTOR<T,d> >;
INSTANTIATION_HELPER(float,1);
INSTANTIATION_HELPER(float,2);
INSTANTIATION_HELPER(float,3);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
INSTANTIATION_HELPER(double,1);
INSTANTIATION_HELPER(double,2);
INSTANTIATION_HELPER(double,3);
#endif
|
<reponame>aoxiangflysky/onedata
"""Package with performance tests of onedata.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
|
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound Bitcoin protocol traffic to this rate
LIMIT="160kbit"
#defines the IPv4 address space for which you wish to disable rate limiting
LOCALNET_V4="192.168.0.0/16"
#defines the IPv6 address space for which you wish to disable rate limiting
LOCALNET_V6="fe80::/10"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
if [ ! -z "${LOCALNET_V6}" ] ; then
# v6 cannot have the same priority value as v4
tc filter add dev ${IF} parent 1: protocol ipv6 prio 3 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ipv6 prio 4 handle 2 fw classid 1:11
fi
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 7933. but not when dealing with a host on the local network
# (defined by $LOCALNET_V4 and $LOCALNET_V6)
# --set-mark marks packages matching these criteria with the number "2" (v4)
# --set-mark marks packages matching these criteria with the number "4" (v6)
# these packets are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 7933 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 7933 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
if [ ! -z "${LOCALNET_V6}" ] ; then
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --dport 7933 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --sport 7933 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
fi
|
#!/bin/bash
# Download source file.
mkdir -p data
mkdir -p tmp
if [ ! -f tmp/source.zip ]; then
wget -O "tmp/source.zip" "hanbit.co.kr/lib/examFileDown.php?hed_idx=3801"
fi
cd tmp
unzip "source.zip"
# Move data files into data directory.
data_dir="../data"
mkdir -p ${data_dir}
mv "2.MovAvgFilter/SonarAlt.mat" ${data_dir}
mv "10.TrackKalman/Img" ${data_dir}
mv "11.ARS/ArsAccel.mat" ${data_dir}
mv "11.ARS/ArsGyro.mat" ${data_dir}
# Remove tmp directory.
cd ..
rm -rf tmp
|
/***************************************************************************
* (C) Copyright 2007-2012 - Marauroa *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package marauroa.server.db;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import marauroa.common.Log4J;
import marauroa.common.Logger;
import marauroa.common.Pair;
import marauroa.common.Utility;
import marauroa.server.db.adapter.DatabaseAdapter;
/**
* Connection Pool.
*
* @author hendrik
*/
public class TransactionPool {
private static Logger logger = Log4J.getLogger(TransactionPool.class);
private static TransactionPool dbtransactionPool = null;
private AdapterFactory factory = null;
private final Object wait = new Object();
private Properties params = new Properties();
private int count = 10;
private final List<DBTransaction> dbtransactions = Collections.synchronizedList(new LinkedList<DBTransaction>());
private final List<DBTransaction> freeDBTransactions = Collections.synchronizedList(new LinkedList<DBTransaction>());
private final ThreadLocal<Set<DBTransaction>> threadTransactions = new ThreadLocal<Set<DBTransaction>>();
private final Map<DBTransaction, Pair<String, StackTraceElement[]>> callers;
private boolean closed = false;
/**
* creates a DBTransactionPool
*
* @param connfiguration configuration
*/
public TransactionPool(Properties connfiguration) {
params = connfiguration;
count = Integer.parseInt(params.getProperty("count", "4"));
callers = Collections.synchronizedMap(new HashMap<DBTransaction, Pair<String, StackTraceElement[]>>());
factory = new AdapterFactory(connfiguration);
}
/**
* registers this TransactionPool as the global one.
*/
public void registerGlobally() {
registerGlobal(this);
}
/**
* registers a TransactionPool as the global one.
*
* @param transactionPool the pool to register globally
*/
private static void registerGlobal(TransactionPool transactionPool) {
TransactionPool.dbtransactionPool = transactionPool;
}
/**
* gets the TransactionPool
*
* @return TransactionPool
*/
public static synchronized TransactionPool get() {
return dbtransactionPool;
}
private void createMinimumDBTransactions() {
synchronized (wait) {
while (dbtransactions.size() < count) {
DatabaseAdapter adapter = factory.create();
if (adapter == null) {
Utility.sleep(1000);
continue;
}
DBTransaction dbtransaction = new DBTransaction(adapter);
dbtransactions.add(dbtransaction);
freeDBTransactions.add(dbtransaction);
}
}
}
/**
* starts a transaction and marks it as reserved
*
* @return DBTransaction
*/
public DBTransaction beginWork() {
if (closed) {
throw new RuntimeException("transaction pool has been closed");
}
DBTransaction dbtransaction = null;
while (dbtransaction == null) {
synchronized (wait) {
createMinimumDBTransactions();
if (freeDBTransactions.size() == 0) {
try {
logger.info("Waiting for a DBTransaction", new Throwable());
dumpOpenTransactions();
wait.wait();
} catch (InterruptedException e) {
logger.error(e, e);
}
} else {
dbtransaction = freeDBTransactions.remove(0);
addThreadTransaction(dbtransaction);
}
// TODO: check that the connection is still alive
}
}
logger.debug("getDBTransaction: " + dbtransaction, new Throwable());
Thread currentThread = Thread.currentThread();
callers.put(dbtransaction, new Pair<String, StackTraceElement[]>(currentThread.getName(), currentThread.getStackTrace()));
dbtransaction.setThread(Thread.currentThread());
return dbtransaction;
}
/**
* dumps a list of open transactions with their threads and stacktraces to the log file.
*/
public void dumpOpenTransactions() {
for (Pair<String, StackTraceElement[]> pair : callers.values()) {
logger.warn(" * " + pair.first() + " " + Arrays.asList(pair.second()));
}
}
/**
* commits this transaction and frees its reservation
*
* @param dbtransaction transaction
* @throws SQLException in case of an database error
*/
public void commit(DBTransaction dbtransaction) throws SQLException {
try {
dbtransaction.commit();
} catch (SQLException e) {
killTransaction(dbtransaction);
throw e;
}
freeDBTransaction(dbtransaction);
}
/**
* rolls this transaction back and frees the reservation
*
* @param dbtransaction transaction
*/
public void rollback(DBTransaction dbtransaction) {
try {
dbtransaction.rollback();
freeDBTransaction(dbtransaction);
} catch (SQLException e) {
killTransaction(dbtransaction);
logger.warn(e, e);
}
}
private void freeDBTransaction(DBTransaction dbtransaction) {
logger.debug("freeDBTransaction: " + dbtransaction, new Throwable());
synchronized (wait) {
threadTransactions.get().remove(dbtransaction);
callers.remove(dbtransaction);
dbtransaction.setThread(null);
if (dbtransactions.contains(dbtransaction)) {
freeDBTransactions.add(dbtransaction);
} else {
logger.error("Unknown DBTransaction " + dbtransaction + " was not freed.", new Throwable());
}
wait.notifyAll();
}
}
private void addThreadTransaction(DBTransaction dbtransaction) {
Set<DBTransaction> set = threadTransactions.get();
if (set == null) {
set = new HashSet<DBTransaction>();
threadTransactions.set(set);
}
set.add(dbtransaction);
}
/**
* Kicks all transactions which were started in the current thread
*/
public void kickHangingTransactionsOfThisThread() {
Set<DBTransaction> set = threadTransactions.get();
if ((set == null) || set.isEmpty()) {
return;
}
synchronized (wait) {
for (DBTransaction dbtransaction : set) {
killTransaction(dbtransaction);
logger.error("Hanging transaction " + dbtransaction + " was kicked.");
}
wait.notifyAll();
}
set.clear();
}
/**
* removes transactions from the pool that are not connected to the databaes anymore
*/
public void refreshAvailableTransaction() {
synchronized (wait) {
for (DBTransaction dbtransaction : new HashSet<DBTransaction>(freeDBTransactions)) {
try {
dbtransaction.setThread(Thread.currentThread());
dbtransaction.querySingleCellInt("SELECT 1", null);
dbtransaction.setThread(null);
} catch (SQLException e) {
logger.warn("Killing dead transaction " + dbtransaction + ".");
killTransaction(dbtransaction);
}
}
}
}
/**
* kills a transaction by rolling it back and closing it;
* it will be removed from the pool
*
* @param dbtransaction DBTransaction
*/
public void killTransaction(DBTransaction dbtransaction) {
try {
dbtransaction.setThread(Thread.currentThread());
dbtransaction.rollback();
} catch (SQLException e) {
logger.debug(e, e);
}
dbtransaction.close();
dbtransactions.remove(dbtransaction);
freeDBTransactions.remove(dbtransaction);
callers.remove(dbtransaction);
}
/**
* verifies all available transactions
*/
public void verifyAllAvailableConnections() {
synchronized (wait) {
Iterator<DBTransaction> itr = freeDBTransactions.iterator();
while (itr.hasNext()) {
DBTransaction transaction = itr.next();
if (!transaction.verifyConnection()) {
killTransaction(transaction);
itr.remove();
}
}
}
}
/**
* closes the transaction pool
*/
public void close() {
closed = true;
for (DBTransaction transaction : dbtransactions) {
transaction.close();
}
}
}
|
<gh_stars>0
/* * -* *- *- *- *- *- *- * * ** -* -* -* - *- *- *-* - ** - *- - * *- */
/* * _ _ +\ */
/* - | |_ ___ ___ ___ ___ ___ ___ ___ _| |___ ___ ___ ___ + */
/* + | _| _| .'| |_ -| _| -_| | . | -_| | _| -_| /* */
/* * |_| |_| |__,|_|_|___|___|___|_|_|___|___|_|_|___|___| + */
/* - ~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~ * */
/* * <NAME> | okruitho | Alpha_1337k *- */
/* -* <NAME> | rvan-hou | robijnvh -+ */
/* * / <NAME> | jbennink | JonasDBB /- */
/* / <NAME> | tvan-cit | Tjobo-Hero * */
/* + <NAME> | rbraaksm | rbraaksm - */
/* *. ._ */
/* *. database.provider.ts | Created: 2021-10-06 17:48:04 ._ */
/* - Edited on 2021-10-06 17:48:04 by alpha .- */
/* -* *- *- * -* -* -* ** - *-* -* * / -* -*- * /- - -* --*-*++ * -* * */
import { createConnection } from 'typeorm';
export const databaseProvider = [
{
provide: 'DATABASE_CONNECTION',
useFactory: async () =>
await createConnection({
type: 'postgres',
host: 'postgres',
port: 5432,
username: 'postgres',
password: '<PASSWORD>',
database: 'pongping',
entities: [__dirname + '/../**/*.entity{.ts,.js}'],
synchronize: true,
}),
},
];
|
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { fetchFlickrData } from './../actions/flickDataActions';
import './../css/flickrData.css';
class FlickrData extends Component {
componentDidMount() {
this.props.fetchFlickrData();
}
render() {
const flickrData = this.props.flickrData.items || [];
const flickrDataCards = flickrData.map((item, i) => {
const imageStyles = {
background: `url("${item.media.m}") center / cover`,
};
return (
<div key={i} className="demo-card-wide mdl-card mdl-shadow--2dp">
<div className="mdl-card__title flickr-images" style={imageStyles}>
<h2 className="mdl-card__title-text">
{ item.title.length < 25 ? item.title : `${item.title.substr(0, 25)}...` }
</h2>
</div>
<div className="mdl-card__supporting-text">
<a href={`https://www.flickr.com/people/${item.author_id}`} target="_blank">
{ item.author.substr(20, item.author.length).replace(/['"]+/g, '').split(')') }
</a> posted a <a href={item.link} target="_blank">photo</a>
</div>
<div className="mdl-card__actions mdl-card--border">
Tags: { item.tags.length > 0 ? item.tags : 'No tags'}
</div>
</div>
);
});
return (
<div id="flickr-data-container">
{ flickrDataCards }
</div>
);
}
}
const mapStateToProps = state => ({
flickrData: state.flickrDataReducer,
});
const mapDispatchToProps = dispatch => ({
fetchFlickrData: () => dispatch(fetchFlickrData()),
});
export default connect(mapStateToProps, mapDispatchToProps)(FlickrData);
|
import { parseString } from "xml2js";
import Constants from "../Constants";
import dispatcher from "../dispatcher";
import { checkStatus } from "../FetchHelpers";
export function updateAdvisories() {
const url = "https://api.bart.gov/api/bsa.aspx?cmd=bsa&key=MW9S-E7SL-26DU-VV8V&date=today"
const options = {
method: "GET",
mode: "cors"
}
dispatcher.dispatch({type: Constants.FETCH_ADVISORIES});
fetch(url, options)
.then(checkStatus)
.then(response => response.text())
.then((xml) => {
var r;
parseString(xml, (err, result) => {
if(err) { throw new Error(err); }
r = result
});
return r;
})
.then((obj) => {
dispatcher.dispatch({
type: Constants.RECEIVED_ADVISORIES,
advisories: obj.root.bsa
});
})
.catch((err) => {
dispatcher.dispatch({
type: Constants.FETCH_ADVISORIES_ERROR,
message: err
})
console.error("Failed to fetch advisories! Reason:", err)
});
}
|
package org.rs2server.rs2.domain.service.impl.skill;
import org.rs2server.rs2.domain.service.api.skill.MiningService;
import org.rs2server.rs2.model.Item;
import org.rs2server.rs2.model.container.Container;
import org.rs2server.rs2.model.container.Equipment;
import org.rs2server.rs2.model.player.Player;
import org.rs2server.rs2.model.skills.Mining;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Objects;
import java.util.Random;
/**
* @author tommo
*/
public class MiningServiceImpl implements MiningService {
private static final Random rand = new Random();
@Override
public boolean hasPickaxe(@Nonnull Player player) {
return getPickaxe(player) != null;
}
@Nullable
@Override
public Mining.PickAxe getPickaxe(@Nonnull Player player) {
Objects.requireNonNull(player, "player");
for(final Mining.PickAxe pickaxe : Mining.PickAxe.values()) {
if((player.getInventory().contains(pickaxe.getId()) || player.getEquipment().contains(pickaxe.getId()))) {
return pickaxe;
}
}
return null;
}
@Override
public float getProspectorKitExperienceModifier(@Nonnull Player player) {
float modifier = 1f;
final Container eq = player.getEquipment();
if (eq.contains(12013)) {//helmet
modifier += 0.04f;
}
if (eq.contains(12014)) {//jacket
modifier += 0.08f;
}
if (eq.contains(12015)) {//legs
modifier += 0.06f;
}
if (eq.contains(12016)) {//boots
modifier += 0.02f;
}
return modifier;
}
@Override
public Item getRandomChanceGem() {
float chance = rand.nextFloat();
if (chance <= 0.03f) {
int rnd = rand.nextInt(4);
if (rnd == 0) {
return new Item(1617, 1); // Uncut diamond
} else if (rnd == 1) {
return new Item(1619, 1); // Uncut ruby
} else if (rnd == 2) {
return new Item(1621, 1); // Uncut emerald
} else if (rnd == 3) {
return new Item(1623, 1); // Uncut sapphire
}
}
return null;
}
}
|
#!/bin/bash
BUCKET=$1
aws s3api list-multipart-uploads --bucket $BUCKET |
jq -r '.Uploads[] | "\(.UploadId) \(.Key)"' |
while read id key; do
echo KEY: $key, ID $id
aws s3api abort-multipart-upload --bucket $BUCKET --key $key --upload-id $id
done
|
<gh_stars>0
/**
* Package for calculate test.
*
* @author <NAME> ( https://vk.com/id428714363)
* @version 1.5
* @since 16.03.2019
*/
package ru.job4j.calculate;
|
// @flow
type BoolType = { uid: string, name: "bool" };
export const boolT = (uid: string): BoolType => ({ name: "bool", uid });
export const isBoolT = (ty: Type) => ty.name === "bool";
type ErrType = { uid: string, name: "err", lower: Type, upper: Type };
export const errT = (uid: string, lower: Type, upper: Type): ErrType => ({
name: "err",
uid,
lower,
upper
});
export const isErrT = (ty: Type) => ty.name === "err";
type FuncType = { uid: string, name: "func", params: Type[], returns: Type };
export const funcT = (
uid: string,
params: Type[],
returns: Type
): FuncType => ({
name: "func",
uid,
params,
returns
});
export const isFuncT = (ty: Type) => ty.name === "func";
type IntType = { uid: string, name: "int" };
export const intT = (uid: string): IntType => ({ name: "int", uid });
export const isIntT = (ty: Type) => ty.name === "int";
type ObjType = { uid: string, name: "obj", properties: [string, Type][] };
export const objT = (uid: string, properties: [string, Type][]): ObjType => ({
name: "obj",
uid,
properties
});
export const isObjT = (ty: Type) => ty.name === "obj";
type StrType = { uid: string, name: "str" };
export const strT = (uid: string): StrType => ({ name: "str", uid });
export const isStrT = (ty: Type) => ty.name === "str";
type VarType = { uid: string, name: "var" };
export const varT = (uid: string): VarType => ({ name: "var", uid });
export const isVarT = (ty: Type) => ty.name === "var";
type VoidType = { uid: string, name: "void" };
export const voidT = (uid: string): VoidType => ({ name: "void", uid });
export const isVoidT = (ty: Type) => ty.name === "void";
export type Type =
| BoolType
| ErrType
| FuncType
| IntType
| ObjType
| StrType
| VarType
| VoidType;
export const getTypes = (nextId: () => string) => ({
boolT: (...args) => boolT(nextId(), ...args),
errT: (...args) => errT(nextId(), ...args),
funcT: (...args) => funcT(nextId(), ...args),
intT: (...args) => intT(nextId(), ...args),
objT: (...args) => objT(nextId(), ...args),
strT: (...args) => strT(nextId(), ...args),
varT: (...args) => varT(nextId(), ...args),
voidT: (...args) => voidT(nextId(), ...args)
});
|
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
#loading the data
data = pd.read_csv('input.csv')
#Feature extraction
vectorizer = CountVectorizer(ngram_range=(2,2))
X = vectorizer.fit_transform(data)
#Training the model
from sklearn.naive_bayes import MultinomialNB
clf = MultinomialNB().fit(X, data["language"])
#Testing the model
test_data = ["Hola me llamo Ana"]
#converting test data into features
Y = vectorizer.transform(test_data)
predicted_language = clf.predict(Y)
print(predicted_language[0]) # Spanish
|
<reponame>hispindia/BIHAR-2.7
package org.hisp.dhis.common;
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.dataelement.DataElementService;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import static junit.framework.Assert.*;
public class IdentifiableObjectManagerTest
extends DhisSpringTest
{
@Autowired
private DataElementService dataElementService;
@Autowired
private IdentifiableObjectManager identifiableObjectManager;
@Test
public void testGetObject()
{
DataElement dataElementA = createDataElement( 'A' );
DataElement dataElementB = createDataElement( 'B' );
int dataElementIdA = dataElementService.addDataElement( dataElementA );
int dataElementIdB = dataElementService.addDataElement( dataElementB );
DataElementGroup dataElementGroupA = createDataElementGroup( 'A' );
DataElementGroup dataElementGroupB = createDataElementGroup( 'B' );
int dataElementGroupIdA = dataElementService.addDataElementGroup( dataElementGroupA );
int dataElementGroupIdB = dataElementService.addDataElementGroup( dataElementGroupB );
assertEquals( dataElementA, identifiableObjectManager.getObject( dataElementIdA, DataElement.class.getSimpleName() ) );
assertEquals( dataElementB, identifiableObjectManager.getObject( dataElementIdB, DataElement.class.getSimpleName() ) );
assertEquals( dataElementGroupA, identifiableObjectManager.getObject( dataElementGroupIdA, DataElementGroup.class.getSimpleName() ) );
assertEquals( dataElementGroupB, identifiableObjectManager.getObject( dataElementGroupIdB, DataElementGroup.class.getSimpleName() ) );
}
}
|
<reponame>pathawks/podcast<filename>lib/jekyll-podcast/episode.rb
# frozen_string_literal: true
require "mp3info"
require "yaml"
module JekyllPodcast
class Episode
def initialize(e_filename)
@episode = {"audio"=>{}}
Mp3Info.open(e_filename) do |mp3|
@episode["title"] = mp3.tag.title
@episode["excerpt"] = mp3.tag2.COMM
@episode["audio"]["mp3"] = File.basename(e_filename)
if (mp3.tag2.TYER && mp3.tag2.TDAT && mp3.tag2.TIME)
year = mp3.tag2.TYER
month = mp3.tag2.TDAT[2..3]
date = mp3.tag2.TDAT[0..1]
time = mp3.tag2.TIME
@episode["date"] = "#{year}-#{month}-#{date} #{time}"
else
@episode["date"] = File.ctime(e_filename)
end
end
end
def to_yaml
@episode.to_yaml
end
end
end
|
<gh_stars>0
import { createStyled } from "./styled";
const styled = createStyled();
export interface ButtonProps {
type?: "primary" | "secondary";
}
export const Button = styled.button<ButtonProps>`
background: black;
border: none;
padding: 4px;
&[type="primary"] {
background: blue;
}
&[type="secondary"] {
background: grey;
}
`;
|
/*
* MIT License
*
* Copyright (c) 2018 <NAME> (@smallcreep) <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.github.smallcreep.cucumber.seeds.context;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/**
* Test Case for {@link CxProperties}.
* @since 0.1.1
*/
public class CxPropertiesTest {
/**
* Default KEY. This KEY existing in file "cucumber.seeds.properties".
*/
private static final String KEY = "test.prop";
/**
* A rule for handling an exception.
*/
@Rule
public final ExpectedException exception = ExpectedException.none();
/**
* Check value correct read from default file.
*/
@Test
public void checkValueReadFromDefaultFile() {
MatcherAssert.assertThat(
new CxProperties().value(CxPropertiesTest.KEY),
CoreMatchers.equalTo("test2")
);
}
/**
* Check adding to Properties context throw.
* {@link UnsupportedOperationException}
*/
@Test
public void checkNotAddingNewProperty() {
this.exception.expect(UnsupportedOperationException.class);
this.exception.expectMessage(
"#put() is not supported, it's a read-only map"
);
new CxProperties().add("KEY", "value");
}
/**
* Check contains work correctly.
*/
@Test
public void checkContains() {
MatcherAssert.assertThat(
new CxProperties().contains(CxPropertiesTest.KEY),
CoreMatchers.equalTo(true)
);
}
}
|
#!/bin/bash
set -euxo pipefail
VM_CONNECTORS_HADOOP_DIR=/usr/lib/hadoop/lib
VM_CONNECTORS_DATAPROC_DIR=/usr/local/share/google/dataproc/lib
declare -A MIN_CONNECTOR_VERSIONS
MIN_CONNECTOR_VERSIONS=(
["bigquery"]="0.11.0"
["gcs"]="1.7.0")
# Starting from these versions connectors name changed:
# "...-<version>-hadoop2.jar" -> "...-hadoop2-<version>.jar"
declare -A NEW_NAME_MIN_CONNECTOR_VERSIONS
NEW_NAME_MIN_CONNECTOR_VERSIONS=(
["bigquery"]="0.13.5"
["gcs"]="1.9.5")
BIGQUERY_CONNECTOR_VERSION=$(/usr/share/google/get_metadata_value attributes/bigquery-connector-version || true)
GCS_CONNECTOR_VERSION=$(/usr/share/google/get_metadata_value attributes/gcs-connector-version || true)
UPDATED_GCS_CONNECTOR=false
is_worker() {
local role
role="$(/usr/share/google/get_metadata_value attributes/dataproc-role || true)"
if [[ $role != Master ]]; then
return 0
fi
return 1
}
min_version() {
echo -e "$1\n$2" | sort -r -t'.' -n -k1,1 -k2,2 -k3,3 | tail -n1
}
validate_version() {
local name=$1 # connector name: "bigquery" or "gcs"
local version=$2 # connector version
local min_valid_version=${MIN_CONNECTOR_VERSIONS[$name]}
if [[ "$(min_version "$min_valid_version" "$version")" != "$min_valid_version" ]]; then
echo "ERROR: $name-connector version should be greater than or equal to $min_valid_version, but was $version"
return 1
fi
}
update_connector() {
local name=$1 # connector name: "bigquery" or "gcs"
local version=$2 # connector version
if [[ $version ]]; then
if [[ $name == gcs ]]; then
UPDATED_GCS_CONNECTOR=true
fi
# validate new connector version
validate_version "$name" "$version"
if [[ -d ${VM_CONNECTORS_DATAPROC_DIR} ]]; then
local vm_connectors_dir=${VM_CONNECTORS_DATAPROC_DIR}
else
local vm_connectors_dir=${VM_CONNECTORS_HADOOP_DIR}
fi
# remove old connector
rm -f "${vm_connectors_dir}/${name}-connector-"*
# download new connector
# connector name could be in one of 2 formats:
# 1) gs://hadoop-lib/${name}/${name}-connector-hadoop2-${version}.jar
# 2) gs://hadoop-lib/${name}/${name}-connector-${version}-hadoop2.jar
local new_name_min_version=${NEW_NAME_MIN_CONNECTOR_VERSIONS[$name]}
if [[ "$(min_version "$new_name_min_version" "$version")" == "$new_name_min_version" ]]; then
local jar_name="${name}-connector-hadoop2-${version}.jar"
else
local jar_name="${name}-connector-${version}-hadoop2.jar"
fi
gsutil cp "gs://hadoop-lib/${name}/${jar_name}" "${vm_connectors_dir}/"
# Update or create version-less connector link
ln -s -f "${vm_connectors_dir}/${jar_name}" "${vm_connectors_dir}/${name}-connector.jar"
fi
}
if [[ -z $BIGQUERY_CONNECTOR_VERSION ]] && [[ -z $GCS_CONNECTOR_VERSION ]]; then
echo "ERROR: None of connector versions are specified"
exit 1
fi
# because connectors from 1.7 branch are not compatible with previous connectors
# versions (they have the same class relocation paths) we need to update both
# of them, even if only one connector version is set
if [[ -z $BIGQUERY_CONNECTOR_VERSION ]] && [[ $GCS_CONNECTOR_VERSION == "1.7.0" ]]; then
BIGQUERY_CONNECTOR_VERSION="0.11.0"
fi
if [[ $BIGQUERY_CONNECTOR_VERSION == "0.11.0" ]] && [[ -z $GCS_CONNECTOR_VERSION ]]; then
GCS_CONNECTOR_VERSION="1.7.0"
fi
update_connector "bigquery" "$BIGQUERY_CONNECTOR_VERSION"
update_connector "gcs" "$GCS_CONNECTOR_VERSION"
if [[ $UPDATED_GCS_CONNECTOR != true ]]; then
echo "GCS connector wasn't updated - no need to restart any services"
exit 0
fi
# Restart YARN NodeManager service on worker nodes so they can pick up updated GCS connector
if is_worker; then
systemctl kill -s KILL hadoop-yarn-nodemanager
fi
# Restarts Dataproc Agent after successful initialization
# WARNING: this function relies on undocumented and not officially supported Dataproc Agent
# "sentinel" files to determine successful Agent initialization and not guaranteed
# to work in the future. Use at your own risk!
restart_dataproc_agent() {
# Because Dataproc Agent should be restarted after initialization, we need to wait until
# it will create a sentinel file that signals initialization competition (success or failure)
while [[ ! -f /var/lib/google/dataproc/has_run_before ]]; do
sleep 1
done
# If Dataproc Agent didn't create a sentinel file that signals initialization
# failure then it means that initialization succeded and it should be restarted
if [[ ! -f /var/lib/google/dataproc/has_failed_before ]]; then
systemctl kill -s KILL google-dataproc-agent
fi
}
export -f restart_dataproc_agent
# Schedule asynchronous Dataproc Agent restart so it will use updated connectors.
# It could not be restarted sycnhronously because Dataproc Agent should be restarted
# after its initialization, including init actions execution, has been completed.
bash -c restart_dataproc_agent &
disown
|
<filename>node_modules/react-map-gl/node_modules/mapbox-gl/js/util/util.js
'use strict';
var UnitBezier = require('unitbezier');
var Coordinate = require('../geo/coordinate');
/**
* Given a value `t` that varies between 0 and 1, return
* an interpolation function that eases between 0 and 1 in a pleasing
* cubic in-out fashion.
*
* @param {number} t input
* @returns {number} input
* @private
*/
exports.easeCubicInOut = function (t) {
if (t <= 0) return 0;
if (t >= 1) return 1;
var t2 = t * t,
t3 = t2 * t;
return 4 * (t < 0.5 ? t3 : 3 * (t - t2) + t3 - 0.75);
};
/**
* Given given (x, y), (x1, y1) control points for a bezier curve,
* return a function that interpolates along that curve.
*
* @param {number} p1x control point 1 x coordinate
* @param {number} p1y control point 1 y coordinate
* @param {number} p2x control point 2 x coordinate
* @param {number} p2y control point 2 y coordinate
* @returns {Function} interpolator: receives number value, returns
* number value.
* @private
*/
exports.bezier = function(p1x, p1y, p2x, p2y) {
var bezier = new UnitBezier(p1x, p1y, p2x, p2y);
return function(t) {
return bezier.solve(t);
};
};
/**
* A default bezier-curve powered easing function with
* control points (0.25, 0.1) and (0.25, 1)
*
* @param {number} t
* @returns {number} output
* @private
*/
exports.ease = exports.bezier(0.25, 0.1, 0.25, 1);
/**
* Given a four-element array of numbers that represents a color in
* RGBA, return a version for which the RGB components are multiplied
* by the A (alpha) component
*
* @param {Array<number>} color color array
* @returns {Array<number>} premultiplied color array
* @private
*/
exports.premultiply = function (color) {
if (!color) return null;
var opacity = color[3];
return [
color[0] * opacity,
color[1] * opacity,
color[2] * opacity,
opacity
];
};
/**
* constrain n to the given range via min + max
*
* @param {number} n value
* @param {number} min the minimum value to be returned
* @param {number} max the maximum value to be returned
* @returns {number} the clamped value
* @private
*/
exports.clamp = function (n, min, max) {
return Math.min(max, Math.max(min, n));
};
/*
* constrain n to the given range, excluding the minimum, via modular arithmetic
* @param {number} n value
* @param {number} min the minimum value to be returned, exclusive
* @param {number} max the maximum value to be returned, inclusive
* @returns {number} constrained number
* @private
*/
exports.wrap = function (n, min, max) {
var d = max - min;
var w = ((n - min) % d + d) % d + min;
return (w === min) ? max : w;
};
/*
* return the first non-null and non-undefined argument to this function.
* @returns {*} argument
* @private
*/
exports.coalesce = function() {
for (var i = 0; i < arguments.length; i++) {
var arg = arguments[i];
if (arg !== null && arg !== undefined)
return arg;
}
};
/*
* Call an asynchronous function on an array of arguments,
* calling `callback` with the completed results of all calls.
*
* @param {Array<*>} array input to each call of the async function.
* @param {Function} fn an async function with signature (data, callback)
* @param {Function} callback a callback run after all async work is done.
* called with an array, containing the results of each async call.
* @returns {undefined}
* @private
*/
exports.asyncAll = function (array, fn, callback) {
if (!array.length) { return callback(null, []); }
var remaining = array.length;
var results = new Array(array.length);
var error = null;
array.forEach(function (item, i) {
fn(item, function (err, result) {
if (err) error = err;
results[i] = result;
if (--remaining === 0) callback(error, results);
});
});
};
/*
* Compute the difference between the keys in one object and the keys
* in another object.
*
* @param {Object} obj
* @param {Object} other
* @returns {Array<string>} keys difference
* @private
*/
exports.keysDifference = function (obj, other) {
var difference = [];
for (var i in obj) {
if (!(i in other)) {
difference.push(i);
}
}
return difference;
};
/**
* Given a destination object and optionally many source objects,
* copy all properties from the source objects into the destination.
* The last source object given overrides properties from previous
* source objects.
* @param {Object} dest destination object
* @param {...Object} sources sources from which properties are pulled
* @returns {Object} dest
* @private
*/
exports.extend = function (dest) {
for (var i = 1; i < arguments.length; i++) {
var src = arguments[i];
for (var k in src) {
dest[k] = src[k];
}
}
return dest;
};
/**
* Extend a destination object with all properties of the src object,
* using defineProperty instead of simple assignment.
* @param {Object} dest
* @param {Object} src
* @returns {Object} dest
* @private
*/
exports.extendAll = function (dest, src) {
for (var i in src) {
Object.defineProperty(dest, i, Object.getOwnPropertyDescriptor(src, i));
}
return dest;
};
/**
* Extend a parent's prototype with all properties in a properties
* object.
*
* @param {Object} parent
* @param {Object} props
* @returns {Object}
* @private
*/
exports.inherit = function (parent, props) {
var parentProto = typeof parent === 'function' ? parent.prototype : parent,
proto = Object.create(parentProto);
exports.extendAll(proto, props);
return proto;
};
/**
* Given an object and a number of properties as strings, return version
* of that object with only those properties.
*
* @param {Object} src the object
* @param {Array<string>} properties an array of property names chosen
* to appear on the resulting object.
* @returns {Object} object with limited properties.
* @example
* var foo = { name: 'Charlie', age: 10 };
* var justName = pick(foo, ['name']);
* // justName = { name: 'Charlie' }
* @private
*/
exports.pick = function (src, properties) {
var result = {};
for (var i = 0; i < properties.length; i++) {
var k = properties[i];
if (k in src) {
result[k] = src[k];
}
}
return result;
};
var id = 1;
/**
* Return a unique numeric id, starting at 1 and incrementing with
* each call.
*
* @returns {number} unique numeric id.
* @private
*/
exports.uniqueId = function () {
return id++;
};
/**
* Create a version of `fn` that is only called `time` milliseconds
* after its last invocation
*
* @param {Function} fn the function to be debounced
* @param {number} time millseconds after which the function will be invoked
* @returns {Function} debounced function
* @private
*/
exports.debounce = function(fn, time) {
var timer, args;
return function() {
args = arguments;
clearTimeout(timer);
timer = setTimeout(function() {
fn.apply(null, args);
}, time);
};
};
/**
* Given an array of member function names as strings, replace all of them
* with bound versions that will always refer to `context` as `this`. This
* is useful for classes where otherwise event bindings would reassign
* `this` to the evented object or some other value: this lets you ensure
* the `this` value always.
*
* @param {Array<string>} fns list of member function names
* @param {*} context the context value
* @returns {undefined} changes functions in-place
* @example
* function MyClass() {
* bindAll(['ontimer'], this);
* this.name = 'Tom';
* }
* MyClass.prototype.ontimer = function() {
* alert(this.name);
* };
* var myClass = new MyClass();
* setTimeout(myClass.ontimer, 100);
* @private
*/
exports.bindAll = function(fns, context) {
fns.forEach(function(fn) {
context[fn] = context[fn].bind(context);
});
};
/**
* Given a class, bind all of the methods that look like handlers: that
* begin with _on, and bind them to the class.
*
* @param {Object} context an object with methods
* @private
*/
exports.bindHandlers = function(context) {
for (var i in context) {
if (typeof context[i] === 'function' && i.indexOf('_on') === 0) {
context[i] = context[i].bind(context);
}
}
};
/**
* Set the 'options' property on `obj` with properties
* from the `options` argument. Properties in the `options`
* object will override existing properties.
*
* @param {Object} obj destination object
* @param {Object} options object of override options
* @returns {Object} derived options object.
* @private
*/
exports.setOptions = function(obj, options) {
if (!obj.hasOwnProperty('options')) {
obj.options = obj.options ? Object.create(obj.options) : {};
}
for (var i in options) {
obj.options[i] = options[i];
}
return obj.options;
};
/**
* Given a list of coordinates, get their center as a coordinate.
* @param {Array<Coordinate>} coords
* @returns {Coordinate} centerpoint
* @private
*/
exports.getCoordinatesCenter = function(coords) {
var minX = Infinity;
var minY = Infinity;
var maxX = -Infinity;
var maxY = -Infinity;
for (var i = 0; i < coords.length; i++) {
minX = Math.min(minX, coords[i].column);
minY = Math.min(minY, coords[i].row);
maxX = Math.max(maxX, coords[i].column);
maxY = Math.max(maxY, coords[i].row);
}
var dx = maxX - minX;
var dy = maxY - minY;
var dMax = Math.max(dx, dy);
return new Coordinate((minX + maxX) / 2, (minY + maxY) / 2, 0)
.zoomTo(Math.floor(-Math.log(dMax) / Math.LN2));
};
/**
* Determine if a string ends with a particular substring
* @param {string} string
* @param {string} suffix
* @returns {boolean}
* @private
*/
exports.endsWith = function(string, suffix) {
return string.indexOf(suffix, string.length - suffix.length) !== -1;
};
/**
* Determine if a string starts with a particular substring
* @param {string} string
* @param {string} prefix
* @returns {boolean}
* @private
*/
exports.startsWith = function(string, prefix) {
return string.indexOf(prefix) === 0;
};
/**
* Create an object by mapping all the values of an existing object while
* preserving their keys.
* @param {Object} input
* @param {Function} iterator
* @returns {Object}
* @private
*/
exports.mapObject = function(input, iterator, context) {
var output = {};
for (var key in input) {
output[key] = iterator.call(context || this, input[key], key, input);
}
return output;
};
/**
* Create an object by filtering out values of an existing object
* @param {Object} input
* @param {Function} iterator
* @returns {Object}
* @private
*/
exports.filterObject = function(input, iterator, context) {
var output = {};
for (var key in input) {
if (iterator.call(context || this, input[key], key, input)) {
output[key] = input[key];
}
}
return output;
};
/**
* Deeply compares two object literals.
* @param {Object} obj1
* @param {Object} obj2
* @returns {boolean}
* @private
*/
exports.deepEqual = function deepEqual(a, b) {
if (Array.isArray(a)) {
if (!Array.isArray(b) || a.length !== b.length) return false;
for (var i = 0; i < a.length; i++) {
if (!deepEqual(a[i], b[i])) return false;
}
return true;
}
if (typeof a === 'object') {
if (!(typeof b === 'object')) return false;
var keys = Object.keys(a);
if (keys.length !== Object.keys(b).length) return false;
for (var key in a) {
if (!deepEqual(a[key], b[key])) return false;
}
return true;
}
return a === b;
};
/**
* Deeply clones two objects.
* @param {Object} obj1
* @param {Object} obj2
* @returns {boolean}
* @private
*/
exports.clone = function deepEqual(input) {
if (Array.isArray(input)) {
return input.map(exports.clone);
} else if (typeof input === 'object') {
return exports.mapObject(input, exports.clone);
} else {
return input;
}
};
/**
* Check if two arrays have at least one common element.
* @param {Array} a
* @param {Array} b
* @returns {boolean}
* @private
*/
exports.arraysIntersect = function(a, b) {
for (var l = 0; l < a.length; l++) {
if (b.indexOf(a[l]) >= 0) return true;
}
return false;
};
|
#!/bin/sh
# ----------------------------------------------------------------------------
# --
# -- Copyright (c) 2018-2022 Silvio Brandani <support@tcapture.net>. All rights reserved.
# --
# ----------------------------------------------------------------------------
runJREApplication()
{
verifyJRE "$JAVA_EXECUTABLE_PATH" $JAVA_MINIMUM_VERSION $JAVA_BITNESS_REQUIRED
if [ $? -eq 0 ];
then
"$JAVA_EXECUTABLE_PATH" "$@"
else
echo $ErrorMessage
fi
}
verifyJRE()
{
JAVA_PATH="$1"
JAVA_MIN_VERSION="$2"
JAVA_BITNESS="$3"
IS_VALID_JRE=0
if [ "`uname`" = "SunOS" ]; then
AWK=/usr/xpg4/bin/awk # awk utility
else
AWK=awk
fi
if [ -f "$JAVA_PATH" ];
then
JRE_BITNESS_FOUND=32 # Set default to 32-bit
JAVA_VERSION_OUTPUT=`"$JAVA_PATH" -version 2>&1`
JRE_VERSION_FOUND=`echo $JAVA_VERSION_OUTPUT | $AWK -F '"' '/version/ {print $2}' | cut -f2 -d"\"" | cut -f1,2 -d"."`
st=`expr $JAVA_MIN_VERSION '<=' $JRE_VERSION_FOUND`
if [ $st -eq 1 ];
then
if echo $JAVA_VERSION_OUTPUT | grep "64-Bit" &> /dev/null
then
JRE_BITNESS_FOUND=64
elif echo $JAVA_VERSION_OUTPUT | grep "ppc64-64" &> /dev/null
then
JRE_BITNESS_FOUND=64
elif echo $JAVA_VERSION_OUTPUT | grep "ppc64le-64" &> /dev/null
then
JRE_BITNESS_FOUND=64
fi
if [ $JAVA_BITNESS = $JRE_BITNESS_FOUND ];
then
IS_VALID_JRE=1
elif [ $JAVA_BITNESS = 0 ];
then
IS_VALID_JRE=1
else
# Error for incorrect Java bitness
ErrorMessage="$JAVA_BITNESS bit java not found."
return 1
fi
else
# Error for Minimum Java version not found
ErrorMessage="Java $JAVA_MIN_VERSION or greater is not found on your machine."
return 2
fi
fi
if [ $IS_VALID_JRE = "0" ];
then
ErrorMessage="Unable to find JRE in path."
return 3
fi
return 0
}
|
#!/bin/bash
set -e
cmd="$@"
# This entrypoint is used to play nicely with the current cookiecutter configuration.
# Since docker-compose relies heavily on environment variables itself for configuration, we'd have to define multiple
# environment variables just to support cookiecutter out of the box. That makes no sense, so this little entrypoint
# does all this for us.
export REDIS_URL=redis://redis:6379
# the official postgres image uses 'postgres' as default user if not set explictly.
if [ -z "$POSTGRES_USER" ]; then
export POSTGRES_USER=postgres
fi
export DATABASE_URL=postgres://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres:5432/$POSTGRES_DB
function postgres_ready(){
python << END
import sys
import psycopg2
try:
conn = psycopg2.connect(dbname="$POSTGRES_DB", user="$POSTGRES_USER", password="$POSTGRES_PASSWORD", host="postgres")
except psycopg2.OperationalError:
sys.exit(-1)
sys.exit(0)
END
}
until postgres_ready; do
>&2 echo "Postgres is unavailable - sleeping"
sleep 1
done
>&2 echo "Postgres is up - continuing..."
exec $cmd
|
<filename>core/src/main/java/brooklyn/entity/rebind/PersisterDeltaImpl.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.rebind;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import brooklyn.mementos.BrooklynMementoPersister.Delta;
import brooklyn.mementos.BrooklynMementoPersister.MutableDelta;
import brooklyn.mementos.CatalogItemMemento;
import brooklyn.mementos.EnricherMemento;
import brooklyn.mementos.EntityMemento;
import brooklyn.mementos.FeedMemento;
import brooklyn.mementos.LocationMemento;
import brooklyn.mementos.Memento;
import brooklyn.mementos.PolicyMemento;
import com.google.common.annotations.Beta;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
public class PersisterDeltaImpl implements Delta, MutableDelta {
// use multiset?
Collection<LocationMemento> locations = Sets.newLinkedHashSet();
Collection<EntityMemento> entities = Sets.newLinkedHashSet();
Collection<PolicyMemento> policies = Sets.newLinkedHashSet();
Collection<EnricherMemento> enrichers = Sets.newLinkedHashSet();
Collection<FeedMemento> feeds = Sets.newLinkedHashSet();
Collection<CatalogItemMemento> catalogItems = Sets.newLinkedHashSet();
Collection<String> removedLocationIds = Sets.newLinkedHashSet();
Collection<String> removedEntityIds = Sets.newLinkedHashSet();
Collection<String> removedPolicyIds = Sets.newLinkedHashSet();
Collection<String> removedEnricherIds = Sets.newLinkedHashSet();
Collection <String> removedFeedIds = Sets.newLinkedHashSet();
Collection<String> removedCatalogItemIds = Sets.newLinkedHashSet();
@Override
public Collection<LocationMemento> locations() {
return Collections.unmodifiableCollection(locations);
}
@Override
public Collection<EntityMemento> entities() {
return Collections.unmodifiableCollection(entities);
}
@Override
public Collection<PolicyMemento> policies() {
return Collections.unmodifiableCollection(policies);
}
@Override
public Collection<EnricherMemento> enrichers() {
return Collections.unmodifiableCollection(enrichers);
}
@Override
public Collection<FeedMemento> feeds() {
return Collections.unmodifiableCollection(feeds);
}
@Override
public Collection<CatalogItemMemento> catalogItems() {
return Collections.unmodifiableCollection(catalogItems);
}
@Override
public Collection<String> removedLocationIds() {
return Collections.unmodifiableCollection(removedLocationIds);
}
@Override
public Collection<String> removedEntityIds() {
return Collections.unmodifiableCollection(removedEntityIds);
}
@Override
public Collection<String> removedPolicyIds() {
return Collections.unmodifiableCollection(removedPolicyIds);
}
@Override
public Collection<String> removedEnricherIds() {
return Collections.unmodifiableCollection(removedEnricherIds);
}
@Override
public Collection<String> removedFeedIds() {
return Collections.unmodifiableCollection(removedFeedIds);
}
@Override
public Collection<String> removedCatalogItemIds() {
return Collections.unmodifiableCollection(removedCatalogItemIds);
}
@Override
public Collection<? extends Memento> getObjectsOfType(BrooklynObjectType type) {
return Collections.unmodifiableCollection(getMutableObjectsOfType(type));
}
@SuppressWarnings("unchecked")
@Beta
private Collection<Memento> getMutableUncheckedObjectsOfType(BrooklynObjectType type) {
return (Collection<Memento>)getMutableObjectsOfType(type);
}
private Collection<? extends Memento> getMutableObjectsOfType(BrooklynObjectType type) {
switch (type) {
case ENTITY: return entities;
case LOCATION: return locations;
case POLICY: return policies;
case ENRICHER: return enrichers;
case FEED: return feeds;
case CATALOG_ITEM: return catalogItems;
case UNKNOWN:
default:
throw new IllegalArgumentException(type+" not supported");
}
}
@Override
public Collection<String> getRemovedIdsOfType(BrooklynObjectType type) {
return Collections.unmodifiableCollection(getRemovedIdsOfTypeMutable(type));
}
private Collection<String> getRemovedIdsOfTypeMutable(BrooklynObjectType type) {
switch (type) {
case ENTITY: return removedEntityIds;
case LOCATION: return removedLocationIds;
case POLICY: return removedPolicyIds;
case ENRICHER: return removedEnricherIds;
case FEED: return removedFeedIds;
case CATALOG_ITEM: return removedCatalogItemIds;
case UNKNOWN:
default:
throw new IllegalArgumentException(type+" not supported");
}
}
public void add(BrooklynObjectType type, Memento memento) {
getMutableUncheckedObjectsOfType(type).add(memento);
}
@Override
public void addAll(BrooklynObjectType type, Iterable<? extends Memento> mementos) {
Iterables.addAll(getMutableUncheckedObjectsOfType(type), mementos);
}
public void removed(BrooklynObjectType type, Set<String> removedIdsOfType) {
getRemovedIdsOfTypeMutable(type).addAll(removedIdsOfType);
}
}
|
import http from "@/common/http";
import authHeader from "@/common/auth.header";
const SERVICE_API_URL = "/api/oauth";
class OauthService {
login(params = {}) {
return http.post(SERVICE_API_URL + "/login", params, {
headers: authHeader()
});
}
logout(params = {}) {
return http.get(SERVICE_API_URL + "/logout", {
params: params,
headers: authHeader()
});
}
register(params = {}) {
return http.get(SERVICE_API_URL + "/register", {
params: params,
headers: authHeader()
});
}
}
export default new OauthService();
|
alias p='pod'
alias pi='pod install'
alias pclean='rm -rf Pods/'
alias pedit='e Podfile'
|
#!/usr/bin/env bash
# Copyright 2020 The Jetstack cert-manager contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o nounset
set -o errexit
set -o pipefail
# This script will build an entirely new testing environment using kind.
# This is inteded to be run in a CI environment and *not* for development.
# It is not optimised for quick, iterative development.
export_logs() {
echo "Exporting cluster logs to artifacts..."
"${SCRIPT_ROOT}/cluster/export-logs.sh"
}
SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" > /dev/null && pwd )"
export REPO_ROOT="${SCRIPT_ROOT}/.."
source "${SCRIPT_ROOT}/lib/lib.sh"
# Configure PATH to use bazel provided e2e tools
setup_tools
echo "Ensuring a kind cluster exists..."
"${SCRIPT_ROOT}/cluster/create.sh"
trap "export_logs" ERR
echo "Ensuring all e2e test dependencies are installed..."
"${SCRIPT_ROOT}/setup-e2e-deps.sh"
echo "Running e2e test suite..."
# Skip Venafi end-to-end tests in CI
FLAKE_ATTEMPTS=2 "${SCRIPT_ROOT}/run-e2e.sh" \
--ginkgo.skip=Venafi \
"$@"
export_logs
|
<reponame>gtm7712/team-project-d-2191-swen-261-11-d
package com.webcheckers.appl;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.util.HashMap;
import com.webcheckers.model.Player;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
/**
* Unit test suite for {@link PlayerLobby} component
*
* @author: <NAME>
*/
@Tag("Applicaion-tier")
public class PlayerLobbyTest{
/**
* Test for usernames that only have numbers
*/
@Test
public void numericOnly(){
final PlayerLobby CuT = new PlayerLobby();
assertEquals(CuT.checkUsername("123"), 0);
}
/**
* Test for usernames that only have numbers
*/
@Test
public void specialCharOnly(){
final PlayerLobby CuT = new PlayerLobby();
assertEquals(CuT.checkUsername("*"), 0);
}
/**
* Test for usernames that are blank
*/
@Test
public void emptyName(){
final PlayerLobby CuT = new PlayerLobby();
assertEquals(CuT.checkUsername(""), 0);
}
/**
* Test for usernames that are null
*/
@Test
public void nullName(){
final PlayerLobby CuT = new PlayerLobby();
assertEquals(CuT.checkUsername(null), 0);
}
/**
* Test for usernames that are taken by other users
*/
@Test
public void notAvailable(){
final PlayerLobby CuT = new PlayerLobby();
CuT.addUsername("test");
assertEquals(CuT.checkUsername("test"), 1);
}
/**
* Test for a valid name
*/
@Test
public void goodName(){
final PlayerLobby CuT = new PlayerLobby();
assertEquals(CuT.checkUsername("a123"), 2);
}
/**
* Test for adding a username to the lobby
*/
@Test
public void addUsernameTest(){
final PlayerLobby CuT = new PlayerLobby();
CuT.addUsername("Brandon");
assertNotNull(CuT.getUsernames(), "Player Lobby should not be null");
}
/**
* Test to see if the number of people are correct
*/
@Test
public void countPlayerTest(){
final PlayerLobby CuT = new PlayerLobby();
HashMap<String, Player> test = new HashMap<>();
test.put("Brandon", new Player("Brandon"));
CuT.addUsername("Brandon");
assertEquals(CuT.countPlayers(), test.size());
}
/**
* Test to see if the player gotten from getPlayer is correct
*/
@Test
public void getPlayerTest(){
final PlayerLobby CuT = new PlayerLobby();
CuT.addUsername("Brandon");
assertEquals(CuT.getPlayer("Brandon"), new Player("Brandon"));
}
/**
* Test to see if the hashmap returned is correct
*/
@Test
public void getUsernamesTest(){
final PlayerLobby CuT = new PlayerLobby();
HashMap<String, Player> test = new HashMap<>();
test.put("Brandon", new Player("Brandon"));
CuT.addUsername("Brandon");
assertEquals(CuT.getUsernames(), test);
}
}
|
import css from 'styled-jsx/css';
// eslint-disable-next-line no-unused-vars
import React from 'react';
import { colors, mq } from '../../tokens';
export default css`
footer {
background-color: ${colors.blue};
color: white;
padding-bottom: 4rem;
padding-top: 1rem;
}
@media ${mq.phone} {
footer {
padding-bottom: 6rem;
padding-top: 2rem;
}
}
h4 {
font-size: 1rem;
font-weight: 700;
letter-spacing: 0.075rem;
margin-bottom: 0;
}
@supports (font-variation-settings: normal) {
.h4 {
font-variation-settings: 'wght' 700;
font-weight: inherit;
}
}
@media ${mq.phone} {
h4 {
font-size: 1.5rem;
}
}
.columns {
display: flex;
flex-direction: column;
flex-wrap: wrap;
list-style: none;
margin: 0;
padding: 0;
}
@media ${mq.phone} {
.columns {
flex-direction: row;
}
ul.columns {
width: 60%;
}
}
.column {
display: flex;
flex-direction: column;
font-size: 0.8rem;
width: 100%;
}
@media ${mq.phone} {
.column {
font-size: 1.125rem;
width: 50%;
}
.column--logo {
width: 40%;
}
}
.label {
align-self: flex-start;
font-weight: 700;
margin-bottom: 0.3rem;
margin-top: 1.5rem;
}
@media ${mq.phone} {
.label {
margin-top: 2rem;
}
}
@supports (font-variation-settings: normal) {
.label {
font-variation-settings: 'wght' 700;
font-weight: inherit;
}
}
.link {
font-weight: 400;
}
@supports (font-variation-settings: normal) {
.link {
font-variation-settings: 'wght' 400;
font-weight: inherit;
}
}
.link + .link {
margin-left: 0.5rem;
}
a {
color: currentColor;
text-decoration: none;
}
a:hover,
a:focus {
text-decoration: underline;
}
.social-container {
margin-top: 0.5rem;
}
`;
export const logoStyles = css.resolve`
svg {
color: white;
height: 4rem;
margin-top: 1.5rem;
width: 6rem;
}
@media ${mq.phone} {
svg {
height: 8rem;
margin-top: 2rem;
width: 10rem;
}
}
path {
fill: white;
}
`;
export const socialStyles = css.resolve`
svg {
height: 1.25rem;
vertical-align: middle;
width: 1.25rem;
}
@media ${mq.phone} {
svg {
height: 1.5rem;
width: 1.5rem;
}
}
`;
|
ctrlapp.register.controller('barPolarRealEstateController', ['$remote', '$scope', function ($remote, $scope) {
$scope.initMethod = function () {
var size = 600;
var painter = $$("#palette")
// 设置画布大小
.attr({
width: size,
height: size
})
// 获取画笔
.painter();
var data = [
// 最低,最高,平均
[5000, 10000, 6785.71],
[4000, 10000, 6825],
[3000, 6500, 4463.33],
[2500, 5600, 3793.83],
[2000, 4000, 3060],
[2000, 4000, 3222.33],
[2500, 4000, 3133.33],
[1800, 4000, 3100],
[2000, 3500, 2750],
[2000, 3000, 2500],
[1800, 3000, 2433.33],
[2000, 2700, 2375],
[1500, 2800, 2150],
[1500, 2300, 2100],
[1600, 3500, 2057.14],
[1500, 2600, 2037.5],
[1500, 2417.54, 1905.85],
[1500, 2000, 1775],
[1500, 1800, 1650]
];
var cities = [
"北京",
"上海",
"深圳",
"广州",
"苏州",
"杭州",
"南京",
"福州",
"青岛",
"济南",
"长春",
"大连",
"温州",
"郑州",
"武汉",
"成都",
"东莞",
"沈阳",
"烟台"
];
// 绘制圆形刻度尺
painter.strokeCircle(size / 2, size / 2, size / 2 - 50).config({
textAlign: "center"
});
for (var i = 0; i < cities.length; i++) {
var p = $$.rotate(
size / 2,
size / 2,
Math.PI / cities.length * 2 * (i + 0.5) - Math.PI / 2,
size - 30,
size / 2
);
painter
// 刻度
.fillArc(
size / 2,
size / 2,
size / 2 - 50,
size / 2 - 40,
Math.PI / cities.length * 2 * i - Math.PI / 2 - 0.003,
0.006
)
// 文字
.fillText(cities[i], p[0], p[1]);
}
// 绘制几个灰色圈
painter.config("strokeStyle", "gray");
for (var i = 1; i <= 4; i++) {
painter.strokeCircle(
size / 2,
size / 2,
i * (size / 2 - 50) / 5,
0,
Math.PI * 2
);
}
painter
.config("strokeStyle", "black")
// 绘制垂直刻度尺
.beginPath()
.moveTo(size / 2, size / 2)
.lineTo(size / 2, 50)
.stroke();
var rules = ["0", "2,000", "4,000", "6,000", "8,000", "10,000"];
painter.config({
"font-size": 12,
textAlign: "right"
});
for (var i = 0; i < rules.length; i++) {
painter
// 文字
.fillText(
rules[i],
size / 2 - 15,
size / 2 - (size / 2 - 50) / (rules.length - 1) * i
)
// 刻度
.beginPath()
.moveTo(
size / 2 - 10,
size / 2 - (size / 2 - 50) / (rules.length - 1) * i
)
.lineTo(size / 2, size / 2 - (size / 2 - 50) / (rules.length - 1) * i)
.stroke();
}
$$.animation(
deep => {
// 绘制扇形
for (var i = 0; i < data.length; i++) {
painter
// 最高和最低
.config("fillStyle", "rgb(194, 55, 54)")
.fillArc(
size / 2,
size / 2,
data[i][0] / 10000 * (size / 2 - 50),
(data[i][1] * deep > data[i][0]
? data[i][1] * deep
: data[i][0]) /
10000 *
(size / 2 - 50),
Math.PI / cities.length * 2 * i - Math.PI / 2 + 0.04,
Math.PI * 2 / data.length - 0.08
);
}
},
700,
() => {
// 绘制平均值
for (var i = 0; i < data.length; i++) {
painter
.config("fillStyle", "#000")
.fillArc(
size / 2,
size / 2,
data[i][2] / 10000 * (size / 2 - 50) - 2,
data[i][2] / 10000 * (size / 2 - 50) + 2,
Math.PI / cities.length * 2 * i - Math.PI / 2 + 0.04,
Math.PI * 2 / data.length - 0.08
);
}
}
);
};
}]);
|
import { Field, Float, InputType } from '@nestjs/graphql';
import { GraphQLUpload, FileUpload } from 'graphql-upload';
@InputType()
export class CreateFillingInput {
@Field(() => String)
name: string;
@Field(() => Float)
price: number;
@Field(() => Float)
weight: number;
@Field(() => GraphQLUpload)
image: FileUpload;
}
|
<reponame>agus-setiawan-desu/brapi-Java-TestServer
package org.brapi.test.BrAPITestServer.model.entity.germ;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import org.brapi.test.BrAPITestServer.model.entity.BrAPIBaseEntity;
import io.swagger.model.germ.ParentType;
@Entity
@Table(name="pedigree")
public class PedigreeEntity extends BrAPIBaseEntity{
@OneToOne(cascade = CascadeType.ALL)
private GermplasmEntity germplasm;
@OneToOne
private CrossingProjectEntity crossingProject;
@Column
private Integer crossingYear;
@Column
private String familyCode;
@Column
private String pedigree;
@OneToOne(cascade = CascadeType.ALL)
private PedigreeEntity parent1;
@Column
private ParentType parent1Type;
@OneToOne(cascade = CascadeType.ALL)
private PedigreeEntity parent2;
@Column
private ParentType parent2Type;
@Column
private String notation;
public CrossingProjectEntity getCrossingProject() {
return crossingProject;
}
public void setCrossingProject(CrossingProjectEntity crossingProject) {
this.crossingProject = crossingProject;
}
public Integer getCrossingYear() {
return crossingYear;
}
public void setCrossingYear(Integer crossingYear) {
this.crossingYear = crossingYear;
}
public String getFamilyCode() {
return familyCode;
}
public void setFamilyCode(String familyCode) {
this.familyCode = familyCode;
}
public ParentType getParent1Type() {
return parent1Type;
}
public void setParent1Type(ParentType parent1Type) {
this.parent1Type = parent1Type;
}
public ParentType getParent2Type() {
return parent2Type;
}
public void setParent2Type(ParentType parent2Type) {
this.parent2Type = parent2Type;
}
public GermplasmEntity getGermplasm() {
return germplasm;
}
public void setGermplasm(GermplasmEntity germplasm) {
this.germplasm = germplasm;
}
public String getNotation() {
return notation;
}
public void setNotation(String notation) {
this.notation = notation;
}
public String getPedigree() {
return pedigree;
}
public void setPedigree(String pedigree) {
this.pedigree = pedigree;
}
public PedigreeEntity getParent1() {
return parent1;
}
public void setParent1(PedigreeEntity parent1) {
this.parent1 = parent1;
}
public PedigreeEntity getParent2() {
return parent2;
}
public void setParent2(PedigreeEntity parent2) {
this.parent2 = parent2;
}
}
|
<reponame>rcarlosdasilva/weixin
package io.github.rcarlosdasilva.weixin.api.weixin.impl;
import java.io.File;
import java.io.InputStream;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.gson.JsonObject;
import com.google.gson.JsonSyntaxException;
import io.github.rcarlosdasilva.weixin.api.BasicApi;
import io.github.rcarlosdasilva.weixin.api.weixin.MaterialApi;
import io.github.rcarlosdasilva.weixin.common.Convention;
import io.github.rcarlosdasilva.weixin.common.dictionary.MediaType;
import io.github.rcarlosdasilva.weixin.core.http.ContentType;
import io.github.rcarlosdasilva.weixin.core.http.FormData;
import io.github.rcarlosdasilva.weixin.core.http.Http;
import io.github.rcarlosdasilva.weixin.core.http.HttpMethod;
import io.github.rcarlosdasilva.weixin.core.parser.ResponseParser;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaAddMassImageRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaAddMassNewsRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaAddTemporaryRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaAddTimelessNewsRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaAddTimelessRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaCountTimelessRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaDeleteTimelessRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaGetTemporaryRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaGetTemporaryWithHqAudioRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaGetTimelessRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaListTimelessRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaTransformMassVideoRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.MediaUpdateTimelessNewsRequest;
import io.github.rcarlosdasilva.weixin.model.request.media.bean.Article;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaAddMassResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaAddTemporaryResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaAddTimelessResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaCountTimelessResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaGetTemporaryWithVideoResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaGetTimelessResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaListTimelessResponse;
import io.github.rcarlosdasilva.weixin.model.response.media.MediaTransformMassVideoResponse;
/**
* 素材管理相关API实现
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class MaterialApiImpl extends BasicApi implements MaterialApi {
private final Logger logger = LoggerFactory.getLogger(getClass());
public MaterialApiImpl(String accountKey) {
super(accountKey);
}
@Override
public MediaAddTemporaryResponse addTemporaryMedia(MediaType type, String fileName, File file) {
Preconditions.checkArgument(MediaType.NEWS != type,
"Not supported media type of News when add temporary");
MediaAddTemporaryRequest requestModel = new MediaAddTemporaryRequest();
requestModel.setType(type.getText());
return upload(MediaAddTemporaryResponse.class, requestModel, Convention.MEDIA_FILE_UPLOAD_KEY,
fileName, file, null);
}
@Override
public byte[] getTemporaryMedia(String mediaId) {
MediaGetTemporaryRequest requestModel = new MediaGetTemporaryRequest();
requestModel.setMediaId(mediaId);
// 该接口如果是图片,则返回文件流,如是视频则返回json字符串
InputStream is = getStream(requestModel);
byte[] result = readStream(is);
// 假设返回的是json字符串
String text = new String(result);
try {
is.close();
// 尝试解析前面请求返回的json字符串
MediaGetTemporaryWithVideoResponse vedioResponse = ResponseParser
.parse(MediaGetTemporaryWithVideoResponse.class, text);
// 如果能执行到这里,代表是视频,否则上边会解析出错,直接返回前面请求的文件流
is = Http.requestStreamWithBodyContent(vedioResponse.getVideoUrl(), HttpMethod.GET, null,
ContentType.JSON);
result = readStream(is);
is.close();
} catch (Exception ex) {
logger.debug("media api get temporary media", ex);
}
return result;
}
@Override
public byte[] getTemporaryMediaWithHqAudio(String mediaId) {
MediaGetTemporaryWithHqAudioRequest requestModel = new MediaGetTemporaryWithHqAudioRequest();
requestModel.setMediaId(mediaId);
return readStream(getStream(requestModel));
}
@Override
public MediaAddTimelessResponse addTimelessMedia(MediaType type, String fileName, File file) {
Preconditions.checkArgument(MediaType.VIDEO != type,
"Please invoke addTimelessMediaVideo for upload vedio");
MediaAddTimelessRequest requestModel = new MediaAddTimelessRequest();
requestModel.setType(type.getText());
return upload(MediaAddTimelessResponse.class, requestModel, Convention.MEDIA_FILE_UPLOAD_KEY,
fileName, file, null);
}
@Override
public MediaAddTimelessResponse addTimelessMediaVideo(String fileName, File file, String title,
String description) {
MediaAddTimelessRequest requestModel = new MediaAddTimelessRequest();
requestModel.setType(MediaType.VIDEO.getText());
JsonObject obj = new JsonObject();
obj.addProperty(Convention.MEDIA_VIDEO_FORM_TITLE, title);
obj.addProperty(Convention.MEDIA_VIDEO_FORM_INTRODUCTION, description);
FormData data = new FormData(Convention.MEDIA_VIDEO_FORM_KEY, obj.toString());
return upload(MediaAddTimelessResponse.class, requestModel, Convention.MEDIA_FILE_UPLOAD_KEY,
fileName, file, Lists.newArrayList(data));
}
@Override
public String addTimelessMediaNews(List<Article> articles) {
MediaAddTimelessNewsRequest requestModel = new MediaAddTimelessNewsRequest();
requestModel.setArticles(articles);
MediaAddTimelessResponse responseModel = post(MediaAddTimelessResponse.class, requestModel);
return null == responseModel ? null : responseModel.getMediaId();
}
@Override
public MediaGetTimelessResponse getTimelessMedia(String mediaId) {
MediaGetTimelessRequest requestModel = new MediaGetTimelessRequest();
requestModel.setMediaId(mediaId);
MediaGetTimelessResponse responseModel = null;
try {
responseModel = post(MediaGetTimelessResponse.class, requestModel);
} catch (JsonSyntaxException ex) {
logger.info("Json字符串解析错误,尝试获取二进制流(文件),可能是在获取永久图片、音频素材");
responseModel = new MediaGetTimelessResponse();
responseModel.setStream(readStream(postStream(requestModel)));
}
return responseModel;
}
@Override
public boolean deleteTimelessMedia(String mediaId) {
MediaDeleteTimelessRequest requestModel = new MediaDeleteTimelessRequest();
requestModel.setMediaId(mediaId);
return post(Boolean.class, requestModel);
}
@Override
public boolean updateTimelessMedia(String mediaId, int index, Article article) {
MediaUpdateTimelessNewsRequest requestModel = new MediaUpdateTimelessNewsRequest();
requestModel.setMediaId(mediaId);
requestModel.setIndex(index);
requestModel.setArticle(article);
return post(Boolean.class, requestModel);
}
@Override
public MediaCountTimelessResponse countTimelessMedia() {
MediaCountTimelessRequest requestModel = new MediaCountTimelessRequest();
return get(MediaCountTimelessResponse.class, requestModel);
}
@Override
public MediaListTimelessResponse listTimelessMedia(MediaType type, int offset, int count) {
Preconditions.checkArgument(MediaType.THUMBNAIL != type,
"Not supported media type of Thumb when list timeless");
if (count > 20) {
count = 20;
}
if (count < 1) {
count = 1;
}
MediaListTimelessRequest requestModel = new MediaListTimelessRequest();
requestModel.setType(type.getText());
requestModel.setOffset(offset);
requestModel.setCount(count);
return post(MediaListTimelessResponse.class, requestModel);
}
@Override
public String addMassMediaImage(String fileName, File file) {
MediaAddMassImageRequest requestModel = new MediaAddMassImageRequest();
MediaAddMassResponse responseModel = upload(MediaAddMassResponse.class, requestModel,
Convention.MEDIA_FILE_UPLOAD_KEY, fileName, file, null);
return null == responseModel ? null : responseModel.getUrl();
}
@Override
public MediaAddMassResponse addMassMediaNews(List<Article> articles) {
MediaAddMassNewsRequest requestModel = new MediaAddMassNewsRequest();
requestModel.setArticles(articles);
return post(MediaAddMassResponse.class, requestModel);
}
@Override
public MediaTransformMassVideoResponse transformMassMediaVideo(String mediaId, String title,
String description) {
MediaTransformMassVideoRequest requestModel = new MediaTransformMassVideoRequest();
requestModel.setMediaId(mediaId);
requestModel.setTitle(title);
requestModel.setDescription(description);
return post(MediaTransformMassVideoResponse.class, requestModel);
}
}
|
#!/bin/bash -eu
# remove legacy file
rm -f /root/.teslaCamRsyncConfig
|
export const WalletInvoiceFactory = (walletId: WalletId): WalletInvoiceFactory => {
const create = ({
registeredInvoice,
}: {
registeredInvoice: RegisteredInvoice
}): WalletInvoice => ({
paymentHash: registeredInvoice.invoice.paymentHash,
walletId,
selfGenerated: true,
pubkey: registeredInvoice.pubkey,
paid: false,
})
const createForRecipient = ({
registeredInvoice,
}: {
registeredInvoice: RegisteredInvoice
}): WalletInvoice => ({
paymentHash: registeredInvoice.invoice.paymentHash,
walletId,
selfGenerated: false,
pubkey: registeredInvoice.pubkey,
paid: false,
})
return {
create,
createForRecipient,
}
}
|
public class QueueUsingStacks<T> {
private Stack<T> s1;
private Stack<T> s2;
public QueueUsingStacks() {
s1 = new Stack<T>();
s2 = new Stack<T>();
}
public void enqueue(T item) {
s1.push(item);
}
public T dequeue() {
if (s2.isEmpty()) {
while (!s1.isEmpty()) {
s2.push(s1.pop());
}
}
return s2.pop();
}
}
|
/*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.deianvn.pgnparse;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class FENPosition {
private int halfMoves;
private int fullMoves;
private String playerToMove;
private boolean whiteKingCastleAvailable = false;
private boolean whiteQueenCastleAvailable = false;
private boolean blackKingCastleAvailable = false;
private boolean blackQueenCastleAvailable = false;
private String enpassantSquare;
private final Map<String, Piece> occupiedSquares = new HashMap<>();
public Set<String> getOccupiedSquares() {
return Collections.unmodifiableSet(occupiedSquares.keySet());
}
public Piece getPiece(String square) {
return occupiedSquares.get(square);
}
public void addPiece(String square, Piece piece) {
occupiedSquares.put(square, piece);
}
public int getHalfMoves() {
return halfMoves;
}
public void setHalfMoves(int halfMoves) {
this.halfMoves = halfMoves;
}
public int getFullMoves() {
return fullMoves;
}
public void setFullMoves(int fullMoves) {
this.fullMoves = fullMoves;
}
public String getPlayerToMove() {
return playerToMove;
}
public void setPlayerToMove(String playerToMove) {
this.playerToMove = playerToMove;
}
public boolean isWhiteKingCastleAvailable() {
return whiteKingCastleAvailable;
}
public void setWhiteKingCastleAvailable(boolean whiteKingCastleAvailable) {
this.whiteKingCastleAvailable = whiteKingCastleAvailable;
}
public boolean isWhiteQueenCastleAvailable() {
return whiteQueenCastleAvailable;
}
public void setWhiteQueenCastleAvailable(boolean whiteQueenCastleAvailable) {
this.whiteQueenCastleAvailable = whiteQueenCastleAvailable;
}
public boolean isBlackKingCastleAvailable() {
return blackKingCastleAvailable;
}
public void setBlackKingCastleAvailable(boolean blackKingCastleAvailable) {
this.blackKingCastleAvailable = blackKingCastleAvailable;
}
public boolean isBlackQueenCastleAvailable() {
return blackQueenCastleAvailable;
}
public void setBlackQueenCastleAvailable(boolean blackQueenCastleAvailable) {
this.blackQueenCastleAvailable = blackQueenCastleAvailable;
}
public String getEnpassantSquare() {
return enpassantSquare;
}
public void setEnpassantSquare(String enpassantSquare) {
this.enpassantSquare = enpassantSquare;
}
}
|
#! /bin/bash
OMP_NUM_THREADS=8 CUDA_VISIBLE_DEVICES=1 python main_nerf.py data/nerf_synthetic/lego --workspace trial_nerf_lego -O --bound 1.0 --scale 0.67 --dt_gamma 0 --mode blender --gui
#OMP_NUM_THREADS=8 CUDA_VISIBLE_DEVICES=1 python main_nerf.py data/nerf_synthetic/lego --workspace trial_nerf_lego -O --bound 1.0 --scale 0.67 --dt_gamma 0 --mode blender --gui --rand_pose 0 --clip_text "red" --lr 1e-3 --ckpt latest_model
#OMP_NUM_THREADS=8 CUDA_VISIBLE_DEVICES=1 python main_nerf.py data/nerf_llff_data/orchids --workspace trial_nerf_orchids -O --gui --bound 2.0 --scale 0.6
#OMP_NUM_THREADS=8 CUDA_VISIBLE_DEVICES=1 python main_nerf.py data/nerf_llff_data/orchids --workspace trial_nerf_orchids -O --gui --bound 2.0 --scale 0.6 --rand_pose 0 --clip_text "blue flower" --lr 1e-3 --ckpt latest_model
|
<gh_stars>0
import { JsonApiService } from "./json-api.service";
import { WebStorageService } from "./web-storage.service";
import { Injectable } from '@angular/core';
import { Observable } from "rxjs/Rx";
import { CustomRequestOptions } from "./interfaces";
@Injectable()
export class AjaxService{
constructor(private webStorageService: WebStorageService, private jsonApiService: JsonApiService){}
execute(config: CustomRequestOptions): Observable<any>{
let method = config.method ? config.method: 'GET';
if(method == 'GET' && config.keepInLocalStorage){
return this.webStorageService.getItemFromLocalStorage(config.keyForWebStorage, config.url, config.time);
}else if(method == 'GET' && config.keepInSessionStorage){
return this.webStorageService.getItemFromSessionStorage(config.keyForWebStorage, config.url, config.time);
}
return this.jsonApiService.request(config)
}
}
|
#!/usr/bin/env sh
apk add --update --no-cache elixir postgresql-client git
mix local.hex --force
mix local.rebar --force
mkdir /opencov
|
from lorenz import functions
import numpy as np
class Reservoir:
def __init__(self, N=500, alpha=400, NetSR=0.6, bias_scale=0.4, inp_scale=1.2, conn=None):
self.N = N
self.alpha = alpha
self.NetSR = NetSR
self.bias_scale = bias_scale
self.inp_scale = inp_scale
if not conn:
self.conn = 10.0 / self.N
else:
self.conn = conn
self.W_raw = self.NetSR * functions.IntWeights(self.N, self.N, self.conn)
self.W_bias = self.bias_scale * np.random.randn(self.N)
def run(self, patterns, t_learn=2000, t_wash=500, TyA_wout=0.01, TyA_wload=0.0001,
gradient_load=False, gradient_c=False, load=True, gradient_window=1, c_adapt_rate=0.01, gradient_cut=2.0):
self.patterns = patterns
self.t_learn = t_learn
self.t_wash = t_wash
self.TyA_wout = TyA_wout
self.TyA_wload = TyA_wload
self.gradient_load = gradient_load
self.gradient_c = gradient_c
self.gradien_cut = gradient_cut
self.c_adapt_rate = c_adapt_rate
self.n_patterns = len(self.patterns)
if type(self.patterns[0]) == np.ndarray:
self.n_ip_dim = len(patterns[0][0])
else:
if type(self.patterns[0](0)) == np.float64:
self.n_ip_dim = 1
else:
self.n_ip_dim = len(self.patterns[0](0))
self.W_in = self.inp_scale * np.random.randn(self.N, self.n_ip_dim)
self.C = []
self.TrainArgs = np.zeros([self.N, self.n_patterns * self.t_learn])
self.TrainOldArgs = np.zeros([self.N, self.n_patterns * self.t_learn])
TrainOuts = np.zeros([self.n_ip_dim, self.n_patterns * self.t_learn])
I = np.eye(self.N)
for i, p in zip(range(self.n_patterns), self.patterns):
x = np.zeros([self.N])
xOld = np.zeros([self.N])
xColl = np.zeros([self.N, self.t_learn])
xOldColl = np.zeros([self.N, self.t_learn])
uColl = np.zeros([self.n_ip_dim, self.t_learn])
Cc = np.zeros([self.N, self.N])
for t in range(self.t_learn + self.t_wash):
if not type(p) == np.ndarray:
u = np.reshape(p(t), self.n_ip_dim)
else:
u = p[t]
xOld = x
x = np.tanh(np.dot(self.W_raw, x) + np.dot(self.W_in, u) + self.W_bias)
if gradient_c:
grad = x - np.dot(Cc, x)
norm = np.linalg.norm(grad)
if (norm > self.gradien_cut):
grad = self.gradien_cut / norm * grad
Cc = Cc + self.c_adapt_rate * (np.outer(grad, x.T) - (self.alpha ** -2) * Cc)
if (t >= self.t_wash):
xColl[:, t - self.t_wash] = x
xOldColl[:, t - self.t_wash] = xOld
uColl[:, t - self.t_wash] = u
if not gradient_c:
R = np.dot(xColl, np.transpose(xColl)) / self.t_learn
U, S, V = np.linalg.svd(R, full_matrices=True)
S = np.diag(S)
S = (np.dot(S, np.linalg.inv(S + (self.alpha ** -2) * I)))
self.C.append(np.dot(U, np.dot(S, U.T)))
else:
self.C.append(Cc)
self.TrainArgs[:, i * self.t_learn:(i + 1) * self.t_learn] = xColl
self.TrainOldArgs[:, i * self.t_learn:(i + 1) * self.t_learn] = xOldColl
TrainOuts[:, i * self.t_learn:(i + 1) * self.t_learn] = uColl
if load:
""" Output Training """
self.W_out = functions.RidgeWout(self.TrainArgs, TrainOuts, self.TyA_wout)
self.NRMSE_readout = functions.NRMSE(np.dot(self.W_out, self.TrainArgs), TrainOuts);
print("NRMSE for training of output connections: ")
print(self.NRMSE_readout)
""" Loading """
W_bias_rep = np.tile(self.W_bias, (self.n_patterns * self.t_learn, 1)).T
W_targets = (np.arctanh(self.TrainArgs) - W_bias_rep)
self.W = functions.RidgeWload(self.TrainOldArgs, W_targets, self.TyA_wload)
self.NRMSE_load = functions.NRMSE(np.dot(self.W, self.TrainOldArgs), W_targets)
print("NRMSE avg. per neuron for re-learning of connecivity matrix (loading): ")
print(np.mean(self.NRMSE_load))
def recall(self, t_recall=200):
self.Y_recalls = []
self.t_recall = t_recall
for i in range(self.n_patterns):
Cc = self.C[i]
# start with a random state
# x = 0.5*np.random.randn(self.N)
# start from the last network state from training
x = self.TrainArgs[:, -1]
y_recall = np.zeros([self.t_recall, self.n_ip_dim])
for t in range(self.t_recall):
x = np.dot(Cc, np.tanh(np.dot(self.W, x) + self.W_bias))
y = np.dot(self.W_out, x)
y_recall[t] = y
self.Y_recalls.append(y_recall)
|
#!/usr/bin/env bash
# Variables
PG_VERSION=9.5
PROJECT_NAME=wordtori
ENV_USERNAME=ubuntu
#
# Create a symbolic link to the working directory
#
ln -s /vagrant /$PROJECT_NAME
#
# Keep package list information up to date
#
sudo apt-get update
#
# Utilities
#
sudo apt-get install -y build-essential # Required for building ANYTHING on ubuntu
sudo apt-get install -y git
#
# Setup Python
#
sudo apt-get install -y python3-pip
sudo pip3 install --upgrade pip
#
# Install Nginx
#
sudo apt-get install -y nginx
sudo cp /$PROJECT_NAME/deployment/nginx_conf_vagrant /etc/nginx/sites-available/$PROJECT_NAME
sudo ln -s /etc/nginx/sites-available/$PROJECT_NAME /etc/nginx/sites-enabled/
sudo pip3 install uwsgi
#
# Setup Redis
#
sudo apt-get install -y redis-server
#
# Setup Database
#
sudo apt-get install -y postgresql # Installs the supported version (9.5)
sudo apt-get install -y libpq-dev # required for psycopg2
sudo apt-get install -y python-psycopg2
sudo cp /$PROJECT_NAME/deployment/pg_hba.conf /etc/postgresql/$PG_VERSION/main/
sudo service postgresql restart
sudo -u postgres createuser -s $ENV_USERNAME
sudo createdb -U $ENV_USERNAME $PROJECT_NAME
#
# Install Python packages
#
sudo pip3 install -r /$PROJECT_NAME/requirements.txt
#
# TODO: Create Django admin - Currently Django doesn't support non-interactive setting for superuser
#
# python3 manage.py creatsuperuser #--username admin --email GetOutIsland@gmail.com
#
# Migrate Django
#
cd /$PROJECT_NAME && python3 manage.py makemigrations
cd /$PROJECT_NAME && python3 manage.py migrate
#
# Populate local database
#
# cd /$PROJECT_NAME && python3 manage.py loaddata deployment/initial_data.json
# sudo createuser -U postgres -d hjgblmqzztzppf
# sudo pg_restore -U hjgblmqzztzppf -d $PROJECT_NAME --clean /$PROJECT_NAME/deployment/initial_dataset_feb_8_2017
# sudo psql -d $PROJECT_NAME -U postgres -c "REASSIGN OWNED BY hjgblmqzztzppf TO vagrant"
#
# Restart the server
#
sudo service postgresql restart
sudo service nginx restart
#
# Daemonize uWSGI module
#
cd /$PROJECT_NAME && sudo uwsgi --daemonize /var/log/uwsgi-daemon.log --socket :8001 --module $PROJECT_NAME.wsgi --touch-reload=/$PROJECT_NAME/reload.ini
#
# Prepopulate the database
#
cd /$PROJECT_NAME && python3 manage.py prepopulate
|
#!/bin/bash -e
: ${LOGFILE:=/var/log/recipes/gcs-p12.log}
: ${TARGET_DIR:=/usr/lib/hadoop/lib}
main(){
mkdir -p $TARGET_DIR
echo $P12KEY | base64 -d > $TARGET_DIR/gcp.p12
echo "p12 file successfully saved to $TARGET_DIR/gcp.p12"
}
exec &>> "$LOGFILE"
[[ "$0" == "$BASH_SOURCE" ]] && main "$@"
|
<filename>src/components/ConnectedStep.tsx
import * as React from 'react'
import { BorderRadiusObject, Shape } from '../types'
import { ITourGuideContext } from './TourGuideContext'
declare var __TEST__: boolean
interface Props {
name: string
text: string
title?: string
order: number
active?: boolean
shape?: Shape
image?: React.ReactNode
context: ITourGuideContext
children?: any
maskOffset?: number
borderRadiusObject?: BorderRadiusObject
borderRadius?: number
keepTooltipPosition?: boolean
tooltipBottomOffset?: number
}
export class ConnectedStep extends React.Component<Props> {
static defaultProps = {
active: true,
}
wrapper: any
componentDidMount() {
if (this.props.active) {
this.register()
}
}
componentDidUpdate(prevProps: Props) {
if (this.props.active !== prevProps.active) {
if (this.props.active) {
this.register()
} else {
this.unregister()
}
}
}
componentWillUnmount() {
this.unregister()
}
setNativeProps(obj: any) {
this.wrapper.setNativeProps(obj)
}
register() {
if (this.props.context && this.props.context.registerStep) {
this.props.context.registerStep({
target: this,
wrapper: this.wrapper,
...this.props,
})
} else {
console.warn('context undefined')
}
}
unregister() {
if (this.props.context.unregisterStep) {
this.props.context.unregisterStep(this.props.name)
} else {
console.warn('unregisterStep undefined')
}
}
measure() {
if (typeof __TEST__ !== 'undefined' && __TEST__) {
return new Promise((resolve) =>
resolve({
x: 0,
y: 0,
width: 0,
height: 0,
}),
)
}
return new Promise((resolve, reject) => {
const measure = () => {
// Wait until the wrapper element appears
if (this.wrapper && this.wrapper.measure) {
this.wrapper.measure(
(
_ox: number,
_oy: number,
width: number,
height: number,
x: number,
y: number,
) =>
resolve({
x,
y,
width,
height,
}),
reject,
)
} else {
requestAnimationFrame(measure)
}
}
requestAnimationFrame(measure)
})
}
render() {
const copilot = {
ref: (wrapper: any) => {
this.wrapper = wrapper
},
onLayout: () => {}, // Android hack
}
return React.cloneElement(this.props.children, { copilot })
}
}
|
#!/bin/bash
# -*- Mode: Bash; tab-width: 2; indent-tabs-mode: nil -*- vim:sta:et:sw=2:ts=2:syntax=sh
#
SCRIPT=$(basename "$0")
function check_dependency {
typeset D="$1"
if ( which "$D" 2>&1 | grep -e "^which: no " -e "^no ")
then
echo "${SCRIPT}: Aborting - unable to resolve dependency: $D" >&2
exit 1
fi
}
|
import json
import math
import os
from matplotlib.pyplot import imshow, show
from matplotlib import cm
from numpy import arange, zeros
from scipy import ndimage
from scipy import misc
def pixelate_image(img_fn,bins=20) :
img = misc.imread(img_fn)
print "img is [h,w]:",img.shape
# convert to single channel
img = img.mean(axis=2)
h,w = img.shape
box_width = 1.*w/bins
h_bins = math.ceil(1.*h/box_width)
pixel_img = zeros(shape=(h_bins,bins))
print "binned to",pixel_img.shape,"by",bins,"columns, box width",box_width
pixel_img_d = []
for i,x in enumerate(arange(0,h,box_width)) :
for j,y in enumerate(arange(0,w,box_width)) :
pixel_img[i,j] = img[x:x+box_width,y:y+box_width].mean()
pixel_img_d.append({"x":j,"y":i,"v":pixel_img[i,j]})
imshow(pixel_img,cmap=cm.gray,interpolation='none')
show()
base,ext = os.path.splitext(img_fn)
json.dump(pixel_img_d,open('%s.json'%base,'w'))
if __name__ == '__main__' :
pixelate_image('mona_lisa.jpg')
pixelate_image('C-3PO_droid.jpg',bins=50)
|
#!/bin/sh
#
# $Id$
#
# mkgdaldist.sh - prepares GDAL source distribution package
#
# Doxgen 1.7.1 has a bug related to man pages. See https://trac.osgeo.org/gdal/ticket/6048
echo $(doxygen --version) | xargs python -c "import sys; v = sys.argv[1].split('.'); v=int(v[0])*10000+int(v[1])*100+int(v[2]); sys.exit(v < 10704)"
rc=$?
if test $rc != 0; then
echo "Wrong Doxygen version. 1.7.4 or later required"
exit $rc;
fi
if [ $# -lt 1 ] ; then
echo "Usage: mkgdaldist.sh <version> [-date date] [-branch branch] [-rc n]"
echo " <version> - version number used in name of generated archive."
echo " -date - date of package generation, current date used if not provided"
echo " -branch - path to SVN branch, trunk is used if not provided"
echo " -rc - gives a release candidate id to embed in filenames"
echo "Example: mkgdaldist.sh 1.8.0 -branch branches/1.8 -rc RC2"
echo "or mkgdaldist.sh 1.10.0beta2"
exit
fi
#
# Processing script input arguments
#
GDAL_VERSION=$1
COMPRESSED_VERSION=`echo $GDAL_VERSION | tr -d .`
if test "$2" = "-date" ; then
forcedate=$3
shift
shift
else
forcedate=no
fi
if test "$2" = "-branch"; then
forcebranch=$3
shift
shift
else
forcebranch="trunk"
fi
if test "$2" = "-rc"; then
RC=$3
shift
shift
else
RC=""
fi
#
# Checkout GDAL sources from the repository
#
echo "* Downloading GDAL sources from SVN..."
rm -rf dist_wrk
mkdir dist_wrk
cd dist_wrk
SVNURL="http://svn.osgeo.org/gdal"
SVNBRANCH=${forcebranch}
SVNMODULE="gdal"
echo "Generating package '${GDAL_VERSION}' from '${SVNBRANCH}' branch"
echo
# Disable for now, seems to depend on modern SVN versions.
#SVN_CONFIG="--config-option config:miscellany:use-commit-times=yes"
svn checkout -q ${SVNURL}/${SVNBRANCH}/${SVNMODULE} ${SVNMODULE} ${SVN_CONFIG}
if [ \! -d gdal ] ; then
echo "svn checkout reported an error ... abandoning mkgdaldist"
cd ..
rm -rf dist_wrk
exit
fi
#
# Make some updates and cleaning
#
echo "* Updating release date..."
if test "$forcedate" != "no" ; then
echo "Forcing Date To: $forcedate"
rm -f gdal/gcore/gdal_new.h
sed -e "/define GDAL_RELEASE_DATE/s/20[0-9][0-9][0-9][0-9][0-9][0-9]/$forcedate/" gdal/gcore/gdal.h > gdal/gcore/gdal_new.h
mv gdal/gcore/gdal_new.h gdal/gcore/gdal.h
fi
echo "* Cleaning .svn directories under $PWD..."
find gdal -name .svn | xargs rm -rf
rm -f gdal/.gitignore
#
# Generate man pages
#
echo "* Generating man pages..."
CWD=${PWD}
cd gdal
if test -d "man"; then
rm -rf man
fi
(cat Doxyfile ; echo "ENABLED_SECTIONS=man"; echo "INPUT=apps swig/python/scripts"; echo "FILE_PATTERNS=*.cpp *.dox"; echo "GENERATE_HTML=NO"; echo "GENERATE_MAN=YES"; echo "QUIET=YES") | doxygen -
if test ! -d "man"; then
echo " make man failed"
fi
cd ${CWD}
#
# Generate SWIG interface for C#
#
echo "* Generating SWIG C# interfaces..."
CWD=${PWD}
cd gdal/swig/csharp
./mkinterface.sh
cd ${CWD}
#
# Generate SWIG interface for Perl
#
echo "* Generating SWIG Perl interfaces..."
CWD=${PWD}
cd gdal/swig/perl
rm *wrap*
touch ../../GDALmake.opt
make generate
rm -f ../../GDALmake.opt
cd ${CWD}
#
# Make distribution packages
#
echo "* Making distribution packages..."
rm -f gdal/VERSION
echo $GDAL_VERSION > gdal/VERSION
mv gdal gdal-${GDAL_VERSION}
rm -f ../gdal-${GDAL_VERSION}${RC}.tar.gz ../gdal${COMPRESSED_VERSION}${RC}.zip
tar cf ../gdal-${GDAL_VERSION}${RC}.tar gdal-${GDAL_VERSION}
xz -k9e ../gdal-${GDAL_VERSION}${RC}.tar
gzip -9 ../gdal-${GDAL_VERSION}${RC}.tar
zip -qr ../gdal${COMPRESSED_VERSION}${RC}.zip gdal-${GDAL_VERSION}
echo "* Generating MD5 sums ..."
OSTYPE=`uname -s`
if test "$OSTYPE" = "Darwin" ; then
MD5=md5
else
MD5=md5sum
fi
cd ..
$MD5 gdal-${GDAL_VERSION}${RC}.tar.xz > gdal-${GDAL_VERSION}${RC}.tar.xz.md5
$MD5 gdal-${GDAL_VERSION}${RC}.tar.gz > gdal-${GDAL_VERSION}${RC}.tar.gz.md5
$MD5 gdal${COMPRESSED_VERSION}${RC}.zip > gdal${COMPRESSED_VERSION}${RC}.zip.md5
echo "* Cleaning..."
rm -rf dist_wrk
echo "*** The End ***"
|
<filename>splashpy/routers/objects.py
#
# Copyright (C) 2015-2020 Splash Sync <www.splashsync.com>
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
#
from splashpy import const
from splashpy.routers.base import BaseRouter
from splashpy.core.framework import Framework
class ObjectsRouter(BaseRouter):
"""Objects WebService Actions Router"""
def execute(self, task):
"""Execute Objects Actions"""
response = self.empty_response(task)
# READING OF SERVER OBJECT LIST
if task['name'] == const.__SPL_F_OBJECTS__:
response['data'] = Framework.getObjects()
response['result'] = True
return response
# Validate Received Task
if not self.isValidTask(task):
return response
# Execute Admin Actions
if task['name'] in [const.__SPL_F_DESC__, const.__SPL_F_FIELDS__, const.__SPL_F_LIST__]:
return self.doAdminActions(task)
# Execute Sync Actions
if task['name'] in [const.__SPL_F_GET__, const.__SPL_F_SET__, const.__SPL_F_DEL__]:
return self.doSyncActions(task)
# Wrong Request Task
Framework.log().error("Object Router - Requested task not found => " + task['name'])
return response
def doAdminActions(self, task):
"""Execute Admin Objects Actions"""
response = self.empty_response(task)
# Load Object Class
ws_object = Framework.getObject(task["params"]['type'])
# READING OF OBJECT DESCRIPTION
if task['name'] == const.__SPL_F_DESC__:
if ws_object:
response['data'] = ws_object.description()
response['result'] = bool(response['data'].__len__())
# READING OF OBJECT FIELDS
if task['name'] == const.__SPL_F_FIELDS__:
if ws_object:
response['data'] = ws_object.fields()
response['result'] = bool(response['data'].__len__())
# READING OF OBJECTS LIST
if task['name'] == const.__SPL_F_LIST__:
try:
filters = task["params"]["filters"]
except:
filters = None
try:
parameters = task["params"]["params"]
except:
parameters = None
if ws_object:
response['data'] = ws_object.objectsList(filters, parameters)
response['result'] = bool(response['data'].__len__())
return response
def doSyncActions( self, task ):
"""Execute Admin Objects Actions"""
from splashpy.componants.validator import Validator
response = self.empty_response(task)
# Load Object Class
ws_object = Framework.getObject(task["params"]['type'])
ws_object_id = task["params"]['id']
# Verify Object Id
if not Validator.isValidObjectId(ws_object_id):
return response
# READING OF OBJECT DATA
if task['name'] == const.__SPL_F_GET__:
ws_fields = task["params"]['fields']
if ws_object and Validator.isValidObjectFieldsList(ws_fields):
response['data'] = ws_object.get(ws_object_id, ws_fields)
response['result'] = (response['data'] != False)
# WRITING OF OBJECT DATA
if task['name'] == const.__SPL_F_SET__:
ws_fields = task["params"]['fields']
if ws_object and Validator.isValidObjectFieldsList(ws_fields):
# Take Lock for this object => No Commit Allowed for this Object
ws_object.lock(ws_object_id)
# Write Data on local system
response['data'] = ws_object.set(ws_object_id, ws_fields)
# Release Lock for this object
ws_object.unlock(ws_object_id)
response['result'] = (response['data'] != False)
# DELETE OBJECT
if task['name'] == const.__SPL_F_DEL__:
if ws_object:
# Take Lock for this object => No Commit Allowed for this Object
ws_object.lock(ws_object_id)
response['data'] = ws_object.delete(ws_object_id)
response['result'] = response['data']
return response
@staticmethod
def isValidTask(task):
"""Verify Received Task"""
# Verify Requested Object Type is Available
if not hasattr(task["params"], '__iter__'):
return Framework.log().error("Object Router - Missing Task Parameters... ")
# Verify Requested Object Type is Available
if 'type' not in task["params"]:
return Framework.log().error("Object Router - Missing Object Type... ")
# Verify Requested Object Type is Valid
if not task["params"]['type'] in Framework.getObjects():
return Framework.log().error("Object Router - Object Type is Invalid... ")
return True
|
<gh_stars>0
import { NextFSDirectory } from './NextFSDirectory';
import { NextFSFile } from './NextFSFile';
import { NextFSObject } from './NextFSObject';
export class NextFSManager {
public async get(path: string): Promise<NextFSObject> {
return null;
}
public async exists(path: string): Promise<boolean> {
return false;
}
public async createFile(path: string): Promise<NextFSObject> {
return null;
}
public async createDirectory(path: string): Promise<NextFSObject> {
return null;
}
public async delete(path: string): Promise<boolean> {
return false;
}
public async move(path: string, newPath: string): Promise<boolean> {
return false;
}
public async copy(path: string, newPath: string): Promise<boolean> {
return false;
}
public async rename(path: string, newName: string): Promise<boolean> {
return false;
}
public static async upload(path: string, file: string | Buffer): Promise<boolean> {
return false;
}
}
|
<reponame>hugleMr/Eazax-cases<gh_stars>0
const { ccclass, property, executionOrder } = cc._decorator;
/**
* 后期处理
* @author 陈皮皮 (ifaswind)
* @version 20211128
* @see PostProcessing.ts https://gitee.com/ifaswind/eazax-ccc/blob/master/components/effects/PostProcessing.ts
*/
@ccclass
@executionOrder(-1)
export default class PostProcessing extends cc.Component {
@property({ type: cc.Camera, tooltip: CC_DEV && '输入摄像机' })
protected camera: cc.Camera = null;
@property({ type: cc.Sprite, tooltip: CC_DEV && '输出目标精灵' })
protected targetSprite: cc.Sprite = null;
/**
* 输出纹理
*/
protected texture: cc.RenderTexture = null;
/**
* 生命周期:节点加载
*/
protected onLoad() {
this.init();
this.registerEvent();
}
/**
* 生命周期:节点销毁
*/
protected onDestroy() {
this.unregisterEvent();
this.release();
}
/**
* 注册事件
*/
protected registerEvent() {
cc.Canvas.instance.node.on(cc.Node.EventType.SIZE_CHANGED, this.onCanvasSizeChanged, this);
}
/**
* 反注册事件
*/
protected unregisterEvent() {
cc.Canvas.instance.node.off(cc.Node.EventType.SIZE_CHANGED, this.onCanvasSizeChanged, this);
}
/**
* 初始化
*/
protected init() {
// 创建并初始化 RenderTexture
const texture = this.texture = new cc.RenderTexture(),
screenSize = cc.view.getVisibleSizeInPixel();
texture.initWithSize(screenSize.width, screenSize.height);
// 将摄像机的内容渲染到目标纹理上
this.camera.targetTexture = texture;
// 使用目标纹理生成精灵帧并设置到精灵上
const sprite = this.targetSprite;
sprite.spriteFrame = new cc.SpriteFrame(texture);
// 设置 Y 轴翻转
// texture.setFlipY(true); // not working
sprite.node.scaleY = -Math.abs(sprite.node.scaleY);
}
/**
* 释放
*/
protected release() {
this.camera.destroy();
this.texture.destroy();
}
/**
* 画布尺寸变化回调
*/
protected onCanvasSizeChanged() {
const screenSize = cc.view.getVisibleSizeInPixel();
this.texture.updateSize(screenSize.width, screenSize.height);
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_snooze_outline = void 0;
var ic_snooze_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M9 11h3.63L9 15.2V17h6v-2h-3.63L15 10.8V9H9v2zm7.056-7.654l1.282-1.535 4.607 3.85-1.28 1.54zM3.336 7.19l-1.28-1.536L6.662 1.81l1.28 1.536zM12 6c3.86 0 7 3.14 7 7s-3.14 7-7 7-7-3.14-7-7 3.14-7 7-7m0-2c-4.97 0-9 4.03-9 9s4.03 9 9 9 9-4.03 9-9-4.03-9-9-9z"
},
"children": []
}]
};
exports.ic_snooze_outline = ic_snooze_outline;
|
let app = new Vue({
el: "#home",
data: {
message: "REFACE",
},
});
|
python test_widerface_40_100.py --gpu=3 --network=net5 --thresh=0.5 --prefix=model/resnet_retina/resnet_retina --epoch=999 --output=./test_out_dic/resnet_retina_epoch_999
|
# frozen_string_literal: true
require 'spec_helper'
describe ScenarioAttachment do
let(:scenario) { FactoryBot.create(:scenario) }
let(:source) { FactoryBot.create(:scenario) }
let(:metadata) do
{
source_scenario_id: source.id,
source_saved_scenario_id: 1,
source_scenario_title: 'a',
source_dataset_key: 'nl',
source_end_year: 2050
}
end
context 'with all source scenario attributes set' do
subject do
described_class.new(
metadata.merge(key: 'interconnector_1_price_curve')
)
end
it { is_expected.to be_valid }
end
context 'with some source scenario attributes set' do
subject do
described_class.new(
key: 'interconnector_1_price_curve',
source_dataset_key: 'nl',
source_end_year: 2050
)
end
it { is_expected.not_to be_valid }
end
context 'with no source scenario attributes set' do
subject do
described_class.new(
key: 'interconnector_1_price_curve'
)
end
it { is_expected.to be_valid }
end
context 'when an interconnector_1_price curve attachment already exists' do
before do
described_class.create!(
scenario_id: scenario.id,
key: 'interconnector_1_price_curve'
)
end
context 'with a new "interconnector_1_price_curve" attachment' do
let(:attachment) do
described_class.new(
scenario_id: scenario.id,
key: 'interconnector_1_price_curve'
)
end
it 'is invalid' do
expect(attachment).not_to be_valid
end
it 'has an error on key' do
attachment.valid?
expect(attachment.errors[:key])
.to include('already exists for this scenario')
end
end
context 'with a new "interconnector_1_price_curve" attachment for a ' \
'different scenario' do
it 'is valid' do
attachment = described_class.new(
scenario_id: FactoryBot.create(:scenario).id,
key: 'interconnector_1_price_curve'
)
expect(attachment).to be_valid
end
end
context 'with a new "interconnector_2_price_curve" attachment' do
it 'is valid' do
attachment = described_class.new(
scenario_id: scenario.id,
key: 'interconnector_2_price_curve'
)
expect(attachment).to be_valid
end
end
end
describe '#update_or_remove_metadata' do
context 'when all metadata is set' do
let(:attachment) do
described_class.new(
metadata.merge(
scenario_id: scenario.id,
key: 'interconnector_1_price_curve'
)
)
end
it 'removes the metadata when no new metadata is supplied' do
expect { attachment.update_or_remove_metadata({}) }
.to change(attachment, :source_scenario?)
.from(true).to(false)
end
it 'updates the metadata when new metadata is given' do
expect { attachment.update_or_remove_metadata(metadata) }
.not_to change(attachment, :source_scenario?)
.from(true)
end
end
context 'when no metadata is set' do
let(:attachment) do
described_class.new(
key: 'interconnector_1_price_curve',
scenario_id: scenario.id
)
end
it 'does nothing when no new metadata is supplied' do
expect { attachment.update_or_remove_metadata({}) }
.not_to change(attachment, :source_scenario?)
.from(false)
end
it 'updates the metadata when new metadata is given' do
expect { attachment.update_or_remove_metadata(metadata) }
.to change(attachment, :source_scenario?)
.from(false).to(true)
end
end
end
end
|
package orbidder
import (
"encoding/json"
"github.com/eugene-fedorenko/prebid-server/openrtb_ext"
"testing"
)
// This file actually intends to test static/bidder-params/orbidder.json
//
// These also validate the format of the external API: request.imp[i].ext.orbidder
// TestValidParams makes sure that the orbidder schema accepts all imp.ext fields which we intend to support.
func TestValidParams(t *testing.T) {
validator, err := openrtb_ext.NewBidderParamsValidator("../../static/bidder-params")
if err != nil {
t.Fatalf("Failed to fetch the json-schemas. %v", err)
}
for _, validParam := range validParams {
if err := validator.Validate(openrtb_ext.BidderOrbidder, json.RawMessage(validParam)); err != nil {
t.Errorf("Schema rejected orbidder params: %s", validParam)
}
}
}
// TestInvalidParams makes sure that the orbidder schema rejects all the imp.ext fields we don't support.
func TestInvalidParams(t *testing.T) {
validator, err := openrtb_ext.NewBidderParamsValidator("../../static/bidder-params")
if err != nil {
t.Fatalf("Failed to fetch the json-schemas. %v", err)
}
for _, invalidParam := range invalidParams {
if err := validator.Validate(openrtb_ext.BidderOrbidder, json.RawMessage(invalidParam)); err == nil {
t.Errorf("Schema allowed unexpected params: %s", invalidParam)
}
}
}
var validParams = []string{
`{"placementId":"123","accountId":"orbidder-test"}`,
`{"placementId":"123","accountId":"orbidder-test","bidfloor":0.5}`,
}
var invalidParams = []string{
``,
`null`,
`true`,
`5`,
`4.2`,
`[]`,
`{}`,
`{"placement_id":"123"}`,
`{"placementId":123}`,
`{"placementId":"123"}`,
`{"account_id":"orbidder-test"}`,
`{"accountId":123}`,
`{"accountId":"orbidder-test"}`,
`{"placementId":123,"account_id":"orbidder-test"}`,
`{"placementId":"123","account_id":123}`,
`{"placementId":"123","accountId":"orbidder-test","bidfloor":"0.5"}`,
`{"placementId":"123","bidfloor":"0.5"}`,
`{"accountId":"orbidder-test","bidfloor":"0.5"}`,
}
|
echo 'Bem vindo Usuario'
echo
echo
echo 'Porfavor digite a opcao do turno'
echo
echo '[1] Manha'
echo '[2] Tarde'
echo '[3] Noite'
echo
read turno
if [ $turno='1' ]; then
echo 'Camila pode atender na parte da manha'
elif [ $turno='2']; then
echo 'Joao pode atender na parte da tarde'
else
echo 'Marcos pode atender na parte da noite'
fi
sleep 4
echo 'consulta marcada!'
|
#!/bin/bash
echo "This requires the chia python virtual environment."
echo "Execute '. ./activate' if you have not already, before running."
echo "This version of Timelord requires CMake 3.14+ to compile vdf_client"
PYTHON_VERSION=$(python -c 'import sys; print(f"python{sys.version_info.major}.{sys.version_info.minor}")')
echo "Python version: $PYTHON_VERSION"
export BUILD_VDF_BENCH=Y # Installs the useful vdf_bench test of CPU squaring speed
THE_PATH=$(python -c 'import pkg_resources; print( pkg_resources.get_distribution("chiavdf").location)' 2> /dev/null)/vdf_client
CHIAVDF_VERSION=$(python -c 'from setup import dependencies; t = [_ for _ in dependencies if _.startswith("chiavdf")][0]; print(t)')
ubuntu_cmake_install() {
UBUNTU_PRE_2004=$(python -c 'import subprocess; process = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(float(process.stdout) < float(20.04))')
if [ "$UBUNTU_PRE_2004" = "True" ]; then
echo "Ubuntu version is pre 20.04LTS - installing CMake with snap"
sudo apt-get install snap -y
sudo apt-get remove --purge cmake -y
hash -r
sudo snap install cmake --classic
else
echo "Ubuntu 20.04LTS and newer support CMake 3.16+"
sudo apt-get install cmake -y
fi
}
symlink_vdf_bench() {
if [ ! -e vdf_bench ] && [ -e venv/lib/"$1"/site-packages/vdf_bench ]; then
echo ln -s venv/lib/"$1"/site-packages/vdf_bench
ln -s venv/lib/"$1"/site-packages/vdf_bench .
elif [ ! -e venv/lib/"$1"/site-packages/vdf_bench ]; then
echo "ERROR: Could not find venv/lib/$1/site-packages/vdf_bench"
else
echo "./vdf_bench link exists"
fi
}
if [ "$(uname)" = "Linux" ] && type apt-get; then
UBUNTU_DEBIAN=true
echo "Found Ubuntu/Debian"
elif [ "$(uname)" = "Darwin" ]; then
MACOS=true
echo "Found MacOS"
fi
if [ -e "$THE_PATH" ]; then
echo "$THE_PATH"
echo "vdf_client already exists, no action taken"
else
if [ -e venv/bin/python ] && test $UBUNTU_DEBIAN; then
echo "Installing chiavdf from source on Ubuntu/Debian"
# If Ubuntu version is older than 20.04LTS then upgrade CMake
ubuntu_cmake_install
# Install remaining needed development tools - assumes venv and prior run of install.sh
echo apt-get install libgmp-dev libboost-python-dev lib"$PYTHON_VERSION"-dev libboost-system-dev -y
sudo apt-get install libgmp-dev libboost-python-dev lib"$PYTHON_VERSION"-dev libboost-system-dev -y
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
elif [ -e venv/bin/python ] && test $MACOS && brew info boost | grep -q 'Not installed'; then
echo "Installing chiavdf requirement boost for MacOS"
brew install boost
echo "installing chiavdf from source"
# User needs to provide required packages
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
elif [ -e venv/bin/python ]; then
echo "installing chiavdf from source"
# User needs to provide required packages
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
else
echo "no venv created yet, please run install.sh"
fi
fi
echo "To estimate a timelord on this CPU try './vdf_bench square_asm 400000' for an ips estimate"
|
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.linear_model import LogisticRegression
# Read the data
data = pd.read_csv('data.csv')
# Convert the data to a matrix of feature vectors
vectorizer = CountVectorizer(min_df=0, lowercase=False)
x = vectorizer.fit_transform(data['review'])
# Train the logistic regression model
model = LogisticRegression()
model.fit(x, data['sentiment'])
|
import sys
from settings import YamlReader
from simulation import Pdb2gmx
def main():
if len(sys.argv) != 7:
print("Usage: python setup_simulation.py <system> <step> <properties_file> <input_structure_pdb_path> <output_gro_path> <output_top_zip_path>")
return
system = sys.argv[1]
step = sys.argv[2]
properties_file = sys.argv[3]
prop = YamlReader(properties_file, system).get_prop_dic()[step]
Pdb2gmx(input_structure_pdb_path=sys.argv[4],
output_gro_path=sys.argv[5],
output_top_zip_path=sys.argv[6],
properties=prop).launch()
if __name__ == '__main__':
main()
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -e
set -x
function add_admin_group() {
groupadd -f -r admin
}
function configure_user() {
usermod -a -G admin cloud
mkdir -p /home/cloud/.ssh
chmod 700 /home/cloud/.ssh
echo "root:password" | chpasswd
}
function configure_inittab() {
# Fix inittab
cat >> /etc/inittab << EOF
0:2345:respawn:/sbin/getty -L 115200 ttyS0 vt102
vc:2345:respawn:/sbin/getty 38400 hvc0
EOF
}
function configure_login() {
configure_inittab
add_admin_group
configure_user
}
return 2>/dev/null || configure_login
|
#!/bin/bash
cd "${BASH_SOURCE%/*}" || exit
./walletresetkmd.sh &
./listassetchains | while read chain; do
./walletresetac.sh ${chain} &
done
|
#!/bin/bash
# Copyright
# 2018 Johns Hopkins University (Author: Jesus Villalba)
# 2017 David Snyder
# 2017 Johns Hopkins University (Author: Daniel Garcia-Romero)
# 2017 Johns Hopkins University (Author: Daniel Povey)
# Apache 2.0.
#
. ./cmd.sh
. ./path.sh
set -e
stage=1
config_file=default_config.sh
. parse_options.sh || exit 1;
. $config_file
xvector_dir=exp/xvectors/$nnet_name
if [ $stage -le 1 ]; then
# Extract xvectors for training LDA/PLDA
for name in sre_tel sre_phnmic voxceleb
do
steps_kaldi_xvec/extract_xvectors.sh --cmd "$train_cmd --mem 12G" --nj 60 \
$nnet_dir data/${name}_combined \
$xvector_dir/${name}_combined
done
fi
if [ $stage -le 2 ]; then
# Extracts x-vectors for evaluation
for name in sre16_eval_enroll sre16_eval_test sre16_major sre16_minor \
sitw_train_dev sitw_dev_enroll sitw_dev_test sitw_eval_enroll sitw_eval_test \
sre18_dev_unlabeled sre18_dev_enroll_cmn2 sre18_dev_test_cmn2 \
sre18_eval_enroll_cmn2 sre18_eval_test_cmn2 \
sre18_eval_enroll_vast sre18_eval_test_vast
do
steps_kaldi_xvec/extract_xvectors.sh --cmd "$train_cmd --mem 6G" --nj 40 \
$nnet_dir data/$name \
$xvector_dir/$name
done
for name in sre16_dev_enroll sre16_dev_test sre18_dev_enroll_vast sre18_dev_test_vast
do
steps_kaldi_xvec/extract_xvectors.sh --cmd "$train_cmd --mem 6G" --nj 1 \
$nnet_dir data/$name \
$xvector_dir/$name
done
fi
if [ $stage -le 3 ]; then
mkdir -p $xvector_dir/train_combined
cat $xvector_dir/{sre_phnmic,voxceleb}_combined/xvector.scp > $xvector_dir/train_combined/xvector.scp
fi
exit
|
package com.genersoft.iot.vmp.vmanager.service;
import com.alibaba.fastjson.JSONObject;
/**
* 点播处理
*/
public interface IPlayService {
void onPublishHandlerForPlayBack(JSONObject resonse, String deviceId, String channelId, String uuid);
void onPublishHandlerForPlay(JSONObject resonse, String deviceId, String channelId, String uuid);
}
|
#!/bin/bash -e
cyclecloud_profile=/tmp/cyclecloud.profile
set +e
source $cyclecloud_profile
set -e
custom_script_uri=$(jetpack config lsf.custom_script_uri 0)
if [ $custom_script_uri == 0 ]; then
echo lsf.custom_script_uri is not defined, exiting.
exit
fi
echo running $custom_script_uri...
curl -L $custom_script_uri > custom_script_uri_tmp.sh
chmod +x custom_script_uri_tmp.sh
./custom_script_uri_tmp.sh
|
"""
Django settings for tr_sys project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['arsserver','localhost', 'ars.transltr.io', 'ars-dev.transltr.io']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
# 'channels',
'tr_ars.apps.ARSConfig',
#'tr_ars.default_ars_app.ars_app.AppConfig',
'tr_ara_aragorn.aragorn_app.AppConfig',
'tr_ara_arax.arax_app.AppConfig',
'tr_ara_bte.bte_app.AppConfig',
'tr_ara_explanatory.explanatory_app.AppConfig',
'tr_ara_improving.improving_app.AppConfig',
'tr_ara_ncats.ncats_app.AppConfig',
'tr_ara_robokop.robokop_app.AppConfig',
'tr_ara_unsecret.unsecret_app.AppConfig',
'tr_ara_wfr.wfr_app.AppConfig',
'tr_kp_genetics.genetics_app.AppConfig',
'tr_kp_molecular.molecular_app.AppConfig',
'tr_kp_cam.cam_app.AppConfig',
'tr_kp_textmining.textmining_app.AppConfig',
'tr_kp_openpredict.openpredict_app.AppConfig',
'tr_kp_cohd.cohd_app.AppConfig',
'tr_ara_aragorn_exp.aragorn_exp_app.AppConfig',
'tr_kp_chp.chp_app.AppConfig',
'tr_kp_icees.icees_app.AppConfig',
'tr_kp_icees_dili.icees_dili_app.AppConfig',
'django_celery_results',
'markdownify',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tr_sys.urls'
CORS_ORIGIN_ALLOW_ALL = True # for further customization see https://pypi.org/project/django-cors-headers/
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'tr_ars','templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tr_sys.wsgi.application'
# Channels
ASGI_APPLICATION = 'tr_sys.routing.application'
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [('127.0.0.1', 6379)],
},
},
}
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
DJANGO_LOG_LEVEL = 'DEBUG'
LOGGING = {
'formatters': {
'simple': {
'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}',
'style': '{',
}
},
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'root': {
'handlers': ['console'],
'level': 'DEBUG',
},
'loggers': {
'tr_ars.tasks': {
'level': 'DEBUG',
'handlers': ['console'],
},
'tr_ars.default_ars_app.api': {
'level': 'DEBUG',
'handlers': ['console']
}
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# markdowninfy
MARKDOWNIFY_STRIP = False
MARKDOWNIFY_WHITELIST_TAGS = {
'a', 'p',
'h1', 'h2', 'h3','h4', 'h5', 'h6', 'h7',
'ul', 'li', 'span',
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
# Celery settings
CELERY_RESULT_BACKEND = 'django-db'
CELERY_CACHE_BACKEND = 'django-cache'
CELERY_BROKER_URL = 'amqp://localhost'
# Other important shared settings
DATA_UPLOAD_MAX_MEMORY_SIZE=1073741824
USE_CELERY = True
DEFAULT_HOST = 'http://localhost:8000'
|
<filename>app/models/utils.go
package models
import (
"aahframework.org/aah.v0"
"aahframework.org/log.v0"
"github.com/nats-io/go-nats"
)
var (
Conn *nats.Conn
)
type Response struct {
// MessageJSON - json data for outputting
Success bool `json:"success"` // Wether an error occured or not
Message string `json:"message"` // The message
Data interface{} `json:"data"` // Extra data, generally it will contain a struct
}
// TODO: Consider about being more specific about editing requests rather then going with this option.
type EditRequest struct {
ID string `json:"id"`
Option string `json:"option"`
Value interface{} `json:"value"`
}
func ConnectNats(_ *aah.Event) {
conn, err := nats.Connect(aah.AppConfig().StringDefault("nats.url", nats.DefaultURL))
if err != nil {
log.Fatalf("error when connecting to nats: %v", err)
}
Conn = conn
}
func DisconnectNats(_ *aah.Event) {
Conn.Close()
}
|
#!/bin/bash
#
# Copyright (C) 2013 Intel Corporation
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this list
# of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Zhang, GeX <gex.zhang@intel.com>
source $(dirname $0)/Common
func_install lifecycle-exit-running-app.wgt
if [ $? -eq 1 ];then
echo "The installation is failed"
exit 1
fi
func_launch lifecycle-exit-running-app.wgt
if [ $? -eq 0 ];then
echo "The widget should be exit"
exit 1
fi
func_uninstall lifecycle-exit-running-app.wgt
exit 0
|
<gh_stars>100-1000
require 'spec_helper'
describe Travis::RedisPool do
let(:redis) {Travis::RedisPool.new}
let(:unpooled_redis) {Redis.new}
it "increases the metric for number of operations" do
expect {
redis.get('test')
}.to change {Metriks.timer('redis.operations').count}.by(1)
end
it "forwards operations to redis" do
redis.set("some-key", 100)
unpooled_redis.get('some-key').should == "100"
end
it "fails when a non-supported operation is called" do
expect {
redis.setssss
}.to raise_error
end
it "adds a wait time for the pool checkout" do
expect {
redis.get('test')
}.to change{Metriks.timer('redis.pool.wait').count}.by(1)
end
end
|
#!/bin/bash
docker-compose -f docker/quickstart.yml up -d
|
def keep_track():
x = 0
y = 0
z = 0
while True:
# do something to x, y and z
...
|
./build/PDFWriterTestPlayground/PDFWriterTestPlayground -b . -c PDF ObjectContext
|
/**
* @class Router
* @classdesc Routes all incoming messages to the consumers
*/
declare class Router {
private config;
private notifications;
private subscriptions;
constructor(params: any);
/**
* Entry point for all incoming messages
* @param {String} type - Type of incoming message
* @param {Object} message - Message to route
*/
onMessage(type: string, message: any): void;
/**
* Subscribe for events
*/
subscribe(sid: string, entity: any): Promise<void>;
/**
* Unsubscribe from events
*/
unsubscribe(sid: string, entity: any): Promise<void>;
/**
* Handle transport establishing event
* If we have any subscriptions - we should check object for modifications
*/
onConnectionStateChanged(isConnected: boolean): void;
}
export { Router };
export default Router;
|
import copy
from enum import Enum
from ayeaye.connectors import connector_factory
from ayeaye.connectors.models_connector import ModelsConnector
from ayeaye.connectors.multi_connector import MultiConnector
from ayeaye.connectors.placeholder import PlaceholderDataConnector
class Connect:
"""
Connect to a dataset or modelset.
A dataset is a grouping of related data. What makes data 'related' will be down to the
subject-domain. Datasets have a type - it could be a JSON document, a CSV file or a database.
A modelset is one or more :class:`Model`(s).
The main responsibility of :class:`Connect` is to provide a concrete subclass of
:class:`DataConnector` or :class:`ModelConnector`. :class:`DataConnector` in turn provides
access to operations on that dataset type. e.g. read, write etc.
:class:`Connect` can be used standalone (see below) but is really designed to be used as a
class variable in an :class:`ayeaye.Model`:
```
class FavouriteColours(ayeaye.Model):
favourite_colours = ayeaye.Connect(engine_url='csv://data/favourite_colours.csv')
```
An instance of :class:`Connect` as a class variable in an :class:`ayeaye.Model` is a
declaration of a model's use of a dataset. In Python, class variables shouldn't be used as
'dynamic' instance variables so :class:`Connect` is a descriptor
(https://docs.python.org/3/howto/descriptor.html) and the instantiation of the underlying
connection (concrete subclass of :class:`DataConnector`) is evaluated on demand.
It is also possible to use :class:`Connect` in *standalone* mode. This is as a convenience, in
particular for evaluating datasets, for example in a Jupyter notepad:
```
for row in ayeaye.Connect(engine_url='csv://data/favourite_colours.csv'):
print(row.colour)
```
For secrets management @see :class:`ConnectorResolver`.
"""
mutually_exclusive_selectors = ["ref", "engine_url", "models"]
class ConnectBind(Enum):
MODEL = "MODEL"
STANDALONE = "STANDALONE"
NEW = "NEW"
def __init__(self, **kwargs):
"""
typical kwargs are 'ref', 'engine_url', 'access' TODO
"""
self.base_constructor_kwargs = copy.copy(kwargs)
self._construct(**kwargs)
def _construct(self, **kwargs):
"""
setup -- can be called either on true construction or when variables are overlaid on a
(possibly cloned) instance of :class:`Connect`.
"""
# :class:`Connect` is responsible for resolving 'ref' into an engine_url via a
# data catalogue. So 'ref' isn't passed to data type specific connectors (i.e.
# subclasses of :class:`DataConnector`)
# these are passed to the data type specific connectors
self.relayed_kwargs = {**self.base_constructor_kwargs, **kwargs}
# check construction args are valid
# mutually exclusive args
a = [self.relayed_kwargs.get(s) is not None for s in self.mutually_exclusive_selectors]
mandatory_args_count = sum(a)
if mandatory_args_count > 1:
raise ValueError("The kwargs ref, engine_url and models are mutually exclusive.")
self.ref = self.relayed_kwargs.pop("ref", None)
self._standalone_connection = None # see :method:`data`
self._parent_model = None
def __repr__(self):
args = ", ".join([f"{k}={v}" for k, v in self.base_constructor_kwargs.items()])
return f"<Connect({args})>"
def update(self, **kwargs):
"""
Overlay (and overwrite) kwarg on existing instance.
@returns None
"""
# reminder to me: I've gone in circles here on returning a copy (factory style) and it's
# not a good idea
self._construct(**kwargs)
def clone(self, **kwargs):
"""
Overlay (and overwrite) kwarg onto a copy of the existing instance.
This is typically used when :class:`Connect` objects are being used as class variables to
refer to the same dataset multiple times in a single model.
@see :method:`TestModels.test_double_usage`
@return (instance of :class:`Connect`)
"""
new_instance = copy.copy(self)
new_instance._construct(**{**self.relayed_kwargs, **kwargs})
return new_instance
def connect_id(self):
"""
Create an identity reference which is used when examining if separate Connect instances
are actually referring to the same dataset/models.
@return: (str)
"""
for s in self.mutually_exclusive_selectors:
if s in self.relayed_kwargs:
# note, self.relayed_kwargs[s] could be a callable. Whatever it it needs to
# deterministically cast to a string.
return f"{s}:{self.relayed_kwargs[s]}"
return "empty:"
def __hash__(self):
return hash(self.connect_id())
def __eq__(self, other):
if type(self) is type(other):
return self.connect_id() == other.connect_id()
return False
def __copy__(self):
c = self.__class__(**self.base_constructor_kwargs)
c.relayed_kwargs = copy.copy(self.relayed_kwargs)
return c
def __get__(self, instance, instance_class):
if instance is None:
# class method called.
# This means `self` is currently an attribute of the class (so NOT an instance
# variable).
#
# Class variables are kind of like constants because they are shared between multiple
# instances of the class and you shouldn't mutate them so just return self.
return self
if self.connection_bind == Connect.ConnectBind.STANDALONE:
raise ValueError("Attempt to connect as a model when already initiated as standalone")
ident = id(self)
if ident not in instance._connections:
instance._connections[ident] = self._prepare_connection()
# a Connect belongs to zero or one ayeaye.Model. Link in both directions.
instance._connections[ident]._parent_model = instance
self._parent_model = instance
return instance._connections[ident]
def __set__(self, instance, new_connection):
"""
Replace an instance of :class:`ayeaye.Model`'s :class:`ayeaye.Connect` with another
instance of `Connect`.
"""
if not isinstance(new_connection, self.__class__):
my_class = self.__class__.__name__
raise ValueError(f"Only {my_class} instances can be set")
self.__init__(**new_connection.relayed_kwargs)
ident = id(self)
instance._connections[ident] = self._prepare_connection()
def _prepare_connection(self):
"""
Resolve everything apart from secrets needed to access the engine behind this dataset.
@return: (instance subclass of :class:`DataConnector`) or (None) when resolve not yet
possible.
"""
if self.ref is not None:
raise NotImplementedError(
("Sorry! Dataset discovery (looking up engine_url from ref) " "hasn't been written yet.")
)
engine_url = self.relayed_kwargs.get("engine_url")
if callable(engine_url):
engine_url = engine_url()
if "models" in self.relayed_kwargs:
# could be a callable but shouldn't be instantiated yet, ModelsConnector does that
connector_cls = ModelsConnector
elif engine_url is None:
# engine_url not yet available
connector_cls = PlaceholderDataConnector
else:
if isinstance(engine_url, list):
# compile time list of engine_url strings
# might be callable or a dict or set in the future
connector_cls = MultiConnector
else:
connector_cls = connector_factory(engine_url)
# Make an independent copy of relay_kwargs because these come from class variables
# so could be resolved again under a different context.
# TODO: when a list of callables is needed to populate MultiConnector for example then make
# this recursive
detached_kwargs = {}
for k, v in self.relayed_kwargs.items():
if k == "engine_url":
# this might have been a callable above
detached_kwargs[k] = copy.deepcopy(engine_url)
if callable(v) and k != "models":
if k in connector_cls.preserve_callables:
# reference to the callable is preserved for the `connector_cls` to call. This is
# needed when the `connector_cls` supplies arguments to the callable.
detached_kwargs[k] = v
else:
# any kwarg arg could be a simple callable (i.e. it's called without any arguments)
# The callable isn't expected to have __deep_copy__ method. It's time to use the
# results of the callable so call it now. Note, the callable is left in place in
# `relay_kwargs`.
detached_kwargs[k] = v()
else:
detached_kwargs[k] = copy.deepcopy(v)
connector = connector_cls(**detached_kwargs)
connector._connect_instance = self
return connector
@property
def connection_bind(self):
"""
Raises a ValueError if an indeterminate state is found.
@return: (ConnectBind)
@see class's docstring.
An instance of :class:`Connect` can be a class variable for an :class:`ayeaye.Model`
or
Standalone mode
or
Not yet determined
"""
if self._parent_model is None and self._standalone_connection is None:
return Connect.ConnectBind.NEW
if self._standalone_connection is not None:
return Connect.ConnectBind.STANDALONE
if self._parent_model is not None:
return Connect.ConnectBind.MODEL
msg = (
"Parent already attached and standalone connection is present. This"
" shouldn't ever happen. Please let us know how it did!"
)
raise ValueError(msg)
def connect_standalone(self):
"""
Make a standalone Connect.
Connect is normally used as part of an class:`ayeaye.Model` and will connect to the target
dataset on demand. It's also possible to use Connect as a standalone instance which
proxies to the target dataset's instance. The standalone version is also stood up on demand
but it can be done explicitly with this method if Connect is short cutting the as-a-proxy
incorrectly. See :method:`__getattr__`.
"""
if self.connection_bind == Connect.ConnectBind.MODEL:
raise ValueError("Attempt to connect as standalone when already bound to a model")
if self.connection_bind == Connect.ConnectBind.NEW:
self._standalone_connection = self._prepare_connection()
def __getattr__(self, attr):
"""
proxy through to subclass of :class:`DataConnector` when used as a standalone Connect
(i.e. not a class variable on :class:`ayeaye.Model`).
"""
if self.connection_bind == Connect.ConnectBind.MODEL:
cls_name = self.__class__.__name__
attrib_error_msg = f"'{cls_name}' object has no attribute '{attr}'"
raise AttributeError(attrib_error_msg)
# Short cut to proxy
# avoid instantiating the target DataConnector if the attribute that is being accessed
# is known to Connect.
# TODO - maybe make a list of permitted non-proxy attribs because the target DataConnector
# might do things to certain attribs on construction so the real version would differ.
# e.g. ConnectorResolver with engine_urls
if self.connection_bind == Connect.ConnectBind.NEW and attr in self.relayed_kwargs:
return self.relayed_kwargs[attr]
self.connect_standalone()
return getattr(self._standalone_connection, attr)
def __len__(self):
raise NotImplementedError("TODO")
def __getitem__(self, key):
raise NotImplementedError("TODO")
def __iter__(self):
"""
more intuitive use of the data behind this Connect. i.e. proxy to a DataConnector.
e.g.
...
for record in Connect(ref="my_dataset"):
print(record)
"""
self.connect_standalone()
yield from self._standalone_connection
|
function removeDuplicates(arr) {
let seen = new Set();
let output = [];
arr.forEach(function(item) {
if(!seen.has(item)) {
seen.add(item);
output.push(item);
}
});
return output;
}
let myArray = [5, 1, 2, 3, 4, 5, 1, 6];
let res = removeDuplicates(myArray);
console.log(res); // [5, 1, 2, 3, 4, 6]
|
package application;
import javafx.scene.control.TextArea;
import modele.Console;
import modele.Object;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import static org.junit.Assert.*;
public class ConsoleTest {
@BeforeClass
public static void setUpClass() {
// Code exécuté avant l'exécution du premier test (et de la méthode @Before
System.out.println("==================CONSOLE TEST==================");
}
@AfterClass
public static void tearDownClass() {
// Code exécuté après l'exécution de tous les tests
System.out.println("=================FIN TEST CONSOLE================");
}
@Before
public void beforeTests() {
Console.getInstance().getLines().clear();
}
@Test
public void singletonTest() {
System.out.println("Début du test de singleton ...");
Console first = Console.getInstance();
Console second = Console.getInstance();
assertEquals(first, second);
}
@Test
public void addLine() {
System.out.println("Début du test d'ajout de ligne ...");
assertEquals(0, Console.getInstance().getLines().size());
Console.getInstance().addLine("Test");
assertEquals(1, Console.getInstance().getLines().size());
}
@Test
public void addLines() {
System.out.println("Début du test d'ajout de lignes ...");
assertEquals(0, Console.getInstance().getLines().size());
ArrayList<String> lines = new ArrayList<>(Arrays.asList("Test1", "Test2"));
Console.getInstance().addLines(lines);
assertEquals(2, Console.getInstance().getLines().size());
String[] lines2 = {"Test1", "Test2"};
Console.getInstance().addLines(lines2);
assertEquals(4, Console.getInstance().getLines().size());
}
@Test
public void helloWorld() {
System.out.println("Début du test d'Hello World ...");
Console.getInstance().printHelloWorld();
assertEquals(9, Console.getInstance().getLines().size());
}
@Test
public void overflow() {
System.out.println("Début du test d'overflow ...");
for (int k = 0; k < Console.getInstance().maxLines + 10; k++)
Console.getInstance().addLine("Garbage");
assertEquals(Console.getInstance().maxLines + 1, Console.getInstance().getLines().size());
}
@Test
public void attachTA() {
System.out.println("Début du test d'attache de TextArea ...");
Console.getInstance().attachTextarea(null);
assertNull(Console.getInstance().getAttachedTextArea());
}
}
|
package org.fade.demo.springframework.transaction;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.sql.DataSource;
import java.sql.Types;
/**
* @author fade
* @date 2022/01/03
*/
@Service
public class PropagationServiceImpl implements PropagationService {
private JdbcTemplate jdbcTemplate;
@Autowired
public void setDataSource(DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Override
@Transactional(propagation = Propagation.NESTED)
public void nestedThrowException(User user) {
jdbcTemplate.update("insert into user(name, age, sex) values(?, ?, ?)",
new Object[] {user.getName(), user.getAge(), user.getSex()},
new int[] {Types.VARCHAR, Types.INTEGER, Types.VARCHAR});
throw new RuntimeException("test PROPAGATION_NESTED");
}
@Override
@Transactional(propagation = Propagation.NESTED)
public void nestedOk(User user) {
jdbcTemplate.update("insert into user(name, age, sex) values(?, ?, ?)",
new Object[] {user.getName(), user.getAge(), user.getSex()},
new int[] {Types.VARCHAR, Types.INTEGER, Types.VARCHAR});
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.