text stringlengths 1 1.05M |
|---|
package com.fantasy.smallweather.util;
import android.app.Activity;
import android.os.Bundle;
/**
* 自定义活动类,重载了onCreate()和onDestroy()
* @author Fantasy
* @version 1.0, 2016/8/31
*/
public class BaseActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ActivityCollector.addActivity(this);
}
@Override
protected void onDestroy() {
super.onDestroy();
ActivityCollector.removeActivity(this);
}
}
|
<reponame>daffupman/rabbitmq-guide
package io.daff.rabbit._03_advanced_feature.limitstream;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.QueueingConsumer;
import com.rabbitmq.client.QueueingConsumer.Delivery;
import io.daff.rabbit.Constants;
public class Consumer {
public static void main(String[] args) throws Exception {
ConnectionFactory connectionFactory = new ConnectionFactory() ;
connectionFactory.setHost(Constants.RABBIT_MQ_HOST);
connectionFactory.setPort(5672);
connectionFactory.setVirtualHost("/");
connectionFactory.setAutomaticRecoveryEnabled(true);
connectionFactory.setNetworkRecoveryInterval(3000);
Connection connection = connectionFactory.newConnection();
Channel channel = connection.createChannel();
String queueName = "test001";
// durable 是否持久化消息
channel.queueDeclare(queueName, true, false, false, null);
QueueingConsumer consumer = new QueueingConsumer(channel);
channel.basicQos(0, 1, false);
// 参数:队列名称、是否自动ACK、Consumer
channel.basicConsume(queueName, false, consumer);
// 循环获取消息
while(true){
// 获取消息,如果没有消息,这一步将会一直阻塞
Delivery delivery = consumer.nextDelivery();
String msg = new String(delivery.getBody());
System.out.println("收到消息:" + msg);
channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
}
}
}
|
#!/bin/sh
#Regenerate the CRL to avoid the expired CRL error
openssl ca -config CA/estExampleCA.cnf -gencrl -out CA/estCA/crl.pem
cat CA/trustedcerts.crt CA/estCA/crl.pem > US901/trustedcertsandcrl.crt
#Setup the trust anchor
export EST_TRUSTED_CERTS=US901/trustedcertsandcrl.crt
export EST_CACERTS_RESP=CA/estCA/cacert.crt
export EST_OPENSSL_CACONFIG=CA/estExampleCA.cnf
#Start the EST server
../../example/server/estserver -n -l -p 8089 -c CA/estCA/private/estservercertandkey.pem -k CA/estCA/private/estservercertandkey.pem -r estrealm -d 60 -v
|
#!/bin/bash
dconf load /org/pantheon/terminal/settings/ <<COLORS
[/]
name='GitHub Dark'
cursor-color='#c9d1d9'
foreground='#8b949e'
background='rgba(16,18,22,.95)'
palette='#000000:#f78166:#56d364:#e3b341:#6ca4f8:#db61a2:#2b7489:#ffffff:#4d4d4d:#f78166:#56d364:#e3b341:#6ca4f8:#db61a2:#2b7489:#ffffff'
COLORS
|
func drawText(_ text: String, in rect: CGRect) {
let textInsets = UIEdgeInsets(top: 5, left: 5, bottom: 5, right: 5) // Example insets, you can adjust these values as needed
let textRect = rect.inset(by: textInsets)
let textAttributes: [NSAttributedString.Key: Any] = [
.font: UIFont.systemFont(ofSize: 12), // Example font size, you can adjust this as needed
.foregroundColor: UIColor.black // Example text color, you can adjust this as needed
]
let textDrawingOptions: NSStringDrawingOptions = [.usesLineFragmentOrigin, .usesFontLeading]
let textDrawingContext = NSStringDrawingContext()
text.draw(with: textRect, options: textDrawingOptions, attributes: textAttributes, context: textDrawingContext)
} |
package keeper_test
import (
"fmt"
"time"
"github.com/tendermint/farming/x/farming"
)
func (suite *KeeperTestSuite) TestLastEpochTime() {
_, found := suite.keeper.GetLastEpochTime(suite.ctx)
suite.Require().False(found)
t := mustParseRFC3339("2021-07-23T05:01:02Z")
suite.keeper.SetLastEpochTime(suite.ctx, t)
t2, found := suite.keeper.GetLastEpochTime(suite.ctx)
suite.Require().True(found)
suite.Require().Equal(t, t2)
}
func (suite *KeeperTestSuite) TestFirstEpoch() {
// The first epoch may run very quickly depending on when
// the farming module is deployed,
// meaning that (block time) - (last epoch time) may be smaller
// than epoch_days parameter on the first epoch.
params := suite.keeper.GetParams(suite.ctx)
suite.Require().Equal(uint32(1), params.EpochDays)
suite.ctx = suite.ctx.WithBlockTime(mustParseRFC3339("2021-08-11T23:59:59Z"))
farming.EndBlocker(suite.ctx, suite.keeper)
lastEpochTime, found := suite.keeper.GetLastEpochTime(suite.ctx)
suite.Require().True(found)
suite.ctx = suite.ctx.WithBlockTime(mustParseRFC3339("2021-08-12T00:00:00Z"))
farming.EndBlocker(suite.ctx, suite.keeper)
t, _ := suite.keeper.GetLastEpochTime(suite.ctx)
suite.Require().True(t.After(lastEpochTime)) // Indicating that the epoch advanced.
}
func (suite *KeeperTestSuite) TestEpochDays() {
for _, epochDays := range []uint32{1, 2, 3} {
suite.Run(fmt.Sprintf("epoch days = %d", epochDays), func() {
suite.SetupTest()
params := suite.keeper.GetParams(suite.ctx)
params.EpochDays = epochDays
suite.keeper.SetParams(suite.ctx, params)
t := mustParseRFC3339("2021-08-11T00:00:00Z")
suite.ctx = suite.ctx.WithBlockTime(t)
farming.EndBlocker(suite.ctx, suite.keeper)
lastEpochTime, _ := suite.keeper.GetLastEpochTime(suite.ctx)
for i := 0; i < 10000; i++ {
t = t.Add(5 * time.Minute)
suite.ctx = suite.ctx.WithBlockTime(t)
farming.EndBlocker(suite.ctx, suite.keeper)
t2, _ := suite.keeper.GetLastEpochTime(suite.ctx)
if t2.After(lastEpochTime) {
suite.Require().GreaterOrEqual(t2.Sub(lastEpochTime).Hours(), float64(epochDays*24))
lastEpochTime = t2
}
}
})
}
}
|
<reponame>pedroalbanese/gostpass
// Copyright 2016 The Sandpass Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package uuids
import (
"bytes"
"strings"
"testing"
)
var hexTests = []struct {
u UUID
s string
}{
{
UUID{},
"00000000-0000-0000-0000-000000000000",
},
{
UUID{0xf8, 0x1d, 0x4f, 0xae, 0x7d, 0xec, 0x11, 0xd0, 0xa7, 0x65, 0x00, 0xa0, 0xc9, 0x1e, 0x6b, 0xf6},
"f81d4fae-7dec-11d0-a765-00a0c91e6bf6",
},
}
func TestParse(t *testing.T) {
for _, test := range hexTests {
u, err := Parse(test.s)
if err != nil {
t.Errorf("Parse(%q) unexpected error: %v", test.s, err)
}
if u != test.u {
t.Errorf("Parse(%q) = %v; want %v", test.s, u, test.u)
}
}
parseTests := []struct {
s string
u UUID
fail bool
}{
{
s: "f81d4fae7dec11d0a76500a0c91e6bf6",
u: UUID{0xf8, 0x1d, 0x4f, 0xae, 0x7d, 0xec, 0x11, 0xd0, 0xa7, 0x65, 0x00, 0xa0, 0xc9, 0x1e, 0x6b, 0xf6},
},
{
s: "",
fail: true,
},
{
s: "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
fail: true,
},
{
s: "f81d4fae7dec11d0a76500a0c91e6bf",
fail: true,
},
{
s: "f81d4fae7dec11d0a76500a0c91e6bf6ad",
fail: true,
},
{
s: "f81d4fae7dec11d0a76500a0c91e6bf6a",
fail: true,
},
}
for _, test := range parseTests {
u, err := Parse(test.s)
if (err != nil) != test.fail {
if test.fail {
t.Errorf("Parse(%q) should return an error", test.s)
} else {
t.Errorf("Parse(%q) unexpected error: %v", test.s, err)
}
}
if u != test.u {
t.Errorf("Parse(%q) = %v; want %v", test.s, u, test.u)
}
}
}
func TestAppendHex(t *testing.T) {
for _, test := range hexTests {
{
s := test.u.AppendHex(make([]byte, 0, 36))
if !bytes.Equal(s, []byte(test.s)) {
t.Errorf("UUID(%v).AppendHex(make([]byte, 0)) = %q; want %q", [16]byte(test.u), s, test.s)
}
}
{
s := test.u.AppendHex(make([]byte, 0, 36))
if !bytes.Equal(s, []byte(test.s)) {
t.Errorf("UUID(%v).AppendHex(make([]byte, 0, 36)) = %q; want %q", [16]byte(test.u), s, test.s)
}
}
{
b := make([]byte, 0, 39)
b = append(b, "foo"...)
s := test.u.AppendHex(b)
if !bytes.Equal(s, []byte("foo"+test.s)) {
t.Errorf("UUID(%v).AppendHex(\"foo\") = %q; want %q", [16]byte(test.u), s, "foo"+test.s)
}
}
}
}
func TestString(t *testing.T) {
for _, test := range hexTests {
s := test.u.String()
if s != test.s {
t.Errorf("UUID(%v).String() = %q; want %q", [16]byte(test.u), s, test.s)
}
}
}
func TestNew3(t *testing.T) {
tests := []struct {
namespace UUID
id []byte
u UUID
}{
{
DNS, []byte("www.example.com"),
UUID{0x5d, 0xf4, 0x18, 0x81, 0x3a, 0xed, 0x35, 0x15, 0x88, 0xa7, 0x2f, 0x4a, 0x81, 0x4c, 0xf0, 0x9e},
},
{
DNS, []byte("www.widgets.com"),
UUID{0x3d, 0x81, 0x3c, 0xbb, 0x47, 0xfb, 0x32, 0xba, 0x91, 0xdf, 0x83, 0x1e, 0x15, 0x93, 0xac, 0x29},
},
}
for _, test := range tests {
u := New3(test.namespace, test.id)
if u != test.u {
t.Errorf("New3(%v, %q) = %v; want %v", test.namespace, test.id, u, test.u)
}
}
}
func TestNew4(t *testing.T) {
const random = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff\xe0\xe1"
u, err := New4(strings.NewReader(random))
if err != nil {
t.Fatal("New4 error:", err)
}
if version := u[6] >> 4; version != 4 {
t.Errorf("New4() = %v, version = %d; want 4", u, version)
}
if variant := u[8] >> 5; variant&6 != 4 {
t.Errorf("New4() = %v, variant = %d; want 4", u, variant&6)
}
}
func TestNew5(t *testing.T) {
tests := []struct {
namespace UUID
id []byte
u UUID
}{
{
DNS, []byte("www.example.com"),
UUID{0x2e, 0xd6, 0x65, 0x7d, 0xe9, 0x27, 0x56, 0x8b, 0x95, 0xe1, 0x26, 0x65, 0xa8, 0xae, 0xa6, 0xa2},
},
}
for _, test := range tests {
u := New5(test.namespace, test.id)
if u != test.u {
t.Errorf("New5(%v, %q) = %v; want %v", test.namespace, test.id, u, test.u)
}
}
}
|
import React, { useState,useMemo } from "react";
import { makeStyles } from "@material-ui/core/styles";
import { IoMdAddCircleOutline } from "react-icons/io";
import {
Grid,
Button,
Paper,
TextField,
IconButton,
Table,
TableHead,
TableRow,
TableCell,
TableBody
} from "@material-ui/core";
import { CirclePicker } from "react-color";
import SubItem from "./SubItem";
import { MdUpdate } from "react-icons/md";
import gql from "graphql-tag";
import { useQuery, useMutation } from "@apollo/react-hooks";
const useStyles = makeStyles(theme => ({
button: {
margin: theme.spacing(1)
},
sendToRight: {
textAlign: "right"
},
containerGrid: {
marginTop: "50px"
},
paper: {
padding: "50px"
},
alignPaper: {
padding: "65px"
},
textInPut: {
marginTop: "30px"
},
alignCenter: {
display: "flex",
alignItems: "flex-end"
}
}));
const UpdateNewDepartemant = (props) => {
const classes = useStyles();
const [departemant,setDepartemant] = useState({
id:'',
name:'',
color:'',
active:true,
subDepartemant:[]
});
const [subState,setSubState] = useState({
id:'',
name:'',
active:true
});
const {loading,data} = useQuery(GET_A_DEPARTEMANT,{
variables:{id:props.match.params.id}
});
useMemo(() => {
if(data){
setDepartemant(data.getADepartemant);
}
},[data])
const [updateDepartemant,{error}] = useMutation(UPDATE_DEPARTEMANT,{
update(_,result){
console.log(result.data.updateDepartemant);
},
onError(err){
console.log(err.graphQLErrors[0].message);
},
variables:{...departemant,subDepartemant:departemant.subDepartemant.map(p => {
return {
name:p.name,
active:p.active
}
})}
})
const changeSubActive = (item,active) => {
const result = departemant.subDepartemant.map(departemantt => departemantt.name == item.name ?
departemantt = {...departemantt,active}
: departemantt
);
setDepartemant({...departemant,subDepartemant:[...result]});
}
const updateSub = (item,name) => {
const result = departemant.subDepartemant.map(departemantt => departemantt.name == item.name ?
departemantt = {...departemantt,name}
: departemantt
);
setDepartemant({...departemant,subDepartemant:[...result]});
}
const deleteSubItem = item =>{
const result = departemant.subDepartemant.filter(subitem => subitem.name != item.name);
setDepartemant({...departemant,subDepartemant:[...result]});
}
const onSubmit = e =>{
e.preventDefault();
updateDepartemant();
}
const changeHandlerDep = e =>{
setDepartemant({...departemant,name:e.target.value});
}
const changeColorHandler = color => {
setDepartemant({...departemant,color:color.hex});
}
const changeSubStateHandler = e =>{
setSubState({...subState,name:e.target.value});
}
const addSubHandler = () => {
setDepartemant({...departemant,subDepartemant:[...departemant.subDepartemant,subState]})
}
return (
<form onSubmit={onSubmit}>
<Grid container spacing={3}>
<Grid item xs={12} className={classes.sendToRight}>
<Button
variant="contained"
color="primary"
size="large"
className={classes.button}
startIcon={<MdUpdate />}
type="submit"
>
Update Departemant
</Button>
</Grid>
</Grid>
<Grid container spacing={3}>
<Grid item xs={8}>
<Paper
elevation={3}
className={`${classes.alignPaper} ${classes.textInPut}`}
>
<h4>Add Departemant Name</h4>
<TextField
name="name"
placeholder="Add Departemant Name"
label="Add Name"
fullWidth
variant="outlined"
className={classes.textInPut}
required
value={departemant.name}
onChange={changeHandlerDep}
/>
</Paper>
<Paper
elevation={3}
className={`${classes.paper} ${classes.textInPut}`}
>
<h4>Add Sub Departemant</h4>
<Grid container spacing={0}>
<Grid item xs={8}>
<TextField
name="subname"
placeholder="Add Sub Departemant Name"
label="Add Sub Departemant Name"
fullWidth
variant="outlined"
className={classes.textInPut}
value={subState.name}
onChange={changeSubStateHandler}
id="subname"
/>
</Grid>
<Grid item xs={4} className={classes.alignCenter}>
<Button
variant="contained"
color="primary"
size="large"
className={classes.button}
startIcon={<IoMdAddCircleOutline />}
style={{padding:"10px"}}
onClick={addSubHandler}
>
Add Sub Departemant
</Button>
</Grid>
<Grid item xs={12} className={classes.alignPaper}>
<Table>
<TableHead>
<TableRow>
<TableCell>No.</TableCell>
<TableCell>Name</TableCell>
<TableCell>Active</TableCell>
<TableCell>Action</TableCell>
</TableRow>
</TableHead>
<TableBody>
{departemant && departemant.subDepartemant.length > 0 && departemant.subDepartemant.map((dep,index) => (<SubItem key={index} index={++index} departemant={dep} deleteItem={deleteSubItem} updateSub={updateSub} changeSubActive={changeSubActive}/>))}
</TableBody>
</Table>
</Grid>
</Grid>
</Paper>
</Grid>
<Grid item xs={4}>
<Paper
elevation={3}
className={`${classes.paper} ${classes.textInPut}`}
>
<h4>Departemant Color</h4>
<CirclePicker color={departemant.color} onChangeComplete={changeColorHandler}/>
</Paper>
</Grid>
</Grid>
</form>
);
};
const GET_A_DEPARTEMANT = gql`
query getADepartemant($id:ID!) {
getADepartemant(id:$id){
id
name
active
color
subDepartemant{
id
name
active
}
}
}
`;
const UPDATE_DEPARTEMANT = gql`
mutation updateDepartemant($id:ID!,$name:String!,$color:String,$subDepartemant:[ISubDepartemant],$active:Boolean){
updateDepartemant(id:$id,name:$name,color:$color,subDepartemant:$subDepartemant,active:$active){
id
name
color
active
subDepartemant{
id
name
active
}
}
}
`;
export default UpdateNewDepartemant;
|
#!/bin/bash
# Copyright 2020 The gVisor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Run a packetimpact test. Two docker containers are made, one for the
# Device-Under-Test (DUT) and one for the test bench. Each is attached with
# two networks, one for control packets that aid the test and one for test
# packets which are sent as part of the test and observed for correctness.
set -euxo pipefail
function failure() {
local lineno=$1
local msg=$2
local filename="$0"
echo "FAIL: $filename:$lineno: $msg"
}
trap 'failure ${LINENO} "$BASH_COMMAND"' ERR
declare -r LONGOPTS="dut_platform:,posix_server_binary:,testbench_binary:,runtime:,tshark,extra_test_arg:"
# Don't use declare below so that the error from getopt will end the script.
PARSED=$(getopt --options "" --longoptions=$LONGOPTS --name "$0" -- "$@")
eval set -- "$PARSED"
declare -a EXTRA_TEST_ARGS
while true; do
case "$1" in
--dut_platform)
# Either "linux" or "netstack".
declare -r DUT_PLATFORM="$2"
shift 2
;;
--posix_server_binary)
declare -r POSIX_SERVER_BINARY="$2"
shift 2
;;
--testbench_binary)
declare -r TESTBENCH_BINARY="$2"
shift 2
;;
--runtime)
# Not readonly because there might be multiple --runtime arguments and we
# want to use just the last one. Only used if --dut_platform is
# "netstack".
declare RUNTIME="$2"
shift 2
;;
--tshark)
declare -r TSHARK="1"
shift 1
;;
--extra_test_arg)
EXTRA_TEST_ARGS+="$2"
shift 2
;;
--)
shift
break
;;
*)
echo "Programming error"
exit 3
esac
done
# All the other arguments are scripts.
declare -r scripts="$@"
# Check that the required flags are defined in a way that is safe for "set -u".
if [[ "${DUT_PLATFORM-}" == "netstack" ]]; then
if [[ -z "${RUNTIME-}" ]]; then
echo "FAIL: Missing --runtime argument: ${RUNTIME-}"
exit 2
fi
declare -r RUNTIME_ARG="--runtime ${RUNTIME}"
elif [[ "${DUT_PLATFORM-}" == "linux" ]]; then
declare -r RUNTIME_ARG=""
else
echo "FAIL: Bad or missing --dut_platform argument: ${DUT_PLATFORM-}"
exit 2
fi
if [[ ! -f "${POSIX_SERVER_BINARY-}" ]]; then
echo "FAIL: Bad or missing --posix_server_binary: ${POSIX_SERVER-}"
exit 2
fi
if [[ ! -f "${TESTBENCH_BINARY-}" ]]; then
echo "FAIL: Bad or missing --testbench_binary: ${TESTBENCH_BINARY-}"
exit 2
fi
# Variables specific to the control network and interface start with CTRL_.
# Variables specific to the test network and interface start with TEST_.
# Variables specific to the DUT start with DUT_.
# Variables specific to the test bench start with TESTBENCH_.
# Use random numbers so that test networks don't collide.
declare -r CTRL_NET="ctrl_net-${RANDOM}${RANDOM}"
declare -r TEST_NET="test_net-${RANDOM}${RANDOM}"
# On both DUT and test bench, testing packets are on the eth2 interface.
declare -r TEST_DEVICE="eth2"
# Number of bits in the *_NET_PREFIX variables.
declare -r NET_MASK="24"
function new_net_prefix() {
# Class C, 192.0.0.0 to 223.255.255.255, transitionally has mask 24.
echo "$(shuf -i 192-223 -n 1).$(shuf -i 0-255 -n 1).$(shuf -i 0-255 -n 1)"
}
# Last bits of the DUT's IP address.
declare -r DUT_NET_SUFFIX=".10"
# Control port.
declare -r CTRL_PORT="40000"
# Last bits of the test bench's IP address.
declare -r TESTBENCH_NET_SUFFIX=".20"
declare -r TIMEOUT="60"
declare -r IMAGE_TAG="gcr.io/gvisor-presubmit/packetimpact"
# Make sure that docker is installed.
docker --version
function finish {
local cleanup_success=1
if [[ -z "${TSHARK-}" ]]; then
# Kill tcpdump so that it will flush output.
docker exec -t "${TESTBENCH}" \
killall tcpdump || \
cleanup_success=0
else
# Kill tshark so that it will flush output.
docker exec -t "${TESTBENCH}" \
killall tshark || \
cleanup_success=0
fi
for net in "${CTRL_NET}" "${TEST_NET}"; do
# Kill all processes attached to ${net}.
for docker_command in "kill" "rm"; do
(docker network inspect "${net}" \
--format '{{range $key, $value := .Containers}}{{$key}} {{end}}' \
| xargs -r docker "${docker_command}") || \
cleanup_success=0
done
# Remove the network.
docker network rm "${net}" || \
cleanup_success=0
done
if ((!$cleanup_success)); then
echo "FAIL: Cleanup command failed"
exit 4
fi
}
trap finish EXIT
# Subnet for control packets between test bench and DUT.
declare CTRL_NET_PREFIX=$(new_net_prefix)
while ! docker network create \
"--subnet=${CTRL_NET_PREFIX}.0/${NET_MASK}" "${CTRL_NET}"; do
sleep 0.1
declare CTRL_NET_PREFIX=$(new_net_prefix)
done
# Subnet for the packets that are part of the test.
declare TEST_NET_PREFIX=$(new_net_prefix)
while ! docker network create \
"--subnet=${TEST_NET_PREFIX}.0/${NET_MASK}" "${TEST_NET}"; do
sleep 0.1
declare TEST_NET_PREFIX=$(new_net_prefix)
done
docker pull "${IMAGE_TAG}"
# Create the DUT container and connect to network.
DUT=$(docker create ${RUNTIME_ARG} --privileged --rm \
--stop-timeout ${TIMEOUT} -it ${IMAGE_TAG})
docker network connect "${CTRL_NET}" \
--ip "${CTRL_NET_PREFIX}${DUT_NET_SUFFIX}" "${DUT}" \
|| (docker kill ${DUT}; docker rm ${DUT}; false)
docker network connect "${TEST_NET}" \
--ip "${TEST_NET_PREFIX}${DUT_NET_SUFFIX}" "${DUT}" \
|| (docker kill ${DUT}; docker rm ${DUT}; false)
docker start "${DUT}"
# Create the test bench container and connect to network.
TESTBENCH=$(docker create --privileged --rm \
--stop-timeout ${TIMEOUT} -it ${IMAGE_TAG})
docker network connect "${CTRL_NET}" \
--ip "${CTRL_NET_PREFIX}${TESTBENCH_NET_SUFFIX}" "${TESTBENCH}" \
|| (docker kill ${TESTBENCH}; docker rm ${TESTBENCH}; false)
docker network connect "${TEST_NET}" \
--ip "${TEST_NET_PREFIX}${TESTBENCH_NET_SUFFIX}" "${TESTBENCH}" \
|| (docker kill ${TESTBENCH}; docker rm ${TESTBENCH}; false)
docker start "${TESTBENCH}"
# Start the posix_server in the DUT.
declare -r DOCKER_POSIX_SERVER_BINARY="/$(basename ${POSIX_SERVER_BINARY})"
docker cp -L ${POSIX_SERVER_BINARY} "${DUT}:${DOCKER_POSIX_SERVER_BINARY}"
docker exec -t "${DUT}" \
/bin/bash -c "${DOCKER_POSIX_SERVER_BINARY} \
--ip ${CTRL_NET_PREFIX}${DUT_NET_SUFFIX} \
--port ${CTRL_PORT}" &
# Because the Linux kernel receives the SYN-ACK but didn't send the SYN it will
# issue a RST. To prevent this IPtables can be used to filter those out.
docker exec "${TESTBENCH}" \
iptables -A INPUT -i ${TEST_DEVICE} -j DROP
# Wait for the DUT server to come up. Attempt to connect to it from the test
# bench every 100 milliseconds until success.
while ! docker exec "${TESTBENCH}" \
nc -zv "${CTRL_NET_PREFIX}${DUT_NET_SUFFIX}" "${CTRL_PORT}"; do
sleep 0.1
done
declare -r REMOTE_MAC=$(docker exec -t "${DUT}" ip link show \
"${TEST_DEVICE}" | tail -1 | cut -d' ' -f6)
declare -r LOCAL_MAC=$(docker exec -t "${TESTBENCH}" ip link show \
"${TEST_DEVICE}" | tail -1 | cut -d' ' -f6)
declare -r DOCKER_TESTBENCH_BINARY="/$(basename ${TESTBENCH_BINARY})"
docker cp -L "${TESTBENCH_BINARY}" "${TESTBENCH}:${DOCKER_TESTBENCH_BINARY}"
if [[ -z "${TSHARK-}" ]]; then
# Run tcpdump in the test bench unbuffered, without dns resolution, just on
# the interface with the test packets.
docker exec -t "${TESTBENCH}" \
tcpdump -S -vvv -U -n -i "${TEST_DEVICE}" net "${TEST_NET_PREFIX}/24" &
else
# Run tshark in the test bench unbuffered, without dns resolution, just on the
# interface with the test packets.
docker exec -t "${TESTBENCH}" \
tshark -V -l -n -i "${TEST_DEVICE}" \
-o tcp.check_checksum:TRUE \
-o udp.check_checksum:TRUE \
host "${TEST_NET_PREFIX}${TESTBENCH_NET_SUFFIX}" &
fi
# tcpdump and tshark take time to startup
sleep 3
# Start a packetimpact test on the test bench. The packetimpact test sends and
# receives packets and also sends POSIX socket commands to the posix_server to
# be executed on the DUT.
docker exec -t "${TESTBENCH}" \
/bin/bash -c "${DOCKER_TESTBENCH_BINARY} \
${EXTRA_TEST_ARGS[@]-} \
--posix_server_ip=${CTRL_NET_PREFIX}${DUT_NET_SUFFIX} \
--posix_server_port=${CTRL_PORT} \
--remote_ipv4=${TEST_NET_PREFIX}${DUT_NET_SUFFIX} \
--local_ipv4=${TEST_NET_PREFIX}${TESTBENCH_NET_SUFFIX} \
--remote_mac=${REMOTE_MAC} \
--local_mac=${LOCAL_MAC} \
--device=${TEST_DEVICE}"
echo PASS: No errors.
|
#!/bin/bash
scrot -s '%Y-%m-%d_$wx$h_scrot.png' -e 'curl --location --request POST https://screenshots.local.setkeh.com/image --form file=@$f && mv $f /home/setkeh/storage/screenshots/' | jq -r '.url' | xclip -selection c |
const { logger } = require('../helpers');
const { UserModel, AdvertModel } = require('../models');
/**
* Create and save User object
*
* @async
* @param {Number} _id - telegram ID
* @param {Object} location - object with properties latitude and longitude
* @param {Number} searchRadius - in metres
*/
const createUser = async (_id, { latitude, longitude }, searchRadius) => {
try {
const model = new UserModel({
_id,
location: { coordinates: [longitude, latitude] },
searchRadius
});
await model.save();
} catch (e) {
logger.error(e);
throw new Error('Unable save user');
}
};
/**
* Create and save Advertisement object
*
* @param {Object} advertisement
* @property {Number} author - telegram ID
* @property {string} title
* @property {string} description
* @property {Object} location - object with properties latitude and longitude
* @property {string} category
* @property {string} remuneration
*/
const createAdvertisement = async ({
author,
title,
description,
location: { latitude, longitude },
category,
remuneration
}) => {
try {
const model = new AdvertModel({
author,
title,
description,
location: { coordinates: [longitude, latitude] },
category,
remuneration,
isActive: true
});
await model.save();
} catch (e) {
logger.error(e);
throw new Error('Unable save advertisement');
}
};
module.exports = { createUser, createAdvertisement };
|
/*
* Copyright (C) 2020 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <gtest/gtest.h>
#include <string>
#include <ignition/common/Console.hh>
#include "test_config.h" // NOLINT(build/include)
#include "ignition/rendering/Node.hh"
#include "ignition/rendering/RenderEngine.hh"
#include "ignition/rendering/RenderingIface.hh"
#include "ignition/rendering/Scene.hh"
#include "ignition/rendering/Visual.hh"
using namespace ignition;
using namespace rendering;
class NodeTest : public testing::Test,
public testing::WithParamInterface<const char *>
{
/// \brief Test visual material
public: void Pose(const std::string &_renderEngine);
};
/////////////////////////////////////////////////
void NodeTest::Pose(const std::string &_renderEngine)
{
RenderEngine *engine = rendering::engine(_renderEngine);
if (!engine)
{
igndbg << "Engine '" << _renderEngine
<< "' is not supported" << std::endl;
return;
}
ScenePtr scene = engine->CreateScene("scene");
// create visual
NodePtr node = scene->CreateVisual();
ASSERT_NE(nullptr, node);
// check initial pose
EXPECT_EQ(math::Pose3d(), node->LocalPose());
EXPECT_EQ(math::Vector3d(), node->LocalPosition());
EXPECT_EQ(math::Quaterniond(), node->LocalRotation());
EXPECT_EQ(math::Pose3d(), node->WorldPose());
EXPECT_EQ(math::Vector3d(), node->WorldPosition());
EXPECT_EQ(math::Quaterniond(), node->WorldRotation());
// set node pose, position, and quaternion
node->SetLocalPose(math::Pose3d(1, 2, 3, 0, 1.57, 1.57));
EXPECT_EQ(math::Pose3d(1, 2, 3, 0, 1.57, 1.57), node->LocalPose());
EXPECT_EQ(math::Pose3d(1, 2, 3, 0, 1.57, 1.57), node->WorldPose());
node->SetLocalPosition(math::Vector3d(3, 4, 5));
EXPECT_EQ(math::Vector3d(3, 4, 5), node->LocalPosition());
EXPECT_EQ(math::Vector3d(3, 4, 5), node->WorldPosition());
node->SetLocalRotation(math::Quaterniond(math::Vector3d(0.3, 0.1, 0.2)));
EXPECT_EQ(math::Quaterniond(math::Vector3d(0.3, 0.1, 0.2)),
node->LocalRotation());
EXPECT_EQ(math::Quaterniond(math::Vector3d(0.3, 0.1, 0.2)),
node->WorldRotation());
node->SetWorldPose(math::Pose3d(-1, -2, -3, 0, -1.57, -1.57));
EXPECT_EQ(math::Pose3d(-1, -2, -3, 0, -1.57, -1.57), node->WorldPose());
EXPECT_EQ(math::Pose3d(-1, -2, -3, 0, -1.57, -1.57), node->LocalPose());
node->SetWorldPosition(math::Vector3d(-3, -4, -5));
EXPECT_EQ(math::Vector3d(-3, -4, -5), node->WorldPosition());
EXPECT_EQ(math::Vector3d(-3, -4, -5), node->LocalPosition());
node->SetWorldRotation(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)));
EXPECT_EQ(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)),
node->WorldRotation());
EXPECT_EQ(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)),
node->LocalRotation());
// set NAN and inf values. verify they are not set
node->SetLocalPose(math::Pose3d(1, NAN, 3, 0, math::INF_D, 1.57));
EXPECT_EQ(math::Pose3d(-3, -4, -5, -0.3, -0.1, -0.2), node->LocalPose());
EXPECT_EQ(math::Pose3d(-3, -4, -5, -0.3, -0.1, -0.2), node->WorldPose());
node->SetWorldPose(math::Pose3d(1, NAN, 3, 0, math::INF_D, 1.57));
EXPECT_EQ(math::Pose3d(-3, -4, -5, -0.3, -0.1, -0.2), node->LocalPose());
EXPECT_EQ(math::Pose3d(-3, -4, -5, -0.3, -0.1, -0.2), node->WorldPose());
node->SetLocalPosition(math::Vector3d(NAN, 4, 5));
EXPECT_EQ(math::Vector3d(-3, -4, -5), node->WorldPosition());
EXPECT_EQ(math::Vector3d(-3, -4, -5), node->LocalPosition());
node->SetWorldPosition(math::Vector3d(NAN, 4, 5));
EXPECT_EQ(math::Vector3d(-3, -4, -5), node->WorldPosition());
EXPECT_EQ(math::Vector3d(-3, -4, -5), node->LocalPosition());
node->SetLocalRotation(math::Quaterniond(math::Vector3d(NAN, 0.4, 1.5)));
EXPECT_EQ(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)),
node->LocalRotation());
EXPECT_EQ(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)),
node->WorldRotation());
node->SetWorldRotation(math::Quaterniond(math::Vector3d(NAN, 0.4, 1.5)));
EXPECT_EQ(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)),
node->WorldRotation());
EXPECT_EQ(math::Quaterniond(math::Vector3d(-0.3, -0.1, -0.2)),
node->LocalRotation());
// Clean up
engine->DestroyScene(scene);
rendering::unloadEngine(engine->Name());
}
/////////////////////////////////////////////////
TEST_P(NodeTest, Pose)
{
Pose(GetParam());
}
INSTANTIATE_TEST_CASE_P(Node, NodeTest,
RENDER_ENGINE_VALUES,
ignition::rendering::PrintToStringParam());
int main(int argc, char **argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
<reponame>permanentCH5/GeoInfoNet<filename>train/train_gin.py<gh_stars>10-100
import sys
sys.path.append('../dataset')
sys.path.append('../datatransfer')
sys.path.append('../network')
sys.path.append('../test')
sys.path.append('../util')
import dataset_si as LevirCS
import torch
import torch.nn as nn
import torch.optim as optim
import dataTransfer as inputTransform
import dataTransferJoint as jointTransform
import cv2 as cv
import gin as gin
#import myRes50 as res
import math
import os
import torch.nn.functional as F
from torch.utils.data import DataLoader
import datetime
from misc import check_mkdir, AverageMeter, BatchStat
import numpy as np
#need to set
args = {
'train_batch_size':4,
'lr': 0.001,
'lr_decay':0.9,
'crop_size':240,#wait for design
'weight_decay':1e-4,
'momentum':0.9,
'display':10,
'max_epoch': 250,
'max_iter':200000,
'snapshot': 'gin',
'snapshot_freq': 20000,
'snapshotInitName': 'densenet169.pth',
'model_dir': '../models/',
'log_dir': '../log/',
'exp_name': 'gin',
'print_freq': 1,
'is_transfer': True,
'curr_epoch': 1,
'trainTxtPath':'./train_levir_cloud_snow_dataset_version3_withdem.txt',
}
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
device = torch.device('cuda:0')
def main():
train_joint_transform = jointTransform.Compose([
jointTransform.RandomImgCrop(args['crop_size']), #imgcrop:h,w
jointTransform.RotateTransform(), #random 90
])
train_set = LevirCS.LevirCS_MSS_CloudSnowDatasetV3(trainTxtPath=args['trainTxtPath'],rootPath='',
joint_transform=train_joint_transform,image_transform=None)
train_loader = DataLoader(train_set, batch_size=args['train_batch_size'], num_workers=2, shuffle=True)
curr_epoch = args['curr_epoch']
net = gin.gin(is_transfer=args['is_transfer'],curr_epoch=args['curr_epoch'],snapshotInitPath=args['model_dir']+args['snapshotInitName']).to(device)
print(net)
net.train()
criterion_cls = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(),lr=args['lr'],weight_decay=args['weight_decay'], momentum=args['momentum'], nesterov=True)
print( optimizer)
check_mkdir(args['model_dir'])
check_mkdir(args['log_dir'])
args['time'] = str(datetime.datetime.now().timestamp())
open(os.path.join(args['log_dir'], args['time']+ '_'+args['snapshot']+'.txt'), 'a').write(str(args) + '\n\n')
train(train_loader, net,criterion_cls, optimizer, curr_epoch, args)
def train(train_loader, net,criterion_cls, optimizer, curr_epoch, train_args):
while curr_epoch<=train_args['max_epoch']:
main_loss_recoder = AverageMeter()
cls_loss_recoder = AverageMeter()
batch_stat = BatchStat()
curr_iter = (curr_epoch - 1) * len(train_loader)
for i, data in enumerate(train_loader):
optimizer.param_groups[0]['lr'] = train_args['lr'] * (1 - float(curr_iter) / train_args['max_iter']
) ** train_args['lr_decay']
inputs, gts_seg = data
inputs = inputs.to(device)
optimizer.zero_grad()
outputs_cls= net(inputs)
gts_seg = gts_seg.to(device)
cls_loss = criterion_cls(outputs_cls,gts_seg)
loss = cls_loss
loss.backward()
optimizer.step()
main_loss_recoder.update(loss.data.cpu().numpy(), inputs.size(2) * inputs.size(3))
batch_stat.stat_update(outputs_cls,gts_seg)
curr_iter += 1
if (i + 1) % train_args['display'] == 0:
mainLossOutput = '[epoch %d], [iter %d / %d], [train main loss %.5f], [lr %.10f]' % (
curr_epoch, i + 1, len(train_loader), main_loss_recoder.avg,
optimizer.param_groups[0]['lr'])
print(mainLossOutput)
open(os.path.join(args['log_dir'], args['time']+ '_'+args['snapshot']+'.txt'), 'a').write(mainLossOutput)
batch_statOutput = '[iter %d] [recall %.5f,%.5f,%.5f], [precision %.5f,%.5f,%.5f], [iou %.5f,%.5f,%.5f]\n' % (i+1,
batch_stat.recall[0],batch_stat.recall[1],batch_stat.recall[2],batch_stat.precision[0],
batch_stat.precision[1],batch_stat.precision[2],batch_stat.iou[0],batch_stat.iou[1],batch_stat.iou[2])
print(batch_statOutput)
open(os.path.join(args['log_dir'], args['time']+ '_'+args['snapshot']+'.txt'), 'a').write(batch_statOutput)
if curr_iter >= train_args['max_iter']:
return
if curr_iter % train_args['snapshot_freq'] == 0 and curr_iter/train_args['snapshot_freq'] >0:
torch.save(net.state_dict(),train_args['model_dir']+train_args['snapshot']+'_'+str(curr_iter)+'_epoch'+str(curr_epoch)+'.pth')
curr_epoch += 1
return
if __name__ == '__main__':
main()
|
#!/usr/bin/env bash
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "${debian_chroot:-}" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set a fancy prompt (non-color, unless we know we "want" color)
case "$TERM" in
xterm-color|*-256color) color_prompt=yes;;
esac
# uncomment for a colored prompt, if the terminal has the capability; turned
# off by default to not distract the user: the focus in a terminal window
# should be on the output of commands, not on the prompt
#force_color_prompt=yes
if [ -n "$force_color_prompt" ]; then
if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
# We have color support; assume it's compliant with Ecma-48
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
# a case would tend to support setf rather than setaf.)
color_prompt=yes
else
color_prompt=
fi
fi
if [ "$color_prompt" = yes ]; then
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
else
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ '
fi
unset color_prompt force_color_prompt
# If this is an xterm set the title to user@host:dir
case "$TERM" in
xterm*|rxvt*)
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
;;
*)
;;
esac
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
#alias dir='dir --color=auto'
#alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# colored GCC warnings and errors
#export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01'
|
#!/bin/bash
DIR=${PWD}
USER_ID=$(stat -c "%u" ${PWD})
echo "Using uid=${USER_ID} from owner of dir ${DIR}"
# verify if user exists
id "${USER_NAME?}"
if [[ $? -ne 0 ]]; then
echo "Creating user ${USER_NAME} with UID ${USER_ID}"
# create a new user, without home directory and without a password
adduser -u ${USER_ID} -D -s /bin/bash ${USER_NAME}
fi
|
<reponame>wmn7/Traffic-Classification
'''
@Author: <NAME>
@Date: 2021-01-05 11:08:45
@Description: 将原始的数据划分训练集与测试集, 返回训练集和测试集的路径地址
@LastEditTime: 2021-01-05 18:02:58
'''
import os
from sklearn.model_selection import train_test_split
from TrafficFlowClassification.TrafficLog.setLog import logger
def get_file_path(folder_path):
"""获得 folder_path 下 pcap 文件的路径, 以 dict 的形式返回.
返回的包含每个大类(Chat, Email), 下每个小类(AIMchat1, aim_chat_3a), 中 pcap 的文件路径.
返回数据类型如下所示:
{
'Chat': {
'AIMchat1': ['D:\\Traffic-Classification\\data\\preprocess_data\\Chat\\AIMchat1\\AIMchat1.pcap.TCP_131-202-240-87_13393_178-237-24-202_443.pcap', ...]
'aim_chat_3a': [...],
...
},
'Email': {
'email1a': [],
...
},
...
}
Args:
folder_path (str): 包含 pcap 文件的根目录名称
"""
pcap_dict = {}
for (root, _, files) in os.walk(folder_path):
if len(files) > 0:
logger.info('正在记录 {} 下的 pcap 文件'.format(root))
folder_name_list = os.path.normpath(root).split(os.sep) # 将 'D:\Traffic-Classification\data\preprocess_data' 返回为列表 ['D:', 'Traffic-Classification', 'data', 'preprocess_data']
top_category, second_category = folder_name_list[-2], folder_name_list[-1]
if top_category not in pcap_dict:
pcap_dict[top_category] = {}
if second_category not in pcap_dict[top_category]:
pcap_dict[top_category][second_category] = []
for Ufile in files:
pcapPath = os.path.join(root, Ufile) # 需要转换的pcap文件的完整路径
pcap_dict[top_category][second_category].append(pcapPath)
logger.info('将所有的 pcap 文件整理为 dict !')
logger.info('==========\n')
return pcap_dict
def get_train_test(folder_path, train_size):
"""返回训练集和测试集的 pcap 的路径, 这里返回的数据格式如下,
{
'Chat': ['./data/preprocess_data\\Chat\\AIMchat1\\AIMchat1.pcap.UDP_131-202-240-87_137_131-202-243-255_137.pcap', ...],
'Email': [...],
...
}
Args:
folder_path (str): 包含 pcap 文件的根目录
train_size (float): 0-1 之间的一个数, 表示训练集所占的比例
"""
train_dict = {} # 训练集的 pcap 路径
test_dict = {} # 测试集的 pcap 路径
pcap_dict = get_file_path(folder_path=folder_path)
for pcap_category, pcap_category_dict in pcap_dict.items():
train_dict[pcap_category] = []
test_dict[pcap_category] = []
for _, pcaps_list in pcap_category_dict.items():
if len(pcaps_list)>3:
X_train, X_test = train_test_split(pcaps_list, train_size=train_size, shuffle=True)
train_dict[pcap_category].extend(X_train)
test_dict[pcap_category].extend(X_test)
return train_dict, test_dict
|
import './add-stuff-page.html';
import './add-stuff-page.js';
import './edit-stuff-page.html';
import './edit-stuff-page.js';
import './home-page.html';
import './list-stuff-page.html';
import './list-stuff-page.js';
import './sign-up-page.html';
import './sign-up-page.js';
import './login-page.html';
import './login-page.js';
import './user-home-page.html';
import './admin-page.html';
import './sell-page.html';
import './sell-page.js';
import './list-item.html';
import './list-item.js'; |
FLUME_CLASSPATH="/opt/hadoop/libs/*"
|
#ifdef __cplusplus
extern "C" {
#endif
#include "qcloud.h"
__QCLOUD_STATIC__ mqtt_event_type_t mqtt_glue_event_type_get(qcloud_mqtt_ack_type_t ack_type, int is_nack, int is_timer_expired)
{
mqtt_event_type_t event_type;
if (is_nack) {
switch (ack_type) {
case QCLOUD_MQTT_ACK_TYPE_PUBACK:
event_type = MQTT_EVENT_PUBLISH_NACK;
break;
case QCLOUD_MQTT_ACK_TYPE_SUBACK:
event_type = MQTT_EVENT_SUBCRIBE_NACK;
break;
case QCLOUD_MQTT_ACK_TYPE_UNSUBACK:
event_type = MQTT_EVENT_UNSUBCRIBE_NACK;
break;
}
} else if (is_timer_expired) {
switch (ack_type) {
case QCLOUD_MQTT_ACK_TYPE_PUBACK:
event_type = MQTT_EVENT_PUBLISH_TIMEOUT;
break;
case QCLOUD_MQTT_ACK_TYPE_SUBACK:
event_type = MQTT_EVENT_SUBCRIBE_TIMEOUT;
break;
case QCLOUD_MQTT_ACK_TYPE_UNSUBACK:
event_type = MQTT_EVENT_UNSUBCRIBE_TIMEOUT;
break;
}
} else {
switch (ack_type) {
case QCLOUD_MQTT_ACK_TYPE_PUBACK:
event_type = MQTT_EVENT_PUBLISH_SUCCESS;
break;
case QCLOUD_MQTT_ACK_TYPE_SUBACK:
event_type = MQTT_EVENT_SUBCRIBE_SUCCESS;
break;
case QCLOUD_MQTT_ACK_TYPE_UNSUBACK:
event_type = MQTT_EVENT_UNSUBCRIBE_SUCCESS;
break;
}
}
return event_type;
}
__QCLOUD_INTERNAL__ char *mqtt_glue_string_const2mutable(const char *orig_str, size_t orig_str_len)
{
char *str_mutable = NULL;
str_mutable = osal_malloc(orig_str_len + 1);
if (!str_mutable) {
return NULL;
}
strncpy(str_mutable, orig_str, orig_str_len);
str_mutable[orig_str_len] = 0;
return str_mutable;
}
__QCLOUD_INTERNAL__ void mqtt_glue_string_mutable_free(char *str_mutable)
{
osal_free(str_mutable);
}
__QCLOUD_INTERNAL__ uint16_t mqtt_glue_packet_id_generate(qcloud_mqtt_client_t *client)
{
#define PACKET_ID_MAX (65535)
uint16_t packet_id = client->packet_id;
osal_mutex_lock(client->global_lock);
client->packet_id = (packet_id == PACKET_ID_MAX ? 1 : (packet_id + 1));
osal_mutex_unlock(client->global_lock);
return client->packet_id;
}
__QCLOUD_INTERNAL__ void mqtt_glue_connect_id_generate(char *conn_id)
{
int i, flag;
srand((unsigned)osal_timer_current_sec());
for (i = 0; i < QCLOUD_MQTT_CONNECT_ID_MAX - 1; ++i) {
flag = rand() % 3;
switch (flag) {
case 0:
conn_id[i] = (rand() % 26) + 'a';
break;
case 1:
conn_id[i] = (rand() % 26) + 'A';
break;
case 2:
conn_id[i] = (rand() % 10) + '0';
break;
}
}
conn_id[QCLOUD_MQTT_CONNECT_ID_MAX - 1] = '\0';
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_packet_send(qcloud_mqtt_client_t *client, size_t length, osal_timer_t *timer)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
qcloud_err_t rc = QCLOUD_ERR_SUCCESS;
size_t cur_bytes_send = 0, total_bytes_send = 0;
if (length >= sizeof(client->tx_buffer)) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT);
}
while (total_bytes_send < length && !osal_timer_is_expired(timer)) {
rc = client->network.write(&(client->network), &client->tx_buffer[total_bytes_send], length, osal_timer_remain(timer), &cur_bytes_send);
if (rc != QCLOUD_ERR_SUCCESS) {
/* there was an error writing the data */
break;
}
total_bytes_send += cur_bytes_send;
}
if (total_bytes_send == length) {
/* record the fact that we have successfully sent the packet */
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
QCLOUD_FUNC_EXIT_RC(rc);
}
/**
* @brief 解析报文的剩余长度字段
*
* 每从网络中读取一个字节, 按照MQTT协议算法计算剩余长度
*
* @param client Client结构体
* @param value 剩余长度
* @param timeout 超时时间
* @return
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_packet_decode_from_network(qcloud_mqtt_client_t *client, uint32_t *value, uint32_t timeout)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(value, QCLOUD_ERR_INVAL);
uint8_t byte;
uint32_t len = 0, multiplier = 1;
size_t read_len = 0;
/* The value argument is the important value. len is just used temporarily
* and never used by the calling function for anything else */
*value = 0;
do {
#define MAX_NO_OF_REMAINING_LENGTH_BYTES 4
if (++len > MAX_NO_OF_REMAINING_LENGTH_BYTES) {
/* bad data */
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_PACKET_READ)
}
if ((client->network.read(&(client->network), &byte, 1, timeout, &read_len)) !=
QCLOUD_ERR_SUCCESS) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE);
}
*value += (byte & 127) * multiplier;
multiplier *= 128;
} while ((byte & 128) != 0);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_STATIC__ void mqtt_glue_packet_drain(qcloud_mqtt_client_t *client, osal_timer_t *timer, uint32_t packet_len)
{
qcloud_err_t rc;
uint32_t time_remain = 0;
size_t total_bytes_read = 0, read_len = 0, bytes2read = 0;
time_remain = osal_timer_remain(timer) + QCLOUD_MQTT_REMAIN_WAIT_MS_MAX;
if (packet_len < sizeof(client->rx_buffer)) {
bytes2read = packet_len;
} else {
bytes2read = sizeof(client->rx_buffer);
}
do {
rc = client->network.read(&(client->network), client->rx_buffer,
bytes2read, time_remain, &read_len);
if (rc == QCLOUD_ERR_SUCCESS) {
total_bytes_read += read_len;
if ((packet_len - total_bytes_read) >= sizeof(client->rx_buffer)) {
bytes2read = sizeof(client->rx_buffer);
} else {
bytes2read = packet_len - total_bytes_read;
}
}
} while (total_bytes_read < packet_len && rc == QCLOUD_ERR_SUCCESS);
}
/**
* @brief 从底层SSL/TCP层读取报文数据
*
* 1. 读取第一个字节确定报文的类型;
* 2. 读取剩余长度字段, 最大为四个字节; 剩余长度表示可变包头和有效负载的长度
* 3. 根据剩余长度, 读取剩下的数据, 包括可变包头和有效负荷
*
* @param client Client结构体
* @param timer 定时器
* @param packet_type 报文类型
* @return
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_packet_read(qcloud_mqtt_client_t *client, osal_timer_t *timer, uint8_t *packet_type)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(packet_type, QCLOUD_ERR_INVAL);
qcloud_err_t rc;
size_t read_len = 0;
mqtt_header_t header = {0};
uint32_t len = 0, remain_len = 0, time_remain = 0;
time_remain = osal_timer_remain(timer);
if (time_remain == 0) {
// QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_REQUEST_TIMEOUT);
time_remain = 1;
}
// 1. 读取报文固定头部的第一个字节
rc = client->network.read(&(client->network), client->rx_buffer, (len = 1), time_remain, &read_len);
if (rc == QCLOUD_ERR_SSL_NOTHING_TO_READ || rc == QCLOUD_ERR_TCP_NOTHING_TO_READ) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_NOTHING_TO_READ);
}
if (rc != QCLOUD_ERR_SUCCESS) {
QCLOUD_FUNC_EXIT_RC(rc);
}
// 2. 读取报文固定头部剩余长度部分
time_remain = osal_timer_remain(timer) + QCLOUD_MQTT_REMAIN_WAIT_MS_MAX; //确保一包MQTT报文接收完;
rc = mqtt_glue_packet_decode_from_network(client, &remain_len, time_remain);
if (QCLOUD_ERR_SUCCESS != rc) {
QCLOUD_FUNC_EXIT_RC(rc);
}
// 如果读缓冲区的大小小于报文的剩余长度, 报文会被丢弃
if (remain_len >= sizeof(client->rx_buffer)) {
mqtt_glue_packet_drain(client, timer, remain_len);
QCLOUD_LOG_E("rx buffer insufficient[%d] < [%d]", sizeof(client->rx_buffer), remain_len);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT);
}
// 将剩余长度写入读缓冲区
len += mqtt_common_packet_encode(client->rx_buffer + len, remain_len);
// 3. 读取报文的剩余部分数据
if ((len + remain_len) > sizeof(client->rx_buffer)) {
mqtt_glue_packet_drain(client, timer, remain_len);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_BUF_TOO_SHORT);
}
time_remain = osal_timer_remain(timer) + QCLOUD_MQTT_REMAIN_WAIT_MS_MAX;
rc = client->network.read(&(client->network), client->rx_buffer + len, remain_len, time_remain, &read_len);
if (rc != QCLOUD_ERR_SUCCESS) {
QCLOUD_FUNC_EXIT_RC(rc);
}
header.byte = client->rx_buffer[0];
*packet_type = header.bits.type;
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
/**
* @brief 消息主题是否相同
*
* @param topic_filter
* @param topicName
* @return
*/
__QCLOUD_INTERNAL__ int mqtt_glue_is_topic_equals(const char *topic_filter, char *topic)
{
int topic_len = 0;
topic_len = strlen(topic);
if (strlen(topic_filter) != topic_len) {
return QCLOUD_FALSE;
}
if (strncmp(topic_filter, topic, topic_len) == 0) {
return QCLOUD_TRUE;
}
return QCLOUD_FALSE;
}
/**
* @brief 消息主题匹配
*
* assume topic filter and name is in correct format
* # can only be at end
* + and # can only be next to separator
*
* @param topic_filter 订阅消息的主题名
* @param topicName 收到消息的主题名, 不能包含通配符
* @param topicNameLen 主题名的长度
* @return
*/
__QCLOUD_INTERNAL__ int mqtt_glue_is_topic_matches(const char *topic_filter, char *topic, uint16_t topic_len)
{
// it's ugly, but it works. keep it, optimize later
char *curf, *curn, *curn_end, *nextpos;
curf = (char *)topic_filter;
curn = topic;
curn_end = curn + topic_len;
while (*curf && (curn < curn_end)) {
if (*curf == '+' && *curn == '/') {
curf++;
continue;
}
if (*curn == '/' && *curf != '/') {
break;
}
if (*curf != '+' && *curf != '#' && *curf != *curn) {
break;
}
if (*curf == '+') {
/* skip until we meet the next separator, or end of string */
nextpos = curn + 1;
while (nextpos < curn_end && *nextpos != '/') {
nextpos = ++curn + 1;
}
} else if (*curf == '#') {
/* skip until end of string */
curn = curn_end - 1;
}
curf++;
curn++;
};
if (*curf == '\0') {
return curn == curn_end;
} else {
return (*curf == '#') || *(curf + 1) == '#' || (*curf == '+' && *(curn - 1) == '/');
}
}
#if (QCLOUD_CFG_DUPLICATED_MSG_REMOVE_EN > 0u)
#define MQTT_MAX_REPEAT_BUF_LEN 50
static uint16_t sg_repeat_packet_id_buf[MQTT_MAX_REPEAT_BUF_LEN];
/**
* @brief 判断packet_id缓存中是否已经存有传入的packet_id;
*/
__QCLOUD_STATIC__ int mqtt_glue_packet_id_is_exist(uint16_t packet_id)
{
int i;
for (i = 0; i < MQTT_MAX_REPEAT_BUF_LEN; ++i) {
if (packet_id == sg_repeat_packet_id_buf[i]) {
return QCLOUD_TRUE;
}
}
return QCLOUD_FALSE;
}
__QCLOUD_STATIC__ static void mqtt_glue_packet_id_cache_add(uint16_t packet_id)
{
static uint16_t current_packet_id_cnt = 0;
if (mqtt_glue_packet_id_is_exist(packet_id)) {
return;
}
sg_repeat_packet_id_buf[current_packet_id_cnt++] = packet_id;
if (current_packet_id_cnt >= MQTT_MAX_REPEAT_BUF_LEN) {
current_packet_id_cnt = current_packet_id_cnt % 50;
}
}
__QCLOUD_INTERNAL__ void mqtt_glue_packet_id_cache_reset(void)
{
int i = 0;
for (i = 0; i < MQTT_MAX_REPEAT_BUF_LEN; ++i) {
sg_repeat_packet_id_buf[i] = 0;
}
}
#endif
__QCLOUD_STATIC__ qcloud_mqtt_msg_handler_t *mqtt_glue_msg_handler_get(qcloud_mqtt_client_t *client, char *topic, uint16_t topic_len)
{
QCLOUD_FUNC_ENTRY;
qcloud_list_t *curr, *next;
qcloud_mqtt_msg_handler_t *msg_handler;
if (!client || !topic) {
return NULL;
}
if (qcloud_list_empty(&client->msg_handler_list)) {
return NULL;
}
osal_mutex_lock(client->msg_handler_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->msg_handler_list) {
msg_handler = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_msg_handler_t, list);
if (mqtt_glue_is_topic_equals(msg_handler->topic_filter_mutable, topic) ||
mqtt_glue_is_topic_matches(msg_handler->topic_filter_mutable, topic, topic_len)) {
osal_mutex_unlock(client->msg_handler_list_lock);
return msg_handler;
}
}
osal_mutex_unlock(client->msg_handler_list_lock);
return NULL;
}
/**
* @brief 终端收到服务器的的PUBLISH消息之后, 传递消息给消息回调处理函数
*
* @param client
* @param topicName
* @param message
* @return
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_incoming_message_deliver(qcloud_mqtt_client_t *client,
char *topic,
uint16_t topic_len,
mqtt_incoming_msg_t *message)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(topic, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(message, QCLOUD_ERR_INVAL);
qcloud_mqtt_msg_handler_t *msg_handler = NULL;
message->topic = topic;
message->topic_len = (size_t)topic_len;
msg_handler = mqtt_glue_msg_handler_get(client, topic, topic_len);
if (!msg_handler) {
/* message handler not found for topic */
/* call default handler */
mqtt_glue_callback_involve(client, MQTT_EVENT_PUBLISH_RECVEIVED, message);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
msg_handler->handler(client, message, msg_handler->private_data);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_STATIC__ void mqtt_glue_ack_info_destroy(qcloud_mqtt_ack_info_t *ack_info)
{
qcloud_list_del(&ack_info->list);
if (ack_info->type == QCLOUD_MQTT_ACK_TYPE_SUBACK && ack_info->handler) {
mqtt_glue_msg_handler_destory(ack_info->handler);
}
osal_free(ack_info);
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_ack_list_scan(qcloud_mqtt_client_t *client)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
qcloud_list_t *curr, *next;
mqtt_event_type_t event_type;
qcloud_mqtt_ack_info_t *ack_info;
if (qcloud_list_empty(&client->ack_pend_list)) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
osal_mutex_lock(client->ack_pend_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->ack_pend_list) {
ack_info = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_ack_info_t, list);
if (!qcloud_mqtt_client_is_connected(client)) {
continue;
}
// check whether the ack is timeout
if (!osal_timer_is_expired(&ack_info->ack_timer)) {
continue;
}
event_type = mqtt_glue_event_type_get(ack_info->type, QCLOUD_FALSE, QCLOUD_TRUE);
/* 通知外部网络已经断开 */
mqtt_glue_callback_involve(client, event_type, &ack_info->packet_id);
mqtt_glue_ack_info_destroy(ack_info);
}
osal_mutex_unlock(client->ack_pend_list_lock);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_ack_list_destroy(qcloud_mqtt_client_t *client)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
qcloud_list_t *curr, *next;
qcloud_mqtt_ack_info_t *ack_info;
if (qcloud_list_empty(&client->ack_pend_list)) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
osal_mutex_lock(client->ack_pend_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->ack_pend_list) {
ack_info = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_ack_info_t, list);
mqtt_glue_ack_info_destroy(ack_info);
}
osal_mutex_unlock(client->ack_pend_list_lock);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_STATIC__ qcloud_mqtt_ack_info_t *mqtt_glue_ack_info_create(qcloud_mqtt_ack_type_t type,
qcloud_mqtt_msg_handler_t *handler,
uint32_t timeout,
uint16_t packet_id,
uint16_t packet_len,
uint8_t *packet_body)
{
qcloud_mqtt_ack_info_t *ack_info = NULL;
ack_info = (qcloud_mqtt_ack_info_t *)osal_malloc(sizeof(qcloud_mqtt_ack_info_t) + packet_len);
if (!ack_info) {
QCLOUD_LOG_E("memory alloc failed!");
return NULL;
}
qcloud_list_init(&ack_info->list);
osal_timer_init(&ack_info->ack_timer);
osal_timer_countdown_ms(&ack_info->ack_timer, timeout);
ack_info->type = type;
ack_info->handler = handler;
ack_info->packet_id = packet_id;
ack_info->packet_len = packet_len;
ack_info->packet_body = (uint8_t *)ack_info + sizeof(qcloud_mqtt_ack_info_t);
memcpy(ack_info->packet_body, packet_body, packet_len);
return ack_info;
}
__QCLOUD_STATIC__ void mqtt_glue_ack_list_do_unrecord(qcloud_mqtt_client_t *client, qcloud_mqtt_ack_info_t *ack_info)
{
qcloud_list_del(&ack_info->list);
osal_free(ack_info);
}
__QCLOUD_STATIC__ void mqtt_glue_ack_list_unrecord(qcloud_mqtt_client_t *client,
qcloud_mqtt_ack_type_t type,
uint16_t packet_id,
int is_nack,
qcloud_mqtt_msg_handler_t **handler)
{
QCLOUD_FUNC_ENTRY;
qcloud_list_t *curr, *next;
mqtt_event_type_t event_type;
qcloud_mqtt_ack_info_t *ack_info;
if (!client) {
return;
}
if (qcloud_list_empty(&client->ack_pend_list) ||
!qcloud_mqtt_client_is_connected(client)) {
return;
}
osal_mutex_lock(client->ack_pend_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->ack_pend_list) {
ack_info = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_ack_info_t, list);
if (ack_info->packet_id != packet_id || ack_info->type != type) {
continue;
}
event_type = mqtt_glue_event_type_get(type, is_nack, osal_timer_is_expired(&ack_info->ack_timer));
mqtt_glue_callback_involve(client, event_type, &ack_info->packet_id);
if (handler) {
*handler = ack_info->handler;
}
mqtt_glue_ack_list_do_unrecord(client, ack_info);
}
osal_mutex_unlock(client->ack_pend_list_lock);
}
__QCLOUD_STATIC__ void mqtt_glue_ack_list_do_record(qcloud_mqtt_client_t *client, qcloud_mqtt_ack_info_t *ack_info)
{
osal_mutex_lock(client->ack_pend_list_lock);
qcloud_list_add_tail(&ack_info->list, &client->ack_pend_list);
osal_mutex_unlock(client->ack_pend_list_lock);
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_ack_list_record(qcloud_mqtt_client_t *client,
qcloud_mqtt_ack_type_t type,
qcloud_mqtt_msg_handler_t *handler,
uint16_t packet_id,
uint16_t packet_len)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
qcloud_mqtt_ack_info_t *ack_info = NULL;
ack_info = mqtt_glue_ack_info_create(type, handler, client->command_timeout, packet_id, packet_len, client->tx_buffer);
if (!ack_info) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE);
}
mqtt_glue_ack_list_do_record(client, ack_info);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_callback_involve(qcloud_mqtt_client_t *client, mqtt_event_type_t event_type, void *message)
{
mqtt_event_t event;
mqtt_event_handler_t *event_handler;
event_handler = &client->event_handler;
if (!event_handler) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE);
}
event.type = event_type;
event.message = message;
event_handler->handler(client, event_handler->context, &event);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
/**
* @brief 终端收到服务器的的PUBACK消息之后, 处理收到的PUBACK报文
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_puback_packet_handle(qcloud_mqtt_client_t *client, osal_timer_t *timer)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
qcloud_err_t rc;
uint8_t dup, type;
uint16_t packet_id;
rc = mqtt_common_deserialize_ack_packet(&type, &dup, &packet_id, client->rx_buffer, sizeof(client->rx_buffer));
if (QCLOUD_ERR_SUCCESS != rc) {
QCLOUD_FUNC_EXIT_RC(rc);
}
mqtt_glue_ack_list_unrecord(client, QCLOUD_MQTT_ACK_TYPE_PUBACK, packet_id, QCLOUD_FALSE, NULL);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_INTERNAL__ void mqtt_glue_msg_handler_destory(qcloud_mqtt_msg_handler_t *msg_handler)
{
qcloud_list_del(&msg_handler->list);
mqtt_glue_string_mutable_free(msg_handler->topic_filter_mutable);
osal_free(msg_handler);
}
__QCLOUD_INTERNAL__ qcloud_mqtt_msg_handler_t * mqtt_glue_msg_handler_create(char *topic_filter_mutable, mqtt_subscribe_opt_t *subscribe_opt)
{
qcloud_mqtt_msg_handler_t *msg_handler = NULL;
msg_handler = (qcloud_mqtt_msg_handler_t *)osal_malloc(sizeof(qcloud_mqtt_msg_handler_t));
if (!msg_handler) {
QCLOUD_LOG_E("malloc failed");
return NULL;
}
qcloud_list_init(&msg_handler->list);
msg_handler->private_data = subscribe_opt->private_data;
msg_handler->handler = subscribe_opt->message_handler;
msg_handler->qos = subscribe_opt->qos;
msg_handler->topic_filter_mutable = topic_filter_mutable;
return msg_handler;
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_msg_handler_list_destroy(qcloud_mqtt_client_t *client)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
qcloud_list_t *curr, *next;
qcloud_mqtt_msg_handler_t *msg_handler;
if (qcloud_list_empty(&client->msg_handler_list)) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
osal_mutex_lock(client->msg_handler_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->msg_handler_list) {
msg_handler = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_msg_handler_t, list);
mqtt_glue_msg_handler_destory(msg_handler);
}
osal_mutex_unlock(client->msg_handler_list_lock);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_STATIC__ int mqtt_glue_handler_is_exist(qcloud_mqtt_client_t *client, qcloud_mqtt_msg_handler_t *that_handler)
{
QCLOUD_FUNC_ENTRY;
qcloud_list_t *curr, *next;
qcloud_mqtt_msg_handler_t *this_handler;
if (!client || !that_handler) {
return QCLOUD_FALSE;
}
if (qcloud_list_empty(&client->msg_handler_list)) {
return QCLOUD_FALSE;
}
osal_mutex_lock(client->msg_handler_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->msg_handler_list) {
this_handler = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_msg_handler_t, list);
if (mqtt_glue_is_topic_equals(this_handler->topic_filter_mutable, that_handler->topic_filter_mutable) &&
this_handler->handler == that_handler->handler) {
return QCLOUD_TRUE;
}
}
osal_mutex_unlock(client->msg_handler_list_lock);
return QCLOUD_FALSE;
}
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_msg_handler_install(qcloud_mqtt_client_t *client, qcloud_mqtt_msg_handler_t *msg_handler)
{
if (mqtt_glue_handler_is_exist(client, msg_handler)) {
QCLOUD_LOG_W("duplicated handler");
mqtt_glue_msg_handler_destory(msg_handler);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
qcloud_list_add(&msg_handler->list, &client->msg_handler_list);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
__QCLOUD_INTERNAL__ void mqtt_glue_msg_handler_uninstall(qcloud_mqtt_client_t *client, const char *topic_filter, int *is_subscribed)
{
char *this_topic = NULL;
int is_topic_filter_wildcard = QCLOUD_FALSE;
qcloud_list_t *curr, *next;
qcloud_mqtt_msg_handler_t *msg_handler = NULL;
*is_subscribed = QCLOUD_FALSE;
is_topic_filter_wildcard = (strstr(topic_filter, "/#") || strstr(topic_filter, "/+"));
if (qcloud_list_empty(&client->msg_handler_list)) {
return;
}
osal_mutex_lock(client->msg_handler_list_lock);
QCLOUD_LIST_FOR_EACH_SAFE(curr, next, &client->msg_handler_list) {
msg_handler = QCLOUD_LIST_ENTRY(curr, qcloud_mqtt_msg_handler_t, list);
if (mqtt_glue_is_topic_equals(topic_filter, this_topic) ||
(is_topic_filter_wildcard &&
mqtt_glue_is_topic_matches(topic_filter, this_topic, strlen(this_topic)))) {
// completely the same, or match the wildcard
mqtt_glue_msg_handler_destory(msg_handler);
*is_subscribed = QCLOUD_TRUE;
}
}
osal_mutex_unlock(client->msg_handler_list_lock);
}
/**
* @brief 终端收到服务器的的 SUBACK 消息之后, 处理收到的 SUBACK 报文
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_suback_packet_handle(qcloud_mqtt_client_t *client, osal_timer_t *timer)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
int qos = 0;
qcloud_err_t rc;
uint32_t count = 0;
uint16_t packet_id = 0;
int is_nack = QCLOUD_FALSE;
qcloud_mqtt_msg_handler_t *msg_handler = NULL;
// 反序列化SUBACK报文
rc = mqtt_common_deserialize_suback_packet(&packet_id, 1, &count, &qos, client->rx_buffer, sizeof(client->rx_buffer));
if (rc != QCLOUD_ERR_SUCCESS) {
QCLOUD_FUNC_EXIT_RC(rc);
}
/* check the return code of suback packet:
0x00(QOS0, SUCCESS), 0x01(QOS1, SUCCESS), 0x02(QOS2, SUCCESS), 0x80(Failure)
*/
is_nack = (qos == MQTT_PACKET_SUBACK_RC_FAILURE);
mqtt_glue_ack_list_unrecord(client, QCLOUD_MQTT_ACK_TYPE_SUBACK, packet_id, is_nack, &msg_handler);
if (!msg_handler) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE);
}
if (is_nack) {
mqtt_glue_msg_handler_destory(msg_handler);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_SUB);
}
mqtt_glue_msg_handler_install(client, msg_handler);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
/**
* @brief 终端收到服务器的的 USUBACK 消息之后, 处理收到的 USUBACK 报文
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_unsuback_packet_handle(qcloud_mqtt_client_t *client, osal_timer_t *timer)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
qcloud_err_t rc;
uint16_t packet_id = 0;
rc = mqtt_common_deserialize_unsuback_packet(&packet_id, client->rx_buffer, sizeof(client->rx_buffer));
if (rc != QCLOUD_ERR_SUCCESS) {
QCLOUD_FUNC_EXIT_RC(rc);
}
mqtt_glue_ack_list_unrecord(client, QCLOUD_MQTT_ACK_TYPE_UNSUBACK, packet_id, QCLOUD_FALSE, NULL);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
/**
* @brief 终端收到服务器的的PUBLISH消息之后, 处理收到的PUBLISH报文
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_publish_packet_handle(qcloud_mqtt_client_t *client, osal_timer_t *timer)
{
QCLOUD_FUNC_ENTRY;
qcloud_err_t rc;
char *topic_name;
uint16_t topic_len;
uint32_t len = 0;
char topic_mutable[QCLOUD_MQTT_TOPIC_SIZE_MAX + 1] = {0};
mqtt_incoming_msg_t msg;
rc = mqtt_common_deserialize_publish_packet(&msg.dup, (int *)&msg.qos, &msg.retained, &msg.id,
&topic_name, &topic_len,
(uint8_t **) &msg.payload, &msg.payload_len,
client->rx_buffer, sizeof(client->rx_buffer));
if (rc != QCLOUD_ERR_SUCCESS) {
QCLOUD_FUNC_EXIT_RC(rc);
}
if(topic_len > QCLOUD_MQTT_TOPIC_SIZE_MAX) {
QCLOUD_LOG_E("incoming topic len overflow");
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE);
}
// 传过来的topicName没有截断,会把payload也带过来
memset(topic_mutable, 0, sizeof(topic_mutable));
memcpy(topic_mutable, topic_name, topic_len);
if (msg.qos == MQTT_QOS0) {
rc = mqtt_glue_incoming_message_deliver(client, topic_mutable, topic_len, &msg);
/* no further processing required for QOS0 */
QCLOUD_FUNC_EXIT_RC(rc);
}
#if (QCLOUD_CFG_DUPLICATED_MSG_REMOVE_EN > 0u)
// 执行订阅消息的回调函数
if (!mqtt_glue_packet_id_is_exist(msg.id)) {
#endif
rc = mqtt_glue_incoming_message_deliver(client, topic_mutable, topic_len, &msg);
if (QCLOUD_ERR_SUCCESS != rc) {
QCLOUD_FUNC_EXIT_RC(rc);
}
#if (QCLOUD_CFG_DUPLICATED_MSG_REMOVE_EN > 0u)
}
mqtt_glue_packet_id_cache_add(msg.id);
#endif
osal_mutex_lock(client->tx_lock);
if (msg.qos == MQTT_QOS1) {
rc = mqtt_common_serialize_puback_packet(client->tx_buffer, sizeof(client->tx_buffer), msg.id, &len);
} else if (msg.qos == MQTT_QOS2) {
rc = mqtt_common_serialize_pubrec_packet(client->tx_buffer, sizeof(client->tx_buffer), msg.id, &len);
}
if (QCLOUD_ERR_SUCCESS != rc) {
osal_mutex_unlock(client->tx_lock);
QCLOUD_FUNC_EXIT_RC(rc);
}
rc = mqtt_glue_packet_send(client, len, timer);
osal_mutex_unlock(client->tx_lock);
QCLOUD_FUNC_EXIT_RC(rc);
}
/**
* @brief 处理PUBREC报文, 并发送PUBREL报文, PUBLISH报文为QOS2时
*
* @param client
* @param timer
* @return
*/
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_pubrec_packet_handle(qcloud_mqtt_client_t *client, osal_timer_t *timer)
{
QCLOUD_FUNC_ENTRY;
qcloud_err_t rc;
uint32_t len;
uint16_t packet_id;
uint8_t dup, type;
rc = mqtt_common_deserialize_ack_packet(&type, &dup, &packet_id, client->rx_buffer, sizeof(client->rx_buffer));
if (QCLOUD_ERR_SUCCESS != rc) {
QCLOUD_FUNC_EXIT_RC(rc);
}
osal_mutex_lock(client->tx_lock);
rc = mqtt_common_serialize_pubrel_packet(client->tx_buffer, sizeof(client->tx_buffer), 0, packet_id, &len);
if (QCLOUD_ERR_SUCCESS != rc) {
osal_mutex_unlock(client->tx_lock);
QCLOUD_FUNC_EXIT_RC(rc);
}
/* send the PUBREL packet */
rc = mqtt_glue_packet_send(client, len, timer);
if (QCLOUD_ERR_SUCCESS != rc) {
osal_mutex_unlock(client->tx_lock);
/* there was a problem */
QCLOUD_FUNC_EXIT_RC(rc);
}
osal_mutex_unlock(client->tx_lock);
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS);
}
/**
* @brief 处理服务器的心跳包回包
*
* @param client
*/
__QCLOUD_STATIC__ void mqtt_glue_pingresp_packet_handle(qcloud_mqtt_client_t *client)
{
QCLOUD_FUNC_ENTRY;
osal_mutex_lock(client->global_lock);
client->ping_outstanding = 0;
osal_timer_countdown(&client->ping_timer, client->keep_alive_interval);
osal_mutex_unlock(client->global_lock);
QCLOUD_FUNC_EXIT;
}
__QCLOUD_STATIC__ qcloud_err_t mqtt_glue_packet_handle(qcloud_mqtt_client_t *client, osal_timer_t *timer, uint8_t packet_type)
{
qcloud_err_t rc = QCLOUD_ERR_SUCCESS;
switch (packet_type) {
case MQTT_PACKET_TYPE_CONNACK:
break;
case MQTT_PACKET_TYPE_PUBACK:
rc = mqtt_glue_puback_packet_handle(client, timer);
break;
case MQTT_PACKET_TYPE_SUBACK:
rc = mqtt_glue_suback_packet_handle(client, timer);
break;
case MQTT_PACKET_TYPE_UNSUBACK:
rc = mqtt_glue_unsuback_packet_handle(client, timer);
break;
case MQTT_PACKET_TYPE_PUBLISH:
rc = mqtt_glue_publish_packet_handle(client, timer);
break;
case MQTT_PACKET_TYPE_PUBREC:
rc = mqtt_glue_pubrec_packet_handle(client, timer);
break;
case MQTT_PACKET_TYPE_PUBREL:
QCLOUD_LOG_E("PUBREL NOT handled!");
break;
case MQTT_PACKET_TYPE_PUBCOMP:
case MQTT_PACKET_TYPE_PINGRESP:
break;
default:
/* either unknown packet type or failure occurred should not happen */
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_RX_MESSAGE_INVAL);
}
switch (packet_type) {
/* recv below msgs are all considered as PING OK */
case MQTT_PACKET_TYPE_PUBACK:
case MQTT_PACKET_TYPE_SUBACK:
case MQTT_PACKET_TYPE_UNSUBACK:
case MQTT_PACKET_TYPE_PINGRESP:
mqtt_glue_pingresp_packet_handle(client);
break;
/* Recv downlink pub means link is OK but we still need to send PING request */
case MQTT_PACKET_TYPE_PUBLISH:
osal_mutex_lock(client->global_lock);
client->ping_outstanding = 0;
osal_mutex_unlock(client->global_lock);
break;
}
QCLOUD_FUNC_EXIT_RC(rc);
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_spin(qcloud_mqtt_client_t *client, osal_timer_t *timer, uint8_t *packet_type)
{
QCLOUD_FUNC_ENTRY;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(packet_type, QCLOUD_ERR_INVAL);
qcloud_err_t rc;
/* read the socket, see what work is done */
rc = mqtt_glue_packet_read(client, timer, packet_type);
/* nothing to read, not a failure */
QCLOUD_FUNC_EXIT_RC_IF(rc, QCLOUD_ERR_MQTT_NOTHING_TO_READ, QCLOUD_ERR_SUCCESS);
QCLOUD_FUNC_EXIT_RC_IF_NOT(rc, QCLOUD_ERR_SUCCESS, rc);
QCLOUD_FUNC_EXIT_RC(mqtt_glue_packet_handle(client, timer, *packet_type));
}
__QCLOUD_INTERNAL__ qcloud_err_t mqtt_glue_spin4ack(qcloud_mqtt_client_t *client, osal_timer_t *timer, uint8_t packet_type)
{
QCLOUD_FUNC_ENTRY;
qcloud_err_t rc;
uint8_t ack_packet_type = 0;
QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL);
QCLOUD_POINTER_SANITY_CHECK(timer, QCLOUD_ERR_INVAL);
do {
if (osal_timer_is_expired(timer)) {
QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_MQTT_REQUEST_TIMEOUT);
}
rc = mqtt_glue_spin(client, timer, &ack_packet_type);
} while (QCLOUD_ERR_SUCCESS == rc && ack_packet_type != packet_type);
QCLOUD_FUNC_EXIT_RC(rc);
}
#ifdef __cplusplus
}
#endif
|
<gh_stars>10-100
package io.opensphere.mantle.transformer.util;
import java.util.List;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import gnu.trove.map.hash.TLongObjectHashMap;
import io.opensphere.core.geometry.AbstractRenderableGeometry;
import io.opensphere.core.geometry.Geometry;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.rangeset.RangedLongSet;
/**
* The Class GeometrySetUtil.
*/
public final class GeometrySetUtil
{
/** The Constant ALL_BITS_MASK. */
public static final long ALL_BITS_MASK = -1;
/**
* Find geometries with ids.
*
* @param geomSet the geom set
* @param geomSetLock the geom set lock
* @param idSet the id set
* @param dmGeomIdMask the dm geom id mask
* @return the map
*/
public static TLongObjectHashMap<AbstractRenderableGeometry> findGeometriesWithIds(Set<AbstractRenderableGeometry> geomSet,
ReentrantLock geomSetLock, RangedLongSet idSet, long dmGeomIdMask)
{
return findGeometriesWithIds(geomSet, geomSetLock, idSet, null, dmGeomIdMask);
}
/**
* Find geometries with ids.
*
* @param geomSet the geom set
* @param geomSetLock the geom set lock
* @param idSet the id set
* @param foundIdSet the found id set ( if passed in not null any found ids
* will be added to this set )
* @param dmGeomIdMask the dm geom id mask
* @return the map
*/
public static TLongObjectHashMap<AbstractRenderableGeometry> findGeometriesWithIds(Set<AbstractRenderableGeometry> geomSet,
ReentrantLock geomSetLock, RangedLongSet idSet, Set<Long> foundIdSet, long dmGeomIdMask)
{
TLongObjectHashMap<AbstractRenderableGeometry> foundSet = new TLongObjectHashMap<>();
geomSetLock.lock();
try
{
for (AbstractRenderableGeometry geom : geomSet)
{
if (idSet.contains(Long.valueOf(geom.getDataModelId() & dmGeomIdMask)))
{
if (foundIdSet != null)
{
foundIdSet.add(Long.valueOf(geom.getDataModelId() & dmGeomIdMask));
}
foundSet.put(geom.getDataModelId() & dmGeomIdMask, geom);
}
}
}
finally
{
geomSetLock.unlock();
}
return foundSet;
}
/**
* Find geometry set with ids.
*
* @param geomSet the geom set
* @param geomSetLock the geom set lock
* @param idList the id list
* @param dmGeomIdMask the dm geom id mask
* @return the sets the
*/
public static Set<Geometry> findGeometrySetWithIds(Set<Geometry> geomSet, ReentrantLock geomSetLock, List<Long> idList,
long dmGeomIdMask)
{
return findGeometrySetWithIds(geomSet, geomSetLock, idList, null, dmGeomIdMask);
}
/**
* Find geometry set with ids.
*
* @param geomSet the geom set
* @param geomSetLock the geom set lock
* @param idList the id list of cache ids.
* @param foundIdSet the found id set ( if passed in not null any found ids
* will be added to this set )
* @param dmGeomIdMask the dm geom id mask
* @return the sets the
*/
public static Set<Geometry> findGeometrySetWithIds(Set<Geometry> geomSet, ReentrantLock geomSetLock, List<Long> idList,
Set<Long> foundIdSet, long dmGeomIdMask)
{
Set<Long> idSet = New.set(idList);
Set<Geometry> foundSet = New.set();
geomSetLock.lock();
try
{
for (Geometry geom : geomSet)
{
if (idSet.contains(Long.valueOf(geom.getDataModelId() & dmGeomIdMask)))
{
if (foundIdSet != null)
{
foundIdSet.add(Long.valueOf(geom.getDataModelId() & dmGeomIdMask));
}
foundSet.add(geom);
}
}
}
finally
{
geomSetLock.unlock();
}
return foundSet;
}
/**
* Find geometry with id.
*
* @param geomSet the geom set
* @param geomSetLock the geom set lock
* @param cacheId the id
* @param dmGeomIdMask the dm geom id mask
* @return the geometry
*/
public static Geometry findGeometryWithId(Set<Geometry> geomSet, ReentrantLock geomSetLock, long cacheId, long dmGeomIdMask)
{
Geometry found = null;
geomSetLock.lock();
try
{
for (Geometry geom : geomSet)
{
if ((geom.getDataModelId() & dmGeomIdMask) == cacheId)
{
found = geom;
break;
}
}
}
finally
{
geomSetLock.unlock();
}
return found;
}
/**
* Instantiates a new geometry set util.
*/
private GeometrySetUtil()
{
// Disallow
}
}
|
<filename>src/site/components/QuestItemPrecondition.tsx
import React from 'react';
import { ItemWithCount } from '../../../typings/custom';
import { ItemData } from '../../items/itemData';
import { Input } from './Input';
import {
CloseButton,
ItemIdContainer,
ItemsContainer,
LargeDropdown,
ScriptBlockContainer,
TextWrapper,
Wrapper,
} from './StyledElements';
export interface QuestRewardProps {
onRemove: () => void;
preconditionBlock: ItemWithCount;
onItemIdChange: (value: string) => void;
onItemCountChange: (value: number) => void;
}
export const QuestItemPrecondition: (props: QuestRewardProps) => JSX.Element = ({
onRemove,
preconditionBlock,
onItemIdChange,
onItemCountChange,
}) => {
return (
<ScriptBlockContainer>
<CloseButton onClick={onRemove} />
<ItemIdContainer>
<Wrapper>
<TextWrapper>Item Id</TextWrapper>
<LargeDropdown
value={preconditionBlock.id}
onChange={(e: any) => onItemIdChange(e.target.value)}
>
<option value="none"></option>
{Object.entries(ItemData).map(([itemKey, itemData]) => (
<option value={itemKey}>{itemData.name}</option>
))}
</LargeDropdown>
</Wrapper>
</ItemIdContainer>
<ItemsContainer>
<Wrapper>
<TextWrapper>Item Quantity</TextWrapper>
<Input
value={preconditionBlock.count}
onChange={(e: any) => onItemCountChange(e.target.value)}
/>
</Wrapper>
</ItemsContainer>
</ScriptBlockContainer>
);
};
|
use base64;
pub enum Encoding {
ASCII,
Base64,
}
impl Encoding {
fn encode(&self, input: &str) -> String {
match self {
Encoding::ASCII => {
input.chars().map(|c| c as u8).collect()
}
Encoding::Base64 => {
base64::encode(input)
}
}
}
fn decode(&self, input: &str) -> String {
match self {
Encoding::ASCII => {
input.chars().map(|c| c as char).collect()
}
Encoding::Base64 => {
match base64::decode(input) {
Ok(decoded) => String::from_utf8_lossy(&decoded).into_owned(),
Err(_) => String::from("Invalid Base64 input"),
}
}
}
}
} |
import pyspark
from pyspark.ml import Pipeline
from pyspark.ml.feature import StringIndexer, CountVectorizer
from pyspark.ml.classification import LogisticRegression
# Set up the pipeline stages
indexer = StringIndexer(inputCol="document", outputCol="document_index")
vectorizer = CountVectorizer(inputCol="document_index", outputCol="features")
classifier = LogisticRegression(featuresCol="features", labelCol="language")
# Build and run the pipeline
pipeline = Pipeline(stages=[indexer, vectorizer, classifier])
model = pipeline.fit(documentsDf) |
import matplotlib.pyplot as plt
# Read data from file
data = []
with open('cluster_results.txt', 'r') as file:
for line in file:
x, y, cluster = map(float, line.split())
data.append((x, y, cluster))
# Separate data by cluster
clusters = {}
for x, y, cluster in data:
if cluster not in clusters:
clusters[cluster] = {'x': [], 'y': []}
clusters[cluster]['x'].append(x)
clusters[cluster]['y'].append(y)
# Create scatter plot
plt.figure()
for cluster, points in clusters.items():
plt.scatter(points['x'], points['y'], label=f'Cluster {int(cluster)}')
# Add labels and legend
plt.xlabel('X')
plt.ylabel('Y')
plt.title('Cluster Results')
plt.legend()
plt.show() |
//给你一份旅游线路图,该线路图中的旅行线路用数组 paths 表示,其中 paths[i] = [cityAi, cityBi] 表示该线路将会从
//cityAi 直接前往 cityBi 。请你找出这次旅行的终点站,即没有任何可以通往其他城市的线路的城市。
//
// 题目数据保证线路图会形成一条不存在循环的线路,因此恰有一个旅行终点站。
//
//
//
// 示例 1:
//
//
//输入:paths = [["London","New York"],["New York","Lima"],["Lima","Sao Paulo"]]
//输出:"Sao Paulo"
//解释:从 "London" 出发,最后抵达终点站 "Sao Paulo" 。本次旅行的路线是 "London" -> "New York" ->
//"Lima" -> "Sao Paulo" 。
//
//
// 示例 2:
//
//
//输入:paths = [["B","C"],["D","B"],["C","A"]]
//输出:"A"
//解释:所有可能的线路是:
//"D" -> "B" -> "C" -> "A".
//"B" -> "C" -> "A".
//"C" -> "A".
//"A".
//显然,旅行终点站是 "A" 。
//
//
// 示例 3:
//
//
//输入:paths = [["A","Z"]]
//输出:"Z"
//
//
//
//
// 提示:
//
//
// 1 <= paths.length <= 100
// paths[i].length == 2
// 1 <= cityAi.length, cityBi.length <= 10
// cityAi != cityBi
// 所有字符串均由大小写英文字母和空格字符组成。
//
// Related Topics 哈希表 字符串 👍 97 👎 0
package algorithm_1400
import "testing"
func Test_destCity(t *testing.T) {
type args struct {
paths [][]string
}
tests := []struct {
name string
args args
want string
}{
{"t1", args{[][]string{{"B", "C"}, {"D", "B"}, {"C", "A"}}}, "A"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := destCity(tt.args.paths); got != tt.want {
t.Errorf("destCity() = %v, want %v", got, tt.want)
}
})
}
}
|
<reponame>ops-class/test161<filename>version.go
package test161
import (
"fmt"
)
type ProgramVersion struct {
Major uint `yaml:"major"`
Minor uint `yaml:"minor"`
Revision uint `yaml:"revision"`
}
var Version = ProgramVersion{
Major: 1,
Minor: 3,
Revision: 2,
}
func (v ProgramVersion) String() string {
return fmt.Sprintf("%v.%v.%v", v.Major, v.Minor, v.Revision)
}
// Returns 1 if this > other, 0 if this == other, and -1 if this < other
func (this ProgramVersion) CompareTo(other ProgramVersion) int {
if this.Major > other.Major {
return 1
} else if this.Major < other.Major {
return -1
} else if this.Minor > other.Minor {
return 1
} else if this.Minor < other.Minor {
return -1
} else if this.Revision > other.Revision {
return 1
} else if this.Revision < other.Revision {
return -1
} else {
return 0
}
}
|
// Copyright 2021 99cloud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { metadataListUrl } from '../../../support/constants';
describe('The Metadata Page', () => {
const listUrl = metadataListUrl;
const name = 'An Example Namespace';
const filename = 'metadata.json';
beforeEach(() => {
cy.loginAdmin(listUrl);
});
it('successfully create', () => {
cy.clickHeaderButton(1)
.formAttachFile('metadata', filename)
.clickModalActionSubmitButton();
});
it('successfully detail', () => {
cy.tableSimpleSearchText(name).checkTableFirstRow(name).goToDetail();
cy.goBackToList(listUrl);
});
it('successfully manage resource type', () => {
cy.tableSimpleSearchText(name)
.clickActionInMore('Manage Resource Types')
.formTableSelectBySearch('associations', 'OS::Cinder::Volume')
.clickModalActionSubmitButton();
});
it('successfully edit', () => {
cy.tableSimpleSearchText(name)
.clickFirstActionButton()
.formCheckboxClick('options', 0)
.clickModalActionSubmitButton();
});
it('successfully delete', () => {
cy.tableSimpleSearchText(name)
.clickConfirmActionInMore('Delete')
.checkEmptyTable();
});
});
|
void SetupCharacterMovementAndCamera(USpringArmComponent* CameraBoom, USceneComponent* RootComponent, UCharacterMovementComponent* CharacterMovement) {
// Set character movement properties
CharacterMovement->bOrientRotationToMovement = true; // Rotate character to moving direction
CharacterMovement->RotationRate = FRotator(0.f, 640.f, 0.f);
CharacterMovement->bConstrainToPlane = true;
CharacterMovement->bSnapToPlaneAtStart = true;
// Create a camera boom and attach it to the character's root component
CameraBoom = NewObject<USpringArmComponent>(RootComponent, TEXT("CameraBoom"));
CameraBoom->SetupAttachment(RootComponent);
} |
treevalue -v |
package com.youngki.memory_project;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
public class showHelp extends AppCompatActivity {
private TextView howToPlay;
private Button button;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_show_help);
howToPlay = (TextView)findViewById(R.id.textView10);
howToPlay.setTypeface(Typeface.createFromAsset(getAssets(),"fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button7);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button1);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button2);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button3);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button4);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button5);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
button = (Button)findViewById(R.id.button6);
button.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/orange juice 2.0.ttf"));
}
public void onGenerateMapClick(View v){
Intent windowOpener = new Intent(this,createMap.class);
startActivity(windowOpener);
}
public void onGenerateHelpClick(View v){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("Select easy, medium, or hard, then click generate and the map will show " +
"on the screen. Click the + or - buttons to increase or decrease the amount of letters" +
" or numbers shown. Click train to save the map you've created.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void onHandwritingHelpClick(View v){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("In the open space, write the number shown at the bottom, and hear the " +
"number read back to you.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void onRepetitionHelpClick(View v){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("Press the button corresponding to the letters and numbers shown on the " +
"screen, and hear it played back to you. At the bottom of the screen, choose from " +
"the different voices we have available.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void onMatchingHelpClick(View v){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("First, click on a number to select it, then click on the letter that " +
"is paired with the number. When you have matched all letters and numbers, click " +
"submit to check your answers. Click next to continue to test on the remaining " +
"items. When there are no items left, you will be taken to the results screen.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void onLetterToNumberHelpClick(View v){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("Click on the number that is paired with the letter shown. On the next " +
"screen, the upper right hand corner will let you know if you answered right or " +
"wrong. When there are no items left, you will be taken to the results screen.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void onMatching2HelpClick(View v){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("First, click on a number to select it, then click on the letter that " +
"is paired with the number. When you have matched all letters and numbers, click " +
"submit. You will then be taken to the results screen to see how you did.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void onViewMapClick(View view){
Boolean hasMap = false;
SharedPreferences prefs = getSharedPreferences("MyPref", MODE_PRIVATE);
String wrapperStr = prefs.getString("memMap", null);
if (wrapperStr != null){
//TODO: Also check that the user trained today via userHasTrainedToday boolean
// a map was detected
hasMap = true;
startActivity(new Intent(this,ViewMapActivity.class));
}
if(hasMap == false){
AlertDialog.Builder builder1 = new AlertDialog.Builder(showHelp.this);
builder1.setMessage("Please Create a Map First.");
builder1.setCancelable(false);
builder1.setNeutralButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
Intent generateMapWindowOpener = new Intent(showHelp.this, createMap.class);
startActivity(generateMapWindowOpener);
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
return;
}
//Intent startWindowOpener = new Intent(this,showMap.class);
//startActivity(startWindowOpener);
}
@Override
public void onBackPressed(){
Intent windowOpener = new Intent(this, MainActivity.class);
startActivity(windowOpener);
}
}
|
package crawler
import (
"encoding/json"
"log"
"net/http"
"github.com/gocolly/colly/v2"
)
// Ping to test server status
func Ping(w http.ResponseWriter, r *http.Request) {
log.Println("Pong")
w.Write([]byte("Pong"))
}
// GetUrls to get all urls in a page
func GetUrls(w http.ResponseWriter, r *http.Request) {
url := r.URL.Query().Get("url")
if url == "" {
log.Println("Missing 'url' argument")
return
}
log.Println("Visiting", url)
c := colly.NewCollector()
var urls []string
c.OnHTML("a[href]", func (e *colly.HTMLElement) {
link := e.Request.AbsoluteURL(e.Attr("href"))
if link != "" {
urls = append(urls, link)
}
})
c.Visit(url)
b, err := json.Marshal(urls)
if err != nil {
log.Println("Failed to serialize response: ", err)
return
}
w.Header().Add("content-type", "application/json")
w.Write(b)
}
|
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Album } from '../model';
@Component({
selector: 'app-song-list',
templateUrl: './song-list.component.html',
styleUrls: ['./song-list.component.css']
})
export class SongListComponent implements OnInit {
album: Album;
trackKeys: number[];
constructor(private route: ActivatedRoute) { }
ngOnInit(): void {
this.route.data
.subscribe(data => {
if (!!data.album) {
this.album = data.album;
this.trackKeys = Object.keys(this.album.songs) as unknown as number[];
this.trackKeys.sort((a, b) => a - b); // sort numerically in ascending order
}
});
}
}
|
#!/bin/bash
#
# This script starts the RayMaze
####
# VARIABLES
####
STARTER_CLASS="de.tarent.mica.maze.Starter"
SCRIPT_NAME=`basename $0`
UNIX_STYLE_HOME=$(cd `dirname "${BASH_SOURCE[0]}"` && pwd)/`basename "${BASH_SOURCE[0]}"`
UNIX_STYLE_HOME=`echo $UNIX_STYLE_HOME | sed "s/$SCRIPT_NAME//g"`
####
# MAIN
####
CP=""
# Then add all application library jars to the classpath.
for a in "$UNIX_STYLE_HOME"/lib/*; do
CP="$CP":"$a"
done
java -cp $CP $STARTER_CLASS "$@"
|
import numpy as np
class MarkerMeasurement:
# Measurements are of landmarks in 2D and have a position as well as tag id.
def __init__(self, position, tag, initial_covariance=(0.2 * np.eye(2))):
self.position = position
self.tag = tag
self.covariance = initial_covariance
class DriveMeasurement:
# Measurement of the robot wheel velocities
def __init__(self, left_speed, right_speed, dt, left_cov=5, right_cov=5):
self.left_speed = left_speed
self.right_speed = right_speed
self.dt = dt
self.left_cov = left_cov
self.right_cov = right_cov
|
#! @RCD_SCRIPTS_SHELL@
#
# $NetBSD: pgbouncer.sh,v 1.3 2016/03/15 11:29:35 fhajny Exp $
#
# PROVIDE: pgbouncer
# REQUIRE: DAEMON
#
if [ -r /etc/rc.subr ]
then
. /etc/rc.subr
fi
name="pgbouncer"
rcvar=${name}
command="@PREFIX@/bin/${name}"
pgbouncer_conf="@PKG_SYSCONFDIR@/${name}.ini"
required_files="${pgbouncer_conf}"
command_args="-d ${pgbouncer_conf}"
extra_commands="reload"
pgsql_user="@PGB_USER@"
start_cmd="pgbouncer_start"
pidfile="@VARBASE@/run/${name}/${name}.pid"
if [ -r ${pgbouncer_conf} ] ; then
pidfile=`grep -i pidfile ${pgbouncer_conf} | cut -d= -f2`
fi
pgbouncer_start()
{
@ECHO@ "Starting ${name}."
piddir=`dirname ${pidfile}`
if [ ! -d ${piddir} ] ; then
@MKDIR@ -p ${piddir}
@CHOWN@ ${pgsql_user} ${piddir}
fi
doit="@SU@ -m ${pgsql_user} -c '${command} ${command_args}'"
eval $doit
}
load_rc_config $name
run_rc_command "$1"
|
def sum_of_even_numbers(numbers):
# Initialize the sum of even numbers to 0
even_sum = 0
# Iterate through the list of numbers
for num in numbers:
# Check if the number is even
if num % 2 == 0:
# Add the even number to the sum
even_sum += num
# Return the sum of even numbers
return even_sum
# Test the function with the provided example
input_list = [1, 2, 3, 4, 5, 6]
print(sum_of_even_numbers(input_list)) # Output: 12
# Additional test cases
print(sum_of_even_numbers([2, 4, 6, 8, 10])) # Output: 30
print(sum_of_even_numbers([1, 3, 5, 7, 9])) # Output: 0
print(sum_of_even_numbers([11, 22, 33, 44, 55, 66])) # Output: 132 |
/*
Jameleon - An automation testing tool..
Copyright (C) 2006 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.jameleon.data;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
public class CollectionDataDriverTest extends TestCase {
protected DataExecuter dd;
protected CollectionDataDriver cdd;
public static final String KEY = "key";
public static void main(String args[]) {
junit.textui.TestRunner.run(suite());
}
public static Test suite() {
return new TestSuite(CollectionDataDriverTest.class);
}
public CollectionDataDriverTest(String name) {
super(name);
}
public void setUp() {
cdd = new CollectionDataDriver();
cdd.setKey(KEY);
}
public void tearDown(){
cdd.close();
cdd = null;
}
public void testConstructor(){
Collection items = createCollection();
cdd = new CollectionDataDriver(KEY, items);
assertEquals("key field", KEY, cdd.key);
assertEquals("# of items", 3, cdd.items.size());
}
public void testOpenValid () throws Exception{
cdd.setItems(createCollection());
try {
cdd.open();
assertNotNull("Connection should not be null", cdd.iterator);
assertTrue("Connection should be open", cdd.iterator.hasNext());
} catch (IOException ioe ) {
fail("No exception should have been thrown");
}
}
public void testClose() throws IOException{
cdd.setItems(createCollection());
cdd.open();
cdd.close();
assertNull("the Collection's iterator should now be null", cdd.iterator);
}
public void testSetKey () throws Exception {
cdd.setKey("key2");
assertEquals("key", "key2", cdd.key);
}
public void testGetNextRowDefaultConstructor() throws IOException{
cdd.setItems(createCollection());
cdd.open();
Map m = cdd.getNextRow();
assertNotNull ( "A row should have been returned", m );
assertEquals ("one", m.get(KEY).toString());
m = cdd.getNextRow();
assertNotNull ( "A row should have been returned", m );
assertEquals ("two", m.get(KEY).toString());
m = cdd.getNextRow();
assertNotNull ( "A row should have been returned", m );
assertEquals ("three", m.get(KEY).toString());
m = cdd.getNextRow();
assertNull ( "A row should not have been returned", m );
cdd.close();
}
public void testGetNextRowEmptyResultSet() throws IOException{
cdd.setItems(new LinkedList());
cdd.open();
assertNull ( "There should be ZERO row found", cdd.getNextRow() );
cdd.close();
}
public void testOpenNullData() throws IOException{
boolean exceptionThrown = false;
try{
cdd.open();
}catch (IOException ioe){
exceptionThrown = true;
}
assertTrue ( "IOException should have been thrown", exceptionThrown );
}
public void testGetNextRowNullKey() throws IOException{
boolean exceptionThrown = false;
cdd.setItems(new LinkedList());
cdd.setKey(null);
try{
cdd.open();
}catch (IOException ioe){
exceptionThrown = true;
}
assertTrue ( "IOException should have been thrown", exceptionThrown );
}
public void testHasMoreRowsDefaultConstructor () throws IOException {
cdd.setItems(createCollection());
cdd.setKey("key");
cdd.open();
assertTrue("Should have rows. ", cdd.hasMoreRows());
}
public void testHasMoreRowsNo () throws IOException {
cdd.setItems(new LinkedList());
cdd.setKey("key");
cdd.open();
assertFalse("Should have rows. ", cdd.hasMoreRows());
}
private Collection createCollection(){
List strings = new LinkedList();
strings.add("one");
strings.add("two");
strings.add("three");
return strings;
}
}
|
// Create a 2D array to store the marks
int[][] marks = {{85,90,95},{75,80,90},{95,100,105}};
// Create a variable to store total for each student
int totals[] = new int[marks.length];
// Iterate through the loop and calculate total for each student
for(int i=0; i<marks.length; i++){
int total = 0;
for(int j=0; j<marks[i].length; j++){
total += marks[i][j];
}
totals[i] = total;
}
// Calculate the average
float average = 0;
for(int i=0; i<totals.length; i++){
average += totals[i];
}
average /= totals.length;
// Print the average
System.out.println("Average marks: "+average); |
#Powerline from @powerline/powerline
local _POWERLINE_SOURCED="$0:A"
_powerline_columns_fallback() {
if which stty &>/dev/null ; then
local cols="$(stty size 2>/dev/null)"
if ! test -z "$cols" ; then
echo "${cols#* }"
return 0
fi
fi
echo 0
return 0
}
_powerline_append_precmd_function() {
if test -z "${precmd_functions[(re)$1]}" ; then
precmd_functions+=( $1 )
fi
}
integer -g _POWERLINE_JOBNUM=0
_powerline_tmux_pane() {
local -x TMUX="$_POWERLINE_TMUX"
echo "${TMUX_PANE:-`tmux display -p "#D"`}" | tr -d ' %'
}
_powerline_tmux_pane() {
local -x TMUX="$_POWERLINE_TMUX"
echo "${TMUX_PANE:-`tmux display -p "#D"`}" | tr -d ' %'
}
_powerline_init_tmux_support() {
emulate -L zsh
if test -n "$TMUX" && tmux refresh -S &>/dev/null ; then
# TMUX variable may be unset to create new tmux session inside this one
typeset -g _POWERLINE_TMUX="$TMUX"
function -g _powerline_tmux_setenv() {
emulate -L zsh
local -x TMUX="$_POWERLINE_TMUX"
tmux setenv -g TMUX_"$1"_$(_powerline_tmux_pane) "$2"
tmux refresh -S
}
function -g _powerline_tmux_set_pwd() {
_powerline_tmux_setenv PWD "$PWD"
}
function -g _powerline_tmux_set_columns() {
_powerline_tmux_setenv COLUMNS "${COLUMNS:-$(_powerline_columns_fallback)}"
}
chpwd_functions+=( _powerline_tmux_set_pwd )
trap '_powerline_tmux_set_columns' SIGWINCH
_powerline_tmux_set_columns
_powerline_tmux_set_pwd
fi
}
_powerline_init_modes_support() {
emulate -L zsh
test -z "$ZSH_VERSION" && return 0
local -a vs
vs=( ${(s:.:)ZSH_VERSION} )
# Mode support requires >=zsh-4.3.11
if (( vs[1] < 4 || (vs[1] == 4 && (vs[2] < 3 || (vs[2] == 3 && vs[3] < 11))) )) ; then
return 0
fi
function -g _powerline_get_main_keymap_name() {
REPLY="${${(Q)${${(z)${"$(bindkey -lL main)"}}[3]}}:-.safe}"
}
function -g _powerline_set_true_keymap_name() {
typeset -g _POWERLINE_MODE="${1}"
local plm_bk="$(bindkey -lL ${_POWERLINE_MODE})"
if [[ $plm_bk = 'bindkey -A'* ]] ; then
_powerline_set_true_keymap_name ${(Q)${${(z)plm_bk}[3]}}
fi
}
function -g _powerline_zle_keymap_select() {
_powerline_set_true_keymap_name $KEYMAP
zle reset-prompt
test -z "$_POWERLINE_SAVE_WIDGET" || zle $_POWERLINE_SAVE_WIDGET
}
function -g _powerline_set_main_keymap_name() {
local REPLY
_powerline_get_main_keymap_name
_powerline_set_true_keymap_name "$REPLY"
}
_powerline_add_widget zle-keymap-select _powerline_zle_keymap_select
_powerline_set_main_keymap_name
if [[ "$_POWERLINE_MODE" != vi* ]] ; then
typeset -g _POWERLINE_DEFAULT_MODE="$_POWERLINE_MODE"
fi
_powerline_append_precmd_function _powerline_set_main_keymap_name
}
_powerline_set_jobnum() {
# If you are wondering why I am not using the same code as I use for bash
# ($(jobs|wc -l)): consider the following test:
# echo abc | less
# <C-z>
# . This way jobs will print
# [1] + done echo abc |
# suspended less -M
# ([ is in first column). You see: any line counting thingie will return
# wrong number of jobs. You need to filter the lines first. Or not use
# jobs built-in at all.
integer -g _POWERLINE_JOBNUM=${(%):-%j}
}
_powerline_update_counter() {
zpython '_powerline.precmd()'
}
_powerline_setup_prompt() {
emulate -L zsh
_powerline_append_precmd_function _powerline_set_jobnum
typeset -g VIRTUAL_ENV_DISABLE_PROMPT=1
if test -z "${POWERLINE_NO_ZSH_ZPYTHON}" && { zmodload libzpython || zmodload zsh/zpython } &>/dev/null ; then
_powerline_append_precmd_function _powerline_update_counter
zpython 'from powerline.bindings.zsh import setup as _powerline_setup'
zpython '_powerline_setup(globals())'
zpython 'del _powerline_setup'
powerline-reload() {
zpython 'from powerline.bindings.zsh import reload as _powerline_reload'
zpython '_powerline_reload()'
zpython 'del _powerline_reload'
}
powerline-reload-config() {
zpython 'from powerline.bindings.zsh import reload_config as _powerline_reload_config'
zpython '_powerline_reload_config()'
zpython 'del _powerline_reload_config'
}
else
if test -z "${POWERLINE_COMMAND}" ; then
typeset -g POWERLINE_COMMAND="$($POWERLINE_CONFIG_COMMAND shell command)"
fi
local add_args='-r .zsh'
add_args+=' --last-exit-code=$?'
add_args+=' --last-pipe-status="$pipestatus"'
add_args+=' --renderer-arg="client_id=$$"'
add_args+=' --renderer-arg="shortened_path=${(%):-%~}"'
add_args+=' --jobnum=$_POWERLINE_JOBNUM'
add_args+=' --renderer-arg="mode=$_POWERLINE_MODE"'
add_args+=' --renderer-arg="default_mode=$_POWERLINE_DEFAULT_MODE"'
local new_args_2=' --renderer-arg="parser_state=${(%%):-%_}"'
new_args_2+=' --renderer-arg="local_theme=continuation"'
local add_args_3=$add_args' --renderer-arg="local_theme=select"'
local add_args_2=$add_args$new_args_2
add_args+=' --width=$(( ${COLUMNS:-$(_powerline_columns_fallback)} - ${ZLE_RPROMPT_INDENT:-1} ))'
local add_args_r2=$add_args$new_args_2
typeset -g PS1='$("$POWERLINE_COMMAND" $=POWERLINE_COMMAND_ARGS shell aboveleft '$add_args')'
typeset -g RPS1='$("$POWERLINE_COMMAND" $=POWERLINE_COMMAND_ARGS shell right '$add_args')'
typeset -g PS2='$("$POWERLINE_COMMAND" $=POWERLINE_COMMAND_ARGS shell left '$add_args_2')'
typeset -g RPS2='$("$POWERLINE_COMMAND" $=POWERLINE_COMMAND_ARGS shell right '$add_args_r2')'
typeset -g PS3='$("$POWERLINE_COMMAND" $=POWERLINE_COMMAND_ARGS shell left '$add_args_3')'
fi
}
_powerline_add_widget() {
local widget="$1"
local function="$2"
local old_widget_command="$(zle -l -L $widget)"
if [[ "$old_widget_command" = "zle -N $widget $function" ]] ; then
return 0
elif [[ -z "$old_widget_command" ]] ; then
zle -N $widget $function
else
local save_widget="_powerline_save_$widget"
local -i i=0
while ! test -z "$(zle -l -L $save_widget)" ; do
save_widget="${save_widget}_$i"
(( i++ ))
done
# If widget was defined with `zle -N widget` (without `function`
# argument) then this function will be handy.
eval "function $save_widget() { emulate -L zsh; $widget \$@ }"
eval "${old_widget_command/$widget/$save_widget}"
zle -N $widget $function
typeset -g _POWERLINE_SAVE_WIDGET="$save_widget"
fi
}
if test -z "${POWERLINE_CONFIG_COMMAND}" ; then
if which powerline-config >/dev/null ; then
typeset -g POWERLINE_CONFIG_COMMAND=powerline-config
else
typeset -g POWERLINE_CONFIG_COMMAND="${_POWERLINE_SOURCED:h:h:h:h}/scripts/powerline-config"
fi
fi
setopt promptpercent
setopt promptsubst
if "${POWERLINE_CONFIG_COMMAND}" shell --shell=zsh uses prompt ; then
_powerline_setup_prompt
_powerline_init_modes_support
fi
if "${POWERLINE_CONFIG_COMMAND}" shell --shell=zsh uses tmux ; then
_powerline_init_tmux_support
fi
|
<filename>node_modules/react-icons-kit/fa/videoCamera.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.videoCamera = void 0;
var videoCamera = {
"viewBox": "0 0 1792 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1792 352v1088q0 42-39 59-13 5-25 5-27 0-45-19l-403-403v166q0 119-84.5 203.5t-203.5 84.5h-704q-119 0-203.5-84.5t-84.5-203.5v-704q0-119 84.5-203.5t203.5-84.5h704q119 0 203.5 84.5t84.5 203.5v165l403-402q18-19 45-19 12 0 25 5 39 17 39 59z"
}
}]
};
exports.videoCamera = videoCamera; |
<filename>packages/axyz-react/src/logos/Walletconnect.tsx
import React from 'react';
interface Props {
width?: number | string;
height?: number | string;
}
const Walletconnect: React.FC<Props> = ({ width = 40, height = 40 }) => (
<svg width={width} height={height} viewBox="0 0 512 512" xmlns="http://www.w3.org/2000/svg">
<radialGradient id="a" cx="0%" cy="50%" r="100%">
<stop offset={0} stopColor="#5d9df6" />
<stop offset={1} stopColor="#006fff" />
</radialGradient>
<g fill="none" fillRule="evenodd">
<path
d="M256 0c141.385 0 256 114.615 256 256S397.385 512 256 512 0 397.385 0 256 114.615 0 256 0z"
fill="url(#a)"
/>
<path
d="M162.692 197.709c51.533-50.279 135.084-50.279 186.617 0l6.202 6.05a6.327 6.327 0 0 1 0 9.105l-21.216 20.7a3.357 3.357 0 0 1-4.666 0l-8.535-8.328c-35.95-35.075-94.238-35.075-130.188 0l-9.14 8.918a3.357 3.357 0 0 1-4.666 0l-21.216-20.7a6.327 6.327 0 0 1 0-9.104zm230.493 42.809 18.883 18.422a6.327 6.327 0 0 1 0 9.104l-85.142 83.07c-2.577 2.514-6.754 2.514-9.33 0l-60.43-58.957a1.679 1.679 0 0 0-2.332 0l-60.427 58.958c-2.576 2.513-6.754 2.514-9.33 0l-85.145-83.072a6.327 6.327 0 0 1 0-9.104l18.883-18.422c2.576-2.514 6.754-2.514 9.33 0l60.43 58.958a1.679 1.679 0 0 0 2.332 0l60.427-58.958c2.576-2.514 6.754-2.514 9.33 0l60.43 58.958a1.679 1.679 0 0 0 2.332 0l60.428-58.957c2.577-2.514 6.755-2.514 9.331 0z"
fill="#fff"
fillRule="nonzero"
/>
</g>
</svg>
);
export default Walletconnect;
|
#!/usr/bin/env bash
# ENVIRONMENT SETUP
initEnv() {
echo -e "\e[36m \e[1m [01] Installing Miniconda \e[0m"
apt-get install wget
wget https://repo.anaconda.com/miniconda/Miniconda3-py38_4.11.0-Linux-x86_64.sh -O ~/miniconda.sh
bash ~/miniconda.sh -b -p ~/miniconda
rm ~/miniconda.sh
export PATH=~/miniconda/bin:$PATH
echo -e "\e[36m \e[1m [02] Creating Conda Env \e[0m"
conda env create -f virtualnet.yml
}
initEnv |
TERMUX_PKG_HOMEPAGE=https://tukaani.org/xz/
TERMUX_PKG_DESCRIPTION="XZ-format compression library"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_VERSION=5.2.4
TERMUX_PKG_REVISION=4
TERMUX_PKG_SRCURL=https://fossies.org/linux/misc/xz-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=9717ae363760dedf573dad241420c5fea86256b65bc21d2cf71b2b12f0544f4b
TERMUX_PKG_BREAKS="liblzma-dev"
TERMUX_PKG_REPLACES="liblzma-dev"
TERMUX_PKG_ESSENTIAL=yes
|
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
from sklearn.model_selection import train_test_split
# Load the data
df = pd.read_csv("stock_data.csv")
# Split the data
X = df.iloc[:, :-1].values
y = df.iloc[:, -1].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Train the model
regressor = LinearRegression()
regressor.fit(X_train, y_train)
#Test the model
y_pred = regressor.predict(X_test)
# Calculate the predictions
predictions = regressor.predict(X_test)
# Plot the model
plt.scatter(y_test, predictions)
plt.xlabel("True Values")
plt.ylabel("Predictions")
plt.show()
# Evaluate the model
eval = regressor.evaluate(X_test, y_test)
print(eval) |
<reponame>valenpo/postfix-jilter
/*
* Copyright (c) 2001-2004 Sendmail, Inc. All Rights Reserved
*/
package com.sendmail.jilter;
import com.sendmail.jilter.internal.JilterConstants;
import java.io.IOException;
import java.nio.channels.WritableByteChannel;
/**
* Status class for methods in {@link JilterHandler}.
*/
public abstract class JilterStatus
{
protected JilterStatus()
{
}
abstract public void sendReplyPacket(WritableByteChannel writeChannel)
throws IOException;
/**
* Creates a new JilterStatus with a custom error reply code.
*
* @param rcode The three-digit (RFC 821/2821) SMTP reply code. rcode cannot be null, and must
* be a valid 4XX or 5XX reply code.
* @param xcode The extended (RFC 1893/2034) reply code. If xcode is null, no extended code is
* used. Otherwise, xcode must conform to RFC 1893/2034.
* @param messageLines An array of single lines of text which will be used as the text part of
* the SMTP reply. If messageLines has zero lines, an empty message is used.
* @return the new status
* @throws IllegalArgumentException if rcode or xcode is invalid
*/
public static JilterStatus makeCustomStatus(String rcode, String xcode, String[] messageLines)
throws IllegalArgumentException
{
return new CustomJilterStatus(rcode, xcode, messageLines);
}
/**
* Continue processing the current connection, message, or recipient.
*/
public static final JilterStatus SMFIS_CONTINUE = new SimpleJilterStatus(JilterConstants.SMFIR_CONTINUE);
/**
* Rejection.
* For a connection-oriented routine, reject this connection; call {@link JilterHandler#close close}.
* For a message-oriented routine (except {@link JilterHandler#eom eom} or {@link JilterHandler#abort abort}),
* reject this message.
* For a recipient-oriented routine, reject the current recipient (but continue processing the current message).
*/
public static final JilterStatus SMFIS_REJECT = new SimpleJilterStatus(JilterConstants.SMFIR_REJECT);
/**
* Message discard. For a message- or recipient-oriented routine, accept this message, but silently discard it.
* {@link #SMFIS_DISCARD SMFIS_DISCARD} should not be returned by a connection-oriented routine.
*/
public static final JilterStatus SMFIS_DISCARD = new SimpleJilterStatus(JilterConstants.SMFIR_DISCARD);
/**
* Acceptance. For a connection-oriented routine, accept this connection without further filter processing;
* call {@link JilterHandler#close close}.
* For a message- or recipient-oriented routine, accept this message without further filtering.
*/
public static final JilterStatus SMFIS_ACCEPT = new SimpleJilterStatus(JilterConstants.SMFIR_ACCEPT);
/**
* Return a temporary failure, i.e., the corresponding SMTP command will return an appropriate 4xx status code.
* For a message-oriented routine (except {@link JilterHandler#envfrom envfrom}), fail for this message.
* For a connection-oriented routine, fail for this connection; call {@link JilterHandler#close close}.
* For a recipient-oriented routine, only fail for the current recipient; continue message processing.
*/
public static final JilterStatus SMFIS_TEMPFAIL = new SimpleJilterStatus(JilterConstants.SMFIR_TEMPFAIL);
}
|
//
// Created by eduardo on 20/03/2021.
//
#include "ohlc_prices.h"
#include "portfolio/common/algorithm.h"
#include <iomanip>
#include <string>
#include <vector>
namespace portfolio {
double ohlc_prices::open() const { return open_price_; }
double ohlc_prices::high() const { return high_price_; }
double ohlc_prices::low() const { return low_price_; }
double ohlc_prices::close() const { return close_price_; }
ohlc_prices::ohlc_prices(double open, double high, double low,
double close) {
open_price_ = open;
high_price_ = high;
low_price_ = low;
close_price_ = close;
}
ohlc_prices::ohlc_prices() {
open_price_ = 0.0;
high_price_ = 0.0;
low_price_ = 0.0;
close_price_ = 0.0;
}
void ohlc_prices::set_prices(double open, double high, double low,
double close) {
open_price_ = open;
high_price_ = high;
low_price_ = low;
close_price_ = close;
}
std::string ohlc_prices::to_string() const {
std::string open_str = std::to_string(open_price_);
std::string high_str = std::to_string(high_price_);
std::string low_str = std::to_string(low_price_);
std::string close_str = std::to_string(close_price_);
return open_str + " " + high_str + " " + low_str + " " + close_str;
}
bool ohlc_prices::from_string(std::string_view str_ohlc) {
std::string str(str_ohlc);
std::istringstream iss(str);
std::vector<std::string> result(std::istream_iterator<std::string>{iss},
std::istream_iterator<std::string>());
std::string st_open = result[0];
std::string st_high = result[1];
std::string st_low = result[2];
std::string st_close = result[3];
std::string::size_type sz; // alias of size_t
if (is_floating(st_open)) {
open_price_ = std::stod(st_open, &sz);
} else {
return false;
}
if (is_floating(st_high)) {
high_price_ = std::stod(st_high, &sz);
} else {
return false;
}
if (is_floating(st_low)) {
low_price_ = std::stod(st_low, &sz);
} else {
return false;
}
if (is_floating(st_close)) {
close_price_ = std::stod(st_close, &sz);
} else {
return false;
}
return true;
}
bool ohlc_prices::operator==(const ohlc_prices &rhs) const {
return open_price_ == rhs.open_price_ &&
high_price_ == rhs.high_price_ && low_price_ == rhs.low_price_ &&
close_price_ == rhs.close_price_;
}
bool ohlc_prices::operator!=(const ohlc_prices &rhs) const {
return !(rhs == *this);
}
} // namespace portfolio |
import java.util.*;
class User {
private int id;
private String name;
private int age;
public User(int id, String name, int age) {
this.id = id;
this.name = name;
this.age = age;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public static void main(String[] args) {
List<User> userList = new ArrayList<>();
userList.add(new User(2, "John", 20));
userList.add(new User(4, "Jack", 22));
userList.add(new User(3, "Peter", 25));
userList.add(new User(1, "Jack", 18));
userList.sort(Comparator.comparing(User :: getId));
System.out.println("Sorted list by User id: " + userList);
userList.sort(Comparator.comparing(User :: getAge));
System.out.println("Sorted list by User age: " + userList);
}
} |
#!/bin/sh
echo "Starting Elasticsearch ${ES_VERSION}"
BASE=/usr/share/elasticsearch
# Allow for memlock if enabled
# if [ "${MEMORY_LOCK}" == "true" ]; then
# if [[ $(whoami) == "root" ]]; then
# # ulimit -l unlimited
# echo "Adding Memory Lock"
# mkdir /etc/systemd/system/elasticsearch.service.d
# touch /etc/systemd/system/elasticsearch.service.d/override.conf
# echo $"[Service]\n" > /etc/systemd/system/elasticsearch.service.d/override.conf
# echo "LimitMEMLOCK=infinity" >> /etc/systemd/system/elasticsearch.service.d/override.conf
# systemctl daemon-reload
# # echo "elasticsearch soft memlock unlimited" >> /etc/security/limits.conf
# # echo "elasticsearch hard memlock unlimited" >> /etc/security/limits.conf
# # su elasticsearch
# fi
# fi
# Set a random node name if not set
if [ -z "${NODE_NAME}" ]; then
NODE_NAME="$(uuidgen)"
fi
# Create a temporary folder for Elasticsearch ourselves
# ref: https://github.com/elastic/elasticsearch/pull/27659
export ES_TMPDIR="$(mktemp -d -t elasticsearch.XXXXXXXX)"
# Prevent "Text file busy" errors
sync
# if [ ! -z "${ES_PLUGINS_INSTALL}" ]; then
# OLDIFS="${IFS}"
# IFS=","
# for plugin in ${ES_PLUGINS_INSTALL}; do
# if ! "${BASE}"/bin/elasticsearch-plugin list | grep -qs ${plugin}; then
# until "${BASE}"/bin/elasticsearch-plugin install --batch ${plugin}; do
# echo "Failed to install ${plugin}, retrying in 3s"
# sleep 3
# done
# fi
# done
# IFS="${OLDIFS}"
# fi
if [ ! -z "${SHARD_ALLOCATION_AWARENESS_ATTR}" ]; then
# this will map to a file like /etc/hostname => /dockerhostname so reading that file will get the
# container hostname
if [ -f "${SHARD_ALLOCATION_AWARENESS_ATTR}" ]; then
ES_SHARD_ATTR="$(cat "${SHARD_ALLOCATION_AWARENESS_ATTR}")"
else
ES_SHARD_ATTR="${SHARD_ALLOCATION_AWARENESS_ATTR}"
fi
NODE_NAME="${ES_SHARD_ATTR}-${NODE_NAME}"
echo "node.attr.${SHARD_ALLOCATION_AWARENESS}: ${ES_SHARD_ATTR}" >> $BASE/config/elasticsearch.yml
if [ "$NODE_MASTER" == "true" ]; then
echo "cluster.routing.allocation.awareness.attributes: ${SHARD_ALLOCATION_AWARENESS}" >> "${BASE}"/config/elasticsearch.yml
fi
fi
export NODE_NAME=${NODE_NAME}
# remove x-pack-ml module
# rm -rf /elasticsearch/modules/x-pack/x-pack-ml
# rm -rf /elasticsearch/modules/x-pack-ml
# Run
if [[ $(whoami) == "root" ]]; then
if [ ! -d "/data/data/nodes/0" ]; then
echo "Changing ownership of /data folder"
chown -R elasticsearch:elasticsearch /data
echo "Changing ownership of ${ES_TMPDIR} folder"
chmod -R a+w ${ES_TMPDIR}
chown -R elasticsearch:elasticsearch ${ES_TMPDIR}
# Create keystore for secure_url etc.
echo "Keystore creation for secure_url"
"${BASE}"/bin/elasticsearch-keystore create
echo "${XPACK_SECURE_URL_SLACK}" | $BASE/bin/elasticsearch-keystore add --stdin xpack.notification.slack.account.monitoring.secure_url
fi
exec su -c $BASE/bin/elasticsearch elasticsearch $ES_EXTRA_ARGS
else
# The container's first process is not running as 'root',
# it does not have the rights to chown. However, we may
# assume that it is being ran as 'elasticsearch', and that
# the volumes already have the right permissions. This is
# the case for Kubernetes, for example, when 'runAsUser: 1000'
# and 'fsGroup:100' are defined in the pod's security context.
"${BASE}"/bin/elasticsearch ${ES_EXTRA_ARGS}
fi
|
#!/usr/bin/env bash
set -e
chown -R www-data:www-data storage bootstrap/cache
chmod -R 755 storage bootstrap/cache
composer install --no-scripts --no-autoloader --ansi --no-interaction -d /var/www/back
composer dump-autoload -d /var/www/back
if [ ! -f ".env" ]
then
cp .env.example .env
fi
cd /var/www/back && php artisan key:generate
php artisan optimize && php artisan config:cache && php artisan route:cache
chown -R www-data:www-data storage bootstrap/cache
chmod -R 755 storage bootstrap/cache
php-fpm
|
<gh_stars>0
import { useFormikContext } from 'formik';
import React, { useCallback } from 'react';
import Button, { ButtonProps } from 'shared/view/components/Button';
const SubmitButton: React.FC<ButtonProps> = ({ onPress, ...props }) => {
const formik = useFormikContext();
const handlePress = useCallback(() => {
onPress?.();
formik.handleSubmit();
}, []);
return <Button {...props} onPress={handlePress} />;
};
export default SubmitButton;
|
<filename>src/reducers/application.js
import { buildActions } from 'utils';
export const types = buildActions('application', [
'INIT_APP',
'LOGIN_USER',
'REQUEST_TOKEN',
'REQUEST_TOKEN_SUCCESS',
'REQUEST_TOKEN_FAILURE',
'REQUEST_CURRENT_USER',
'REQUEST_CURRENT_USER_SUCCESS',
'REQUEST_CURRENT_USER_FAILURE',
'LOGIN_USER_SUCCESS',
'LOGIN_USER_FAILURE',
'LOGOUT_USER',
'LOGOUT_USER_SUCCESS',
'LOGOUT_USER_FAILURE',
'REGISTER_USER',
'REGISTER_USER_SUCCESS',
'REGISTER_USER_FAILURE'
]);
const initApp = () => ({
type: types.INIT_APP
});
const loginUser = (emailAddress, password) => ({
type: types.LOGIN_USER,
emailAddress,
password
});
const requestToken = (emailAddress, password) => ({
type: types.REQUEST_TOKEN,
emailAddress,
password
});
const requestTokenSuccess = ({ token, expiration }) => ({
type: types.REQUEST_TOKEN_SUCCESS,
expiration,
token
});
const requestTokenFailure = error => ({
type: types.REQUEST_TOKEN_FAILURE,
error
});
const requestCurrentUser = () => ({
type: types.REQUEST_CURRENT_USER
});
const requestCurrentUserSuccess = user => ({
type: types.REQUEST_CURRENT_USER_SUCCESS,
user
});
const requestCurrentUserFailure = error => ({
type: types.REQUEST_CURRENT_USER_FAILURE,
error
});
const loginUserSuccess = () => ({
type: types.LOGIN_USER_SUCCESS
});
const loginUserFailure = error => ({
type: types.LOGIN_USER_FAILURE,
error
});
const logoutUser = () => ({
type: types.LOGOUT_USER
});
const logoutUserSuccess = () => ({
type: types.LOGOUT_USER_SUCCESS
});
const logoutUserFailure = error => ({
type: types.LOGOUT_USER_FAILURE,
error
});
const registerUser = details => ({
type: types.REGISTER_USER,
details
});
const registerUserSuccess = () => ({
type: types.REGISTER_USER_SUCCESS
});
const registerUserFailure = error => ({
type: types.REGISTER_USER_FAILURE,
error
});
export const actions = {
initApp,
loginUser,
requestToken,
requestTokenSuccess,
requestTokenFailure,
requestCurrentUser,
requestCurrentUserSuccess,
requestCurrentUserFailure,
loginUserSuccess,
loginUserFailure,
logoutUser,
logoutUserSuccess,
logoutUserFailure,
registerUser,
registerUserSuccess,
registerUserFailure
};
export const initialState = {
loggingIn: false,
loggingOut: false,
user: null,
error: null,
authorization: {
accessToken: null,
refreshToken: null,
expiration: null
},
registration: {
registering: false,
complete: false,
error: null
}
};
export const reducer = (state = initialState, action = {}) => {
switch (action.type) {
case types.LOGIN_USER:
return {
...state,
loggingIn: true
};
case types.LOGOUT_USER:
return {
...state,
loggingOut: true
};
case types.REGISTER_USER:
return {
...state,
registration: {
...state.registration,
registering: true,
details: action.details
}
};
case types.REQUEST_TOKEN_SUCCESS:
return {
...state,
authorization: {
...state.authorization,
accessToken: action.token,
expiration: action.expiration
}
};
case types.REQUEST_TOKEN_FAILURE:
return {
...state,
authorization: {
...state.authorization,
error: action.error
}
};
case types.REQUEST_CURRENT_USER_SUCCESS:
return {
...state,
user: action.user
};
case types.LOGOUT_USER_SUCCESS:
return {
...state,
loggingOut: false,
user: null,
authorization: initialState.authorization
};
case types.LOGIN_USER_SUCCESS:
return {
...state,
loggingIn: false
};
case types.LOGIN_USER_FAILURE:
case types.LOGOUT_USER_FAILURE:
return {
...state,
loggingIn: false,
loggingOut: false,
error: action.error
};
case types.REGISTER_USER_SUCCESS:
return {
...state,
registration: {
registering: false,
complete: true,
error: null
}
};
case types.REGISTER_USER_FAILURE:
return {
...state,
registration: {
registering: false,
complete: true,
error: action.error
}
};
default:
return state;
}
};
|
def longest_increasing_sequence(arr):
n = len(arr)
dp = [1] * n
max_lis = -float('inf')
for i in range(1, n):
for j in range(i):
if arr[i] > arr[j]:
dp[i] = max(dp[i], dp[j] + 1)
max_lis = max(max_lis, dp[i])
return max_lis
print(longest_increasing_sequence(array)) # Output: 4 |
<filename>src/viewBuilder.tsx
import React from 'react';
import { IOutlet } from './@types/outlet.type';
import { ViewBuilder } from './component.service';
import { Outlet } from './components/outlet.component';
export const viewBuilder: ViewBuilder = (view, params) => {
let outlet: JSX.Element;
if (typeof view !== 'function') {
outlet = <Outlet name={params.name} />;
} else {
outlet = <Outlet name={params.name} routeComponent={view} />;
}
const element = <params.component {...params.data} {...({ Outlet: () => outlet } as IOutlet)} />;
view = () => element;
return view;
};
|
<reponame>danielavellar15/ckeditor5
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/**
* @module ui/bindings/injectcsstransitiondisabler
*/
/**
* A decorator that brings the possibility to temporarily disable CSS transitions using
* {@link module:ui/view~View} methods. It is helpful when, for instance, the transitions should not happen
* when the view is first displayed but they should work normal in other cases.
*
* The methods to control the CSS transitions are:
* * `disableCssTransitions()` – Adds the `.ck-transitions-disabled` class to the
* {@link module:ui/view~View#element view element}.
* * `enableCssTransitions()` – Removes the `.ck-transitions-disabled` class from the
* {@link module:ui/view~View#element view element}.
*
* **Note**: This helper extends the {@link module:ui/view~View#template template} and must be used **after**
* {@link module:ui/view~View#setTemplate} is called:
*
* import injectCssTransitionDisabler from '@ckeditor/ckeditor5-ui/src/bindings/injectcsstransitiondisabler';
*
* class MyView extends View {
* constructor() {
* super();
*
* // ...
*
* this.setTemplate( { ... } );
*
* // ...
*
* injectCssTransitionDisabler( this );
*
* // ...
* }
* }
*
* The usage comes down to:
*
* const view = new MyView();
*
* // ...
*
* view.disableCssTransitions();
* view.show();
* view.enableCssTransitions();
*
* @param {module:ui/view~View} view View instance that should get this functionality.
*/
export default function injectCssTransitionDisabler( view ) {
view.set( '_isCssTransitionsDisabled', false );
view.disableCssTransitions = () => {
view._isCssTransitionsDisabled = true;
};
view.enableCssTransitions = () => {
view._isCssTransitionsDisabled = false;
};
view.extendTemplate( {
attributes: {
class: [
view.bindTemplate.if( '_isCssTransitionsDisabled', 'ck-transitions-disabled' )
]
}
} );
}
|
import flask
from flask import Flask, request, jsonify
import json
app = Flask(name)
@app.route('/countries', methods=['GET'])
def get_countries():
with open('data.json', 'r') as f:
countries = json.load(f)
return jsonify(countries)
if name == 'main':
app.run() |
# Case-insensitive globbing (used in pathname expansion)
shopt -s nocaseglob
# Check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
#export GREP_OPTIONS='--color=auto'
# Prevent less from clearing the screen while still showing colors.
export LESS=-XR
# Set the terminal's title bar.
function titlebar() {
echo -n $'\e]0;'"$*"$'\a'
}
# SSH auto-completion based on entries in known_hosts.
if [[ -e ~/.ssh/known_hosts ]]; then
complete -o default -W "$(cat ~/.ssh/known_hosts | sed 's/[, ].*//' | sort | uniq | grep -v '[0-9]')" ssh scp sftp
fi
|
import { createSelector } from "reselect";
export const getUsers = state => state.users.users;
export const getCurrentUserKey = state => state.users.currentUserKey;
export const getHasUsers = createSelector(getUsers, users => users.length > 0);
export const getCurrentUser = createSelector(
[getUsers, getCurrentUserKey],
(users, currentUserKey) => users.find(user => user.key === currentUserKey)
);
|
#!/usr/bin/env bash
set -e
help() {
cat <<EOF
Manage GitHub Projects(beta) seamlessly from the command line.
USAGE
gh project-manager add issues <command> <subcommand> [flags]
FLAGS
--help, -h Show help for command
--path Set json file path
--project-num Set project number
--project-type Set project number
EXAMPLES
$ gh project-manager add issues --project-type org --project-num 1 --path ./data.json
$ gh project-manager add issues --project-type user --project-num 98 --path /etc/data.json
LEARN MORE
Use 'gh project-manager add issues <command> --help' for more information about a command.
Read the documentation at https://github.com/rise8-us/gh-project-manager
EOF
}
PROJECT_TYPE=
PROJECT_NUM=
ISSUES=
showPathPrompt() {
echo "Please enter path to issues JSON data or enter 'q' to quit: "
read -r
if [ "$REPLY" == q ]; then
exit 0
fi
ISSUES=$(jq -c -r ".[]" "${REPLY}")
}
showProjectTypeMenu() {
PS3="#: "
options=("User" "Organization" "Quit")
echo "Select Project Type"
select opt in "${options[@]}"
do
case $opt in
"User")
PROJECT_TYPE=user
break
;;
"Organization")
PROJECT_TYPE=organization
break
;;
"Quit")
exit 0
;;
*)
echo "invalid option $REPLY"
exit 0
;;
esac
done
}
showProjectNumberPrompt() {
echo "Please enter project number or enter 'q' to quit:"
read -r
if [ "$REPLY" == q ]; then
exit 0
fi
PROJECT_NUM=${REPLY}
}
while [ $# -gt 0 ]; do
case "$1" in
--path)
ISSUES=$(jq -c -r ".[]" "${2}")
shift
;;
--project-type)
if [ "$2" == org ]; then
PROJECT_TYPE=organization
shift
elif [ "$2" == user ]; then
PROJECT_TYPE=user
shift
else
help
exit 0
fi
;;
--project-num)
PROJECT_NUM=$2
shift
;;
-h|--help)
help
exit 0
;;
*)
help >&2
exit 1
;;
esac
shift
done
if [ -z $PROJECT_TYPE ]; then
showProjectTypeMenu
fi
if [ -z "$PROJECT_NUM" ]; then
showProjectNumberPrompt
fi
if [ -z "$ISSUES" ]; then
showPathPrompt
fi
OWNER=$(gh repo view --json owner --jq .owner.login)
OPTIONS_QUERY="
query(\$org: String!, \$projectNum: Int!) {
${PROJECT_TYPE}(login: \$org) {
projectNext(number: \$projectNum) {
id
fields(first:100) {
nodes {
name,
id,
settings
}
}
}
}
}
"
QUERIED_PROJECT=$(exec gh api graphql -f query="${OPTIONS_QUERY}" -F org="$OWNER" -F projectNum="$PROJECT_NUM")
PROJECT_ID=$(echo "$QUERIED_PROJECT" | jq ".data.${PROJECT_TYPE}.projectNext.id")
FIELD_ID=$(echo "$QUERIED_PROJECT" | jq ".data.${PROJECT_TYPE}.projectNext.fields.nodes[] | select(.name == \"Status\") | .id")
OPTIONS=$(echo "$QUERIED_PROJECT" | jq ".data.${PROJECT_TYPE}.projectNext.fields.nodes[] | select(.name == \"Status\") | .settings | fromjson | .options")
ADD_ISSUE_MUTATION="
mutation(\$projectId: ID!, \$contentId: ID!) {
addProjectNextItem(input: {
projectId: \$projectId
contentId: \$contentId
}) {
projectNextItem {
id
}
}
}
"
UPDATE_ISSUE_MUTATION="
mutation(\$projectId: ID!, \$itemId: ID!, \$fieldId: ID!, \$fieldVal: String!) {
updateProjectNextItemField(input: {
projectId: \$projectId
itemId: \$itemId
fieldId: \$fieldId
value: \$fieldVal
}) {
projectNextItem {
id
}
}
}
"
IFS=$'\n'
# shellcheck disable=SC2068
echo -ne "Adding Issues"
for issue in $ISSUES; do
echo -ne "."
CONTENT_ID=$(echo "$issue" | jq -r ".id")
STATUS=$(echo "$issue" | jq -r ".status")
FIELD_VAL=$(echo "$OPTIONS" | jq ".[] | select(.name == \"${STATUS}\") | .id" | sed -e 's/^"//' -e 's/"$//')
ITEM_ID=$(gh api graphql -f query="${ADD_ISSUE_MUTATION}" -F projectId="$PROJECT_ID" -F contentId="$CONTENT_ID" -q ".data.addProjectNextItem.projectNextItem.id")
gh api graphql -f query="${UPDATE_ISSUE_MUTATION}" -F projectId="$PROJECT_ID" -F itemId="\"$ITEM_ID\"" -F fieldId="$FIELD_ID" -F fieldVal="$FIELD_VAL" --silent
done
echo " Success!"
|
def drop_cap(string: str) -> str:
words = string.split()
modified_words = [word.capitalize() if len(word) > 2 else word for word in words]
modified_words[0] = modified_words[0].capitalize()
return ' '.join(modified_words) |
<reponame>HargurBedi/LSDP_Lab
import java.io.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
public class student{
public static class Map extends Mapper<Text,Text,Text,Text>
{
public void map(Text key, Text value, Context context) throws IOException, InterruptedException
{
String name = context.getConfiguration().get("name");
String[] elements = key.toString().split(",");
if(name.equalsIgnoreCase(elements[1]))
context.write(key,value);
}
}
public static void main(String[] args) throws Exception
{
Configuration conf=new Configuration();
conf.set("name",args[2]);
Job job=new Job(conf,"student");
job.setInputFormatClass(KeyValueTextInputFormat.class);
job.setJarByClass(student.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setMapperClass(Map.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
job.waitForCompletion(true);
}
} |
import Vue from 'vue';
import Vuex from 'vuex';
Vue.use(Vuex);
const state = {
count: 0
};
const mutations = {
INCREMENT(state) {
state.count++
}
};
export default new Vuex.Store({
state,
mutations
});
|
<reponame>jumbo4213/nest-serve-template
export * from './device.entity';
export * from './user.entity';
export * from './session.entity';
export * from './wx-account.entity';
|
import config from '@config/index'
import Ipfs from 'ipfs-mini' // https://github.com/silentcicero/ipfs-mini
const IPFS = new Ipfs(config.ipfsGateway)
IPFS.getLink = function (CID) {
return IPFS.provider.protocol + '://' + IPFS.provider.host + '/ipfs/' + CID
}
IPFS.uploadImageAsSvg = async function (dataURI:string) {
const fileContents = `<svg xmlns="http://www.w3.org/2000/svg"><image href="${dataURI}" /></svg>`
const CID = await IPFS.add(fileContents)
const link = IPFS.getLink(CID)
return { CID, link }
}
IPFS.getImage = async function (CID) {
const svg = await IPFS.cat(CID)
return svg?.split('<image href="')[1]?.split('"')[0] || ''
}
export default IPFS
|
<filename>app/library/actors/RefreshActor.scala
package library.actors
import akka.actor.{Actor, Props}
import library.Engine
import library.utils.dateTimeNow
import repositories.StateRepository
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}
object RefreshActor {
def props = Props[RefreshActor]
case class Refresh()
}
class RefreshActor extends Actor {
import RefreshActor._
val stateUpdateActor = context.actorOf(StateUpdateActor.props, "stateupdate-actor")
def receive = {
case Refresh() =>
if (StateRepository.state.message == "state.majinprogress" && StateRepository.state.date.plusHours(8).isAfter(dateTimeNow.now()) ) {}
else {
stateUpdateActor ! "maj"
Engine.sendRequestToApi().onComplete {
case Success(e) => stateUpdateActor ! "success"
case Failure(e) => {
println(e)
stateUpdateActor ! "failure"}
}
}
}
}
|
#!/bin/bash
if [ -z "$DIR" ]
then
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
fi
mkdir -p $WORKDIR
$SUDO apt update
$SUDO bash -c "apt install --allow-unauthenticated -y wget gnupg2 software-properties-common"
# Release in which has been natively introduced support for golang-go (Ubuntu >= 20.04)
os_limit_major="20"
os_limit_minor="04"
read -r os_major os_minor<<<$(grep -Po 'VERSION_ID="\K.*?(?=")' /etc/os-release | sed 's/\./ /g')
# Checking whether the major release is lower or the minor
if (( os_major < os_limit_major || ( os_major == os_limit_major && os_minor < os_limit_minor ) ))
then
$SUDO add-apt-repository ppa:longsleep/golang-backports -y || true
fi
$SUDO apt update
PACKAGES=""
PACKAGES+=" git" # needed to clone dependencies
PACKAGES+=" build-essential cmake" # provides compiler and other compilation tools
PACKAGES+=" bison flex libelf-dev" # bcc dependencies
PACKAGES+=" libllvm-9-ocaml-dev libllvm9 llvm-9 llvm-9-dev llvm-9-doc llvm-9-examples llvm-9-runtime clang-9 lldb-9 lld-9 llvm-9-tools libclang-9-dev"
PACKAGES+=" libllvm9 llvm-9-dev libclang-9-dev" # bpf tools compilation tool chain
PACKAGES+=" libnl-route-3-dev libnl-genl-3-dev" # netlink library
PACKAGES+=" uuid-dev"
PACKAGES+=" golang-go" # needed for polycubectl and pcn-k8s
PACKAGES+=" pkg-config"
# Removed because of the comment at line L76 (GPG key expired); we had to install this manually
#PACKAGES+=" libyang-dev"
PACKAGES+=" autoconf libtool m4 automake"
PACKAGES+=" libssl-dev" # needed for certificate based security
PACKAGES+=" sudo" # needed for pcn-iptables, when building docker image
PACKAGES+=" kmod" # needed for pcn-iptables, when using lsmod to unload conntrack if not needed
PACKAGES+=" jq bash-completion" # needed for polycubectl bash autocompletion
PACKAGES+=" libpcre3-dev" # needed for libyang
PACKAGES+=" libpcap-dev" # needed for packetcapture filter
if [ "$MODE" == "pcn-k8s" ]; then
PACKAGES+=" curl" # needed for pcn-k8s to download a binary
PACKAGES+=" iptables" # only for pcn-k8s
PACKAGES+=" iproute2" # provides bridge command that is used to add entries in vxlan device
fi
# use non interactive to avoid blocking the install script
$SUDO bash -c "DEBIAN_FRONTEND=noninteractive apt-get install -yq $PACKAGES"
# licd $WORKDIR
set +e
if [ ! -d libyang ]; then
git clone https://github.com/CESNET/libyang.git
fi
cd libyang
git checkout v0.14-r1
mkdir -p build && cd build
cmake ..
make -j $(getconf _NPROCESSORS_ONLN)
$SUDO make install
echo "Install pistache"
cd $WORKDIR
set +e
if [ ! -d pistache ]; then
git clone https://github.com/oktal/pistache.git
fi
cd pistache
# use last known working version
git checkout 117db02eda9d63935193ad98be813987f6c32b33
git submodule update --init
mkdir -p build && cd build
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release -DPISTACHE_USE_SSL=ON ..
make -j $(getconf _NPROCESSORS_ONLN)
$SUDO make install
echo "Install libtins"
cd $WORKDIR
set +e
if [ ! -d libtins ]; then
git clone --branch v3.5 https://github.com/mfontanini/libtins.git
fi
cd libtins
mkdir -p build && cd build
cmake -DLIBTINS_ENABLE_CXX11=1 \
-DLIBTINS_BUILD_EXAMPLES=OFF -DLIBTINS_BUILD_TESTS=OFF \
-DLIBTINS_ENABLE_DOT11=OFF -DLIBTINS_ENABLE_PCAP=OFF \
-DLIBTINS_ENABLE_WPA2=OFF -DLIBTINS_ENABLE_WPA2_CALLBACKS=OFF ..
make -j $(getconf _NPROCESSORS_ONLN)
$SUDO make install
$SUDO ldconfig
# Set $GOPATH, if not already set
if [[ -z "${GOPATH}" ]]; then
mkdir -p $HOME/go
export GOPATH=$HOME/go
fi |
<reponame>eengineergz/Lambda
"use strict";
exports.__esModule = true;
/**
* ES6 Number.isNaN
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN
*/
function isNaN(val) {
return typeof val === 'number' && val != val;
}
exports["default"] = isNaN;
|
public class SpiralTraverse {
public static List<Integer> traverse(int[][] arr) {
List<Integer> list = new ArrayList<>();
// Set up the four boundaries
int top = 0;
int bottom = arr.length - 1;
int left = 0;
int right = arr[0].length - 1;
// Traverse until all of the cells have been visited
while (top <= bottom && left <= right) {
// Traverse from left to right
for (int i = left; i <= right; i++) {
list.add(arr[top][i]);
}
top++;
// Traverse from top to bottom
for (int i = top; i <= bottom; i++) {
list.add(arr[i][right]);
}
right--;
// Traverse from right to left
if (top <= bottom) {
for (int i = right; i >= left; i--) {
list.add(arr[bottom][i]);
}
bottom--;
}
// Traverse from bottom to top
if (left <= right) {
for (int i = bottom; i >= top; i--) {
list.add(arr[i][left]);
}
left++;
}
}
return list;
}
public static void main(String[] args) {
int[][] arr = {{1, 2, 3},
{4, 5, 6},
{7, 8, 9};
System.out.println(traverse(arr)); // [1, 2, 3, 6, 9, 8, 7, 4, 5]
}
} |
#!/bin/bash -e
#
# Copyright 2018-2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The scripts creates a the KF Pipelines API python package.
# Requirements: jq and Java
# To install the prerequisites run the following:
#
# # Debian / Ubuntu:
# sudo apt-get install --no-install-recommends -y -q default-jdk jq
#
# # OS X
# brew tap caskroom/cask
# brew cask install caskroom/versions/java8
# brew install jq
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)"
REPO_ROOT="$DIR/../.."
VERSION="$(cat $REPO_ROOT/VERSION)"
if [ -z "$VERSION" ]; then
echo "ERROR: $REPO_ROOT/VERSION is empty"
exit 1
fi
codegen_file=/tmp/openapi-generator-cli.jar
# Browse all versions in: https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/
codegen_uri="https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/4.3.1/openapi-generator-cli-4.3.1.jar"
if ! [ -f "$codegen_file" ]; then
curl -L "$codegen_uri" -o "$codegen_file"
fi
pushd "$(dirname "$0")"
CURRENT_DIR="$(pwd)"
DIR="$CURRENT_DIR/python_http_client"
swagger_file="$CURRENT_DIR/swagger/kfp_api_single_file.swagger.json"
echo "Removing old content in DIR first."
rm -rf "$DIR"
echo "Generating python code from swagger json in $DIR."
java -jar "$codegen_file" generate -g python -t "$CURRENT_DIR/python_http_client_template" -i "$swagger_file" -o "$DIR" -c <(echo '{
"packageName": "kfp_server_api",
"packageVersion": "'"$VERSION"'",
"packageUrl": "https://github.com/kubeflow/pipelines"
}')
echo "Copying LICENSE to $DIR"
cp "$CURRENT_DIR/../../LICENSE" "$DIR"
echo "Building the python package in $DIR."
pushd "$DIR"
python3 setup.py --quiet sdist
popd
echo "Adding license header for generated python files in $DIR."
go install github.com/google/addlicense
addlicense "$DIR"
echo "Run the following commands to update the package on PyPI"
echo "python3 -m pip install twine"
echo "python3 -m twine upload --username kubeflow-pipelines $DIR/dist/*"
echo "Please also push local changes to github.com/kubeflow/pipelines"
popd
|
#!/bin/bash
sudo rm -f /usr/local/bin/apktool
sudo rm -rf /usr/local/apktool
sudo pkgutil --forget {replace}
|
<reponame>mwkrentel/libmonitor<filename>src/mpi_comm_c.c<gh_stars>10-100
/*
* Override MPI_Comm_rank in C/C++.
*
* Copyright (c) 2007-2021, Rice University.
* See the file LICENSE for details.
*
* $Id$
*/
#include "config.h"
#include "common.h"
#include "monitor.h"
typedef int mpi_comm_fcn_t(void *, int *);
#ifdef MONITOR_STATIC
extern mpi_comm_fcn_t MPI_Comm_size;
extern mpi_comm_fcn_t __real_MPI_Comm_rank;
#endif
static mpi_comm_fcn_t *real_mpi_comm_size = NULL;
static mpi_comm_fcn_t *real_mpi_comm_rank = NULL;
/*
* In C, MPI_Comm is not always void *, but that seems to be
* compatible with most libraries.
*/
int
MONITOR_WRAP_NAME(MPI_Comm_rank)(void *comm, int *rank)
{
int size = -1, ret;
MONITOR_DEBUG("comm = %p\n", comm);
MONITOR_GET_REAL_NAME(real_mpi_comm_size, MPI_Comm_size);
MONITOR_GET_REAL_NAME_WRAP(real_mpi_comm_rank, MPI_Comm_rank);
ret = (*real_mpi_comm_size)(comm, &size);
ret = (*real_mpi_comm_rank)(comm, rank);
monitor_set_mpi_size_rank(size, *rank);
return (ret);
}
|
<reponame>xeon225/CyanMD
import '../../src/pageComponent/head/index.js'
var count=0
window.vm=new Vue({
el:'#main',
data:{
tabbarData:_.times(5,index=>{
return {
title:'tab'+index
}
}),
tabPosition:'top',
tabNav:[false,false],
tabCol:'auto'
},
methods:{
add(){
this.tabbarData.push({
title:'new Tab'+(++count)
})
},
extra(vm,item,index){
console.log(arguments)
},
navItem(){
console.log(arguments)
}
}
}) |
<filename>hybrid_robot/src/coordinate_transform.cpp
#include <ros/ros.h>
#include <stdio.h>
#include <tf/transform_listener.h>
#include <Eigen/Dense>
int main(int argc, char** argv)
{
ros::init(argc, argv, "my_tf_listener");
ros::NodeHandle node("~");
//ros::NodeHandle node;
tf::TransformListener listener;
ros::Rate rate(1);
float r_x = 0.0f;
float r_y = 0.0f;
float r_z = 0.0f;
float r_w = 0.0f;
float t_x = 0.0f;
float t_y = 0.0f;
float t_z = 0.0f;
int count = 0;
bool cali;
int caliStep;
node.param("calibration", cali, true);
node.param("calibrationStep", caliStep, 1);
// std::cout << "The value of calibration " << cali << std::endl;
//while (node.ok())
while (ros::ok())
{
// hold the transformation parameters between two coordinate frames
//tf::StampedTransform result_cali;
tf::StampedTransform transformPara;
try
{
if (cali)
{
if (caliStep == 1)
{
// listener.waitForTransform("inter_robotBase_frame", "endEffectorI5_frame", ros::Time(0), ros::Duration(3.0));
listener.waitForTransform("base_link", "camera_frame1", ros::Time(0), ros::Duration(4.0));
// For calibration - calculate the transformation between the two frames
listener.lookupTransform("camera_frame1", "base_link", ros::Time(0), transformPara);
// listener.lookupTransform("base_link", "camera_frame1", ros::Time(0), transformPara);
// get the value
tf::Quaternion rotation = transformPara.getRotation();
tf::Vector3 tfVect = transformPara.getOrigin();
std::cout << "Calibration at Step 1 - Inverse Quaternion from Auto I5 Robot" << std::endl;
ROS_INFO("Origin & Quat: %f %f %f %f %f %f %f", tfVect.x(), tfVect.y(), tfVect.z(), rotation.x(), rotation.y(), rotation.z(), rotation.w());
tf::Matrix3x3 mat(rotation);
tfScalar y, p, r;
mat.getEulerYPR(y, p, r);
ROS_INFO("Three Angles: %f %f %f ", r, p, y);
// btQuaternion::btMatrix3x3 tfMatrix; // = transformPara.getBasis();
// geometry_msgs::Pose robot_pose;
Eigen::Matrix4f hete_matrix;
// for translation
hete_matrix(0,3) = tfVect.x();
hete_matrix(1,3) = tfVect.y();
hete_matrix(2,3) = tfVect.z();
hete_matrix(3,3) = 1;
// the last row
hete_matrix(3,0) = 0;
hete_matrix(3,1) = 0;
hete_matrix(3,2) = 0;
// for rotation part
hete_matrix(0,0) = 1 - 2*pow(rotation.y(),2) - 2*pow(rotation.z(),2);
hete_matrix(1,0) = 2*(rotation.x()*rotation.y() + rotation.z()*rotation.w());
hete_matrix(2,0) = 2*(rotation.x()*rotation.z() - rotation.y()*rotation.w());
hete_matrix(0,1) = 2*(rotation.x()*rotation.y() - rotation.z()*rotation.w());
hete_matrix(1,1) = 1 - 2*pow(rotation.x(),2) - 2*pow(rotation.z(),2);
hete_matrix(2,1) = 2*(rotation.x()*rotation.w() + rotation.y()*rotation.z());
hete_matrix(0,2) = 2*(rotation.x()*rotation.z() + rotation.y()*rotation.w());
hete_matrix(1,2) = 2*(rotation.y()*rotation.z() - rotation.x()*rotation.w());
hete_matrix(2,2) = 1 - 2*pow(rotation.x(),2) - 2*pow(rotation.y(),2);
ROS_INFO("Matrix: %f %f %f %f ", hete_matrix(0,0), hete_matrix(0,1), hete_matrix(0,2), hete_matrix(0,3));
ROS_INFO("Matrix: %f %f %f %f ", hete_matrix(1,0), hete_matrix(1,1), hete_matrix(1,2), hete_matrix(1,3));
ROS_INFO("Matrix: %f %f %f %f ", hete_matrix(2,0), hete_matrix(2,1), hete_matrix(2,2), hete_matrix(2,3));
ROS_INFO("Matrix: %f %f %f %f ", hete_matrix(3,0), hete_matrix(3,1), hete_matrix(3,2), hete_matrix(3,3));
// ROS_INFO("Matrix: %f %f %f ", tfMatrix.xz(), tfMatrix.yz(), tfMatrix.zz());
}
if (caliStep == 2)
{
count++;
// wait until the two frames of camera and robot base are available
listener.waitForTransform("camera_depth_optical_frame", "depend_robotBase_frame", ros::Time(0), ros::Duration(3.0));
// For calibration - calculate the transformation between the two frames
listener.lookupTransform("camera_depth_optical_frame", "depend_robotBase_frame", ros::Time(0), transformPara);
// get the angle/quaternion
tf::Quaternion rotation = transformPara.getRotation();
// get the distance between the two frames
tf::Vector3 tfVect = transformPara.getOrigin();
r_x += rotation.x();
r_y += rotation.y();
r_z += rotation.z();
r_w += rotation.w();
t_x += tfVect.x();
t_y += tfVect.y();
t_z += tfVect.z();
ROS_INFO("Gathering data %d", count);
if (count == 100)
{
std::cout << "Calibration at Step 2 - Quaternion of robotBase frame comapring to camera_frame" << std::endl;
//ROS_INFO("Quat: %f %f %f %f", );
ROS_INFO("Origin & Quat: %f %f %f %f %f %f %f", t_x/float(count), t_y/float(count), t_z/float(count), r_x/float(count), r_y/float(count), r_z/float(count), r_w/float(count));
count = 0;
t_x = 0.0;
t_y = 0.0;
t_z = 0.0;
r_x = 0.0;
r_y = 0.0;
r_z = 0.0;
r_w = 0.0;
}
}
}
else
{
// For xtion_frame.txt & displaying axes in OpenRave
// wait for two frames: estimate_robot_base_frame and grasp_aubo_frame available
listener.waitForTransform("fix_robotBase_frame", "grasp_aubo_frame", ros::Time(0), ros::Duration(3.0));
// transform the coordiantes from grasp_aubo_frame to estimate_robot_base_frame
// For display grasp in OpenRave
listener.lookupTransform("fix_robotBase_frame", "grasp_aubo_frame", ros::Time(0), transformPara);
// get the value
tf::Quaternion rotation = transformPara.getRotation();
tf::Vector3 tfVect = transformPara.getOrigin();
ROS_INFO("Origin & Quat: %f %f %f %f %f %f %f", tfVect.x(), tfVect.y(), tfVect.z(), rotation.x(), rotation.y(), rotation.z(), rotation.w());
}
}
catch (tf::TransformException &ex)
{
ROS_ERROR("%s",ex.what());
ros::Duration(1.0).sleep();
continue;
}
rate.sleep();
}
return 0;
} |
<gh_stars>0
package com.nortal.spring.cw.core.web.component.list;
/**
* @author <NAME>
*/
public class ListPagination {
private int pageNr;
public int getPageNr() {
return pageNr;
}
public void setPageNr(int pageNr) {
this.pageNr = pageNr;
}
}
|
<gh_stars>10-100
import { Injectable } from '@angular/core';
import { ButtonLayoutDisplay, DialogLayoutDisplay } from './enums';
@Injectable({
providedIn: 'root'
})
export class LayoutHelperService {
getIconClasses(layoutType: DialogLayoutDisplay, iconStyleClass: string): string {
let returnString = '';
if (iconStyleClass) {
returnString += iconStyleClass;
return returnString;
}
switch (layoutType) {
case DialogLayoutDisplay.SUCCESS: {
returnString += 'ap-icon-success icon-check-circle';
break;
}
case DialogLayoutDisplay.INFO: {
returnString += 'ap-icon-info icon-info-circle';
break;
}
case DialogLayoutDisplay.WARNING: {
returnString += 'ap-icon-warning icon-warning';
break;
}
case DialogLayoutDisplay.DANGER: {
returnString += 'ap-icon-danger icon-times-circle';
break;
}
}
return returnString;
}
getButtonClasses(layoutType: ButtonLayoutDisplay | DialogLayoutDisplay | null, perm: string = '', type?: 'auto-button'): string {
let returnString = perm + ' ';
if (type === 'auto-button' && layoutType === DialogLayoutDisplay.NONE) {
layoutType = ButtonLayoutDisplay.PRIMARY;
}
switch (layoutType) {
case ButtonLayoutDisplay.SUCCESS: {
returnString += 'ed-btn-success';
break;
}
case ButtonLayoutDisplay.INFO: {
returnString += 'ed-btn-info';
break;
}
case ButtonLayoutDisplay.WARNING: {
returnString += 'ed-btn-warning';
break;
}
case ButtonLayoutDisplay.DANGER: {
returnString += 'ed-btn-danger';
break;
}
case ButtonLayoutDisplay.DARK: {
returnString += 'ed-btn-dark';
break;
}
case ButtonLayoutDisplay.LIGHT: {
returnString += 'ed-btn-light';
break;
}
case ButtonLayoutDisplay.PRIMARY: {
returnString += 'ed-btn-primary';
break;
}
case ButtonLayoutDisplay.SECONDARY: {
returnString += 'ed-btn-secondary';
break;
}
case ButtonLayoutDisplay.LINK: {
returnString += 'ed-btn-link';
break;
}
case ButtonLayoutDisplay.CUSTOM_ONE: {
returnString += 'ed-btn-customone';
break;
}
case ButtonLayoutDisplay.CUSTOM_TWO: {
returnString += 'ed-btn-customtwo';
break;
}
case ButtonLayoutDisplay.CUSTOM_THREE: {
returnString += 'ed-btn-customthree';
break;
}
case ButtonLayoutDisplay.CUSTOM_FOUR: {
returnString += 'ed-btn-customfour';
break;
}
case ButtonLayoutDisplay.CUSTOM_FIVE: {
returnString += 'ed-btn-customfive';
break;
}
}
return returnString;
}
getBoxClasses(layoutType: DialogLayoutDisplay, perm: string = ''): string {
let returnString = perm + ' ';
switch (layoutType) {
case DialogLayoutDisplay.NONE: {
returnString += 'standard-dialog';
break;
}
case DialogLayoutDisplay.SUCCESS: {
returnString += 'success-dialog';
break;
}
case DialogLayoutDisplay.INFO: {
returnString += 'info-dialog';
break;
}
case DialogLayoutDisplay.WARNING: {
returnString += 'warning-dialog';
break;
}
case DialogLayoutDisplay.DANGER: {
returnString += 'danger-dialog';
break;
}
case DialogLayoutDisplay.CUSTOM_ONE: {
returnString += 'customone-dialog';
break;
}
case DialogLayoutDisplay.CUSTOM_TWO: {
returnString += 'customtwo-dialog';
break;
}
case DialogLayoutDisplay.CUSTOM_THREE: {
returnString += 'customthree-dialog';
break;
}
case DialogLayoutDisplay.CUSTOM_FOUR: {
returnString += 'customfour-dialog';
break;
}
case DialogLayoutDisplay.CUSTOM_FIVE: {
returnString += 'customfive-dialog';
break;
}
}
return returnString;
}
}
|
// (C) 2019-2020 GoodData Corporation
import { IWorkspaceDatasetsService, IDataset } from "@gooddata/sdk-backend-spi";
import { TigerAuthenticatedCallGuard } from "../../../types";
export class TigerWorkspaceDataSets implements IWorkspaceDatasetsService {
constructor(private readonly authCall: TigerAuthenticatedCallGuard, public readonly workspace: string) {}
public async getDatasets(): Promise<IDataset[]> {
return this.authCall(async () => []);
}
}
|
def _histogram_plot_transf(self):
# Assuming the necessary data and transformations are available
transformed_data = perform_transformations(self.data) # Perform necessary transformations
plot = HistogramPlotTransf(ax=None, path=self.path, data=transformed_data) # Create histogram plot with transformed data
plot.save() # Save the generated histogram plot to the specified path |
#!/bin/bash
#
# Description : Arx Libertatis (AKA Arx Fatalis)
# Author : Jose Cerrejon Gonzalez (ulysess@gmail_dot._com)
# Version : 1.0.2 (17/Aug/20)
# Compatible : Raspberry Pi 4 (fail)
#
# Help : https://wiki.arx-libertatis.org/Downloading_and_Compiling_under_Linux
# For fans : https://www.reddit.com/r/ArxFatalis/
# Issue : ../sysdeps/unix/sysv/linux/read.c: No such file or directory
#
. ./scripts/helper.sh || . ./helper.sh || wget -q 'https://github.com/jmcerrejon/PiKISS/raw/master/scripts/helper.sh'
clear
check_board || { echo "Missing file helper.sh. I've tried to download it for you. Try to run the script again." && exit 1; }
INSTALL_DIR="$HOME/games"
PACKAGES=( libglew-dev )
PACKAGES_DEV=( zlib1g-dev libfreetype6-dev libopenal1 libopenal-dev mesa-common-dev libgl1-mesa-dev libboost-dev libepoxy-dev libglm-dev libcppunit-dev libglew-dev libsdl2-dev )
CONFIG_DIR="$HOME/.local/share/arx"
BINARY_URL="https://www.littlecarnage.com/arx_rpi2.tar.gz"
SOURCE_CODE_URL="https://github.com/ptitSeb/ArxLibertatis.git"
SOURCE_CODE_OFFICIAL_URL="https://github.com/arx/ArxLibertatis.git" # Doesn't work for now
DATA_URL="https://archive.org/download/rpi_share/arx_demo_en.tgz"
ICON_URL="https://github.com/arx/ArxLibertatisData/blob/master/icons/arx-libertatis-32.png?raw=true"
INPUT=/tmp/arx.$$
runme() {
if [ ! -f "$INSTALL_DIR"/arx/arx ]; then
echo -e "\nFile does not exist.\n· Something is wrong.\n· Try to install again."
exit_message
fi
echo
read -p "Press [ENTER] to run the game..."
cd "$INSTALL_DIR"/arx && ./arx
exit_message
}
remove_files() {
# TODO Remove files installed with sudo make install (maybe if I make a .deb dpkg, easier)
rm -rf ~/.local/share/applications/arx.desktop ~/.local/share/arx "$CONFIG_DIR"/arx-libertatis-32.png \
"$INSTALL_DIR"/arx /usr/local/share/blender/scripts/addons/arx /usr/local/share/games/arx
}
uninstall() {
read -p "Do you want to uninstall Arx Libertatis (y/N)? " response
if [[ $response =~ [Yy] ]]; then
remove_files
if [[ -e "$INSTALL_DIR"/arx ]]; then
echo -e "I hate when this happens. I could not find the directory, Try to uninstall manually. Apologies."
exit_message
fi
echo -e "\nSuccessfully uninstalled."
exit_message
fi
exit_message
}
if [[ -d "$INSTALL_DIR"/arx ]]; then
echo -e "Arx Libertatis already installed.\n"
uninstall
exit 1
fi
generate_icon() {
echo -e "\nGenerating icon..."
mkdir -p "$CONFIG_DIR"
wget -q "$ICON_URL" -O "$CONFIG_DIR"/arx-libertatis-32.png
if [[ ! -e ~/.local/share/applications/arx.desktop ]]; then
cat <<EOF >~/.local/share/applications/arx.desktop
[Desktop Entry]
Name=Arx Fatalis (AKA Arx Libertatis)
Exec=/home/pi/games/arx/arx
Icon=${CONFIG_DIR}/arx-libertatis-32.png
Type=Application
Comment=Arx Fatalis is set on a world whose sun has failed, forcing the above-ground creatures to take refuge in caverns.
Categories=Game;ActionGame;
EOF
fi
}
fix_libndi() {
echo -e "\nFixing library libndi.so\n"
sudo rm -f /usr/lib/libndi.so
sudo ln -r -s /usr/lib/libndi.so.4.0.0 /usr/lib/libndi.so
sudo rm -f /usr/lib/libndi.so.4
sudo ln -r -s /usr/lib/libndi.so.4.0.0 /usr/lib/libndi.so.4
}
fix_libGLEW1.7() {
if [[ -f /usr/lib/arm-linux-gnueabihf/libGLEW.so.1.7 ]]; then
return 0
fi
echo -e "\nLinking libGLEW.so -> libGLEW.so.1.7\n"
sudo ln -s /usr/lib/arm-linux-gnueabihf/libGLEW.so /usr/lib/arm-linux-gnueabihf/libGLEW.so.1.7
}
compile() {
installPackagesIfMissing "${PACKAGES_DEV[@]}"
fix_libndi
mkdir -p ~/sc && cd "$_"
git clone "$SOURCE_CODE_URL" arx && cd "$_"
mkdir build && cd "$_"
CFLAGS="-fsigned-char -marm -march=armv8-a+crc -mtune=cortex-a72 -mfpu=neon-fp-armv8 -mfloat-abi=hard" CXXFLAGS="-fsigned-char" cmake .. -DBUILD_TOOLS=off -DBUILD_IO_LIBRARY=off -DBUILD_CRASHREPORTER=off -DICON_TYPE=none
if [[ -f ~/sc/arx/build/CMakeFiles/CMakeError.log ]]; then
echo -e "\n\nERROR!!. I can't continue with the command make. Check ~/sc/arx/build/CMakeFiles/CMakeError.log\n"
exit 1
fi
make -j"$(getconf _NPROCESSORS_ONLN)"
}
install_binaries() {
echo -e "\nInstalling binary files..."
download_and_extract "$BINARY_URL" "$INSTALL_DIR"
rm "$INSTALL_DIR/Arx Fatalis.sh"
chmod +x "$INSTALL_DIR"/arx/arx*
fix_libGLEW1.7
}
end_message() {
echo -e "\nDone!. Click on Menu > Games > Arx Libertatis."
runme
}
download_data_files() {
download_and_extract "$DATA_URL" ~
}
choose_data_files() {
while true; do
dialog --clear \
--title "[ Arx Libertatis Data files ]" \
--menu "Choose language:" 11 68 3 \
English "Install the game with English text and voices." \
Spanish "Install the game with Spanish text and voices." \
Exit "Continue with Shareware version" 2>"${INPUT}"
menuitem=$(<"${INPUT}")
case $menuitem in
English) clear && DATA_URL=$(extract_url_from_file 7) && return 0 ;;
Spanish) clear && DATA_URL=$(extract_url_from_file 6) && return 0 ;;
Exit) clear ;;
esac
done
}
install() {
mkdir -p "$INSTALL_DIR"
installPackagesIfMissing "${PACKAGES[@]}"
install_binaries
generate_icon
echo
read -p "Do you have an original copy of Arx Fatalis (If not, a Shareware version will be installed) (y/N)?: " response
if [[ $response =~ [Yy] ]]; then
choose_data_files
message_magic_air_copy
fi
download_data_files
end_message
}
echo "Install Arx Libertatis (Port of Arx Fatalis)"
echo "============================================"
echo
echo " · Install path: $INSTALL_DIR/arx"
echo " · NOTE: It's NOT the latest compiled from source. This binary proceed from https://www.littlecarnage.com/"
echo " · I've tried to compile Arx Libertatis for 3 days with no success. I'll try it (or ptitSeb) in a long time."
echo
read -p "Press [Enter] to continue..."
install
|
<filename>policy/src/main/java/brooklyn/enricher/DeltaEnricher.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.enricher;
import static brooklyn.util.JavaGroovyEquivalents.elvis;
import brooklyn.catalog.Catalog;
import brooklyn.enricher.basic.AbstractTransformingEnricher;
import brooklyn.entity.Entity;
import brooklyn.event.AttributeSensor;
import brooklyn.event.Sensor;
import brooklyn.event.SensorEvent;
import brooklyn.util.flags.TypeCoercions;
/**
* Converts an absolute sensor into a delta sensor (i.e. the diff between the current and previous value)
*/
//@Catalog(name="Delta", description="Converts an absolute sensor into a delta sensor "
// + "(i.e. the diff between the current and previous value)")
public class DeltaEnricher<T extends Number> extends AbstractTransformingEnricher<T> {
Number last = 0;
public DeltaEnricher() { // for rebinding
}
public DeltaEnricher(Entity producer, Sensor<T> source, AttributeSensor<T> target) {
super(producer, source, target);
}
@Override
public void onEvent(SensorEvent<T> event) {
Number current = elvis(event.getValue(), 0);
double newVal = current.doubleValue() - last.doubleValue();
entity.setAttribute((AttributeSensor<T>)target, TypeCoercions.coerce(newVal, target.getTypeToken()));
last = current;
}
}
|
<gh_stars>0
function check_car_number(str){
var Expression=/^[\u4E00-\u9FA5]?[a-zA-Z]-\w{5}$/;
var objExp=new RegExp(Expression);
return objExp.test(str)
}
function check_car(){
if (form1.username.value == ""){
alert("用户名不能为空!");
form1.username.focus();
return (false);
}
//验证车牌号码
if (form1.car_number.value==""){
alert("车牌号码不能为空!");
form1.car_number.focus();
return (false);
}
if(form1.car_number.value.length!=8||isNaN(form1.car_number.value.substr(3,4))){
alert("您输入的车牌号码格式不正确!");
form1.car_number.focus();
return (false);
}
if(!check_car_number(form1.car_number.value)){
alert("您输入的车牌号码格式不正确!");
form1.car_number.focus();return;
}
//验证身份证号码
if(form1.user_number.value==""){
alert("请输入身份证号码!");
form1.user_number.focus();
return (false);
}
if(!(form1.user_number.value.length==15 || form1.user_number.value.length==18)){
alert("身份证号只能为15位或18位!");
form1.user_number.focus();
return (false);
}
//验证电话号码
if(form1.user_tel.value==""){
alert("请输入电话号码!");
form1.user_tel.focus();
return (false);
}
//验证地址
if(form1.address.value==""){
alert("请输入家庭地址!");
form1.address.focus();
return (false);
}
if(form1.car_content.value==""){
alert("请输入车辆信息!");
form1.car_content.focus();
return (false);
}
if(form1.car_road.value==""){
alert("请输入车辆行驶路线!");
form1.car_road.focus();
return (false);
}
}
function check_admin(){
if(form1.admin_user.value==""){
alert("请输入用户名!");
form1.admin_user.focus();
return (false);
}
if(form1.admin_pass.value==""){
alert("请输入旧密码!");
form1.admin_pass.focus();
return (false);
}
if(form1.admin_new_pass.value==""){
alert("请输入新密码!");
form1.admin_new_pass.focus();
return (false);
}
if(form1.admin_new_pass2.value==""){
alert("请输入新密码!");
form1.admin_new_pass2.focus();
return (false);
}
if(form1.admin_new_pass.value!=forms1.admin_new_pass2.value){
alert("您输入的新密码与确认密码不符!");
form1.admin_new_pass2.focus();
return (false);
}
}
function check_select_car(){
if(form1.select1.value==""){
alert("请输入车辆路线!");
form1.select1.focus();
return (false);
}
if(form1.select2.value==""){
alert("请输入车辆路线!");
form1.select2.focus();
return (false);
}
} |
#!/usr/bin/env bash
# run the command and capture the stdout
out=$(command $@)
echo "$out"
if [[ $(echo "$out" | grep -c reduce) -gt 0 ]]; then
echo ""
echo "Original Rscript failed, try reduce = 0 ..."
echo "Modifying vj_pairing_plot.r ..."
sed -i 's/mat, annotationTrack/mat, reduce = 0, annotationTrack/' vj_pairing_plot.r
cmd=$(echo "$out" | grep "\\[RUtil\\] Executing" | sed 's/\[RUtil\] Executing //')
echo "Run again: $cmd"
command $cmd
else
echo ""
echo "Not a 'reduce' failure for original script, nothing to do."
fi
|
class Api::V3::Games::Categories::RunsController < Api::V3::ApplicationController
before_action :set_game, only: [:index]
before_action :set_category, only: [:index]
before_action :set_runs, only: [:index]
def index
runs = paginate @runs
render json: Api::V3::RunBlueprint.render(runs, root: :runs)
end
private
def set_game
@game = Game.joins(:srdc).find_by(speedrun_dot_com_games: {shortname: params[:game_id]})
@game ||= Game.find(params[:game_id])
rescue ActiveRecord::RecordNotFound
render status: :not_found, json: {
status: 404,
message: "Game with shortname or id '#{params[:game_id]}' not found."
}
end
def set_category
@category = @game.categories.find(params[:category_id])
rescue ActiveRecord::RecordNotFound
render status: :not_found, json: {
status: 404,
message: "Category with id '#{params[:category_id]}' not found for game '#{params[:game_id]}'."
}
end
def set_runs
@runs = @category.runs.includes(:game, :category, :user)
end
end
|
<gh_stars>0
import React, { useState, useEffect, useRef } from 'react';
import { Platform, StyleSheet, View, Text, TextInput, TouchableOpacity, SafeAreaView, KeyboardAvoidingView, Image, Alert, ActivityIndicator } from 'react-native';
import { AntDesign, Ionicons } from '@expo/vector-icons';
import Colors from '../constants/Colors';
import { useNavigation } from '@react-navigation/core';
import * as ImagePicker from 'expo-image-picker';
import { useDispatch, useSelector } from 'react-redux';
import { StoreStateType } from '../store/types';
import { axiosHandler, getData, tokenName, tokenType } from '../helper';
import { CATEGORY_POSTS_URL, CATEGORY_URL, POST_URL, TIMELINE_POSTS_URL } from '../urls';
import { CategoryType } from '../types';
import { updatePostsList } from '../store/posts/actionCreators';
export default function NewPostScreen() {
const navigation = useNavigation();
const { user } = useSelector(mapStateToProps);
const [postText, setPostText] = useState<string>('');
const [image, setImage] = useState<string>('');
const [error, setError] = useState<string | null>(null);
const [submitting, setSubmitting] = useState<boolean>(false);
const [categories, setCategories] = useState<CategoryType[]>([]);
const [selectedCategory, setSelectedCategory] = useState<CategoryType | null>(null);
const dispatch = useDispatch();
const onPickImage = async () => {
// Ask for permission
(async () => {
if (Platform.OS !== 'web') {
const { status } = await ImagePicker.requestMediaLibraryPermissionsAsync();
if (status !== 'granted') {
alert('Sorry, we need camera roll permissions to make this work!');
}
}
})();
let result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.Images,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
if (!result.cancelled) {
setImage(result.uri);
}
}
const onOpenCamera = async () => {
// Ask for permission
(async () => {
if (Platform.OS !== 'web') {
const { status } = await ImagePicker.requestCameraPermissionsAsync();
if (status !== 'granted') {
alert('Sorry, we need camera roll permissions to make this work!');
}
}
})();
let result = await ImagePicker.launchCameraAsync({
mediaTypes: ImagePicker.MediaTypeOptions.Images,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
if (!result.cancelled) {
setImage(result.uri);
}
}
const onSubmitPost = async () => {
setSubmitting(true);
const formData = new FormData();
if (image && image !== '') {
// Infer the type of the image
let filename: string = image.split('/').pop() || '';
let match = /\.(\w+)$/.exec(filename);
let type = match ? `image/${match[1]}` : `image`;
formData.append('image', {uri: image, name: filename, type});
}
formData.append('text', postText);
formData.append('category_id', selectedCategory?.id || categories[0].id);
const tokenString = await getData(tokenName);
if (!tokenString) {
navigation.navigate('Login');
return;
}
const token: tokenType = JSON.parse(tokenString);
const response = await axiosHandler({
url: POST_URL,
method: 'POST',
data: formData,
extra: { 'Content-Type': 'multipart/form-data' },
token: token.access_token,
})?.catch(e => {
setError(e.message);
});
if (response) {
Alert.alert(
'Success',
'Your post has been shared.',
[{
text: 'Ok',
onPress: () => navigation.navigate('HomeScreen')
}]
);
}
setSubmitting(false);
}
const getAllCategories = async () => {
const tokenString = await getData(tokenName);
if (!tokenString) {
navigation.navigate('Login');
return;
}
const token: tokenType = JSON.parse(tokenString);
const response = await axiosHandler({
url: CATEGORY_URL,
method: 'GET',
token: token.access_token,
})?.catch(e => {
setError('Error occurred!');
});
if (response) {
setCategories(response.data);
} else {
setCategories([]);
setError('Error occurred!');
}
};
const onCancel = () => {
navigation.goBack();
}
useEffect(() => {
getAllCategories();
}, []);
useEffect(() => {
if (error) {
Alert.alert(
'Error',
error,
[{
text: 'Ok',
onPress: () => setError(null)
}]
);
}
}, [error])
return (
<SafeAreaView style={styles.container}>
<View style={styles.header}>
<TouchableOpacity onPress={onCancel} activeOpacity={0.8}>
<AntDesign color={Colors.light.tabIconDefault} size={30} name='closecircle' />
</TouchableOpacity>
<Text style={styles.username}>{user?.user?.username}</Text>
<TouchableOpacity onPress={onSubmitPost} activeOpacity={0.8}>
{
submitting ?
<ActivityIndicator size="large" color="blue" />
:
<Ionicons color={postText === '' && image === '' ? Colors.light.tabIconDefault : Colors.light.tabIconSelected} size={35} name='arrow-up-circle' />
}
</TouchableOpacity>
</View>
<Text style={styles.catHeader}>Category</Text>
<View style={styles.catContainer}>
{
categories.map(cat => <TouchableOpacity key={cat.id} style={{ ...styles.catItem, backgroundColor: selectedCategory?.name === cat.name ? 'blue' : '#eeeeee' }} onPress={() => setSelectedCategory(cat)}>
<Text style={{ color: selectedCategory?.name === cat.name ? '#fff' : 'black' }}>{cat.name}</Text>
</TouchableOpacity>)
}
</View>
<KeyboardAvoidingView
style={styles.bottomPart}
behavior={Platform.OS === 'ios' ? 'padding' : undefined}
// keyboardVerticalOffset={Platform.OS === 'ios' ? 40 : 0}
>
<TextInput value={postText} onChangeText={setPostText} multiline={true} style={styles.postInput} placeholder={"What's new?"} />
<View style={styles.footer}>
<TouchableOpacity style={styles.pickImageButton} onPress={onPickImage} activeOpacity={0.8}>
<Ionicons color={postText === '' && image === '' ? '#a6a6a6' : 'blue'} size={30} name='image-outline' />
</TouchableOpacity>
<TouchableOpacity style={styles.pickImageButton} onPress={onOpenCamera} activeOpacity={0.8}>
<Ionicons color={postText === '' && image === '' ? '#a6a6a6' : 'blue'} size={30} name='camera-outline' />
</TouchableOpacity>
</View>
{
image !== '' &&
<View style={styles.inputContainer}>
<Image style={styles.image} source={{ uri: image }} />
</View>
}
</KeyboardAvoidingView>
</SafeAreaView>
);
}
const mapStateToProps = (state: StoreStateType) => ({
user: state.user.user,
});
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'flex-start',
backgroundColor: '#fff'
},
header: {
flexDirection: 'row',
width: '100%',
padding: 10,
justifyContent: 'space-between',
alignItems: 'center',
borderBottomWidth: 0.3,
borderColor: '#d0d0d0',
},
username: {
fontWeight: 'bold',
fontSize: 20
},
bottomPart: {
flex: 1,
width: '100%'
},
postInput: {
padding: 15,
fontSize: 22,
width: '100%',
flex: 1,
},
footer: {
width: '100%',
flexDirection: 'row',
borderTopWidth: 0.5,
padding: 5,
borderTopColor: '#d0d0d0'
},
pickImageButton: {
marginRight: 15
},
catItem: {
marginLeft: 10,
paddingVertical: 10,
paddingHorizontal: 20,
borderRadius: 15
},
catHeader: {
marginLeft: 20,
fontSize: 20,
fontWeight: 'bold'
},
catContainer: {
width: '100%',
flexDirection: 'row',
marginVertical: 5,
marginHorizontal: 10,
paddingVertical: 6,
borderBottomWidth: 0.5,
borderBottomColor: 'grey'
},
inputContainer: {
marginBottom: 20,
flexDirection: 'row',
justifyContent: 'center'
},
image: {
width: '50%',
height: 100,
resizeMode: 'contain'
},
});
|
<filename>sha256x4-neon/sha256_neon.h
#ifndef SHA256_NEON_H
#define SHA256_NEON_H
#include <stddef.h>
#include <stdint.h>
#include <arm_neon.h>
/* The incremental API allows hashing of individual input blocks; these blocks
must be exactly 64 bytes each.
Use the 'finalize' functions for any remaining bytes (possibly over 64). */
#define PQC_sha256ctx2_BYTES 32
#define SPX_SHA256_OUTPUT_BYTES 32
/* Structure for the incremental API */
typedef struct {
uint8_t ctx[4*PQC_sha256ctx2_BYTES + 8];
} sha256ctx2;
/* ====== SHA256 API ==== */
/**
* Initialize the incremental hashing API
*/
void sha256x4_inc_init(sha256ctx2 *state);
/**
* Absorb blocks
*/
void sha256x4_inc_blocks(sha256ctx2 *state, const uint8_t *in, size_t inblocks);
/**
* Finalize and obtain the digest
*
* If applicable, this function will free the memory associated with the sha256ctx2.
*/
void sha256x4_inc_finalize(
uint8_t *out0,
uint8_t *out1,
uint8_t *out2,
uint8_t *out3,
uint8_t *state,
const uint8_t *in0,
const uint8_t *in1,
const uint8_t *in2,
const uint8_t *in3,
size_t inlen);
/**
* All-in-one sha256 function
*/
void sha256x42(
uint8_t *out0,
uint8_t *out1,
uint8_t *out2,
uint8_t *out3,
const uint8_t *in0,
const uint8_t *in1,
const uint8_t *in2,
const uint8_t *in3,
size_t inlen);
void mgf1x42(
unsigned char *out0,
unsigned char *out1,
unsigned char *out2,
unsigned char *out3,
unsigned long outlen,
const unsigned char *in0,
const unsigned char *in1,
const unsigned char *in2,
const unsigned char *in3,
unsigned long inlen);
#endif |
module.exports = {
up: (queryInterface, Sequelize) =>
queryInterface.bulkInsert(
'Users',
[
{
name: '<NAME>',
email: '<EMAIL>',
createdAt: new Date(),
updatedAt: new Date()
},
{
name: '<NAME>',
email: '<EMAIL>',
createdAt: new Date(),
updatedAt: new Date()
}
],
{}
),
down: (queryInterface, Sequelize) => queryInterface.bulkDelete('Users', null, {})
};
|
package academy.devonline.java.section001_classes;
import java.util.Arrays;
public class DynaArrayTestVer3 {
public static void main(String[] args) {
DynaArrayVer3 dynaArray1 = new DynaArrayVer3();
DynaArrayVer3 dynaArray2 = new DynaArrayVer3();
System.out.print("dynaArray1: ");
System.out.print(Arrays.toString(dynaArray1.result) + " ");
System.out.println(dynaArray1.count);
System.out.print("dynaArray2: ");
System.out.print(Arrays.toString(dynaArray2.result) + " ");
System.out.println(dynaArray2.count);
dynaArray1.add(5);
dynaArray1.add(5);
dynaArray1.add(5);
dynaArray1.add(5);
dynaArray1.add(5);
dynaArray1.add(5);
System.out.println("------------------------------------");
System.out.print("dynaArray1: ");
System.out.print(Arrays.toString(dynaArray1.result) + " ");
System.out.println(dynaArray1.count);
System.out.print("dynaArray2: ");
System.out.print(Arrays.toString(dynaArray2.result) + " ");
System.out.println(dynaArray2.count);
}
}
|
<gh_stars>1-10
from django.db import models
from cms.models.pluginmodel import CMSPlugin
from thewall.models import Unconference
class SponsorPlugin(CMSPlugin):
"""
Allows the unconference to use when displaying sponsorships
"""
unconference = models.ForeignKey(Unconference)
|
<reponame>seangreathouse/inventory-hunter
import scraper.amazon
import scraper.bestbuy
import scraper.bhphotovideo
import scraper.microcenter
import scraper.newegg
from scraper.common import ScraperFactory
def init_scrapers(driver, urls: list):
return [ScraperFactory.create(driver, url) for url in urls]
|
/*
* Bulldozer Framework
* Copyright (C) DesertBit
*/
package tr
import (
"bufio"
"encoding/json"
"fmt"
"github.com/desertbit/bulldozer/log"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"sync"
"time"
)
const (
Suffix = ".tr"
defaultLocale = "en"
reloadTimerTimeout = 1 * time.Second
)
var (
currentLocale string = defaultLocale
messages map[string]string = make(map[string]string)
directories []string
mutex sync.Mutex
reloadTimer *time.Timer
reloadTimerStop chan struct{} = make(chan struct{})
)
func init() {
reloadTimer = time.NewTimer(reloadTimerTimeout)
startReloadLoop()
}
//##############//
//### Loops ####//
//##############//
func startReloadLoop() {
go func() {
defer func() {
// Stop the timer
reloadTimer.Stop()
}()
for {
select {
case <-reloadTimer.C:
// Reload the translations
_reload()
case <-reloadTimerStop:
// Just exit the loop
return
}
}
}()
}
//##############//
//### Public ###//
//##############//
// Load all translation files.
// If already loaded, they will be reloaded.
func Load() {
// Stop the timer
reloadTimer.Stop()
// Reload the translations
_reload()
}
// Release releases all goroutines and performs a cleanup
func Release() {
// Stop the filewatcher
fileWatcher.Close()
// Stop the reload goroutine
close(reloadTimerStop)
}
// SetLocale sets the current locale.
// The translations files are all reloaded.
func SetLocale(locale string) {
// Lock the mutex
mutex.Lock()
defer mutex.Unlock()
// Set the new locale
currentLocale = locale
// Reload the translation messages
reload()
}
// S obtains the translated string for the given ID.
func S(id string, args ...interface{}) string {
// Try to get the translated string for the ID
s, ok := getMessage(id)
if !ok {
log.L.Warning("no translated string found for ID '%s'", id)
return "???" + id + "???"
}
return fmt.Sprintf(s, args...)
}
// Add adds a translation directory path
func Add(dirPath string) {
// Lock the mutex
mutex.Lock()
defer mutex.Unlock()
// Clean the path
dirPath = filepath.Clean(dirPath)
// Check if the path is already in the slice
for _, p := range directories {
if p == dirPath {
// The path is already present.
// Just return...
return
}
}
// Add the new path to the slice
directories = append(directories, dirPath)
// Add the new path to the filewatcher
err := fileWatcher.Add(dirPath)
if err != nil {
log.L.Error("translation: failed to add path '%s' to file watcher: %v", dirPath, err)
}
// Reload the translation messages
reload()
}
// Remove removes the translation directory path
func Remove(dirPath string) {
// Lock the mutex
mutex.Lock()
defer mutex.Unlock()
// Clean the path
dirPath = filepath.Clean(dirPath)
// Find the index position of the path
index := -1
for i, p := range directories {
if p == dirPath {
index = i
break
}
}
// Return if the path was not found
if index < 0 || index >= len(directories) {
return
}
// Remove the entry
directories[index], directories = directories[len(directories)-1], directories[:len(directories)-1]
// Remove the path again from the filewatcher
fileWatcher.Remove(dirPath)
// Reload the translation messages
reload()
}
//###############//
//### Private ###//
//###############//
func getMessage(id string) (s string, ok bool) {
// Lock the mutex
mutex.Lock()
defer mutex.Unlock()
s, ok = messages[id]
return
}
func reload() {
// Reset the timer.
reloadTimer.Reset(reloadTimerTimeout)
}
func _reload() {
// Lock the mutex
mutex.Lock()
defer mutex.Unlock()
log.L.Info("translation: reloading translation files")
// Empty the current messages map
messages = make(map[string]string)
// Go through all directories
for _, d := range directories {
// Skip if the base translation directory contains no directories.
entries, err := ioutil.ReadDir(d)
if err != nil {
log.L.Error("failed to obtain entry list of directory '%s': %v", d, err)
continue
}
empty := true
for _, e := range entries {
if e.IsDir() {
empty = false
break
}
}
if empty {
continue
}
dirPath := d + "/" + currentLocale
// Check if the current locale translation folder exists
ok, err := dirExists(dirPath)
if err != nil {
log.L.Error("translate reload error: %v", err)
continue
}
// If not try to load the default locale
if !ok {
log.L.Warning("translation: missing translation files '%s' for current locale '%s'", d, currentLocale)
dirPath = d + "/" + defaultLocale
// Check if the default locale translation folder exists
ok, err := dirExists(dirPath)
if err != nil {
log.L.Error("translate reload error: %v", err)
continue
}
if !ok {
log.L.Error("translation: missing translation files '%s' for default locale '%s'", d, defaultLocale)
continue
}
}
err = filepath.Walk(dirPath, loadFile)
if err != nil {
log.L.Error("translation: filepath walk error: %v", err)
}
}
}
func dirExists(path string) (bool, error) {
// Get the state
stat, err := os.Stat(path)
if err != nil {
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
// Check if it is a directory
return stat.IsDir(), nil
}
func loadFile(path string, f os.FileInfo, err error) error {
if f == nil {
log.L.Error("filepath walk: file info object is nil!")
return nil
}
// Skip if this is a directory or the path is missing the translation suffix
if f.IsDir() || !strings.HasSuffix(path, Suffix) {
return nil
}
type Message struct {
ID, Text string
}
// Open the file
file, err := os.Open(path)
if err != nil {
log.L.Error("failed to open translation file '%s': %v", path, err)
return nil
}
defer file.Close()
// Parse the JSON translation file
var ok bool
dec := json.NewDecoder(bufio.NewReader(file))
for {
var m Message
if err = dec.Decode(&m); err == io.EOF {
break
} else if err != nil {
log.L.Error("failed to parse translation file '%s': %v", path, err)
return nil
}
// Check if overwriting the value
_, ok = messages[m.ID]
if ok {
log.L.Warning("%s: overwriting duplicate translation message with ID '%s'!", path, m.ID)
}
// Add the text to the message map
messages[m.ID] = m.Text
}
return nil
}
|
fn process_option(input: Option<char>) -> (char, bool) {
match input {
Some(c) => (c, true),
None => ('child', false),
}
} |
package org.hisp.dhis.mapgenerator;
import static org.junit.Assert.assertEquals;
import java.awt.Color;
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.indicator.Indicator;
import org.hisp.dhis.indicator.IndicatorGroup;
import org.hisp.dhis.indicator.IndicatorService;
import org.hisp.dhis.indicator.IndicatorType;
import org.hisp.dhis.mapgeneration.InternalMapLayer;
import org.hisp.dhis.mapping.MapLegendSet;
import org.hisp.dhis.mapping.MappingService;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitLevel;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.MonthlyPeriodType;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodType;
import org.junit.Ignore;
import org.junit.Test;
/**
* @author <NAME> <<EMAIL>>
*/
public class GeoToolsMapLayerTest
extends DhisSpringTest
{
private InternalMapLayer internalMapLayer;
private MappingService mappingService;
private OrganisationUnit organisationUnit;
private OrganisationUnitLevel organisationUnitLevel;
private IndicatorGroup indicatorGroup;
private IndicatorType indicatorType;
private Indicator indicator;
private DataElement dataElement;
private DataElementGroup dataElementGroup;
private PeriodType periodType;
private Period period;
private MapLegendSet mapLegendSet;
@Override
public void setUpTest()
{
mappingService = (MappingService) getBean( MappingService.ID );
organisationUnitService = (OrganisationUnitService) getBean( OrganisationUnitService.ID );
indicatorService = (IndicatorService) getBean( IndicatorService.ID );
dataElementService = (DataElementService) getBean( DataElementService.ID );
periodService = (PeriodService) getBean( PeriodService.ID );
organisationUnit = createOrganisationUnit( 'A' );
organisationUnitLevel = new OrganisationUnitLevel( 1, "Level" );
organisationUnitService.addOrganisationUnit( organisationUnit );
organisationUnitService.addOrganisationUnitLevel( organisationUnitLevel );
indicatorGroup = createIndicatorGroup( 'A' );
indicatorService.addIndicatorGroup( indicatorGroup );
indicatorType = createIndicatorType( 'A' );
indicatorService.addIndicatorType( indicatorType );
indicator = createIndicator( 'A', indicatorType );
indicatorService.addIndicator( indicator );
dataElement = createDataElement( 'A' );
dataElementService.addDataElement( dataElement );
dataElementGroup = createDataElementGroup( 'A' );
dataElementGroup.getMembers().add( dataElement );
dataElementService.addDataElementGroup( dataElementGroup );
periodType = periodService.getPeriodTypeByName( MonthlyPeriodType.NAME );
period = createPeriod( periodType, getDate( 2000, 1, 1 ), getDate( 2000, 2, 1 ) );
periodService.addPeriod( period );
mapLegendSet = createMapLegendSet( 'A', indicator );
mappingService.addMapLegendSet( mapLegendSet );
internalMapLayer = new InternalMapLayer();
internalMapLayer.setRadiusLow( 15 );
internalMapLayer.setRadiusHigh( 35 );
internalMapLayer.setColorLow( Color.YELLOW );
internalMapLayer.setColorHigh( Color.RED );
internalMapLayer.setOpacity( 0.5f );
}
@Test
@Ignore
public void testBuildGeometryForOrganisationUnit()
{
//TODO
}
@Test
@Ignore
public void testGetAllMapObjects()
{
//TODO
}
@Ignore
@Test
public void testSetGetRadiusHigh()
{
internalMapLayer.setRadiusHigh( 45 );
assertEquals( 45.8F, internalMapLayer.getRadiusHigh(), 0.00001F );
internalMapLayer.setRadiusHigh( 82 );
assertEquals( 82.023984F, internalMapLayer.getRadiusHigh(), 0.00001F );
}
@Ignore
@Test
public void testSetGetRadiusLow()
{
internalMapLayer.setRadiusLow( 45 );
assertEquals( 45.8F, internalMapLayer.getRadiusLow(), 0.00001F );
internalMapLayer.setRadiusLow( 82 );
assertEquals( 82.023984F, internalMapLayer.getRadiusLow(), 0.00001F );
}
@Ignore
@Test
public void testSetGetColorHigh()
{
internalMapLayer.setColorHigh( Color.YELLOW );
assertEquals( Color.YELLOW, internalMapLayer.getColorHigh() );
internalMapLayer.setColorHigh( Color.BLUE );
assertEquals( Color.BLUE, internalMapLayer.getColorHigh() );
}
@Ignore
@Test
public void testSetGetColorLow()
{
internalMapLayer.setColorLow( Color.YELLOW );
assertEquals( Color.YELLOW, internalMapLayer.getColorLow() );
internalMapLayer.setColorLow( Color.BLUE );
assertEquals( Color.BLUE, internalMapLayer.getColorLow() );
}
@Ignore
@Test
public void testSetGetOpacity()
{
internalMapLayer.setOpacity( 34.8F );
assertEquals( 34.8F, internalMapLayer.getOpacity(), 0.00001 );
internalMapLayer.setOpacity( 14.5F );
assertEquals( 14.5F, internalMapLayer.getOpacity(), 0.00001 );
}
@Ignore
@Test
public void testGetIntervalSet()
{
//TODO
}
}
|
<gh_stars>1-10
package io.github.hotspacode.neeza.test.source;
import io.github.hotspacode.neeza.base.core.MockSpy;
import io.github.hotspacode.neeza.base.dto.MockTransport;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class ObjectSource3 {
public Map createSource2(String a, String b) {
List<Object> methodParams = new ArrayList<>();
MockTransport mockResponse = MockSpy.getMockData(Thread.currentThread().getStackTrace()[1],methodParams,null);
if (mockResponse.isMocked()) {
return mockResponse.getObject(mockResponse);
}
System.out.println("我在正常执行...");
return null;
}
}
|
#!/bin/sh
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache License Version 2.0.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2018 Datadog, Inc.
##### Core config #####
if [ -z $DD_API_KEY ]; then
echo "You must set DD_API_KEY environment variable to run the Datadog Agent container"
exit 1
fi
if [ -z $DD_DD_URL ]; then
export DD_DD_URL="https://app.datadoghq.com"
fi
if [ -z $DD_DOGSTATSD_SOCKET ]; then
export DD_DOGSTATSD_NON_LOCAL_TRAFFIC=1
else
if [ -e $DD_DOGSTATSD_SOCKET ]; then
if [ -S $DD_DOGSTATSD_SOCKET ]; then
echo "Deleting existing socket at ${DD_DOGSTATSD_SOCKET}"
rm $DD_DOGSTATSD_SOCKET
else
echo "${DD_DOGSTATSD_SOCKET} exists and is not a socket, please check your volume options"
exit 1
fi
fi
fi
##### Starting up dogstatsd #####
chmod +x /dogstatsd
sync # Fix for 'Text file busy' error
exec "$@"
|
class Component {
// Common properties and methods for all components
}
class TargetTableComponent extends Component {
// Properties and methods specific to TargetTableComponent
}
class TargetCardComponent extends Component {
// Properties and methods specific to TargetCardComponent
}
class LongTargetCardComponent extends TargetCardComponent {
// Properties and methods specific to LongTargetCardComponent
}
class GeneDetailsComponent extends Component {
// Properties and methods specific to GeneDetailsComponent
}
class InteractionDetailsComponent extends Component {
// Properties and methods specific to InteractionDetailsComponent
}
class DiseaseAssociationDetailsComponent extends Component {
// Properties and methods specific to DiseaseAssociationDetailsComponent
}
class KnowledgeMetricsComponent extends Component {
// Properties and methods specific to KnowledgeMetricsComponent
} |
<reponame>jinahya/datagokr-api-b090041-lunphinfoservice-client-spring
package com.github.jinahya.datagokr.api.b090041_.lunphinfoservice.client;
public class _NoOpTest {
} |
<filename>blingfireclient.library/inc/FAState2Ow_pack_triv.h
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_STATE2OW_PACK_TRIV_H_
#define _FA_STATE2OW_PACK_TRIV_H_
#include "FAConfig.h"
#include "FASetImageA.h"
#include "FAState2OwCA.h"
namespace BlingFire
{
///
/// This class is able to interpret automaton image stored by FAAutPack_triv
///
class FAState2Ow_pack_triv : public FASetImageA,
public FAState2OwCA {
public:
FAState2Ow_pack_triv ();
//// from FASetImageA
public:
void SetImage (const unsigned char * pAutImage);
/// from FAState2OwCA
public:
const int GetOw (const int State) const;
private:
// pointer to the automaton image
const unsigned char * m_pAutImage;
// dst size
int m_DstSize;
};
}
#endif
|
#!/bin/sh
# ** AUTO GENERATED **
# 4.1.14 - Ensure changes to system administration scope (sudoers) is collected (Automated)
echo "-w /etc/sudoers -p wa -k scope" >> /etc/audit/rules.d/audit.rules
echo "-w /etc/sudoers.d/ -p wa -k scope" >> /etc/audit/rules.d/audit.rules
service auditd restart
|
<reponame>mothguib/pytrol
# -*- coding: utf-8 -*-
import numpy as np
from pytrol.control.agent.HPMEstimator import HPMEstimator
from pytrol.model.knowledge.EnvironmentKnowledge import EnvironmentKnowledge
from pytrol.util.net.Connection import Connection
from pytrol.util.randidlenest import draw_rand_idls
# Random Heuristic Pathfinder Mean Predictor
class RHPMEstimator(HPMEstimator):
def __init__(self,
id_: int,
original_id: str,
env_knl: EnvironmentKnowledge,
connection: Connection,
agts_addrs: list,
variant: str = '',
depth: float = 3.0,
interaction: bool = True):
r"""
Args:
id_ (int):
original_id (str):
env_knl (EnvironmentKnowledge):
connection (Connection):
agts_addrs (list):
variant (str):
depth (float):
interaction (bool):
"""
HPMEstimator.__init__(self, id_=id_, original_id=original_id,
env_knl=env_knl, connection=connection,
agts_addrs=agts_addrs, variant=variant,
depth=depth, interaction=interaction)
def estimate_idls(self) -> np.ndarray:
r"""Predictor function: return the model's estimation of idlenesses"""
# For each node the best idleness between the estimated,
# the individual and the previous estimated incremented of 1 is
# selected
best_iidl_estm = super().estimate_idls()
eidls = draw_rand_idls(best_iidl_estm, self.env_knl.shared_idls)
return eidls
|
#pragma once
#include "json/json.h"
//stl
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <unordered_map>
#include <set>
#include <algorithm>
#include <cassert>
#include <chrono>
#include <memory>
#include <thread>
#include <tuple>
#include <string>
|
import functools
@functools.lru_cache(maxsize=None)
def add_two_numbers(n1, n2):
return n1 + n2 |
package main
import (
"fmt"
"path/filepath"
"encoding/json"
"os"
)
func main() {
testRemoveFiles()
}
func testRemoveFiles(){
list:= listFiles("D:\\data\\tmp\\img")
for _, v := range list {
if deleteFile(v){
fmt.Println("delete "+ v)
}else{
fmt.Println("fail to delete "+ v)
}
}
}
func deleteFile(path string) bool{
bFile:=isFile(path)
fmt.Println(path )
fmt.Println( bFile)
if !bFile{
// fmt.Println(33333)
return false
}
err:= os.Remove(path)
if nil!= err{
fmt.Println("fail to remove file["+ path+"]: "+ err.Error())
return false
}
fmt.Println("remove file: "+ path)
return true
}
func isFile(path string) bool{
if 0== len(path){
fmt.Println("without path")
return false
}
info,err := os.Stat(path)
fmt.Println(err)
fmt.Println(info.IsDir())
if nil!= err {
return false
}
return !info.IsDir()
}
func testListFiles(){
list:= listFiles("E:/image/wallpaper")
size:= 10
count:= len(list)/ size
listAll(subArr(list, count+1, size))
listAll(subArr(list, count+2, size))
response(list)
}
func listFiles(path string) []string {
list, err := filepath.Glob(filepath.Join(path,"*"))
if err != nil {
list= []string{"1"}
}
return list
}
func subArr(list []string, page int, size int) []string {
count:= len(list)
if 0== count{
return list
}
from:= (page-1)* size
if from < 0{
from= 0
}
if from> count{
return []string{}
}
to := page* size
if to> count{
to = count
}
return list[from:to]
}
func listAll(list []string){
fmt.Println("--------------------")
for _, v := range list {
fmt.Println(v)
}
}
func response(list []string){
res:= &APIResponse{ 1, 10 , len(list), "_30x30", list}
bytes, err := json.Marshal(res)
if err != nil {
fmt.Println(err.Error())
return
}
fmt.Println(string(bytes))
}
//APIResponse response
type APIResponse struct {
Page int `json:"page"`
Size int `json:"size"`
Count int `json:"count"`
Suffix string `json:"suffix"`
List []string `json:"list"`
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.