text stringlengths 1 1.05M |
|---|
<filename>src/main/java/hekiyou/academy/unsafemaps/mixin/JustReferenceTheColorDataMixin.java
package hekiyou.academy.unsafemaps.mixin;
import net.minecraft.item.map.MapState;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
@Mixin(MapState.UpdateData.class)
public class JustReferenceTheColorDataMixin {
@Shadow public byte[] colors;
/**
* @reason For whatever reason, the normal vanilla procedure for updating a map is as follows:
* - Receive 1D array of pixel data
* - Iterate over 1D as 2D array
* - For every pixel at (x, y), call MapState.setColor(x, y, pixel)
* - ... which then takes the 2D coordinates, and then converts it _back_ into a 1D index
* - With the 1D index, write the pixel color
*
* The obvious technical inefficiency is iterating the 1D as 2D, then using those 2D coordinates to go back to 1D.
* However, the slightly less obvious issue is the fact that we do this for every pixel regardless of how much
* has changed. If incoming colors are 128 x 128, why bother copying?
*
* With this Mixin, we just set target.colors to our input colors iff we're updating
* the whole map. Otherwise, we fallback to the old slower loop with support for offsets.
* @param target The MapState to update using the new colors
* @author tsunko
*/
@Inject(method = "setColorsTo(Lnet/minecraft/item/map/MapState;)V", at = @At(value = "HEAD"), cancellable = true)
public void setColorsTo(MapState target, CallbackInfo info) {
if(colors.length == 128*128) {
// cool, we can just simply set target.colors instead of doing a costly copy
target.colors = colors;
info.cancel();
}
}
}
|
#!/bin/bash
# Script prime number V.0.1
function pnum()
{
echo "Start pnum..."
echo "Param $1"
red='\e[0;31m'
green='\e[0;32m'
noColor='\e[0m'
i=2
if [ "$1" -lt 2 ] ;
then
echo -e "${red}$1 is not primary number!${noColor}"
return 1
elif [ "$1" -eq 2 ] ;
then
echo -e "${green}$1 is primary number!${noColor}"
return 0
fi
while [ $(($i*$i)) -le "$1" ] ; do
if [ "$(($1%$i))" -eq 0 ] ; then
echo -e "${red}$1 is not primary number!${noColor}"
return 0
fi
i=$(($i+1))
done
echo -e "${green}$1 is primary number!${noColor}"
echo "End pnum"
return 0
} |
package com.telenav.osv.manager.network.parser;
import org.json.JSONException;
import org.json.JSONObject;
import com.telenav.osv.item.AccountData;
import com.telenav.osv.item.network.AuthData;
/**
* Created by kalmanb on 8/3/17.
*/
public class AuthDataParser extends ApiResponseParser<AuthData> {
@Override
public AuthData getHolder() {
return new AuthData();
}
public AuthData parse(String json) {
AuthData authData = super.parse(json);
try {
JSONObject obj;
obj = new JSONObject(json);
JSONObject osv = obj.getJSONObject("osv");
authData.setAccessToken(osv.getString("access_token"));
authData.setId(osv.getString("id"));
authData.setUsername(osv.getString("username"));
authData.setDisplayName(osv.getString("full_name"));
String type = osv.getString("type");
int typeNum = AccountData.getUserTypeForString(type);
authData.setUserType(typeNum);
} catch (JSONException e) {
e.printStackTrace();
}
return authData;
}
}
|
<filename>java/src/main/test/Test1.java
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import java.io.*;
import java.net.URL;
/**
* @author zs
*@date 2021/9/14.
*/
public class Test1 {
public static void main(String[] args) throws IOException, InterruptedException {
String url = "https://simeitol-app.oss-cn-hangzhou.aliyuncs.com/ecommerce/pro/pcm/img/微信图片_20211216111414.jpg";
byte[] bytes = IOUtils.toByteArray(url);
OutputStream out = new FileOutputStream("\u202AC:\\Users\\simeitol\\Desktop");
InputStream is = new ByteArrayInputStream(bytes);
byte[] buff = new byte[1024];
int len = 0;
while((len=is.read(buff))!=-1){
out.write(buff, 0, len);
}
is.close();
out.close();
}
}
|
#! /usr/bin/env bats
SRC_DIR=$BATS_TEST_DIRNAME/../..
source $SRC_DIR/lib/trace.sh
@test "test frame" {
one() {
two $1
}
two() {
three $1
}
three() {
frame $1
}
run one 0
[[ $output =~ "two" ]]
run one 1
[[ $output =~ "one" ]]
run one 2
[[ $output =~ "run" ]]
}
@test "test callstack" {
one() {
two $1
}
two() {
three $1
}
three() {
callstack $1
}
run one 0
[[ $output =~ "main" ]]
}
|
#!/bin/bash
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
odir="$1"
if [ -z "$odir" ]; then
odir = "./"
fi
##Generate TLS authentication Test state machine
python yamlFsm.py -p TlsAuthHolder -f noseTlsAuthTest.yaml > ${odir}/noseTlsAuthHolder.py
##Generate PAP authentication state machine
python yamlFsm.py -p PAPAuthHolder -f nosePAPTest.yaml > ${odir}/nosePAPAuthHolder.py
##Generate DNS test state machine
#python yamlFsm.py -p DnsHolder -f noseDnsTest.yaml > ${odir}/noseDnsHolder.py
#Generate EAP MD5 authentication state machine
python yamlFsm.py -p Md5AuthHolder -f noseMD5AuthTest.yaml > ${odir}/noseMd5AuthHolder.py
|
source $(dirname $(readlink -f ${BASH_SOURCE}))/ci-el8-oneapi.sh
|
<filename>app/src/views/stations/StationDetails.js<gh_stars>0
import React, { useState, useEffect, useRef } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { Helmet } from 'react-helmet';
import { resolvePath as urlResolve } from 'tg-named-routes';
import { Container } from 'reactstrap';
import { selectStation } from 'schemas/stations';
import { StationShape, EflowsResponseShape } from 'utils/types';
import { gettext } from 'utils/text';
import withView from 'decorators/withView';
import { fetchEflowsAction } from 'sagas/stations/fetchEflows';
import { Configuration } from 'components/stations/Configuration';
import { Graphs } from 'components/stations/Graphs';
import ServerErrorToaster from 'components/ServerErrorToaster';
const scrollToRef = (ref) =>
window.scrollTo(0, ref.current !== null ? ref.current.offsetTop : 0);
const StationDetails = ({ station, eflows, onFetchEflows }) => {
const graphRef = useRef(null);
const [showGraphs, setShowGraphs] = useState(false);
// Date range picker
const [startDate, setStartDate] = useState(new Date(2016, 0, 1));
const [endDate, setEndDate] = useState(new Date(2016, 11, 31));
// Secondary axis
const [showSecondaryAxis, setShowSecondaryAxis] = useState(true);
const [secondAxisTimeSeriesType, setSecondAxisTimeSeriesType] = useState(
'TW',
);
const [secondAxisThreshold, setSecondAxisThreshold] = useState(16);
const [secondAxisMeasurementType, setSecondAxisMeasurementType] = useState(
'avg',
);
// Eflow configuration
const [eflowMeasurementType, setEflowMeasurementType] = useState('avg');
const [showProcessingBar, setShowProcessingBar] = useState(false);
// Mean low flow configuration
const [meanLowFlowMethod, setMeanLowFlowMethod] = useState('TNT30');
const [
meanLowFlowMethodFrequency,
setMeanLowFlowMethodFrequency,
] = useState('BIOPERIOD');
useEffect(() => {
if (eflows !== null && eflows.length !== 0) {
setShowProcessingBar(false);
}
}, [eflows]);
useEffect(() => {
if (eflows !== null && eflows.length !== 0 && graphRef !== null) {
scrollToRef(graphRef);
}
});
useEffect(() => {
if (showProcessingBar === true && graphRef !== null) {
scrollToRef(graphRef);
}
}, [showProcessingBar]);
useEffect(() => {
eflows = null; // eslint-disable-line no-param-reassign
setShowGraphs(false);
}, [
startDate,
endDate,
secondAxisTimeSeriesType,
secondAxisThreshold,
meanLowFlowMethod,
meanLowFlowMethodFrequency,
station,
]);
const onRunEstimation = () => {
if (
startDate &&
endDate &&
secondAxisThreshold !== undefined &&
secondAxisTimeSeriesType
) {
eflows = null; // eslint-disable-line no-param-reassign
setShowProcessingBar(true);
setShowGraphs(true);
onFetchEflows(
startDate,
endDate,
secondAxisTimeSeriesType,
meanLowFlowMethod,
meanLowFlowMethodFrequency,
);
}
};
const onSetDateRange = ([from, to]) => {
setStartDate(from);
setEndDate(to);
};
const onSetMeanLowFlowMethod = (_meanLowFlowMethod) => {
setMeanLowFlowMethod(_meanLowFlowMethod);
};
const onSetSecondAxisThreshold = (thresholdValue) => {
let thresholdNumber = parseFloat(thresholdValue);
if (!thresholdNumber) {
thresholdNumber = 0;
}
setSecondAxisThreshold(thresholdNumber);
};
const eflowsTS = eflows !== null ? eflows.eflows_ts : [];
const bioperiodsBoundaries = eflows ? eflows.bioperiods_boundaries : [];
return (
<>
<Helmet>
<title>{gettext('Station details')}</title>
<body className="station-details" />
</Helmet>
<Container className="station-container">
<ServerErrorToaster />
<div className="content">
<div className="station">
<div className="title-block">
<Link
to={urlResolve('landing')}
className="btn-back"
/>
<div className="title">{station.name}</div>
</div>
<Configuration
startDate={startDate}
endDate={endDate}
onSetDateRange={onSetDateRange}
secondAxisTimeSeriesType={secondAxisTimeSeriesType}
onSetSecondAxisTimeSeriesType={
setSecondAxisTimeSeriesType
}
secondAxisThreshold={secondAxisThreshold}
onSetSecondAxisThreshold={onSetSecondAxisThreshold}
meanLowFlowMethod={meanLowFlowMethod}
onSetMeanLowFlowMethod={onSetMeanLowFlowMethod}
meanLowFlowMethodFrequency={
meanLowFlowMethodFrequency
}
onSetMeanLowFlowMethodFrequency={
setMeanLowFlowMethodFrequency
}
onRunEstimation={onRunEstimation}
/>
{(eflows !== null || showProcessingBar === true) &&
showGraphs && (
<Graphs
stationName={station.name}
startDate={startDate}
endDate={endDate}
showSecondaryAxis={showSecondaryAxis}
secondAxisTimeSeriesType={
secondAxisTimeSeriesType
}
showProcessingBar={showProcessingBar}
eflowsTS={eflowsTS}
eflowMeasurementType={eflowMeasurementType}
secondAxisMeasurementType={
secondAxisMeasurementType
}
secondAxisThreshold={secondAxisThreshold}
bioperiodsBoundaries={bioperiodsBoundaries}
graphRef={graphRef}
onSetEflowMeasurementType={
setEflowMeasurementType
}
onSetSecondAxisMeasurementType={
setSecondAxisMeasurementType
}
setShowSecondaryAxis={setShowSecondaryAxis}
/>
)}
</div>
</div>
</Container>
</>
);
};
StationDetails.propTypes = {
station: StationShape.isRequired,
eflows: EflowsResponseShape,
onFetchEflows: PropTypes.func.isRequired,
};
StationDetails.defaultProps = {
eflows: null,
};
const mapStateToProps = (state, ownProps) => ({
station: selectStation(state, ownProps.match.params.stationId),
eflows: state.station.eflows,
});
const mapDispatchToProps = (dispatch, ownProps) => ({
onFetchEflows: (
from,
to,
secondAxisType,
meanLowFlowMethod,
meanLowFlowMethodFrequency,
) =>
dispatch(
fetchEflowsAction(
ownProps.match.params.stationId,
from,
to,
secondAxisType,
meanLowFlowMethod,
meanLowFlowMethodFrequency,
),
),
});
const StationDetailsViewConnector = connect(
mapStateToProps,
mapDispatchToProps,
)(StationDetails);
export default withView()(StationDetailsViewConnector);
|
<reponame>pvormste/graphql-go-tools<filename>pkg/document/argumentsdefinition.go
package document
import "github.com/jensneuse/graphql-go-tools/pkg/lexing/position"
// ArgumentsDefinition as specified in:
// http://facebook.github.io/graphql/draft/#ArgumentsDefinition
type ArgumentsDefinition struct {
InputValueDefinitions InputValueDefinitions
Position position.Position
}
func (a ArgumentsDefinition) NodeSelectionSet() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeInputFieldsDefinition() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeInputValueDefinitions() InputValueDefinitions {
return a.InputValueDefinitions
}
func (a ArgumentsDefinition) NodeName() ByteSliceReference {
panic("implement me")
}
func (a ArgumentsDefinition) NodeAlias() ByteSliceReference {
panic("implement me")
}
func (a ArgumentsDefinition) NodeDescription() ByteSliceReference {
panic("implement me")
}
func (a ArgumentsDefinition) NodeArgumentSet() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeArgumentsDefinition() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeDirectiveSet() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeEnumValuesDefinition() EnumValueDefinitions {
panic("implement me")
}
func (a ArgumentsDefinition) NodeFields() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeFieldsDefinition() FieldDefinitions {
panic("implement me")
}
func (a ArgumentsDefinition) NodeFragmentSpreads() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeInlineFragments() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeVariableDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeType() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeOperationType() OperationType {
panic("implement me")
}
func (a ArgumentsDefinition) NodeValue() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeDefaultValue() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeImplementsInterfaces() ByteSliceReferences {
panic("implement me")
}
func (a ArgumentsDefinition) NodeSchemaDefinition() SchemaDefinition {
panic("implement me")
}
func (a ArgumentsDefinition) NodeScalarTypeDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeObjectTypeDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeInterfaceTypeDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeUnionTypeDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeEnumTypeDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeInputObjectTypeDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeDirectiveDefinitions() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeUnionMemberTypes() []int {
panic("implement me")
}
func (a ArgumentsDefinition) NodeValueType() ValueType {
panic("implement me")
}
func (a ArgumentsDefinition) NodeValueReference() int {
panic("implement me")
}
func (a ArgumentsDefinition) NodePosition() position.Position {
return a.Position
}
type ArgumentsDefinitions []ArgumentsDefinition
|
#!/bin/bash
# Copyright 2020 the Velero contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# docker-push is invoked by the CI/CD system to deploy docker images to Docker Hub.
# It will build images for all commits to master and all git tags.
# The highest, non-prerelease semantic version will also be given the `latest` tag.
set +x
if [[ -z "$CI" ]]; then
echo "This script is intended to be run only on Github Actions." >&2
exit 1
fi
CHANGELOG_PATH='changelogs/unreleased'
# https://help.github.com/en/actions/reference/events-that-trigger-workflows#pull-request-event-pull_request
# GITHUB_REF is something like "refs/pull/:prNumber/merge"
pr_number=$(echo $GITHUB_REF | cut -d / -f 3)
change_log_file="${CHANGELOG_PATH}/${pr_number}-*"
if ls ${change_log_file} 1> /dev/null 2>&1; then
echo "changelog for PR ${pr_number} exists"
exit 0
else
echo "PR ${pr_number} is missing a changelog. Please refer https://velero.io/docs/master/code-standards/#adding-a-changelog and add a changelog."
exit 1
fi
|
<reponame>felipe-ga/orders
package com.evoluta.orders.domain.repository;
import com.evoluta.orders.infrastructure.entity.OrderEntity;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
public interface OrderRepositoryCrud extends CrudRepository<OrderEntity, Integer> {
List<OrderEntity> findAll();
OrderEntity save(OrderEntity order);
}
|
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webauthn4j.data.attestation.statement;
import com.webauthn4j.converter.util.JsonConverter;
import com.webauthn4j.converter.util.ObjectConverter;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
class ResponseTest {
private final JsonConverter jsonConverter = new ObjectConverter().getJsonConverter();
@SuppressWarnings("ConstantConditions")
@Test
void error_test() {
Response response = jsonConverter.readValue("{\"error\": \"message\"}", Response.class);
assertThat(response.getError()).isEqualTo("message");
}
} |
<reponame>snatchev/branding.rb
require 'optparse'
require 'ostruct'
module Branding
class CLI
def initialize(args)
@options = OpenStruct.new
@options.file = args.last
@parser = OptionParser.new do |opts|
opts.banner = 'Usage: branding FILE'
opts.on('-p PIXEL',
'--pixel=PIXEL',
[:normal, :hires, :hicolor],
'The pixel rendering algorithm (`normal`, `hires`, or `hicolor`)') do |pixel_algo|
@options.algo = pixel_algo.to_sym
end
opts.on_tail('-h', '--help', 'Show this message') do
puts opts
exit
end
end
@parser.parse!(args)
@options
end
def run
logo = Branding::Logo.new(@options.file)
logo.algo = @options.algo if @options.algo
logo.print
end
end
end
|
#!/usr/bin/env bash
set -e
root=$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." >/dev/null 2>&1 && pwd )
project=$(grep -m 1 '"name":' "$root/package.json" | cut -d '"' -f 4)
# make sure a network for this project has been created
docker swarm init 2> /dev/null || true
docker network create --attachable --driver overlay "$project" 2> /dev/null || true
unit=$1
cmd=$2
chain_id=$3
# If file descriptors 0-2 exist, then we're prob running via interactive shell instead of on CD/CI
if [[ -t 0 && -t 1 && -t 2 ]]
then interactive=(--interactive --tty)
else echo "Running in non-interactive mode"
fi
config=$(
cat "$root/ops/config/node.default.json" "$root/ops/config/router.default.json" |\
jq -s '.[0] + .[1]'
)
########################################
# If we need a chain for these tests, start the evm & stop it when we're done
eth_mnemonic="candy maple cake sugar pudding cream honey rich smooth crumble sweet treat"
if [[ -n "$chain_id" ]]
then
port="${VECTOR_CHAIN_PORT:-$(( 8545 - 1337 + chain_id ))}"
ethprovider_host="evm_$chain_id"
chain_data="$root/.chaindata/$chain_id"
mkdir -p "$chain_data"
function cleanup {
echo "Tests finished, stopping evm.."
docker container stop "$ethprovider_host" 2> /dev/null || true
}
trap cleanup EXIT SIGINT SIGTERM
docker run \
--detach \
--entrypoint bash \
--env "CHAIN_ID=$chain_id" \
--env "EVM=hardhat" \
--env "MNEMONIC=$eth_mnemonic" \
--mount "type=bind,source=$chain_data,target=/data" \
--mount "type=bind,source=$root,target=/root" \
--name "$ethprovider_host" \
--network "$project" \
--publish "$port:8545" \
--rm \
--tmpfs "/tmp" \
"${project}_builder" modules/contracts/ops/entry.sh
chain_addresses="$chain_data/chain-addresses.json"
while ! grep -qs "transferRegistryAddress" "$chain_addresses"
do
if [[ -z $(docker container ls -f "name=$ethprovider_host" -q) ]]
then echo "$ethprovider_host was not able to start up successfully" && exit 1
else sleep 1
fi
done
echo "Provider for chain ${chain_id} is awake & ready to go on port ${port}!"
CHAIN_ADDRESSES=$(cat "$chain_addresses")
CHAIN_PROVIDERS="{\"$chain_id\":\"http://$ethprovider_host:8545\"}"
echo "CHAIN_PROVIDERS=${CHAIN_PROVIDERS}"
echo "CHAIN_ADDRESSES=${CHAIN_ADDRESSES}"
config=$(echo "$config" '{"chainProviders":'"$CHAIN_PROVIDERS"'}' | jq -s '.[0] + .[1]')
config=$(echo "$config" '{"chainAddresses":'"$CHAIN_ADDRESSES"'}' | jq -s '.[0] + .[1]')
else
CHAIN_PROVIDERS="{}"
CHAIN_ADDRESSES="{}"
fi
docker run \
"${interactive[@]}" \
--entrypoint="bash" \
--env="CI=$CI" \
--env="CHAIN_ADDRESSES=$CHAIN_ADDRESSES" \
--env="CHAIN_PROVIDERS=$CHAIN_PROVIDERS" \
--env="LOG_LEVEL=$LOG_LEVEL" \
--env="SUGAR_DADDY=$eth_mnemonic" \
--env="VECTOR_CONFIG=$config" \
--name="${project}_test_$unit" \
--network "$project" \
--rm \
--tmpfs="/tmp" \
--volume="$root:/root" \
"${project}_builder" "/test.sh" "$unit" "$cmd"
|
#!/bin/bash
set -e
CONFIGGEN="$1"
shift
OUT_DIR="$1"
shift
mkdir -p "$OUT_DIR/certs"
"$CONFIGGEN" "$OUT_DIR"
for FILE in $*; do
case "$FILE" in
*.pem)
cp "$FILE" "$OUT_DIR/certs"
;;
*)
cp "$FILE" "$OUT_DIR"
;;
esac
done
# tar is having issues with -C for some reason so just cd into OUT_DIR.
(cd "$OUT_DIR"; tar -hcvf example_configs.tar *.json *.yaml certs/*.pem)
|
#!/bin/sh
export PATH=/usr/local/bin:$PATH
export PATH=/usr/local/go/bin:$PATH
export PATH=/home/isucon/.local/ruby/bin:$PATH
export PATH=/home/isucon/.local/python/bin:$PATH
export PATH=/home/isucon/.local/perl/bin:$PATH
export PATH=/home/isucon/.local/php/bin:$PATH
export PATH=/home/isucon/.local/php/sbin:$PATH
export PATH=/home/isucon/.local/redis/bin:$PATH
export GOPATH=/home/isucon/gocode
export PERL_CARTON_PATH=/home/isucon/.local/carton
export ISUCON_API_KEY=NO_API_KEY
|
import React, { useState, useRef, useEffect } from 'react'
import * as d3 from 'd3'
import { throttle } from 'lodash'
import { useToggleEffect, useConditionalEffect } from 'react-svg-utils'
const MonteCarlo = ({ data, updateCount }:
{
data: number | null,
updateCount: (counts: { countIn: number, countOut: number }) => void
}) => {
const svg = useRef<SVGSVGElement>(null),
{ current: ctx } = useRef<any>({})
// init and clear
useToggleEffect(() => data !== null, () => {
ctx.points = d3.select(svg.current).select('.points')
ctx.countIn = 0
ctx.countOut = 0
console.log('init')
return () => {
console.log('clear')
ctx.countIn = 0
ctx.countOut = 0
}
}, [data])
// draw
useConditionalEffect(() => data !== null, () => {
console.log('draw', data)
const pointIsIn = (p: [number, number]) => p[0]**2 + (500-p[1])**2 <= 500*500,
update = throttle(updateCount, 20)
ctx.points.selectAll("*").interrupt()
ctx.countIn = 0
ctx.countOut = 0
ctx.points.selectAll('.point').remove()
ctx.points
.selectAll('.point')
.data(d3.range(data || 0).map(() => [Math.random()*500,Math.random()*500]))
.enter()
.append('circle')
.attr('class', (d:[number, number]) => pointIsIn(d) ? 'point point-in' : 'point point-out')
.attr('cx', (d:any) => d[0])
.attr('cy', (d:any) => d[1])
.attr('r', 1)
.attr('opacity', 0)
.transition()
.delay((_d:any, i:number) => i * 1)
.attr('opacity', 1)
.on('end', (d: [number, number], i:number) => {
if (pointIsIn(d)) {
ctx.countIn++
} else {
ctx.countOut++
}
update({
countIn: ctx.countIn,
countOut: ctx.countOut
})
})
}, [data, updateCount])
if (data === null) {
return null
}
// render
return (
<svg width="500" height="500" ref={svg}>
<circle cx="0" cy="500" r="500" fill="#006" fillOpacity="20%" stroke="none" />
<rect fill="none" stroke="#000" strokeWidth="5" width="500" height="500" />
<g className="points" />
</svg>
)
}
export const MonteCarloPi = () => {
const [iterations, setIterations] = useState(100),
[data, setData] = useState<number | null>(null),
[counts, setCounts] = useState({ countIn: 0, countOut: 0 })
return (
<div>
<div>
<input type="range" step="10" min="100" max="10000"
defaultValue={iterations}
onChange={e => setIterations(+e.target.value)}
/>
<div>{iterations} Iterations</div>
<div>Samples In: {counts.countIn}</div>
<div>Samples Out: {counts.countOut}</div>
<div>Total: {counts.countIn+counts.countOut}</div>
{ counts.countIn+counts.countOut > 0 &&
<div>π ≈ {4 * counts.countIn / (counts.countIn+counts.countOut)}</div>
}
<button onClick={() => setData(iterations)}>Go</button>
<button onClick={() => setData(null)}>Reset</button>
</div>
<MonteCarlo
data={data}
updateCount={setCounts}
/>
</div>
)
} |
def square_root(num) do
:math.sqrt(num)
end
result = square_root(16)
IO.inspect(result) |
/*Comparação com 'E' */
console.log((5>3) && (2<3))
/*Comparação com 'OR' */
console.log('Andre'=='Fernanda' || "123"==="123")
/*Comparação com 'not' */
console.log(!("sara" == "sara")) |
#!/bin/bash
#SBATCH -N 1
#SBATCH --partition=batch
#SBATCH -J Sinkhorn
#SBATCH -o ./SLURM_jobs/Sinkhorn.%J.out
#SBATCH -e ./SLURM_jobs/Sinkhorn.%J.err
#SBATCH --time=2-00:00:00
#SBATCH --gres=gpu:v100:4
#SBATCH --mem=300G
#SBATCH --constraint=[gpu]
#run the application:
module load anaconda3/4.4.0
source /home/hinnertr/.bashrc
conda activate ~/.conda/envs/IIC-Clustering/
module load cuda/10.0.130
PYTHONPATH='.' python3 src/scripts/cluster/cluster_greyscale_twohead_sinkhorn.py --model_ind 686 --arch ClusterNet6cTwoHead --mode IID --dataset MNIST --dataset_root datasets/MNIST_twohead --gt_k 10 --output_k_A 50 --output_k_B 10 --lamb_A 1.0 --lamb_B 1.0 --lr 0.0001 --num_epochs 50 --batch_sz 4000 --num_dataloaders 5 --num_sub_heads 5 --num_sinkhorn_dataloaders 5 --sinkhorn_batch_size 4096 --sinkhorn_WS_radius 0.01 --crop_orig --crop_other --tf1_crop centre_half --tf2_crop random --tf1_crop_sz 20 --tf2_crop_szs 16 20 24 --input_sz 24 --rot_val 25 --no_flip --head_B_epochs 2 --out_root out/MNIST_twohead_Sinkhorn |
require "minitest/autorun"
require "imazen_licensing"
require_relative "support/license_test_base"
require "digest"
module ImazenLicensing
class TestsForStoreUse < ImazenLicensing::LicenseTestBase
def key
@key ||= File.read("#{File.dirname(__FILE__)}/support/test_private_key.pem")
end
def passphrase
'<PASSWORD>'
end
def test_normalize_domain
v = ImazenLicensing::DomainValidator.new
assert_equal "domain.com", v.normalize_domain_string("https://domain.com/")
assert_equal "domain.com", v.normalize_domain_string("http://domain.com/")
assert_equal "domain.com", v.normalize_domain_string(".domain.com.")
assert_equal "mydomain.domain.domain.com", v.normalize_domain_string(" mydomain.domain.domain.com ")
end
def test_validate_domain
v = ImazenLicensing::DomainValidator.new
[".com", ".net", ".co.uk", "blogspot.com"].each do |domain|
assert_includes v.domain_error(domain), "is not a valid domain (per publicsuffix.org)"
end
["apphb.com", "cloudapp.net", "azurewebsites.net"].each do |domain|
assert_includes v.domain_error(domain), "is not a valid domain"
end
["domain.com:1215"].each do |domain|
assert_includes v.domain_error(domain), "format invalid"
end
end
def test_v4_domain_offline_creative_for_store
h = {
kind: 'v4-domain-offline',
sku: 'R4Creative',
domain: 'acme.com',
owner: 'Acme Corp',
issued: DateTime.parse('2017-04-21'),
features: ['R4Creative', 'R4Performance']
}
license_compare_or_export(__method__.to_s, h)
end
end
end |
<gh_stars>0
import {useDispatch, useSelector} from "react-redux"
import React, {useEffect} from "react"
import {LeftPane} from "./LeftPane"
import {XRightPane} from "./RightPane"
import Handlers from "../state/Handlers"
import StatusBar from "./StatusBar"
import TabBar from "./TabBar/TabBar"
import TabContent from "./TabContent"
import Tabs from "../state/Tabs"
import useSearchShortcuts from "./useSearchShortcuts"
import styled from "styled-components"
const ColumnLayout = styled.div`
display: flex;
overflow: hidden;
flex-grow: 1;
flex-flow: column;
position: relative;
`
const RowLayout = styled.div`
display: flex;
overflow: hidden;
flex-grow: 1;
flex-flow: row;
position: relative;
padding-top: 1px;
`
const SearchPageMain = styled.div`
display: flex;
height: 100%;
`
const SearchPageWrapper = styled.div`
height: 100%;
display: flex;
flex-direction: column;
position: relative;
animation: fadein 300ms;
`
export default function SearchPage() {
const dispatch = useDispatch()
const tabId = useSelector(Tabs.getActive)
useSearchShortcuts()
useEffect(() => () => dispatch(Handlers.abortAll()), [])
return (
<SearchPageWrapper>
<SearchPageMain>
<ColumnLayout>
<div id="modal-dialog-root" />
<TabBar />
<RowLayout>
<LeftPane />
<ColumnLayout>
<TabContent key={tabId} />
</ColumnLayout>
<XRightPane key={tabId} />
</RowLayout>
<StatusBar />
</ColumnLayout>
</SearchPageMain>
</SearchPageWrapper>
)
}
|
<gh_stars>0
# The Book of Ruby - http://www.sapphiresteel.com
class MyCollection
include Enumerable
def initialize( someItems )
@items = someItems
end
def each
@items.each{ |i| yield i }
end
end
things = MyCollection.new(['z','xy','defgh','ij','abc','klmnopqr'])
h = {'one'=>'for sorrow',
'two'=>'for joy',
'three'=>'for a girl',
'four'=>'for a boy',
'five'=>'for sorrow',
'six'=>'for gold',
'seven'=>'for a secret never to be told' }
h_keys = MyCollection.new( h.keys )
h_vals = MyCollection.new( h.values )
puts( '=========== things =============' )
x = things.collect{ |i| i }
p( x )
y = things.max
p( y )
z = things.min
p( z )
p( things.include?('def') )
p( things.include?('xy') )
puts( '=========== h_keys =============' )
x = h_keys.collect{ |i| i }
p( x )
y = h_keys.max
p( y )
z = h_keys.min
p( z )
p( h_keys.include?('two') )
p( h_keys.include?('twenty') )
puts( '=========== h_vals =============' )
x = h_vals.collect{ |i| i }
p( x )
y = h_vals.max
p( y )
z = h_vals.min
p( z )
p( h_vals.include?('for joy') )
p( h_vals.include?('for misery') ) |
#!/usr/bin/env bash
set -euo pipefail
# $1 - JSON file path
# $2 - key/path within JSON file to return the value of
function tyk::get_from_json() {
if [ -z "${1:-}" ]; then
echo >&2 "must provide a file path to ${FUNCNAME[0]}"
exit 1
fi
if [ -z "${2:-}" ]; then
echo >&2 "must provide a JSON key/path to ${FUNCNAME[0]}"
exit 1
fi
if [ ! -r "$1" ]; then
echo >&2 "${script_name:?} cannot read '$1'."
exit 1
fi
if ! value=$(jq --exit-status --raw-output ''"$2"'' "$1"); then
echo >&2 "'$1' did not contain a value under the '$2' key."
exit 1
fi
echo "$value"
}
function tyk::key() {
tyk::get_from_json "${TYK_ROOT:?}/lib/key.json" ".key"
}
function tyk::org_id() {
tyk::get_from_json "$TYK_ROOT/lib/org_id.json" ".org_id"
}
function tyk::token() {
tyk::get_from_json "$TYK_ROOT/lib/token.json" ".token"
}
|
<reponame>hapramp/1Rramp-Android
package com.hapramp.views;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.hapramp.R;
import com.hapramp.utils.ImageHandler;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
public class UserMentionSuggestionListView extends FrameLayout {
@BindView(R.id.mentions_container)
LinearLayout mentionsContainer;
private Context context;
private MentionsSuggestionPickListener mentionsSuggestionPickListener;
public UserMentionSuggestionListView(@NonNull Context context) {
super(context);
init(context);
}
private void init(Context context) {
this.context = context;
View rootView = LayoutInflater.from(context).inflate(R.layout.user_mention_suggestion_container_view, this);
ButterKnife.bind(this, rootView);
}
public UserMentionSuggestionListView(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init(context);
}
public UserMentionSuggestionListView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
}
public void onSearching() {
try {
mentionsContainer.removeAllViews();
View view = LayoutInflater.from(context).inflate(R.layout.user_mention_suggestion_item_row_searching,
null);
mentionsContainer.addView(view, 0, new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
}
catch (Exception e) {
e.printStackTrace();
}
}
public void addSuggestions(List<String> suggestions) {
int max = suggestions.size() > 8 ? 8 : suggestions.size();
mentionsContainer.removeAllViews();
for (int i = 0; i < max; i++) {
String u = suggestions.get(i);
View view = LayoutInflater.from(context).inflate(R.layout.user_mention_suggestion_item_row,
null);
view.setTag(u);
view.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mentionsSuggestionPickListener != null) {
mentionsSuggestionPickListener.onUserPicked((String) view.getTag());
}
}
});
ImageView im = view.findViewById(R.id.user_pic);
TextView tv = view.findViewById(R.id.username);
tv.setText(u);
ImageHandler.loadCircularImage(context, im, String.format(context.getResources().getString(R.string.steem_user_profile_pic_format), u));
mentionsContainer.addView(view, i, new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
}
}
public void setMentionsSuggestionPickListener(MentionsSuggestionPickListener mentionsSuggestionPickListener) {
this.mentionsSuggestionPickListener = mentionsSuggestionPickListener;
}
public interface MentionsSuggestionPickListener {
void onUserPicked(String username);
}
}
|
def factorial(n)
if n == 0
1
else
n * factorial(n - 1)
end
end |
#!/bin/bash
THREADS=1
SKIP_PRE=0
function print_usage() {
echo "Usage: $0 [-i /path/to/file] [-o /path/to/folder] [-r /path/to/file] [-s insert size] [-d standard deviation] [-l read length] [-c coverage] [-t threads]" >&2
echo " -h, --help Help" >&2
echo " -i, --inp Path to input file" >&2
echo " -o, --out Path to output folder" >&2
echo " -r, --ref Path to reference file" >&2
echo " -s, --insSz Median Insert size" >&2
echo " -d, --stdDev Median Standard deviation" >&2
echo " -l, --readLen Read length" >&2
echo " -c, --cov Coverage" >&2
echo " -t, --threads Threads" >&2
}
function parse_arguments() {
if [[ -z $1 ]]; then
print_usage
exit
else
while [ "$1" != "" ]; do
case $1 in
-h | --help)
print_usage
exit
;;
-i | --inp)
shift
INP_FILE=$1
;;
-o | --out)
shift
OUT_FOLDER=$1
# Add / at end if not present
LAST_CHAR_OUT_FOLDER="${OUT_FOLDER: -1}"
if [ $LAST_CHAR_OUT_FOLDER != "/" ]; then
OUT_FOLDER="${OUT_FOLDER}/"
fi
;;
-r | --ref)
shift
REF_FILE=$1
;;
-s | --insSz)
shift
INS_SZ=$1
;;
-d | --stdDev)
shift
STD_DEV=$1
;;
-l | --readLen)
shift
READ_LEN=$1
;;
-c | --cov)
shift
COVERAGE=$1
;;
-t | --threads)
shift
THREADS=$1
;;
--skip)
shift
SKIP_PRE=$1
;;
esac
shift
done
fi
}
function print_arguments() {
INVALID_ARG=0
if [[ -z $INP_FILE ]]; then
echo "ERROR: -i missing" >&2
INVALID_ARG=1
else
if [[ -e $INP_FILE ]]; then
echo " Input file:" $INP_FILE >&2
else
echo "ERROR: " $INP_FILE "does not exist" >&2
exit
fi
fi
if [[ -z $OUT_FOLDER ]]; then
echo "ERROR: -o missing" >&2
INVALID_ARG=1
else
if [[ -d $OUT_FOLDER ]]; then
echo " Output folder:" $OUT_FOLDER >&2
else
# Create folder if it doesn't exist
mkdir -p $OUT_FOLDER
echo " Output folder:" $OUT_FOLDER "created" >&2
fi
fi
if [[ -z $REF_FILE ]]; then
echo "ERROR: -r missing" >&2
INVALID_ARG=1
else
if [[ -e $REF_FILE ]]; then
echo " Reference file:" $REF_FILE >&2
else
echo "ERROR: " $REF_FILE "does not exist" >&2
exit
fi
fi
if [[ -z $INS_SZ ]]; then
echo "ERROR: -s missing" >&2
INVALID_ARG=1
else
if [[ $INS_SZ -gt 0 ]]; then
echo " Insert size:" $INS_SZ >&2
else
echo "ERROR: Insert size" $INS_SZ "is invalid" >&2
exit
fi
fi
if [[ -z $STD_DEV ]]; then
echo "ERROR: -d missing" >&2
INVALID_ARG=1
else
if [[ $STD_DEV -gt 0 ]]; then
echo " Standard deviation:" $STD_DEV >&2
else
echo "ERROR: Standard deviation" $STD_DEV "is invalid" >&2
exit
fi
fi
if [[ -z $INS_SZ ]]; then
echo "ERROR: -l missing" >&2
INVALID_ARG=1
else
if [[ $READ_LEN -gt 0 ]]; then
echo " Read length:" $READ_LEN >&2
else
echo "ERROR: Read length" $READ_LEN "is invalid" >&2
exit
fi
fi
if [[ -z $COVERAGE ]]; then
echo "ERROR: -c missing" >&2
INVALID_ARG=1
else
if [[ $COVERAGE -gt 0 ]]; then
echo " Coverage:" $COVERAGE >&2
else
echo "ERROR: Coverage" $COVERAGE "is invalid" >&2
exit
fi
fi
if [[ $THREADS -gt 0 ]]; then
echo " Threads:" $THREADS >&2
else
echo "ERROR: Threads" $THREADS "is invalid" >&2
exit
fi
if [[ $INVALID_ARG -eq 1 ]]; then
print_usage
exit
fi
}
parse_arguments $@
print_arguments
mkdir -p $OUT_FOLDER/tmp/pre $OUT_FOLDER/tmp/dels $OUT_FOLDER/tmp/ins $OUT_FOLDER/tmp/post
$(dirname $0)/HyINDEL_pre -i $INP_FILE -o $OUT_FOLDER --skip $SKIP_PRE
$(dirname $0)/HyINDEL_dels -i $INP_FILE -o $OUT_FOLDER -s $INS_SZ -d $STD_DEV -l $READ_LEN -c $COVERAGE -t $THREADS
$(dirname $0)/HyINDEL_post -i $INP_FILE -o $OUT_FOLDER -c $COVERAGE
$(dirname $0)/HyINDEL_assembly -i $INP_FILE -o $OUT_FOLDER -r $REF_FILE
$(dirname $0)/HyINDEL_ins -i $OUT_FOLDER"tmp/ins/31_contigs_sort.bam" -o $OUT_FOLDER
DELS_FILE=$OUT_FOLDER"tmp/deletions.vcf"
INS_FILE=$OUT_FOLDER"tmp/insertions.vcf"
OUT_FILE=$OUT_FOLDER"tmp/out_1.vcf"
OUT_SORT_FILE=$OUT_FOLDER"tmp/out_2.vcf"
OUT_ID_FILE=$OUT_FOLDER"tmp/out_3.vcf"
FINAL_FILE=$OUT_FOLDER"output.vcf"
cat $DELS_FILE $INS_FILE >$OUT_FILE
sort -k1,1 -k2,2n $OUT_FILE >$OUT_SORT_FILE
cat $OUT_SORT_FILE | awk '{ co+=1; printf("%s\t%s\t%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", $1, $2, co, $4, $5, $6, $7, $8, $9, $10);}' >$OUT_ID_FILE
cat $OUT_FOLDER"tmp/pre/header.vcf" >$FINAL_FILE
echo "#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT sample" >>$FINAL_FILE
cat $OUT_ID_FILE >>$FINAL_FILE
rm $OUT_FILE $OUT_SORT_FILE $OUT_ID_FILE
|
<reponame>briefgw/brief_schunklwa4p_simulation<filename>schunk_modular_robotics/schunk_libm5api/src/Device/SocketCANDevice.h
/*****************************************************************************
*
* Copyright 2016 Intelligent Industrial Robotics (IIROB) Group,
* Institute for Anthropomatics and Robotics (IAR) -
* Intelligent Process Control and Robotics (IPR),
* Karlsruhe Institute of Technology (KIT)
*
* Author: <NAME>, email: <EMAIL>
* <NAME>, email: <EMAIL>
*
* Date of creation: 03.2016
*
* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* This package is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This package is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this package. If not, see <http://www.gnu.org/licenses/>.
*****************************************************************************/
#ifndef SocketCANDevice_INCLUDEDEF_H
#define SocketCANDevice_INCLUDEDEF_H
//-----------------------------------------------
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/socket.h>
#include <sys/ioctl.h>
#include <fcntl.h>
#include <cerrno>
#include <cstring>
#include <linux/can.h>
#include <linux/can/bcm.h>
#include <linux/can/raw.h>
#include <net/if.h>
#include <string.h>
#ifndef PF_CAN
#define PF_CAN 29
#endif
#ifndef AF_CAN
#define AF_CAN PF_CAN
#endif
//-----------------------------------------------
#include <iostream>
#include <cstdio>
#include <libpcan/libpcan.h>
#include "ProtocolDevice.h"
//-----------------------------------------------
class SocketCANDevice: public CProtocolDevice {
private:
// ---- private data ---------------------------------------------------- ;
// ---- private auxiliary functions ------------------------------------- ;
protected:
// ---- protected data ---------------------------------------------------- ;
bool m_bInitialized;
int m_iDeviceId;
int m_iNoOfRetries;
unsigned short m_uiQueueSize;
unsigned long m_uiTimeOut;
char * m_DeviceName;
// ---- protected auxiliary functions ------------------------------------- ;
int getDeviceError(int iErrorState);
int setBaudRate();
int setBaudRate(unsigned char iBaudRate);
int setMessageId(unsigned long uiMessageId);
int clearReadQueue();
int reinit(unsigned char ucBaudRateId);
int readDevice(CProtocolMessage& rclProtocolMessage);
int writeDevice(CProtocolMessage& rclProtocolMessage);
public:
// ---- public data ----------------------------------------------------- ;
// ---- constructors / destructor --------------------------------------- ;
// default constructor
SocketCANDevice();
SocketCANDevice(const SocketCANDevice& rclSocketCANDevice);
~SocketCANDevice();
// ---- operators ------------------------------------------------------- ;
// assignment operator
SocketCANDevice& operator=(const SocketCANDevice& rclSocketCANDevice);
// ---- query functions ------------------------------------------------- ;
// ---- modify functions ------------------------------------------------ ;
void setQueueSize(unsigned short uiQueueSize);
void setTimeOut(unsigned long uiTimeOut);
// ---- I/O functions --------------------------------------------------- ;
// ---- exec functions -------------------------------------------------- ;
int init();
int init(unsigned long baudRate);
int init(const char* acInitString);
int exit();
int waitForStartMotionAll();
};
#endif
|
from django import test
from model_bakery import baker
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
class TestAssignmentGroupInsertTriggers(test.TestCase):
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_create_group_creates_first_feedbackset(self):
group = baker.make('core.AssignmentGroup')
self.assertEqual(group.feedbackset_set.count(), 1)
|
#!/bin/bash
xargs -n1 curl -s |
gunzip
|
package org.dimdev.rift.mixin.core.client;
import net.minecraft.client.ClientBrandRetriever;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
@Mixin(ClientBrandRetriever.class)
public class MixinClientBrandRetriever {
@Overwrite
public static String getClientModName() {
return "rift";
}
}
|
#!/bin/bash
usage(){
echo "
Written by Brian Bushnell
Last modified January 21, 2015
Description: Prints time elapsed since last called on the same file.
Usage: printtime.sh <filename>
Please contact Brian Bushnell at bbushnell@lbl.gov if you encounter any problems.
"
}
#This block allows symlinked shellscripts to correctly set classpath.
pushd . > /dev/null
DIR="${BASH_SOURCE[0]}"
while [ -h "$DIR" ]; do
cd "$(dirname "$DIR")"
DIR="$(readlink "$(basename "$DIR")")"
done
cd "$(dirname "$DIR")"
DIR="$(pwd)/"
popd > /dev/null
#DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/"
CP="$DIR""current/"
EA="-ea"
EOOM=""
set=0
if [ -z "$1" ] || [[ $1 == -h ]] || [[ $1 == --help ]]; then
usage
exit
fi
function printtime() {
if [[ $SHIFTER_RUNTIME == 1 ]]; then
#Ignore NERSC_HOST
shifter=1
elif [[ $NERSC_HOST == genepool ]]; then
module unload oracle-jdk
module load oracle-jdk/1.8_144_64bit
module load samtools/1.4
module load pigz
elif [[ $NERSC_HOST == denovo ]]; then
module unload java
module load java/1.8.0_144
module load PrgEnv-gnu/7.1
module load samtools/1.4
module load pigz
elif [[ $NERSC_HOST == cori ]]; then
module use /global/common/software/m342/nersc-builds/denovo/Modules/jgi
module use /global/common/software/m342/nersc-builds/denovo/Modules/usg
module unload java
module load java/1.8.0_144
module unload PrgEnv-intel
module load PrgEnv-gnu/7.1
module load samtools/1.4
module load pigz
fi
local CMD="java $EA $EOOM -Xmx8m -cp $CP align2.PrintTime $@"
echo $CMD >&2
eval $CMD
}
printtime "$@"
|
import JsonP from 'jsonp'
import axios from 'axios'
import {message} from 'ant-design-vue'
export default class Axios {
static jsonp (options) {
return new Promise((resolve, reject) => {
JsonP(options.url, {
param: 'callback',
timeout: '5000'
}, function (err, response) {
if (err) throw err
if (response.status === 'success') {
resolve(response)
} else {
reject(response.message)
}
})
})
}
static getData (self, options, params) {
let loading
let axios = self
if (options.isShowLoading !== false) {
loading = document.getElementById('ajaxLoading')
loading.style.display = 'block'
}
return new Promise((resolve, reject) => {
axios[options.method](options.url, params).then(res => {
loading.style.display = 'none'
if (res.status === 200) {
if (res.data.success === true) {
resolve(res.data.result)
} else {
reject(res)
}
} else {
}
}).catch((err) => {
loading.style.display = 'none'
if (err.response.status === 401) {
message.info('获取数据失败,没有权限,请重新登录')
} else {
}
})
})
}
static requestList (_this, url, params, isMock) {
var data = {
params,
isMock
}
this.ajax({
url,
data
}).then((res) => {
let list = res.result.item_list.map((item, index) => {
item.key = index
return item
})
_this.dataSource = list
})
}
static ajax (options) {
let loading
if (options.data && options.data.isShowLoading !== false) {
loading = document.getElementById('ajaxLoading')
loading.style.display = 'block'
}
let baseApi = ''
if (options.isMock) {
baseApi = 'https://www.easy-mock.com/mock/5a7278e28d0c633b9c4adbd7/api'
} else {
baseApi = 'https://www.easy-mock.com/mock/5a7278e28d0c633b9c4adbd7/api'
}
return new Promise((resolve, reject) => {
axios({
url: options.url,
method: 'get',
baseURL: baseApi,
timeout: 5000,
params: (options.data && options.data.params) || ''
}).then((response) => {
if (options.data && options.data.isShowLoading !== false) {
loading = document.getElementById('ajaxLoading')
loading.style.display = 'none'
}
if (response.status === 200) {
let res = response.data
// eslint-disable-next-line
if (res.code == 0) {
resolve(res)
} else {
}
} else {
reject(response.data)
}
})
})
}
}
|
import { KeysRdfUpdateQuads } from '@comunica/context-entries';
import { ActionContext, Bus } from '@comunica/core';
import { ActorRdfUpdateHypermediaSparql } from '../lib/ActorRdfUpdateHypermediaSparql';
import { QuadDestinationSparql } from '../lib/QuadDestinationSparql';
describe('ActorRdfUpdateHypermediaSparql', () => {
let bus: any;
let mediatorHttp: any;
beforeEach(() => {
bus = new Bus({ name: 'bus' });
mediatorHttp = {
mediate: jest.fn(() => ({
body: 'BODY',
})),
};
});
describe('An ActorRdfUpdateHypermediaSparql instance', () => {
let actor: ActorRdfUpdateHypermediaSparql;
beforeEach(() => {
actor = new ActorRdfUpdateHypermediaSparql({
name: 'actor',
bus,
mediatorHttp,
checkUrlSuffixSparql: true,
checkUrlSuffixUpdate: true,
});
});
it('should test', () => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc';
const metadata = { sparqlService: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists })).resolves.toBeTruthy();
});
it('should not test on invalid metadata', () => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc';
const metadata = { somethingElse: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists })).rejects
.toThrow(`Actor actor could not detect a SPARQL service description or URL ending on /sparql or /update.`);
});
it('should test on invalid metadata with forced destination type', () => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc';
const metadata = { somethingElse: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists, forceDestinationType: 'sparql' }))
.resolves.toBeTruthy();
});
it('should test on invalid metadata when URL ends with /sparql', () => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc/sparql';
const metadata = { somethingElse: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists }))
.resolves.toBeTruthy();
});
it('should not test on invalid metadata when URL ends with /sparql when checkUrlSuffix is false', () => {
actor = new ActorRdfUpdateHypermediaSparql({
name: 'actor',
bus,
mediatorHttp,
checkUrlSuffixSparql: false,
checkUrlSuffixUpdate: true,
});
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc/sparql';
const metadata = { somethingElse: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists })).rejects
.toThrow(`Actor actor could not detect a SPARQL service description or URL ending on /sparql or /update.`);
});
it('should not test on invalid metadata when URL ends with /update when checkUrlSuffix is false', () => {
actor = new ActorRdfUpdateHypermediaSparql({
name: 'actor',
bus,
mediatorHttp,
checkUrlSuffixSparql: true,
checkUrlSuffixUpdate: false,
});
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc/update';
const metadata = { somethingElse: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists })).rejects
.toThrow(`Actor actor could not detect a SPARQL service description or URL ending on /sparql or /update.`);
});
it('should not test on invalid metadata with forced destination type for different destination type', () => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc';
const metadata = { somethingElse: true };
const exists = true;
return expect(actor.test({ context, url, metadata, exists, forceDestinationType: 'different' }))
.rejects.toThrow('Actor actor is not able to handle destination type different.');
});
it('should run', async() => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc';
const metadata = { sparqlService: 'service' };
const exists = true;
const { destination } = await actor.run({ context, url, metadata, exists });
expect(destination).toEqual(expect.any(QuadDestinationSparql));
expect((<any> destination).url).toEqual('service');
});
it('should run without sparqlService metadata', async() => {
const context = ActionContext({ [KeysRdfUpdateQuads.destination]: 'abc' });
const url = 'abc';
const metadata = {};
const exists = true;
const { destination } = await actor.run({ context, url, metadata, exists });
expect(destination).toEqual(expect.any(QuadDestinationSparql));
expect((<any> destination).url).toEqual('abc');
});
});
});
|
package org.glamey.training.codes.sort;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import java.util.Random;
import java.util.RandomAccess;
import org.glamey.training.codes.Utils;
import com.google.common.collect.Lists;
/**
* 针对集合的洗牌函数
*
* @author yang.zhou 2019.11.04.14
*/
public class ShuffleCollection {
public static void main(String[] args) {
testNoRandomAccessArray();
testRandomAccessArray();
}
private static ArrayList<Integer> generateList() {
ArrayList<Integer> list = Lists.newArrayList(1, 3, 45, 6, 8, 1, 19, 30);
System.out.println("origin list is \n" + list);
return list;
}
public static void testRandomAccessArray() {
System.out.println("random access list");
List<Integer> list = generateList();
for (int i = 0; i < 10; i++) {
shuffle(list);
System.out.println(list);
}
}
public static void testNoRandomAccessArray() {
System.out.println("no random access list");
List<Integer> list = Lists.newLinkedList(generateList());
for (int i = 0; i < 10; i++) {
shuffle(list);
System.out.println(list);
}
}
private static Random r;
public static void shuffle(List<?> list) {
Random rnd = r;
if (rnd == null) {
r = rnd = new Random();
}
shuffle(list, rnd);
}
private static void shuffle(List<?> list, Random rnd) {
int size = list.size();
if (list instanceof RandomAccess) {
for (int i = size; i > 1; i--) {
swap(list, i - 1, rnd.nextInt(i));
}
} else {
Object[] array = list.toArray();
for (int i = size; i > 1; i--) {
Utils.swap(array, i - 1, rnd.nextInt(i));
}
ListIterator iterator = list.listIterator();
for (int i = 0; i < array.length; i++) {
iterator.next();
iterator.set(array[i]);
}
}
}
private static void swap(List<?> list, int i, int j) {
final List l = list;
l.set(i, l.set(j, l.get(i)));
}
}
|
/*
* Copyright © 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.opentracing.internal;
import javax.annotation.Nullable;
public final class TracingIdUtils {
private static final String NULL_VALUE = "null";
private TracingIdUtils() {
// no instantiation
}
/**
* Returns the ID as provided or {@code "null"} if the ID was {@code null}.
*
* @param id The ID to be evaluated.
* @return ID or {@code "null"}.
*/
public static String idOrNullAsValue(@Nullable final String id) {
return id == null ? NULL_VALUE : id;
}
}
|
import { Schema } from "mongoose";
import "reflect-metadata";
import { isPrimitive } from "util";
function defineProperty(metadataValue: any, target: any, propertyKey: string | symbol) {
const metadataKey = "mongonation:schema";
const metadata = Reflect.getMetadata(metadataKey, target);
if (metadata === undefined) {
Reflect.defineMetadata(metadataKey, {
[propertyKey]: metadataValue,
}, target);
} else {
Reflect.defineMetadata(metadataKey, {
...metadata,
[propertyKey]: metadataValue,
}, target);
}
}
export function Property(target: any, propertyKey: string | symbol) {
let type = Reflect.getMetadata("design:type", target, propertyKey);
if (isPrimitive(type) === false) {
type = {
ref: type.name,
type: Schema.Types.ObjectId,
};
} else {
type = { type };
}
return defineProperty(type, target, propertyKey);
}
export function Array(typename: string) {
return (target: any, propertyKey: string | symbol) => {
let type = Reflect.getMetadata("design:type", target, propertyKey);
if (type === global.Array) {
if (isPrimitive(typename)) {
type = [{
type: typename,
}];
} else {
type = [{
ref: typename,
type: Schema.Types.ObjectId,
}];
}
} else {
throw new Error(`${propertyKey} is not an Array. Please use @Property instead`);
}
return defineProperty(type, target, propertyKey);
};
}
|
pkg_name=xz
pkg_distname=$pkg_name
pkg_origin=core
pkg_version=5.2.2
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=('gpl2+' 'lgpl2+')
pkg_source=http://tukaani.org/${pkg_distname}/${pkg_distname}-${pkg_version}.tar.gz
pkg_shasum=73df4d5d34f0468bd57d09f2d8af363e95ed6cc3a4a86129d2f2c366259902a2
pkg_dirname=${pkg_distname}-${pkg_version}
pkg_deps=(core/glibc)
pkg_build_deps=(core/coreutils core/diffutils core/patch core/make core/gcc core/sed)
pkg_bin_dirs=(bin)
pkg_include_dirs=(include)
pkg_lib_dirs=(lib)
do_check() {
make check
}
# ----------------------------------------------------------------------------
# **NOTICE:** What follows are implementation details required for building a
# first-pass, "stage1" toolchain and environment. It is only used when running
# in a "stage1" Studio and can be safely ignored by almost everyone. Having
# said that, it performs a vital bootstrapping process and cannot be removed or
# significantly altered. Thank you!
# ----------------------------------------------------------------------------
if [[ "$STUDIO_TYPE" = "stage1" ]]; then
pkg_build_deps=(core/gcc core/coreutils core/sed core/diffutils)
fi
|
#!/bin/sh
set -eu
tar -zxf ./perm-bin-dir/perm-*.tgz -C perm-image-workspace
cp perm-image-src/images/perm/Dockerfile perm-image-workspace
mkdir -p perm-image-workspace/certs
cp perm-certs/certs/* perm-image-workspace/certs/
|
#!/bin/bash
set -euo pipefail
function configureTravis {
mkdir ~/.local
curl -sSL https://github.com/SonarSource/travis-utils/tarball/v29 | tar zx --strip-components 1 -C ~/.local
source ~/.local/bin/install
#echo "$ARTIFACTORY_URL/npmjs/" > .npmrc
}
configureTravis
. installJDK8
function strongEcho {
echo ""
echo "================ $1 ================="
}
case "$TARGET" in
CI)
if [ "${TRAVIS_BRANCH}" == "master" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
strongEcho 'Build and deploy'
# Do not deploy a SNAPSHOT version but the release version related to this build
set_maven_build_version $TRAVIS_BUILD_NUMBER
# analysis is currently executed by SonarSource internal infrastructure
mvn deploy \
-Pdeploy-sonarsource \
-B -e -V
elif [[ "${TRAVIS_BRANCH}" == "branch-"* ]] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
strongEcho 'Build and deploy'
# get current version from pom
CURRENT_VERSION=`maven_expression "project.version"`
if [[ $CURRENT_VERSION =~ "-SNAPSHOT" ]]; then
echo "======= Found SNAPSHOT version ======="
# Do not deploy a SNAPSHOT version but the release version related to this build
set_maven_build_version $TRAVIS_BUILD_NUMBER
else
echo "======= Found RELEASE version ======="
fi
# analysis is currently executed by SonarSource internal infrastructure
mvn deploy \
-Pdeploy-sonarsource \
-B -e -V
#elif [ "$TRAVIS_PULL_REQUEST" != "false" ] && [ -n "${GITHUB_TOKEN:-}" ]; then
elif [ -n "${GITHUB_TOKEN:-}" ]; then
strongEcho 'Build and analyze pull request, no deploy'
# No need for Maven phase "install" as the generated JAR file does not need to be installed
# in Maven local repository. Phase "verify" is enough.
export MAVEN_OPTS="-Xmx1G -Xms128m"
mvn org.jacoco:jacoco-maven-plugin:prepare-agent verify sonar:sonar \
-Dsonar.analysis.mode=issues \
-Dsonar.github.pullRequest=$TRAVIS_PULL_REQUEST \
-Dsonar.github.repository=$TRAVIS_REPO_SLUG \
-Dsonar.github.oauth=$GITHUB_TOKEN \
-Dsonar.host.url=$SONAR_HOST_URL \
-Dsonar.login=$SONAR_TOKEN \
-B -e -V
else
strongEcho 'Build, no analysis, no deploy'
echo $TRAVIS_PULL_REQUEST
# No need for Maven phase "install" as the generated JAR file does not need to be installed
# in Maven local repository. Phase "verify" is enough.
mvn verify \
-Dmaven.test.redirectTestOutputToFile=false \
-B -e -V
fi
;;
WEB)
set +eu
source ~/.nvm/nvm.sh && nvm install 4
npm install -g npm@3.5.2
cd server/sonar-web && npm install && npm test
;;
IT)
start_xvfb
mvn install -DskipTests=true -Dsource.skip=true -Denforcer.skip=true -B -e -V
./run-integration-tests.sh "$IT_CATEGORY" "" -Dmaven.test.redirectTestOutputToFile=false -Dexclude-qa-tests=true
;;
*)
echo "Unexpected TARGET value: $TARGET"
exit 1
;;
esac
|
export PATH="./bin:/usr/local/opt/python/libexec/bin:/usr/local/bin:/usr/local/sbin:$HOME/.sfs:$DOT/bin:$PATH"
export MANPATH="/usr/local/man:/usr/local/mysql/man:/usr/local/git/man:$MANPATH"
export PYTHONPATH="$HOME/Library/Python/2.7/lib/python/site-packages:$PYTHONPATH"
|
<filename>common-kafka/src/test/java/com/atjl/kafka/core/thread/TimingCommitThreadTest.java
package com.atjl.kafka.core.thread;
import com.atjl.kafka.consumer.TConsumer;
import com.atjl.kafka.consumer.TopicHandler;
import com.atjl.kafka.domain.OffsetRange;
import com.atjl.util.queue.IQueue;
import com.atjl.util.queue.QueueManager;
import com.atjl.kafka.api.event.BatchEventMC;
import com.atjl.kafka.api.event.Event;
import com.atjl.kafka.core.KafkaConsumeContext;
import com.atjl.kafka.domain.constant.KafkaInnerConstant;
import kafka.javaapi.consumer.ConsumerConnector;
import org.junit.Test;
import org.junit.Before;
import org.junit.After;
import org.mockito.Mockito;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* CommitThread
*
* @since 1.0
*/
public class TimingCommitThreadTest {
@Before
public void before() throws Exception {
}
@After
public void after() throws Exception {
}
KafkaConsumeContext context = new KafkaConsumeContext();
ConsumerConnector connector = Mockito.mock(ConsumerConnector.class);
/**
* Method: run()
*/
@Test
public void testRun() throws Exception {
TConsumer km = new TConsumer();
ConsumerConnector cc = km.getConsumer();
ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(10);
executor.scheduleAtFixedRate(new TimingCommitThread(0,2000,"test1",cc,context),2000,2000,TimeUnit.SECONDS);
while (true);
}
/**
* Method: commitPartition(Map<OffsetRange, Boolean> offsetRangeBooleanMap)
*/
@Test
public void testCommitSeq() throws Exception {
TimingCommitThread ct = new TimingCommitThread(0,1000,"test1",connector,context);
//2->3, 5->9
generateAndAdd2Context(1,new long[]{2, 3, 4,5, 6, 7,8, 9});
//BatchEvent rawDatas = generateBatchCommitData(new long[]{2, 3, 5, 6, 7, 9});
//System.out.println(rawDatas);
ConcurrentHashMap<OffsetRange, Boolean> map = context.getOffsetProcessCache().get(1);
ct.commitPartition(1,map);
context.showContextStatus("AFC");
}
/**
* commit 非连续 offset
* @throws Exception
*/
@Test
public void testCommitNotSeq() throws Exception {
//KafkaManagerHL km = new KafkaManagerHL();
//ConsumerConnector cc = km.getConsumer();
TimingCommitThread ct = new TimingCommitThread(0,1000,"test1",null,context);//"test1",cc);
generateAndAdd2Context(1,new long[]{2, 3, 4, 5, 7, 9});
// Map<OffsetRange, Boolean> offsetRangeBooleanMap = new HashMap<>();
// offsetRangeBooleanMap.put(new OffsetRange(0,3),true);
// offsetRangeBooleanMap.put(new OffsetRange(4,5),true);
// 非连续点
// offsetRangeBooleanMap.put(new OffsetRange(8,9),true);
// offsetRangeBooleanMap.put(new OffsetRange(10,14),true);
ConcurrentHashMap<OffsetRange, Boolean> map = context.getOffsetProcessCache().get(1);
ct.commitPartition(1,map);
context.showContextStatus("AFC");
}
/**
* 未处理offset 被检查次数达到最大
*/
@Test
public void testNotProcessOffsetRangeReachMaxCount(){
TimingCommitThread ct = new TimingCommitThread(0,1000,"test1",null,context);
//2->3, 5->9
BatchEventMC batchRawData = generateAndAddFreshData2Context(1,new long[]{2, 3, 5, 6, 7,9});
BatchEventMC batchRawData1 = generateAndAddFreshData2Context(1,new long[]{11,16,5,20,30,6});
System.out.println(batchRawData.getOffsetMap()+String.valueOf(batchRawData1.getOffsetMap()));
context.addProcessedData(batchRawData1);
context.showContextStatus();
//processSomeDataUpdateContext(new long[]{5,6,7,9});
ConcurrentHashMap<OffsetRange, Boolean> map = context.getOffsetProcessCache().get(1);
for(int i=0;i<5;i++) {
ct.commitPartition(1, map);
context.showContextStatus("AFC");
}
}
/**
* context map 达到最大,强制提交 最小offset range
*/
@Test
public void testContextMapReachMaxSize(){
//KafkaInnerConstant.OFFSET_PROCESS_STATUS_MAP_MAXSIZE = 3;
TimingCommitThread ct = new TimingCommitThread(0,1000,"test1",null,context);
//2->3, 5->9
BatchEventMC batchRawData = generateAndAddFreshData2Context(1,new long[]{2, 3, 5, 6, 7, 9, 10,11,15,16});
processSomeData(batchRawData,1);
context.showContextStatus("BF");
//ConcurrentHashMap<OffsetRange, Boolean> map = KafkaConsumeContext.getOffsetProcessCache().get(1);
ct.run();
context.showContextStatus("AFC");
/**
for(int i=0;i<4;i++) {
ct.commitPartition(1, map);
KafkaConsumeContext.showContextStatus("AFC");
}*/
}
/**
* 伪造 未处理数据,并放入context
* @param arr
*/
private BatchEventMC generateAndAddFreshData2Context(int partition, long[] arr) {
BatchEventMC rawDatas = generateBatchCommitData(partition,arr);
context.addUnProcessData(rawDatas);
context.showContextStatus("BF");
return rawDatas;
}
private void generateAndAdd2Context(int partition,long[] arr){
BatchEventMC rawDatas = generateBatchCommitData(partition,arr);
context.addUnProcessData(rawDatas);
context.showContextStatus("BF");
context.addProcessedData(rawDatas);
context.showContextStatus("AFP");
}
private BatchEventMC generateBatchCommitData(int partition, long[] offsets){
List<Event> list = generateList(partition,offsets);
BatchEventMC res = new BatchEventMC();
for(int i=0;i<list.size();i++){
res.addEvent2Partition(partition,list.get(i));
}
return res;
}
private void processSomeData(BatchEventMC batchRawData, int partition ){
//batchRawData.getOffsetMap().get(partition);//TODO:return ignored
context.addProcessedData(batchRawData);
context.showContextStatus();
}
private List<Event> generateList(int partition,long[] offsets){
List<Event> list = new ArrayList<>();
for(long offset:offsets){
// public Event(String topic, int partition, long offset, String key, String value)
list.add(new Event("test1",partition,offset,String.valueOf(offset),String.valueOf(offset) ));
}
System.out.println("generateConfigModel raw data list "+list);
return list;
}
/**
* test all three thread
*/
@SuppressWarnings("rawtypes")
@Test
public void testAll(){
TConsumer km = new TConsumer();
context = km.getContext();
TopicHandler handler = da -> System.out.println("GET DATA "+da);
km.subscribeTopic("test1", handler);
km.constructStream();
ConsumerConnector connector = km.getConsumer();
int id = 1;
IQueue dataQ = QueueManager.getQueue(KafkaInnerConstant.DATA_QUEUE_KEY);
/**
* fetch thread
*/
Thread rt = new Thread(new FetchDataMCThread(id,km.getConfig() , context,km.getStreams().get(0),dataQ));
rt.start();
id++;
/**
* process thread
*/
Thread pt = new Thread(new ProcessMCThread(id,context,dataQ,handler));
pt.start();
/**
* commit thread
*/
TimingCommitThread ct = new TimingCommitThread(0,1000,"test1",connector,context);
ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(10);
executor.scheduleAtFixedRate(ct,2,5,TimeUnit.SECONDS);
Thread st = new Thread(new TimingStatisticsThread(1,1000));
st.start();
while (true);
}
}
|
<reponame>PysX/home-assistant-conf<gh_stars>100-1000
import copy
import logging
import re
from homeassistant.const import SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
from .const import CONDITION_TYPE_AND, CONDITION_TYPE_OR
ENTRY_PATTERN_SUNRISE = "SR"
ENTRY_PATTERN_SUNSET = "SS"
ENTRY_PATTERN_DAILY = "0"
ENTRY_PATTERN_WORKDAY = "15"
ENTRY_PATTERN_WEEKEND = "67"
_LOGGER = logging.getLogger(__name__)
EntryPattern = re.compile(
"^([0-9]+)?D([0-9]+)?T([0-9SRDUW]+)T?([0-9SRDUW]+)?A([A0-9]+)+(C([C0-9]+))?(F([F0-9]+))?$"
)
FixedTimePattern = re.compile("^([0-9]{2})([0-9]{2})$")
SunTimePattern = re.compile(
"^(([0-9]{2})([0-9]{2}))?([SRDUW]{2})(([0-9]{2})([0-9]{2}))?$"
)
def migrate_old_entity(data: dict, entity_id: str):
"""Import datacollection from restored entity"""
def import_time_input(time_str):
fixed_time_pattern = FixedTimePattern.match(time_str)
sun_time_pattern = SunTimePattern.match(time_str)
res = {}
if fixed_time_pattern:
res = "{}:{}:00".format(
fixed_time_pattern.group(1),
fixed_time_pattern.group(2),
)
elif sun_time_pattern:
if sun_time_pattern.group(4) == ENTRY_PATTERN_SUNRISE:
event = SUN_EVENT_SUNRISE
elif sun_time_pattern.group(4) == ENTRY_PATTERN_SUNSET:
event = SUN_EVENT_SUNSET
if sun_time_pattern.group(1) is not None: # negative offset
offset = "-{}:{}:00".format(
sun_time_pattern.group(2),
sun_time_pattern.group(3),
)
else:
offset = "+{}:{}:00".format(
sun_time_pattern.group(6),
sun_time_pattern.group(7),
)
res = "{}{}".format(event, offset)
else:
raise Exception("failed to parse time {}".format(time_str))
return res
entries = []
weekdays = []
for entry in data["entries"]:
res = EntryPattern.match(entry)
if not res:
return False
# split the entry string in parts
days_setting = res.group(1)
days_list = res.group(2)
time_str = res.group(3)
end_time_str = res.group(4)
action_list = res.group(5).split("A")
condition_list = res.group(7)
my_entry = {}
# parse days
if days_setting:
if days_setting == ENTRY_PATTERN_DAILY:
weekdays = ["daily"]
elif days_setting == ENTRY_PATTERN_WORKDAY:
weekdays = ["workday"]
elif days_setting == ENTRY_PATTERN_WEEKEND:
weekdays = ["weekend"]
elif days_list:
days_list = list(res.group(2))
days_list = [int(i) for i in days_list]
if len(days_list) == 1 and days_list[0] == 0: # for backwards compatibility
weekdays = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
else:
weekdays = []
if 1 in days_list:
weekdays.append("mon")
if 2 in days_list:
weekdays.append("tue")
if 3 in days_list:
weekdays.append("wed")
if 4 in days_list:
weekdays.append("thu")
if 5 in days_list:
weekdays.append("fri")
if 6 in days_list:
weekdays.append("sat")
if 7 in days_list:
weekdays.append("sun")
# parse time
my_entry["start"] = import_time_input(str(time_str))
if end_time_str:
my_entry["stop"] = import_time_input(str(end_time_str))
# parse action
actions = []
action_list = list(filter(None, action_list))
action_list = [int(i) for i in action_list]
for num in action_list:
if num < len(data["actions"]):
action = {}
item = copy.copy(data["actions"][num])
if "entity" in item:
action["entity_id"] = item["entity"]
del item["entity"]
if (
"." not in action["entity_id"]
and "service" in action
and "." in action["service"]
):
action["entity_id"] = "{}.{}".format(
action["service"].split(".").pop(0), action["entity_id"]
)
if "service" in item:
service = item["service"]
if "." not in service and "." in action["entity_id"]:
service = "{}.{}".format(
action["entity_id"].split(".").pop(0), service
)
action["service"] = service
del item["service"]
if item:
action["service_data"] = item
actions.append(action)
my_entry["actions"] = actions
# parse condition
if condition_list:
conditions = []
condition_type = CONDITION_TYPE_OR
conditions_list = []
conditions_or = condition_list.split("C")
for group in conditions_or:
if len(group) > 1:
condition_type = CONDITION_TYPE_AND
conditions_list = [int(i) for i in group]
if condition_type == CONDITION_TYPE_OR:
for group in conditions_or:
conditions_list = [int(i) for i in group]
for num in conditions_list:
if num < len(data["conditions"]):
item = data["conditions"][num]
condition = {
"entity_id": item["entity"],
"attribute": "state",
"value": item["state"],
"match_type": item["match_type"],
}
conditions.append(condition)
my_entry["conditions"] = conditions
my_entry["condition_type"] = condition_type
entries.append(my_entry)
repeat_type = "repeat"
if "options" in data:
if "run_once" in data["options"]:
repeat_type = "pause"
name = None
if "friendly_name" in data and "#" not in data["friendly_name"]:
name = data["friendly_name"]
return {
"schedule_id": entity_id.replace("schedule_", ""),
"weekdays": weekdays,
"timeslots": entries,
"repeat_type": repeat_type,
"name": name,
}
|
const request = require('request-promise');
const fs = require('fs')
exports.scanStream = async function(stream) {
return await uploadFile(stream)
}
exports.scanFileLocation = async function(fileLocation) {
if(!fileLocation) throw new Error('scanFileLocation expects a file location')
const stats = fs.statSync(fileLocation);
const fileSizeInBytes = stats.size;
let readStream = fs.createReadStream(fileLocation);
return await uploadFile(readStream)
}
async function uploadFile(content) {
const res = await request.post({
url: 'https://api.unscan.co/nsfw',
formData: {
file: content,
},
})
const json = JSON.parse(res)
if(!json.success) {
throw new Error(json.message)
}
return json
} |
// <NAME>, Geometric Tools, Redmond WA 98052
// Copyright (c) 1998-2016
// Distributed under the Boost Software License, Version 1.0.
// http://www.boost.org/LICENSE_1_0.txt
// http://www.geometrictools.com/License/Boost/LICENSE_1_0.txt
// File Version: 3.0.0 (2016/06/19)
#include <GTEnginePCH.h>
#include <Physics/GteFluid2UpdateState.h>
#include <Graphics/GteGraphicsEngine.h>
#include <Graphics/GteProgramFactory.h>
using namespace gte;
Fluid2UpdateState::Fluid2UpdateState(std::shared_ptr<ProgramFactory> const& factory,
int xSize, int ySize, int numXThreads, int numYThreads,
std::shared_ptr<ConstantBuffer> const& parameters)
:
mNumXGroups(xSize/numXThreads),
mNumYGroups(ySize/numYThreads)
{
mUpdateState = std::make_shared<Texture2>(DF_R32G32B32A32_FLOAT, xSize, ySize);
mUpdateState->SetUsage(Resource::SHADER_OUTPUT);
mAdvectionSampler = std::make_shared<SamplerState>();
mAdvectionSampler->filter = SamplerState::MIN_L_MAG_L_MIP_P;
mAdvectionSampler->mode[0] = SamplerState::CLAMP;
mAdvectionSampler->mode[1] = SamplerState::CLAMP;
// Create the shader for generating velocity from vortices.
int i = factory->GetAPI();
factory->PushDefines();
factory->defines.Set("NUM_X_THREADS", numXThreads);
factory->defines.Set("NUM_Y_THREADS", numYThreads);
mComputeUpdateState = factory->CreateFromSource(*msSource[i]);
if (mComputeUpdateState)
{
std::shared_ptr<ComputeShader> cshader = mComputeUpdateState->GetCShader();
cshader->Set("Parameters", parameters);
#if defined(GTE_DEV_OPENGL)
cshader->Set("stateTm1", mAdvectionSampler);
#else
cshader->Set("advectionSampler", mAdvectionSampler);
#endif
cshader->Set("updateState", mUpdateState);
}
factory->PopDefines();
}
void Fluid2UpdateState::Execute(std::shared_ptr<GraphicsEngine> const& engine,
std::shared_ptr<Texture2> const& source,
std::shared_ptr<Texture2> const& stateTm1,
std::shared_ptr<Texture2> const& stateT)
{
std::shared_ptr<ComputeShader> cshader = mComputeUpdateState->GetCShader();
cshader->Set("source", source);
cshader->Set("stateTm1", stateTm1);
cshader->Set("stateT", stateT);
engine->Execute(mComputeUpdateState, mNumXGroups, mNumYGroups, 1);
}
std::string const Fluid2UpdateState::msGLSLSource =
"uniform Parameters\n"
"{\n"
" vec4 spaceDelta; // (dx, dy, 0, 0)\n"
" vec4 halfDivDelta; // (0.5/dx, 0.5/dy, 0, 0)\n"
" vec4 timeDelta; // (dt/dx, dt/dy, 0, dt)\n"
" vec4 viscosityX; // (velVX, velVX, 0, denVX)\n"
" vec4 viscosityY; // (velVX, velVY, 0, denVY)\n"
" vec4 epsilon; // (epsilonX, epsilonY, 0, epsilon0)\n"
"};\n"
"\n"
"layout(rgba32f) uniform readonly image2D source;\n"
"layout(rgba32f) uniform readonly image2D stateT;\n"
"uniform sampler2D stateTm1;\n"
"layout(rgba32f) uniform writeonly image2D updateState;\n"
"\n"
"layout (local_size_x = NUM_X_THREADS, local_size_y = NUM_Y_THREADS, local_size_z = 1) in;\n"
"void main()\n"
"{\n"
" ivec2 c = ivec2(gl_GlobalInvocationID.xy);\n"
" ivec2 dim = imageSize(stateT);\n"
"\n"
" int x = int(c.x);\n"
" int y = int(c.y);\n"
" int xm = max(x - 1, 0);\n"
" int xp = min(x + 1, dim.x - 1);\n"
" int ym = max(y - 1, 0);\n"
" int yp = min(y + 1, dim.y - 1);\n"
"\n"
" // Sample states at (x,y), (x+dx,y), (x-dx,y), (x,y+dy), (x,y-dy).\n"
" vec4 stateZZ = imageLoad(stateT, c);\n"
" vec4 statePZ = imageLoad(stateT, ivec2(xp, y));\n"
" vec4 stateMZ = imageLoad(stateT, ivec2(xm, y));\n"
" vec4 stateZP = imageLoad(stateT, ivec2(x, yp));\n"
" vec4 stateZM = imageLoad(stateT, ivec2(x, ym));\n"
"\n"
" // Sample the source state at (x,y).\n"
" vec4 src = imageLoad(source, c);\n"
"\n"
" // Estimate second-order derivatives of state at (x,y).\n"
" vec4 stateDXX = statePZ - 2.0f*stateZZ + stateMZ;\n"
" vec4 stateDYY = stateZP - 2.0f*stateZZ + stateZM;\n"
"\n"
" // Compute advection.\n"
" vec2 tcd = spaceDelta.xy*(c.xy - timeDelta.xy*stateZZ.xy + 0.5f);\n"
" vec4 advection = textureLod(stateTm1, tcd, 0.0f);\n"
"\n"
" // Update the state.\n"
" imageStore(updateState, c, advection +\n"
" (viscosityX*stateDXX + viscosityY*stateDYY + timeDelta.w*src));\n"
"}\n";
std::string const Fluid2UpdateState::msHLSLSource =
"cbuffer Parameters\n"
"{\n"
" float4 spaceDelta; // (dx, dy, 0, 0)\n"
" float4 halfDivDelta; // (0.5/dx, 0.5/dy, 0, 0)\n"
" float4 timeDelta; // (dt/dx, dt/dy, 0, dt)\n"
" float4 viscosityX; // (velVX, velVX, 0, denVX)\n"
" float4 viscosityY; // (velVX, velVY, 0, denVY)\n"
" float4 epsilon; // (epsilonX, epsilonY, 0, epsilon0)\n"
"};\n"
"\n"
"Texture2D<float4> source;\n"
"Texture2D<float4> stateTm1;\n"
"Texture2D<float4> stateT;\n"
"SamplerState advectionSampler; // bilinear, clamp\n"
"RWTexture2D<float4> updateState;\n"
"\n"
"[numthreads(NUM_X_THREADS, NUM_Y_THREADS, 1)]\n"
"void CSMain(uint2 c : SV_DispatchThreadID)\n"
"{\n"
" uint2 dim;\n"
" stateT.GetDimensions(dim.x, dim.y);\n"
"\n"
" int x = int(c.x);\n"
" int y = int(c.y);\n"
" int xm = max(x - 1, 0);\n"
" int xp = min(x + 1, dim.x - 1);\n"
" int ym = max(y - 1, 0);\n"
" int yp = min(y + 1, dim.y - 1);\n"
"\n"
" // Sample states at (x,y), (x+dx,y), (x-dx,y), (x,y+dy), (x,y-dy).\n"
" float4 stateZZ = stateT[int2(x, y)];\n"
" float4 statePZ = stateT[int2(xp, y)];\n"
" float4 stateMZ = stateT[int2(xm, y)];\n"
" float4 stateZP = stateT[int2(x, yp)];\n"
" float4 stateZM = stateT[int2(x, ym)];\n"
"\n"
" // Sample the source state at (x,y).\n"
" float4 src = source[int2(x, y)];\n"
"\n"
" // Estimate second-order derivatives of state at (x,y).\n"
" float4 stateDXX = statePZ - 2.0f*stateZZ + stateMZ;\n"
" float4 stateDYY = stateZP - 2.0f*stateZZ + stateZM;\n"
"\n"
" // Compute advection.\n"
" float2 tcd = spaceDelta.xy*(c - timeDelta.xy*stateZZ.xy + 0.5f);\n"
" float4 advection = stateTm1.SampleLevel(advectionSampler, tcd, 0.0f);\n"
"\n"
" // Update the state.\n"
" updateState[c] = advection +\n"
" (viscosityX*stateDXX + viscosityY*stateDYY + timeDelta.w*src);\n"
"}\n";
std::string const* Fluid2UpdateState::msSource[] =
{
&msGLSLSource,
&msHLSLSource
};
|
#!/bin/bash
##################
# before
##################
. .utils.sh
printScriptPart
printItem "show your application logs"
printSubitem "no errors are expected this time"
waitEnter
##################
# do
##################
set -e
APP_NAME=$(appName)
tsuru app-log -a $APP_NAME -l 1000 | grep "checkEnv\|checkServices" | tail -n 8
##################
# after
##################
printUserPart
printItem "read the logs above"
printUserPartContinue
echo
|
#!/bin/bash
# failure is a natural part of life
set -e
if [[ "$TASK" == "rpkg" ]]; then
Rscript .ci/lint_r_code.R $(pwd)
R CMD build $(pwd)/r-pkg
export _R_CHECK_CRAN_INCOMING_=false
R CMD check \
--as-cran \
*.tar.gz
fi
if [[ "$TASK" == "pypkg" ]]; then
pip install wheel
pytest \
--verbose \
$(pwd)/py-pkg
fi
|
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, ElementRef, OnInit } from '@angular/core'
import { IGridCellValue } from '@blueshiftone/ngx-grid-core'
import { ToolbarService } from '../../../../toolbar.service'
import { EToolbarItemPlacement } from '../../../../typings/enums/toolbar-item-placement.enum'
import { IToolbarComponent } from '../../../../typings/interfaces/toolbar-component.interface'
import { AutoUnsubscribe } from '../../../../utils/auto-unsubscribe'
@Component({
selector: 'data-grid-toolbar-commit-records',
templateUrl: './commit-records.component.html',
styleUrls: [
'../common-toolbar-item-styles.scss',
'./commit-records.component.scss',
],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class CommitRecordsComponent extends AutoUnsubscribe implements IToolbarComponent, OnInit {
public readonly isEnabled: boolean = true
public readonly sortOrder: number = 1
public readonly placement: EToolbarItemPlacement = EToolbarItemPlacement.Primary
constructor(
private readonly elRef : ElementRef<HTMLElement>,
private readonly changeDetection: ChangeDetectorRef,
public readonly toolbarService : ToolbarService
) { super() }
ngOnInit(): void {
this.addSubscription(this.toolbarService.selectionSlice.subscribe(_ => this.changeDetection.detectChanges()))
}
public get hasInvalidValues(): boolean {
const controller = this.toolbarService.gridController
if (!controller) return false
const meta = controller.cell.GetAllCellMetaForDirtyRows.run() ?? []
const values = meta.map(m => controller.cell.GetCellValue.run(m.coords)).filter(c => c) as IGridCellValue[]
return typeof values.find(val => val.validationState?.nextIsValid === false) !== 'undefined'
}
public get isVisible(): boolean {
return (this.toolbarService.gridController?.row.dirtyRowsMap.size ?? 0) > 0
}
public get element() { return this.elRef.nativeElement }
public detectChanges = () => this.changeDetection.detectChanges()
public get selectionDirtyRecordsCount(): number {
const meta = this.toolbarService.currentMeta
return meta.rows.filter(row => row.isDirty).length
}
public commitSelected = () => this.toolbarService.gridController?.grid.CommitSelected.run()
public commitAll = () => this.toolbarService.gridController?.grid.CommitAll.run()
}
|
<filename>front-material/src/Services/wordService.js
import http from "./httpService";
import { APIEndpoint } from "./config.json";
const url = APIEndpoint + "/word";
export async function saveWord(word) {
const res = await http.post(url, word);
return res.data;
}
export async function getWords() {
const res = await http.get(url);
return res;
}
export async function getPracticeWords() {
const urlGet = `${url}/practice/`;
const { data } = await http.get(urlGet);
return data;
}
export async function deleteWord(id) {
const urlDelete = `${url}/${id}`;
const res = await http.delete(urlDelete);
return res;
}
|
<filename>stylization/stylization.py
"""This module contains various stylization functions of text appearance."""
import sys
_style_dict = {
"reset": "\033[0m",
"bold": "\033[01m",
"disable": '\033[02m',
"underline": '\033[04m',
"reverse": '\033[07m',
"strikethrough": '\033[09m',
"invisible": '\033[08m'
}
_fg_dict = {
"black": "\033[30m",
"red": "\033[31m",
"green": "\033[32m",
"orange": "\033[33m",
"blue": "\033[34m",
"purple": "\033[35m",
"cyan": "\033[36m",
"lightgrey": "\033[37m",
"darkgrey": "\033[90m",
"lightred": "\033[91m",
"lightgreen": "\033[92m",
"yellow": "\033[93m",
"lightblue": "\033[94m",
"pink": "\033[95m",
"lightcyan": "\033[96m"
}
_bg_dict = {
"black": "\033[40m",
"red": "\033[41m",
"green": "\033[42m",
"orange": "\033[43m",
"blue": "\033[44m",
"purple": "\033[45m",
"cyan": "\033[46m",
"lightgrey": "\033[47m"
}
def _names2ascii(fg=None, stylename=None, bg=None) -> str:
"""Convert names of foreground, styles and background to ASCII symbols string"""
fg_string = _fg_dict[fg] if fg is not None else ""
bg_string = _bg_dict[bg] if bg is not None else ""
st_string = ""
if stylename is not None:
style_list = stylename.split(" ")
for style_item in style_list:
st_string = "".join((st_string, _style_dict[style_item]))
st_bg_fg_str = "".join((
st_string,
fg_string,
bg_string))
return st_bg_fg_str
def style_string(string: str, fg=None, stylename=None, bg=None) -> str:
"""Apply styles to text.
It is able to change style (like bold, underline etc), foreground and background colors of text string."""
ascii_str = _names2ascii(fg, stylename, bg)
return "".join((
ascii_str,
string,
_style_dict["reset"]))
def style_func_stream(stream=sys.stdout, fg=None, stylename=None, bg=None):
"""Apply styles to stream and call the .
It is able to change style (like bold, underline etc), foreground and background colors of text string.
Example usage:
style_stream(_stream, fg=fg, stylename=stylename,bg=bg)\
(sys.print_exception)\
(e, _stream)
Also you may use it as decorator function."""
def decorator(func):
def wrapper(*args, **kwds):
ascii_str = _names2ascii(fg, stylename, bg)
stream.write(ascii_str)
func(*args, **kwds)
stream.write(_style_dict["reset"])
return wrapper
return decorator
def _chunks(l: bytearray, n: int):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def hexdump(bytebuffer: bytearray, offset: int = 0):
"""Print hexdump of bytearray from offset"""
for i, chunk in enumerate(_chunks(bytebuffer, 16)):
print("%08X: " % (i * 16 + offset), end="")
for byte in chunk[:8]:
print('%02X ' % byte, end="")
print(' ', end="")
for byte in chunk[8:]:
print('%02X ' % byte, end="")
for k in range(16 - len(chunk)):
print('%2s ' % " ", end="")
print(' | ', end="")
for byte in chunk:
if 0x20 <= byte <= 0x7F:
print("%c" % chr(byte), end="")
else:
print(".", end="")
print()
|
import { Locale } from '../types'
const enLocale: Locale = {
everyText: 'Every',
periodLabel: 'Period',
atEveryText: 'At/Every',
minuteLabel: 'Minute(s)',
atOptionLabel: 'at',
everyOptionLabel: 'every',
betweenText: 'between',
inText: 'in',
onText: 'on',
andText: 'and',
monthLabel: 'Month(s)',
dayOfWeekLabel: 'Day of the week',
onEveryText: 'On/Every',
multiDayOfMonthLabel: 'Day of the Month',
dayOfMonthLabel: 'Days',
hourLabel: 'Hour(s)',
weekDaysOptions: [
'SUNDAY',
'MONDAY',
'TUESDAY',
'WEDNESDAY',
'THURSDAY',
'FRIDAY',
'SATURDAY',
],
periodOptions: ['hour', 'day', 'week', 'month', 'year'],
shortMonthOptions: [
'Jan',
'Feb',
'Mar',
'Apr',
'May',
'Jun',
'Jul',
'Aug',
'Sep',
'Oct',
'Nov',
'Dec',
],
onOptionLabel: 'on',
lastDayOfMonthLabel: 'Last day of month',
cronDescriptionText: 'en',
}
export default enLocale
|
#!/bin/bash
mkdir -p ./dist/server/views
mkdir -p ./dist/server/assets/{js,css,images}
|
<filename>app/sso/tests/test_queries.py<gh_stars>10-100
# -*- coding: utf-8 -*-
import testutils.cases as cases
import testutils.factories as factories
import testutils.helpers as helpers
import testutils.decorators as decorators
from django.test import override_settings
from django.utils import timezone
from app.sso.models import SSOConnection
class TestGetSSOConnections(cases.GraphQLTestCase):
"""Test cases for the fetching sso connections associated
with the current workspace.
"""
factory = factories.SSOConnectionFactory
operation = 'ssoConnections'
statement = '''
query {
ssoConnections {
edges {
node {
id
entityId
ssoUrl
extras
}
}
totalCount
}
}
'''
def setUp(self):
super(TestGetSSOConnections, self).setUp()
self.count = 5
self.connections = self.factory.create_batch(self.count, workspace=self.workspace)
self.other_connection = self.factory(workspace=self.other_workspace)
@decorators.as_someone(['MEMBER', 'READONLY', 'OWNER'])
def test_query_when_authorized(self):
results = self.execute(self.statement)
results = results['data'][self.operation]
self.assertEqual(
first=len(results['edges']),
second=self.count,
msg="Node count should equal number of active domains."
)
self.assertEqual(
first=len(results['edges']),
second=results['totalCount'],
msg="Node count should equal totalCount field."
)
@decorators.as_someone(['OUTSIDER', 'ANONYMOUS'])
def test_query_when_not_authorized(self):
"""Outside users should not be able to access this resource.
"""
self.assertPermissionDenied(self.execute(self.statement))
class TestGetSSOConnection(cases.GraphQLTestCase):
"""Test cases for the fetching a specific SSO Connection.
"""
factory = factories.SSOConnectionFactory
operation = 'ssoConnection'
statement = '''
query GetSSOConnection($id: ID!) {
ssoConnection(id: $id) {
id
pk
name
provider
protocol
}
}
'''
def setUp(self):
super(TestGetSSOConnection, self).setUp()
self.connection_kwargs = {
'provider': SSOConnection.GOOGLE,
'workspace': self.workspace,
}
self.resource = self.factory(**self.connection_kwargs)
self.global_id = helpers.to_global_id('SSOConnectionType', self.resource.pk)
@decorators.as_someone(['MEMBER', 'READONLY', 'OWNER'])
def test_query(self):
"""It should return the requested resource.
"""
results = self.execute(self.statement, variables={'id': self. global_id})
results = results['data'][self.operation]
self.assertEqual(results, {
'id': self.global_id,
'pk': self.resource.pk,
'name': 'google-%s' % self.resource.pk,
'provider': 'GOOGLE',
'protocol': 'oauth2',
})
@decorators.as_someone(['OUTSIDER'])
def test_query_when_not_authorized(self):
"""Outside users should not be able to access this resource.
"""
results = self.execute(self.statement, variables={'id': self.global_id})
self.assertPermissionDenied(results)
class TestGetSSODomains(cases.GraphQLTestCase):
"""Test cases for the fetching sso domains for a workspace.
"""
factory = factories.SSODomainFactory
operation = 'ssoDomains'
statement = '''
query GetSSODomains {
ssoDomains {
edges {
node {
id
pk
domain
verificationStatus
verificationToken
}
}
totalCount
}
}
'''
def setUp(self):
super(TestGetSSODomains, self).setUp()
self.count = 5
self.domains = self.factory.create_batch(self.count, workspace=self.workspace)
self.other_domain = self.factory(workspace=self.other_workspace)
@decorators.as_someone(['MEMBER', 'READONLY', 'OWNER'])
def test_query_when_authorized(self):
results = self.execute(self.statement)
results = results['data'][self.operation]
self.assertEqual(
first=len(results['edges']),
second=self.count,
msg="Node count should equal number of active domains."
)
self.assertEqual(
first=len(results['edges']),
second=results['totalCount'],
msg="Node count should equal totalCount field."
)
@decorators.as_someone(['OUTSIDER', 'ANONYMOUS'])
def test_query_when_not_authorized(self):
"""Outside users should not be able to access this resource.
"""
self.assertPermissionDenied(self.execute(self.statement))
class TestGetSSODomain(cases.GraphQLTestCase):
"""Test cases for the fetching a specific SSO Domain.
"""
factory = factories.SSODomainFactory
operation = 'ssoDomain'
statement = '''
query GetSSODomain($id: ID!) {
ssoDomain(id: $id) {
id
pk
domain
verificationStatus
verificationToken
}
}
'''
def setUp(self):
super(TestGetSSODomain, self).setUp()
self.domain_kwargs = {
'domain': 'metamapper.io',
'verified_at': timezone.now(),
'workspace': self.workspace,
}
self.resource = self.factory(**self.domain_kwargs)
self.resource.save()
self.global_id = helpers.to_global_id('SSODomainType', self.resource.pk)
@decorators.as_someone(['MEMBER', 'READONLY', 'OWNER'])
def test_query(self):
"""It should return the requested resource.
"""
results = self.execute(self.statement, variables={'id': self. global_id})
results = results['data'][self.operation]
self.assertEqual(results, {
'id': self.global_id,
'pk': self.resource.pk,
'domain': 'metamapper.io',
'verificationStatus': 'SUCCESS',
'verificationToken': self.resource.verification_token,
})
@decorators.as_someone(['OUTSIDER'])
def test_query_when_not_authorized(self):
"""Outside users should not be able to access this resource.
"""
results = self.execute(self.statement, variables={'id': self.global_id})
self.assertPermissionDenied(results)
@override_settings(GOOGLE_ENABLED=True, GITHUB_ENABLED=True, GOOGLE_CLIENT_ID='meow', GITHUB_CLIENT_ID='wuph')
class TestGetSSOProviders(cases.GraphQLTestCase):
"""Test cases for the fetching the list of supported SSO providers.
"""
factory = None
operation = 'ssoProviders'
statement = '''
query GetSSOProviders {
ssoProviders
}
'''
@decorators.as_someone(['MEMBER', 'OWNER', 'OUTSIDER', 'READONLY', 'ANONYMOUS'])
def test_valid(self):
"""Anyone can access this endpoint.
"""
response = self.execute(self.statement)
response = response['data'][self.operation]
self.assertEqual(response, [
{
'clientId': 'wuph',
'provider': 'GITHUB',
'label': 'Github',
'protocol': 'oauth2',
},
{
'clientId': 'meow',
'provider': 'GOOGLE',
'label': 'Google for Work',
'protocol': 'oauth2',
},
{
'clientId': None,
'provider': 'GENERIC',
'label': 'SAML2',
'protocol': 'saml2',
},
])
class TestGetSSOPrimaryKey(cases.GraphQLTestCase):
"""Test cases for the fetching the list of supported SSO providers.
"""
factory = None
operation = 'ssoPrimaryKey'
statement = '''
query GetSSOConnectionPrimaryKey {
ssoPrimaryKey
}
'''
@decorators.as_someone(['MEMBER', 'OWNER', 'OUTSIDER', 'READONLY'])
def test_valid(self):
"""Anyone can access this endpoint.
"""
response1 = self.execute(self.statement)
response1 = response1['data'][self.operation]
self.assertEqual(len(response1), 12)
response2 = self.execute(self.statement)
response2 = response2['data'][self.operation]
self.assertEqual(len(response2), 12)
self.assertNotEqual(response1, response2)
@decorators.as_someone(['ANONYMOUS'])
def test_query_when_not_authorized(self):
"""Unauthenticated users should not be able to access this resource.
"""
self.assertPermissionDenied(
self.execute(self.statement)
)
|
#!/bin/bash
# Example batch script for running tlbr example on a SLURM cluster
# set the number of nodesT
#SBATCH --nodes=4
# set the number of cpus per node.
#SBATCH --mincpus=32
# set max wallclock time for the entire fitting job (1 day)
#SBATCH --time=1-00:00:00
# set name of job
#SBATCH --job-name=pybnf
# Enable Anaconda Python 3.5
# Your cluster might require something different here, or might not require anything.
module load anaconda/Anaconda3
# Run PyBNF
pybnf -c tlbr.conf -t SLURM -o
|
<gh_stars>0
/*
Siesta 4.2.2
Copyright(c) 2009-2016 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
Class('Ariadne.DomQueryFinder.Identifier.CssClass', {
isa : Ariadne.QueryFinder.Identifier,
does : Ariadne.DomQueryFinder.Role.DomHelper,
has : {
// an array of strings, which will be join with "|" and converted to RegExp
// values from the class definition will be combined with the values provided to the instance
ignoreCssClasses : function () {
return [
'^null$',
'^undefined$'
]
},
ignoreCssClassesRegExp : null
},
methods : {
initialize : function (cfg) {
var ignoreCssClasses = this.mergeArrayAttributeFromClassHierarchy('ignoreCssClasses', cfg)
this.ignoreCssClassesRegExp = ignoreCssClasses.length ? new RegExp(ignoreCssClasses.join('|')) : /\0/
},
identify : function (target, root, maze) {
var classes = this.processCssClasses(this.getCssClasses(target), target)
var segments = []
for (var i = 0; i < classes.length; i++) {
// use object notation for segment, so subclasses can call SUPER and assign custom weights
segments.push({
query : '.' + this.escapeDomSelector(classes[ i ], true),
weight : this.getWeightForCssClass(classes[ i ], target)
})
}
return segments
},
getWeightForCssClass : function (cls, target) {
return 1000
},
ignoreCssClass : function (cls, dom) {
return this.ignoreCssClassesRegExp.test(cls)
},
processCssClasses : function (classes, target) {
return classes
}
}
// eof methods
});
|
import asyncHandler from 'express-async-handler';
import User from '../models/User.js';
//@DESC Get All Users
//@ROUTE /api/v1/users
//@METHOD GET
export const getAll = asyncHandler(async (req, res) => {
const users = await User.find({});
res.status(201).json({ success: true, count: users.length, data: users });
});
//@DESC Get Single User
//@ROUTE /api/v1/users/:id
//@METHOD GET
export const getUser = asyncHandler(async (req, res) => {
const user = await User.findById(req.params.id);
if (!user) {
res.status(404);
throw new Error('User not found');
}
res.status(201).json({ success: true, data: user });
});
//@DESC Add User
//@ROUTE /api/v1/users
//@METHOD POST
export const addUser = asyncHandler(async (req, res) => {
const user = await User.create(req.body);
res.status(201).json({ success: true, data: user });
});
//@DESC Update User
//@ROUTE /api/v1/users/:id
//@METHOD PUT
export const updateUser = asyncHandler(async (req, res) => {
let user = await User.findById(req.params.id);
if (!user) {
res.status(404);
throw new Error('User not found');
}
if (req.user.id !== user._id.toString()) {
res.status(401);
throw new Error("User not authrized");
}
user = await User.findByIdAndUpdate(req.params.id, req.body, {
new: true,
runValidators: true,
});
res.status(201).json({ success: true, data: user });
});
//@DESC Delete User
//@ROUTE /api/v1/users/:id
//@METHOD DELETE
export const deleteUser = asyncHandler(async (req, res) => {
let user = await User.findById(req.params.id);
if (!user) {
res.status(404);
throw new Error('User not found');
}
if (req.user.id !== user._id.toString()) {
res.status(401);
throw new Error("User not authrized");
}
await user.delete();
res.status(201).json({ success: true, data: {} });
});
|
#!/usr/bin/env bash
#
# Copyright (c) 2018-2020 The Beans Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
for b_name in {"${BASE_OUTDIR}/bin"/*,src/secp256k1/*tests,src/univalue/{no_nul,test_json,unitester,object}}; do
# shellcheck disable=SC2044
for b in $(find "${BASE_ROOT_DIR}" -executable -type f -name $(basename $b_name)); do
echo "Wrap $b ..."
mv "$b" "${b}_orig"
echo '#!/usr/bin/env bash' > "$b"
echo "$QEMU_USER_CMD \"${b}_orig\" \"\$@\"" >> "$b"
chmod +x "$b"
done
done
|
<reponame>wani-hackase/wani-writeup
import gmpy2
e = 0x10001
with open('flag.enc', 'rb') as f:
flag = f.read()
flag = int.from_bytes(flag, byteorder='little')
m, exact = gmpy2.iroot(flag, e)
m = int(m)
if exact:
flag = m.to_bytes(m.bit_length()//8+1, byteorder='little')
print(flag)
else:
print('Failed')
|
import selectors
import socket
import types
from typing import Callable
def create_echo_server(port: int) -> Callable[[str], None]:
sel = selectors.DefaultSelector()
num_connections = 0
def start_server():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('localhost', port))
server_socket.listen()
server_socket.setblocking(False)
sel.register(server_socket, selectors.EVENT_READ, accept_connection)
def stop_server():
sel.close()
def accept_connection(sock, mask):
conn, addr = sock.accept()
conn.setblocking(False)
sel.register(conn, selectors.EVENT_READ, read_data)
def read_data(conn, mask):
data = conn.recv(1024)
if data:
conn.send(data)
else:
sel.unregister(conn)
conn.close()
def server_control(action: str):
nonlocal num_connections
if action == "start":
start_server()
elif action == "stop":
stop_server()
else:
raise ValueError("Invalid action")
return server_control |
#!/bin/bash
#
# CoreOS specific functions
#
# (c) 2014-2016, Hetzner Online GmbH
#
# This file isn't ready for production!
#
# SCRIPTPATH will be exported in config.sh
# shellcheck disable=SC2153
export IMAGE_PUBKEY="$SCRIPTPATH/gpg/coreos-pubkey.asc"
# create partitons on the given drive
# create_partitions "DRIVE"
create_partitions() {
touch "$FOLD/fstab"
return 0
}
make_swraid() {
return 0
}
mount_partitions() {
return 0
}
make_lvm() {
return 0
}
format_partitions() {
return 0
}
# validate image with detached signature
#validate_image() {
# # no detached sign found
# return 2
#}
# extract image file to hdd
extract_image() {
local COMPRESSION=""
if [ -n "$1" ] && [ -n "$2" ]; then
case "$2" in
bin)
COMPRESSION=""
;;
bgz)
COMPRESSION="gzip"
;;
bbz)
COMPRESSION="bzip2"
;;
bxz)
COMPRESSION="xz"
;;
*)return 1;;
esac
# extract image with given compression
if [ -n "$COMPRESSION" ]; then
"$COMPRESSION" -d --stdout "$EXTRACTFROM" > "${DRIVE1}"
EXITCODE=$?
else
# or write binary file directly to disk
dd if="$EXTRACTFROM" of="${DRIVE1}" bs=1M
EXITCODE=$?
fi
if [ "$EXITCODE" -eq "0" ]; then
debug "# sucess"
# inform the OS of partition table changes
blockdev --rereadpt "${DRIVE1}"
return 0
else
# wipefs --all "${DRIVE1}"
return 1
fi
fi
}
setup_network_config() {
return 0
}
# generate_config_mdadm "NIL"
generate_config_mdadm() {
return 0
}
# generate_new_ramdisk "NIL"
generate_new_ramdisk() {
return 0
}
set_udev_rules() {
return 0
}
# copy_mtab "NIL"
copy_mtab() {
return 0
}
generate_new_sshkeys() {
return 0
}
generate_ntp_config() {
if [ -f "$CLOUDINIT" ]; then
printf 'write_files:
- path: /etc/ntp.conf
content: |
# hetzner ntp servers
server ntp1.hetzner.de iburst
server ntp2.hetzner.com iburst
server ntp3.hetzner.net iburst
# - Allow only time queries, at a limited rate.
# - Allow all local queries (IPv4, IPv6)
restrict default nomodify nopeer noquery limited kod
restrict 127.0.0.1
restrict [::1]\n' >> "$CLOUDINIT"
return 0
else
return 1
fi
}
set_hostname() {
if [ -f "$CLOUDINIT" ]; then
{
echo "hostname: $1"
echo ""
} >> "$CLOUDINIT"
return 0
else
return 1
fi
}
setup_cpufreq() {
return 0
}
generate_resolvconf() {
{
echo "write_files:"
echo " - path: /etc/resolv.conf"
echo " permissions: 0644"
echo " owner: root"
echo " content: |"
# IPV4
if [ "$V6ONLY" -eq 1 ]; then
debug "# skipping IPv4 DNS resolvers"
else
for index in $(shuf --input-range=0-$(( ${#NAMESERVER[*]} - 1 )) | tr '\n' ' ') ; do
echo " nameserver ${NAMESERVER[$index]}"
done
fi
# IPv6
if [ -n "$DOIPV6" ]; then
for index in $(shuf --input-range=0-$(( ${#DNSRESOLVER_V6[*]} - 1 )) | tr '\n' ' ') ; do
echo " nameserver ${DNSRESOLVER_V6[$index]}"
done
fi
} >> "$CLOUDINIT"
return 0
}
generate_hosts() {
return 0
}
generate_sysctlconf() {
return 0
}
set_rootpassword() {
if [ -n "$1" ] && [ -n "$2" ]; then
if [ "$2" != '*' ]; then
{
echo "users:"
echo " - name: core"
echo " passwd: $2"
echo " - name: root"
echo " passwd: $2"
} >> "$CLOUDINIT"
fi
return 0
else
return 1
fi
}
# set sshd PermitRootLogin
set_ssh_rootlogin() {
if [ -n "$1" ]; then
local permit="$1"
case "$permit" in
yes|no|without-password|forced-commands-only)
cat << EOF >> "$CLOUDINIT"
write_files:
- path: /etc/ssh/sshd_config
permissions: 0600
owner: root:root
content: |
# Use most defaults for sshd configuration.
UsePrivilegeSeparation sandbox
Subsystem sftp internal-sftp
PermitRootLogin "$permit"
PasswordAuthentication yes
EOF
;;
*)
debug "invalid option for PermitRootLogin"
return 1
;;
esac
else
return 1
fi
}
# copy_ssh_keys "$OPT_SSHKEYS_URL"
copy_ssh_keys() {
if [ -n "$1" ]; then
local key_url="$1"
echo "ssh_authorized_keys:" >> "$CLOUDINIT"
case "$key_url" in
https:*|http:*|ftp:*)
wget "$key_url" -O "$FOLD/authorized_keys"
local line
while read -r line; do
echo " - $line" >> "$CLOUDINIT"
done < "$FOLD/authorized_keys"
;;
*)
local line
while read -r line; do
echo " - $line" >> "$CLOUDINIT"
done < "$key_url"
;;
esac
else
return 1
fi
}
# generate_config_grub <version>
generate_config_grub() {
return 0
}
write_grub() {
return 0
}
add_coreos_oem_scripts() {
if [ -n "$1" ]; then
local mntpath=$1
# add netname simplify script (use eth names)
local scriptpath="$mntpath/bin"
local scriptfile="$scriptpath/netname.sh"
if [ ! -d "$scriptpath" ]; then
mkdir -p "$scriptpath"
fi
cat << EOF >> "$scriptfile"
#! /bin/bash
IFINDEX='$1'
echo "ID_NET_NAME_SIMPLE=eth'$(( IFINDEX - 2))'"
EOF
chmod a+x "$scriptfile"
scriptfile="$scriptpath/rename-interfaces.sh"
cat << EOF >> "$scriptfile"
#! /bin/bash
INTERFACES=\$(ip link show | gawk -F ':' '/^[0-9]+/ { print \$2 }' | tr -d ' ' | sed 's/lo//')
for iface in \${INTERFACES}; do
ip link set \${iface} down
udevadm test /sys/class/net/\${iface}
done
EOF
chmod a+x "$scriptfile"
fi
}
add_coreos_oem_cloudconfig() {
if [ -n "$1" ]; then
local mntpath=$1
local cloudconfig="$mntpath/cloud-config.yml"
echo "#cloud-config" > "$cloudconfig"
if ! isVServer; then
cat << EOF >> "$cloudconfig"
write_files:
- path: /run/udev/rules.d/79-netname.rules
permissions: 444
content: |
SUBSYSTEM!="net", GOTO="netname_end"
ACTION!="add", GOTO="netname_end"
ENV{ID_BUS}!="pci", GOTO="netname_end"
IMPORT{program}="/usr/share/oem/bin/netname.sh \$env{IFINDEX}"
NAME=="", ENV{ID_NET_NAME_SIMPLE}!="", NAME="\$env{ID_NET_NAME_SIMPLE}"
LABEL="netname_end"
coreos:
units:
- name: rename-network-interfaces.service
command: start
runtime: yes
content: |
[Unit]
Before=user-config.target
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=/usr/bin/systemctl stop systemd-networkd
ExecStart=/usr/share/oem/bin/rename-interfaces.sh
ExecStart=/usr/bin/systemctl start systemd-networkd
oem:
id: baremetal
name: Hetzner Cloud on Root
home-url: http://www.hetzner.com
bug-report-url: https://github.com/coreos/bugs/issues
EOF
else
cat << EOF >> "$cloudconfig"
oem:
id: vserver
name: Hetzner vServer
home-url: http://www.hetzner.com
bug-report-url: https://github.com/coreos/bugs/issues
EOF
fi
fi
}
#
# os specific functions
# for purpose of e.g. debian-sys-maint mysql user password in debian/ubuntu LAMP
#
run_os_specific_functions() {
local ROOT_DEV; ROOT_DEV=$(blkid -t "LABEL=ROOT" -o device "${DRIVE1}"*)
local OEM_DEV; OEM_DEV=$(blkid -t "LABEL=OEM" -o device "${DRIVE1}"*)
local is_ext4; is_ext4=$(blkid -o value "$ROOT_DEV" | grep ext4)
if [ -n "$is_ext4" ]; then
mount "${ROOT_DEV}" "$FOLD/hdd" 2>&1 | debugoutput ; EXITCODE=$?
else
mount -t btrfs -o subvol=root "${ROOT_DEV}" "$FOLD/hdd" 2>&1 | debugoutput ; EXITCODE=$?
fi
[ "$EXITCODE" -ne "0" ] && return 1
# mount OEM partition as well
mount "${OEM_DEV}" "$FOLD/hdd/usr" 2>&1 | debugoutput ; EXITCODE=$?
[ "$EXITCODE" -ne "0" ] && return 1
if ! isVServer; then
add_coreos_oem_scripts "$FOLD/hdd/usr"
fi
add_coreos_oem_cloudconfig "$FOLD/hdd/usr"
mkdir -p "$FOLD/hdd/var/lib/coreos-install"
debugoutput < "$CLOUDINIT"
cp "$CLOUDINIT" "$FOLD/hdd/var/lib/coreos-install/user_data"
return 0
}
# vim: ai:ts=2:sw=2:et
|
extension WLPublishTableHeaderView {
public override func layoutSubviews() {
super.layoutSubviews()
tf.snp.makeConstraints { (make) in
make.left.right.equalToSuperview()
make.top.equalTo(1)
make.bottom.equalTo(-1)
}
}
} |
import linecache
class BreakpointLocation:
def __init__(self, file_name, line_number):
self.file_name = file_name
self.line_number = line_number
def run_break_set_by_file_and_line(file_name, line_number, num_expected_locations, loc_exact):
# Simulate setting a breakpoint using standard Python
breakpoint_location = None
with open(file_name, 'r') as file:
lines = file.readlines()
if line_number <= len(lines):
breakpoint_location = BreakpointLocation(file_name, line_number)
return breakpoint_location
# Test the function
file_name = "main.cpp"
line_number = 10
num_expected_locations = -1
loc_exact = True
result = run_break_set_by_file_and_line(file_name, line_number, num_expected_locations, loc_exact)
print(result) # Output: <__main__.BreakpointLocation object at 0x7f1d9c7f3a90> |
<reponame>qubitdigital/response
from django.conf.urls import url, include
from django.urls import path
import response.slack.views as views
urlpatterns = [
path('slash_command', views.slash_command, name='slash_command'),
path('action', views.action, name='action'),
path('event', views.event, name='event'),
path('cron_minute', views.cron_minute, name='cron_minute'),
]
|
from typing import Callable
class Message:
def __init__(self, content: str):
self.content = content
@staticmethod
def make_fake_message() -> 'Message':
return Message("Fake message content")
def template(self, template_str: str) -> 'Template':
return Template(template_str)
class Template:
def __init__(self, template_str: str):
self.template_str = template_str
self.format_specs = {}
def add_format_spec(self, func: Callable[[str], str]):
func_name = func.__name__
if func_name in self.format_specs:
raise ValueError(f"Function '{func_name}' already registered as a format spec")
self.format_specs[func_name] = func
def format(self, **kwargs) -> str:
formatted_str = self.template_str
for key, value in kwargs.items():
formatted_str = formatted_str.replace(f"{{{key}}}", str(value))
for func_name, func in self.format_specs.items():
formatted_str = formatted_str.replace(f"{{{func_name}:custom}}", func(formatted_str))
return formatted_str
# Example usage
def custom(input: str) -> str:
return input + "-custom!"
Message = Message.make_fake_message()
template = Message.template("{a:custom}{b:text}{c:image}")
template.add_format_spec(custom)
formatted = template.format(a="Custom", b="Text", c="Image")
print(formatted) # Output: "Custom-custom!TextImage" |
<form action="/" method="POST">
<label for="name">Name:</label>
<input type="text" name="name" id="name" required>
<label for="email">Email:</label>
<input type="email" name="email" id="email" required>
<input type="submit" value="Submit">
</form> |
<filename>Backend_scraper/updater.py
filePath = './data.csv'
import datetime
datetime_str = str(datetime.datetime.now())#https://docs.python.org/3/library/datetime.html
def log(l):
f = open('log.txt','a')
f.write('['+datetime_str+']')
f.write(str(l) + '\n')
f.close()
def getQuery(query):
import mysql.connector
data = []
try:
cnx = mysql.connector.connect(user='urur', password='<PASSWORD>', database='DBDB')
cursor = cnx.cursor()
# print(query)
# input()
cursor.execute(query)
data = cursor.fetchall()
except Exception as e:
log(e)
exit()
finally:#https://www.programiz.com/python-programming/exception-handling
if (cnx.is_connected()):
cursor.close()
cnx.close()
return data
def update():#Close Inside this Function. Stream.
f_ = open(filePath,'r')
data = f_.readlines()
f_.close()
f_ = open(filePath, 'a')
lastTime_str = data[-1].split(',')[0]#Split via CSV format..
nextTime = datetime.datetime.strptime(lastTime_str, '%Y-%m-%d %H:%M:%S')#Sql to datetime.
nextTime = nextTime.replace(minute = 0, second = 0)#Make sure.
nextTime = nextTime + datetime.timedelta(hours=1)#Next Hour!
nextTime_str = nextTime.strftime('%Y-%m-%d %H:%M:%S')#ok.
Query = "SELECT dateandtime, month_idx, jeopsu_pcnt FROM number_record WHERE dateandtime >= '{}' LIMIT 11;".format(nextTime_str)
result = getQuery(Query)
if(len(result) <= 10):#something wrong or not update time. lol.
f_.close()
#Try at the Bottom! At the end of recursion.
import os
os.system("cp ./data.csv ./KatuSatu/data.csv")#copy
os.chdir("./KatuSatu")#Lol. chdir func. Exists!!!
os.system("git add ./*")#almost forgot about this step... 4:43am right now...
os.system("git commit -m \"Data Uploaddd...\"")
os.system("git pull origin")#Just in case there is a change in Origin. Usually not data.csv.
os.system("git push origin")
#Done.
exit()#Go!
elif(len(result) == 11):#Probably right... Hehe.
lines = str(result[0][0].replace(minute = 0, second = 0))#Cleanify. Although hmmm....
for i in range(11):
lines += (', ' + str(result[i][2]))
lines += '\n'
f_.write(lines)#Done.
f_.close()#Close output stream.
log('Added line: ' + lines.rstrip())#yup.
update()#Recursive let's gooooo.
try:
f = open(filePath, 'r')
f.close()
update()
except Exception as e:
import os
os.system('python updater_init.py')#Redo.!!!
#os.system('python updater.py')#Relive.
try:
f = open(filePath, 'r')
f.close()
update()
except Exception as e:
log(e)
exit()#Dude... |
def lowestCommonAncestor(root, p, q):
# Base Case
if root is None:
return None
# If both p and q are smaller than parent
if root.data > p.data and root.data > q.data:
return lowestCommonAncestor(root.left, p, q)
# If both p and q are greater than parent
if root.data < p.data and root.data < q.data:
return lowestCommonAncestor(root.right, p, q)
return root |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.artemis.impl.moas;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.Beta;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.CharsetUtil;
import org.onlab.packet.IpAddress;
import org.onlab.packet.IpPrefix;
import org.onosproject.artemis.ArtemisPacketProcessor;
import org.onosproject.artemis.impl.objects.ArtemisMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* MOAS Client channel handler.
*/
@Sharable
@Beta
public class MoasClientHandler extends ChannelInboundHandlerAdapter {
private static final Logger log =
LoggerFactory.getLogger(MoasClientHandler.class);
private IpAddress localIp;
private IpPrefix localPrefix;
private ArtemisPacketProcessor packetProcessor;
MoasClientHandler(IpAddress localIp, IpPrefix localPrefix, ArtemisPacketProcessor packetProcessor) {
this.localIp = localIp;
this.packetProcessor = packetProcessor;
this.localPrefix = localPrefix;
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
log.info("Connected to server {}", ctx.channel().remoteAddress());
ArtemisMessage message = new ArtemisMessage();
message.setType(ArtemisMessage.Type.INITIATE_FROM_CLIENT);
message.setLocalIp(localIp.toString());
message.setLocalPrefix(localPrefix.toString());
ObjectMapper mapper = new ObjectMapper();
try {
String jsonInString = mapper.writeValueAsString(message);
ByteBuf buffer = Unpooled.copiedBuffer(jsonInString, CharsetUtil.UTF_8);
ctx.writeAndFlush(buffer);
} catch (JsonProcessingException e) {
log.warn("channelActive()", e);
}
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws IOException {
ByteBuf in = (ByteBuf) msg;
String strMsg = in.toString(io.netty.util.CharsetUtil.US_ASCII);
ObjectMapper mapper = new ObjectMapper();
ArtemisMessage actObj = mapper.readValue(strMsg, ArtemisMessage.class);
packetProcessor.processMoasPacket(actObj, ctx);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.error("exceptionCaught()", cause);
ctx.close();
}
}
|
<reponame>rnwood/swayforms
import { Service } from "@prisma/client";
import { useStaticRendering } from "mobx-react";
import { GetServerSidePropsContext, GetServerSidePropsResult } from "next";
import React from "react";
import { ApiClient } from "../shared/ApiClient";
import { PublicPage } from "../shared/PublicPage";
interface IProps {
services: Service[];
}
export default class ServiceIndexPage extends PublicPage<IProps> {
renderTitle() {
return "Services";
}
renderBody() {
return <div>
<h1 className="govuk-heading-xl">Services</h1>
<ul className="govuk-list">
{this.props.services.map(s =>
<li key={s.id} className="govuk-link"><a href={`/service/${s.url}`} >{s.name}</a> <a href={`/admin/service/${s.url}`}>[Design]</a></li>
)}
</ul>
</div>
}
}
export async function getServerSideProps(context: GetServerSidePropsContext<{}>) :Promise<GetServerSidePropsResult<IProps>> {
useStaticRendering(true);
const services = await ApiClient.get<Service[]>("/api/service", context.req);
return {props:{services}};
} |
python pcl_loss_saved.py \
--exp '../../expts/temp'\
--gpu 1 \
--ITER 200001 \
--display \
--batch_size 1
|
<reponame>ginnun/configurator-example
import {Component, Input, OnInit} from '@angular/core';
import { WVM} from '../../../../typescript-generator/configurator';
@Component({
selector: 'app-onshape-button',
templateUrl: './onshape-button.component.html',
styleUrls: ['./onshape-button.component.scss']
})
export class OnshapeButtonComponent implements OnInit {
@Input() documentId: string;
@Input() wvmType: WVM;
@Input() wvmId: string;
@Input() elementId: string;
constructor() { }
ngOnInit() {
}
openOnshape() {
let url = 'https://cad.onshape.com/documents/' + this.documentId;
switch (this.wvmType) {
case 'Workspace':
url += '/w/';
break;
case 'Version':
url += '/v/';
break;
case 'Microversion':
url += '/m/';
break;
}
url += this.wvmId + '/e/' + this.elementId;
window.open(url, '_blank');
}
}
|
import React from 'react';
import { Modal } from '@design-system';
import * as API from '@modules/api';
import { snackBar } from '@modules/ui/snack-bar';
import { authStore } from '@stores/auth';
import { modalStore } from '@stores/modal';
interface Props {
isOpen: boolean;
onClose: () => void;
}
interface State {
token: string;
}
export class TwoFactorAuthSyncModal extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
this.state = {
token: '',
};
}
async onCreateTwoFactorAuth() {
const { data } = await API.postSecurity();
if (data.status === 'ERROR') {
if (data.errorCode === API.ERROR.NOT_LOGIN) {
snackBar('😥 로그인이 필요합니다.');
return;
}
if (data.errorCode === API.ERROR.NEED_TELEGRAM) {
snackBar('😥 텔레그램 연동이 필요합니다.', {
onClick: () => modalStore.onOpenModal('isTelegramSyncModalOpen')
});
return;
}
if (data.errorCode === API.ERROR.ALREADY_EXISTS) {
snackBar('😥 이미 등록되어 있습니다.');
return;
}
}
if (data.status === 'DONE') {
snackBar('😀 2차 인증이 등록되었습니다.');
authStore.set((prevState) => ({
...prevState,
is2faSync: true,
}));
this.props.onClose();
return;
}
snackBar('😥 등록중 오류가 발생했습니다.');
}
render() {
return (
<Modal
title="2차 인증을 사용할까요?"
isOpen={this.props.isOpen}
onClose={this.props.onClose}
submitText="네 사용할게요."
onSubmit={this.onCreateTwoFactorAuth.bind(this)}
>
<>
다음과 같은 요구사항이 필요합니다.
<ul>
<li>
텔레그램 연동이 선행되어야 합니다.
</li>
<li>
계정에 등록된 이메일이 유효해야 합니다.
등록된 이메일로 복구키를 전송하며
복구키는 핸드폰을 소지하지 않았거나
기술적인 문제로 인증코드가 전달되지 않았을 때
사용할 수 있습니다.
</li>
</ul>
연동 후 최소 하루동안 유지해야 하므로 신중하게 연동하여 주십시오.
</>
</Modal>
);
}
} |
<reponame>Larhard/Maildir-Notifier<gh_stars>0
import logging
logger = logging.getLogger('config')
__old_globals = None
__old_globals = set(globals().keys())
sounds = None
sound_cmd = None
CONFIG_VARS = globals().keys() - __old_globals
def override(config):
for variable in CONFIG_VARS & config.keys():
globals()[variable] = config[variable]
logger.debug('set "{}" to "{}"'.format(variable, config[variable]))
|
package seedu.planner.logic.commands;
import static java.util.Objects.requireNonNull;
import javafx.collections.ObservableMap;
import seedu.planner.commons.core.EventsCenter;
import seedu.planner.commons.events.ui.StatusEvent;
import seedu.planner.logic.CommandHistory;
import seedu.planner.model.Model;
import seedu.planner.model.course.DegreeRequirement;
/**
* Display the credit count status of the user in the planner
*/
public class StatusCommand extends Command {
public static final String COMMAND_WORD = "status";
public static final String MESSAGE_USAGE = COMMAND_WORD + ": Shows the progress of"
+ "the user and the required credit"
+ "in each degree requirements\n"
+ "Example: University Level Requirement: 8/20\n"
+ "Foundation: 4/36...";
public static final String MESSAGE_SUCCESS = "Status displayed";
@Override
public CommandResult execute(Model model, CommandHistory history) {
requireNonNull(model);
ObservableMap<DegreeRequirement, int[]> statusMap = model.getStatus();
EventsCenter.getInstance().post(new StatusEvent(statusMap));
return new CommandResult(MESSAGE_SUCCESS);
}
}
|
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.Key;
import javax.crypto.NoSuchPaddingException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
public class KeyWrapper {
/**
* Wraps the extractable key using the specified wrapping key.
*
* @param wrappingKey the key used for wrapping
* @param extractableKey the key to be wrapped
* @throws InvalidKeyException if the specified key is invalid
* @throws NoSuchAlgorithmException if the requested cryptographic algorithm is not available
* @throws NoSuchProviderException if the specified provider is not available
* @throws NoSuchPaddingException if the specified padding mechanism is not available
* @throws IllegalBlockSizeException if the block size is invalid
*/
private static void wrap(Key wrappingKey, Key extractableKey)
throws InvalidKeyException, NoSuchAlgorithmException, NoSuchProviderException, NoSuchPaddingException, IllegalBlockSizeException {
try {
Cipher cipher = Cipher.getInstance("AES/CBC/NoPadding", "Cavium");
cipher.init(Cipher.WRAP_MODE, wrappingKey);
byte[] wrappedKey = cipher.wrap(extractableKey);
// Further processing with the wrappedKey if needed
} catch (NoSuchAlgorithmException | NoSuchProviderException | NoSuchPaddingException | InvalidKeyException e) {
// Handle specific exceptions
e.printStackTrace(); // Example: Log or handle the exception
throw e; // Rethrow the exception
} catch (IllegalBlockSizeException e) {
// Handle IllegalBlockSizeException
e.printStackTrace(); // Example: Log or handle the exception
throw e; // Rethrow the exception
}
}
} |
<reponame>smagill/opensphere-desktop<gh_stars>10-100
/**
* Contains the model classes used by the editor.
*/
package io.opensphere.featureactions.editor.model;
|
<filename>Lab7/FormaPagamento.java
package usapedidos;
public class FormaPagamento {
String forma;
FormaPagamento(String forma) {
this.forma = forma;
}
String getDescricao() {
return forma;
}
}
|
// Copyright 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef LayerPainterChromium_h
#define LayerPainterChromium_h
class SkCanvas;
namespace gfx {
class Rect;
class RectF;
}
namespace cc {
class LayerPainter {
public:
virtual ~LayerPainter() { }
virtual void paint(SkCanvas*, const gfx::Rect& contentRect, gfx::RectF& opaque) = 0;
};
} // namespace cc
#endif // LayerPainterChromium_h
|
<gh_stars>0
"use strict";
let body_modal_8 = '<div class="card">'+
' <div class="card-header">'+
' <h4>Form Input Transaksi</h4>'+
' </div>'+
' <div class="card-body">'+
' '+
' <div class="form-group">'+
' <label>Date</label>'+
' <input type="date" name="tanggal" value="<?php echo $date; ?>" class="form-control">'+
' </div>'+
' <div class="form-group">'+
' <label>Lokasi</label>'+
' <select class="form-control">'+
' <option>Option 1</option>'+
' <option>Option 2</option>'+
' <option>Option 3</option>'+
' </select>'+
' </div>'+
' <div class="form-group">'+
' <label>Item</label>'+
' <select class="form-control">'+
' <option>Option 1</option>'+
' <option>Option 2</option>'+
' <option>Option 3</option>'+
' </select>'+
' </div>'+
' <div class="form-group">'+
' <label>Qty</label>'+
' <input type="text" class="form-control">'+
' </div>'+
' <div class="card-footer text-right">'+
' <button class="btn btn-primary mr-1" type="submit">Submit</button>'+
' <button class="btn btn-secondary" type="reset">Reset</button>'+
' </div>'+
' </div>'+
' </div>'+
' </div>'+
' </div>';
$("#modal-1").fireModal({body: 'Modal body text goes here.'});
$("#modal-8").fireModal({
title: 'Form Input Transaksi',
body: body_modal_8
});
$("#modal-2").fireModal({body: 'Modal body text goes here.', center: true});
let modal_3_body = '<p>Object to create a button on the modal.</p><pre class="language-javascript"><code>';
modal_3_body += '[\n';
modal_3_body += ' {\n';
modal_3_body += " text: 'Login',\n";
modal_3_body += " submit: true,\n";
modal_3_body += " class: 'btn btn-primary btn-shadow',\n";
modal_3_body += " handler: function(modal) {\n";
modal_3_body += " alert('Hello, you clicked me!');\n"
modal_3_body += " }\n"
modal_3_body += ' }\n';
modal_3_body += ']';
modal_3_body += '</code></pre>';
$("#modal-3").fireModal({
title: 'Modal with Buttons',
body: modal_3_body,
buttons: [
{
text: 'Click, me!',
class: 'btn btn-primary btn-shadow',
handler: function(modal) {
alert('Hello, you clicked me!');
}
}
]
});
$("#modal-4").fireModal({
footerClass: 'bg-whitesmoke',
body: 'Add the <code>bg-whitesmoke</code> class to the <code>footerClass</code> option.',
buttons: [
{
text: 'No Action!',
class: 'btn btn-primary btn-shadow',
handler: function(modal) {
}
}
]
});
$("#modal-5").fireModal({
title: 'Login',
body: $("#modal-login-part"),
footerClass: 'bg-whitesmoke',
autoFocus: false,
onFormSubmit: function(modal, e, form) {
// Form Data
let form_data = $(e.target).serialize();
console.log(form_data)
// DO AJAX HERE
let fake_ajax = setTimeout(function() {
form.stopProgress();
modal.find('.modal-body').prepend('<div class="alert alert-info">Please check your browser console</div>')
clearInterval(fake_ajax);
}, 1500);
e.preventDefault();
},
shown: function(modal, form) {
console.log(form)
},
buttons: [
{
text: 'Login',
submit: true,
class: 'btn btn-primary btn-shadow',
handler: function(modal) {
}
}
]
});
$("#modal-6").fireModal({
body: '<p>Now you can see something on the left side of the footer.</p>',
created: function(modal) {
modal.find('.modal-footer').prepend('<div class="mr-auto"><a href="#">I\'m a hyperlink!</a></div>');
},
buttons: [
{
text: 'No Action',
submit: true,
class: 'btn btn-primary btn-shadow',
handler: function(modal) {
}
}
]
});
$('.oh-my-modal').fireModal({
title: 'My Modal',
body: 'This is cool plugin!'
}); |
/*
This file is part of the JitCat library.
Copyright (C) <NAME> 2020
Distributed under the MIT License (license terms are at http://opensource.org/licenses/MIT).
*/
#include "jitcat/StaticConstMemberInfo.h"
#include "jitcat/Tools.h"
using namespace jitcat;
using namespace jitcat::Reflection;
StaticConstMemberInfo::StaticConstMemberInfo(const std::string& name, const CatGenericType& type, const std::any& value):
name(name),
lowerCaseName(Tools::toLowerCase(name)),
type(type),
value(value)
{
}
const std::string& StaticConstMemberInfo::getName() const
{
return name;
}
const std::string& StaticConstMemberInfo::getLowerCaseName() const
{
return lowerCaseName;
}
const CatGenericType& StaticConstMemberInfo::getType() const
{
return type;
}
const std::any& StaticConstMemberInfo::getValue() const
{
return value;
}
|
package com.codepath.apps.Twitter;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ProgressBar;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.fragment.app.FragmentManager;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.swiperefreshlayout.widget.SwipeRefreshLayout;
import com.codepath.apps.Twitter.models.Tweet;
import com.codepath.asynchttpclient.callback.JsonHttpResponseHandler;
import org.json.JSONArray;
import org.json.JSONException;
import org.parceler.Parcels;
import java.util.ArrayList;
import java.util.List;
import okhttp3.Headers;
import static com.codepath.apps.Twitter.models.Tweet.fromJson;
import static com.codepath.apps.Twitter.models.Tweet.fromJsonArray;
public class TimelineActivity extends AppCompatActivity {
TwitterClient client;
TweetAdapter adapter;
RecyclerView rvt;
ProgressBar pb;
List<Tweet> tweets;
EndlessRecyclerViewScrollListener scrollListener;
private final int REQUEST_CODE=30;
private SwipeRefreshLayout swipeContainer;
public static final String TAG="TimelineActivity";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_timeline);
swipeContainer = (SwipeRefreshLayout) findViewById(R.id.swipeContainer);
swipeContainer.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {fetchTimelineAsync(0);
}
});
swipeContainer.setColorSchemeResources(R.color.blue);
if (android.os.Build.VERSION.SDK_INT >= 21){
Window window = this.getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
window.setStatusBarColor(this.getResources().getColor(R.color.status));
}
getSupportActionBar().setTitle("");
getSupportActionBar().setDisplayShowHomeEnabled(true);
getSupportActionBar().setLogo(R.mipmap.ic_launcher);
getSupportActionBar().setDisplayUseLogoEnabled(true);
LocalBroadcastManager.getInstance(this).registerReceiver(updateReceiver, new IntentFilter("update"));
LocalBroadcastManager.getInstance(this).registerReceiver(addReceiver, new IntentFilter("add"));
client = TwitterApp.getRestClient(this);
rvt = findViewById(R.id.rv_tweets);
pb = findViewById(R.id.progressBar);
tweets= new ArrayList<>();
adapter=new TweetAdapter(this, tweets);
LinearLayoutManager layoutManager = new LinearLayoutManager(this);
rvt.setLayoutManager(layoutManager);
rvt.setAdapter(adapter);
scrollListener = new EndlessRecyclerViewScrollListener(layoutManager) {
@Override
public void onLoadMore(int page, int totalItemsCount, RecyclerView view) {
Log.i(TAG, "onLoadMore: "+ page);
loadMoreData();
}
};
rvt.addOnScrollListener(scrollListener);
populateHomeTimeline();
}
private void loadMoreData() {
client.getNextPageOfTweets(new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Headers headers, JSON json) {
Log.i(TAG, "onSuccess for loading more data: "+ json.toString());
JSONArray jsonArray = json.jsonArray;
try {
List<Tweet> tweets = fromJsonArray(jsonArray,pb);
adapter.addAll(tweets);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onFailure(int statusCode, Headers headers, String response, Throwable throwable) {
Log.i(TAG, "onFailure for loading more data");
}
}, tweets.get(tweets.size()-1).id);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
/*if (item.getItemId() == R.id.etcompose){
Intent i = new Intent(this, ComposeActivity.class);
startActivityForResult(i,REQUEST_CODE);
return true;
}
return super.onOptionsItemSelected(item);*/
FragmentManager fm = getSupportFragmentManager();
ComposeFragment compose = ComposeFragment.newInstance("");
Bundle bundle = new Bundle();
bundle.putString("username", "");
compose.setArguments(bundle);
compose.show(fm, "composeFragment");
return true;
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
if(requestCode==REQUEST_CODE&& resultCode==RESULT_OK){
Tweet tweet = Parcels.unwrap(data.getParcelableExtra("tweet"));
tweets.add(0,tweet);
adapter.notifyItemInserted(0);
rvt.smoothScrollToPosition(0);
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return super.onCreateOptionsMenu(menu);
}
public void fetchTimelineAsync(int page) {
client.getHomeTimeline(new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Headers headers, JSON json) {
adapter.clear();
JSONArray jsonArray = json.jsonArray;
try {
tweets.addAll(fromJsonArray(jsonArray, pb));
Log.d(TAG, "tweets size : "+tweets.size());
adapter.notifyDataSetChanged();
Log.d(TAG, "gets JSON");
} catch (JSONException e) {
Log.d(TAG, "JSON Exception");
e.printStackTrace();
}
swipeContainer.setRefreshing(false);
}
@Override
public void onFailure(int statusCode, Headers headers, String response, Throwable throwable) {
Log.d("DEBUG", "Fetch timeline error");
}
});
}
private void populateHomeTimeline() {
pb.setVisibility(ProgressBar.VISIBLE);
client.getHomeTimeline(new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Headers headers, JSON json) {
Log.i(TAG, "onSuccess");
JSONArray jsonArray = json.jsonArray;
try {
tweets.addAll(fromJsonArray(jsonArray, pb));
Log.d(TAG, "tweets size : "+tweets.size());
adapter.notifyDataSetChanged();
Log.d(TAG, "gets JSON");
} catch (JSONException e) {
Log.d(TAG, "JSON Exception");
e.printStackTrace();
}
}
@Override
public void onFailure(int statusCode, Headers headers, String response, Throwable throwable) {
Log.i(TAG, "onFailure", throwable);
}
});
}
public BroadcastReceiver updateReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final int position = intent.getIntExtra("pos",0);
client.updateTweet(tweets.get(position).id, new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Headers headers, JSON json) {
Log.i(TAG, "onSuccess");
try {
tweets.set(position,fromJson(json.jsonObject));
adapter.notifyItemChanged(position);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onFailure(int statusCode, Headers headers, String response, Throwable throwable) {
Log.i(TAG, "onFailure", throwable);
}
});
}
};
public BroadcastReceiver addReceiver= new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
client.myRetweet(new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Headers headers, JSON json) {
try {
tweets.add(0,fromJsonArray(json.jsonArray, null).get(0));
adapter.notifyItemChanged(0);
rvt.smoothScrollToPosition(0);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onFailure(int statusCode, Headers headers, String response, Throwable throwable) {
Log.i(TAG, "onFailure", throwable);
}
});
}
};
} |
<gh_stars>1-10
package io.sjitech.demo.model;
import org.nem.core.model.mosaic.MosaicTransferFeeType;
/**
* Created by wang on 2016/07/27.
*/
public class MijinMosaic {
private String namespaceId;
private String name;
private String creator;
private long id;
private String description;
private int divisibility;
private long initialSupply;
private boolean supplyMutable;
private boolean transferable;
private boolean hasLevy;
private MosaicTransferFeeType levyType;
private String recipientAddress;
private long levyFee;
public MosaicTransferFeeType getLevyType() {
return levyType;
}
public void setLevyType(MosaicTransferFeeType levyType) {
this.levyType = levyType;
}
public String getRecipientAddress() {
return recipientAddress;
}
public void setRecipientAddress(String recipientAddress) {
this.recipientAddress = recipientAddress;
}
public long getLevyFee() {
return levyFee;
}
public void setLevyFee(long levyFee) {
this.levyFee = levyFee;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCreator() {
return creator;
}
public void setCreator(String creator) {
this.creator = creator;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getDivisibility() {
return divisibility;
}
public void setDivisibility(int divisibility) {
this.divisibility = divisibility;
}
public long getInitialSupply() {
return initialSupply;
}
public void setInitialSupply(long initialSupply) {
this.initialSupply = initialSupply;
}
public boolean isSupplyMutable() {
return supplyMutable;
}
public void setSupplyMutable(boolean supplyMutable) {
this.supplyMutable = supplyMutable;
}
public boolean isTransferable() {
return transferable;
}
public void setTransferable(boolean transferable) {
this.transferable = transferable;
}
public boolean isHasLevy() {
return hasLevy;
}
public void setHasLevy(boolean hasLevy) {
this.hasLevy = hasLevy;
}
public String getNamespaceId() {
return namespaceId;
}
public void setNamespaceId(String namespaceId) {
this.namespaceId = namespaceId;
}
}
|
# An invalid example.
class InvalidEvents
include DataMapper::Resource
property :id, Serial
is :state_machine do
state :day
state :night
end
# The next lines are intentionally incorrect.
#
# 'event' only makes sense in a block under 'is :state_machine'
event :sunrise
event :sunset
end
InvalidEvents.auto_migrate!
|
import LocalizedStrings from 'react-localization';
export const strings = new LocalizedStrings({
en: {
year: 'Year',
contribPerYear: 'Contributions per year',
contribPer: 'Contributions per {term}',
bottomNote: '* Contributions for the current year are counted from today until the end of the year. It is therefore normal that contributions for the current year are lower than other years.'
},
fr: {
year: 'Année',
contribPerYear: 'Contribution par année',
contribPer: 'Contribution par {term}',
bottomNote: '* Les contributions de l\'année en cours sont comptées à partir d\'aujourd\'hui jusqu\'à la fin de l\'année. Il est donc normale que les contributions de l\'année en cours soient plus basses que les autres années.'
}
}); |
import { mkdtempSync, writeFileSync, existsSync, mkdirSync } from 'fs';
import { execSync } from 'child_process';
import { join, resolve } from 'path';
import { tmpdir } from 'os';
import { buildPiral } from './build-piral';
import { setBundler } from '../bundler';
const twoMinutes = 2 * 60 * 1000;
const defaultPackageJson = (files: Array<any>) => `
{
"name": "piral-local-test",
"version": "1.0.0",
"description": "",
"keywords": [
"piral"
],
"dependencies": {
"piral": "*"
},
"scripts": {
"start": "piral debug",
"build": "piral build"
},
"app": "./src/index.html",
"pilets": {
"files": ${JSON.stringify(files)},
"externals": [],
"scripts": {
"build": "npm run build-pilet",
"start": "npm run debug-pilet"
},
"validators": {},
"devDependencies": {},
"preScaffold": "",
"postScaffold": "",
"preUpgrade": "",
"postUpgrade": ""
},
"devDependencies": {
"@types/node": "latest",
"@types/react": "latest",
"@types/react-dom": "latest",
"@types/react-router": "latest",
"@types/react-router-dom": "latest",
"piral-cli": "*",
"typescript": "latest"
}
}
`;
const defaultIndexHtml = `
<!DOCTYPE html>
<html lang="en">
<meta charset="UTF-8">
<title>Test Piral Instance</title>
<div id="app"></div>
<script src="./index.tsx"></script>
</html>
`;
const defaultIndexTsx = `
import { renderInstance } from 'piral';
renderInstance({
requestPilets() {
return Promise.resolve([]);
},
});
`;
const tsConfigJson = `
{
"compilerOptions": {
"declaration": true,
"target": "es6",
"sourceMap": true,
"outDir": "./lib",
"skipLibCheck": true,
"lib": ["dom", "es2018"],
"moduleResolution": "node",
"module": "esnext",
"jsx": "react",
"importHelpers": true
},
"include": [
"./src"
],
"exclude": [
"node_modules"
]
}
`;
function createTempDir() {
return mkdtempSync(join(tmpdir(), 'piral-tests-build-piral-'));
}
function scaffoldNewPiralInstance(files: Array<any> = []) {
const dir = createTempDir();
mkdirSync(resolve(dir, 'src'));
console.log('Created temporary directory ...', dir);
writeFileSync(resolve(dir, 'package.json'), defaultPackageJson(files), 'utf8');
writeFileSync(resolve(dir, 'tsconfig.json'), tsConfigJson, 'utf8');
writeFileSync(resolve(dir, 'src/index.html'), defaultIndexHtml, 'utf8');
writeFileSync(resolve(dir, 'src/index.tsx'), defaultIndexTsx, 'utf8');
execSync('npm i', {
cwd: dir,
});
return dir;
}
describe('Build Piral Command', () => {
beforeEach(() => {
setBundler({
name: 'parcel',
actions: require(resolve(__dirname, '../../../piral-cli-parcel/lib/actions')),
});
});
it('missing source should result in an error', async () => {
const dir = createTempDir();
let error = false;
try {
await buildPiral(dir);
} catch {
error = true;
}
expect(error).toBeTruthy();
});
it(
'can create an emulator build without files',
async () => {
const dir = scaffoldNewPiralInstance();
let error = false;
try {
await buildPiral(dir, {
type: 'emulator',
});
} catch {
error = true;
}
expect(error).toBeFalsy();
expect(existsSync(resolve(dir, 'dist/emulator/piral-local-test-1.0.0.tgz'))).toBeTruthy();
expect(existsSync(resolve(dir, 'dist/release'))).toBeFalsy();
},
twoMinutes,
);
it(
'can create a release build',
async () => {
const dir = scaffoldNewPiralInstance();
let error = false;
try {
await buildPiral(dir, {
type: 'release',
});
} catch {
error = true;
}
expect(error).toBeFalsy();
expect(existsSync(resolve(dir, 'dist/emulator/piral-local-test-1.0.0.tgz'))).toBeFalsy();
expect(existsSync(resolve(dir, 'dist/release/index.html'))).toBeTruthy();
},
twoMinutes,
);
it(
'can create all builds with files',
async () => {
const dir = scaffoldNewPiralInstance([
'foo.txt',
{
from: 'src/bar.txt',
to: 'bar.txt',
},
]);
writeFileSync(resolve(dir, 'foo.txt'), 'foo!', 'utf8');
writeFileSync(resolve(dir, 'src/bar.txt'), 'bar!', 'utf8');
let error = false;
try {
await buildPiral(dir);
} catch {
error = true;
}
expect(error).toBeFalsy();
expect(existsSync(resolve(dir, 'dist/emulator/piral-local-test-1.0.0.tgz'))).toBeTruthy();
expect(existsSync(resolve(dir, 'dist/release'))).toBeTruthy();
},
twoMinutes,
);
});
|
#!/bin/bash
export AMBERHOME="/usr/local/programs/custom/amber/amber14/arch/gcc-mpich3.1.3/amber14"
WORKDIR="/home/leskoura/coronaZINC/ZINC/I/RESP-FULL-HIGH-MONOHAL/ZINC00033532/mod2_remake/"
cd $WORKDIR
input="ANTECHAMBER_RESP2_mod.IN"
output="output"
punch="punch"
qin="../mod1_remake/qout"
qout="qout"
espot="../mod1_remake/ANTECHAMBER_modifikovany.ESP"
esout="esout"
${AMBERHOME}/bin/resp -i $input -o $output -p $punch -q $qin -t $qout -e $espot -s $esout |
#!/bin/bash
#
# Author: Pierre Dahmani
# Created: 23.05.2021
#
# Description: test script to start synapse. requires gen-conf.sh to be ran
# before.
docker run -d --name synapse \
--mount type=volume,src=synapse-data,dst=/data \
-p 8008:8008 \
matrixdotorg/synapse:latest
# tail log of latest container to see if we were successfull
docker container logs -f "$(docker ps -ql)"
|
require "s3_file_uploader"
class GenerateGlobalExportCsvWorker
include Sidekiq::Worker
def perform(export_params)
feedback_export_request = FeedbackExportRequest.new(notification_email: export_params["notification_email"])
filename, contents = GlobalExportCsvGenerator.new(
Date.strptime(export_params["from_date"], "%Y-%m-%d").beginning_of_day,
Date.strptime(export_params["to_date"], "%Y-%m-%d").end_of_day,
export_params["exclude_spam"],
).call
feedback_export_request.filename = filename
S3FileUploader.save_file_to_s3(filename, contents)
feedback_export_request.save!
feedback_export_request.touch(:generated_at)
GlobalExportNotification.notification_email(export_params["notification_email"], feedback_export_request.url).deliver_now
end
end
|
<gh_stars>1000+
// Copyright 2017-2021 @polkadot/rpc-core authors & contributors
// SPDX-License-Identifier: Apache-2.0
import '@polkadot/rpc-augment';
|
package com.touch.air.mall.auth.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* @author: bin.wang
* @date: 2021/1/22 13:16
*/
@Configuration
public class WebConfig implements WebMvcConfigurer {
/**
* 视图映射
* @param registry
*/
@Override
public void addViewControllers(ViewControllerRegistry registry) {
/**
* @GetMapping({"/", "/login.html"})
* public String loginPage()
* return "login"
*/
registry.addViewController("/login.html").setViewName("login");
registry.addViewController("/register.html").setViewName("register");
}
}
|
import * as fs from 'fs';
import { join } from 'path';
import * as Benchmark from 'benchmark';
import ndarray from 'ndarray';
import * as PNGJS from 'pngjs';
import AsyncAstar, {
AsyncAstarResult,
AsyncAstarStatus
} from '../lib/asyncastar';
import { copyNdarray, createPlanner, NodeData } from '../lib/util';
import { MAZE_BENCHMARKS, MAZES } from './fixtures/data';
import { saveImage } from './helper';
// import { performance } from 'perf_hooks';
// Require imports necessitate this
const PNG = PNGJS.default.PNG;
const tempPNG = 'tmp.png';
const FIXTURE_FOLDER = 'src/tests/fixtures';
// Import the maze data (read PNG images)
const MAZE_DATA = MAZES.map(maze2 => {
const fname = join(FIXTURE_FOLDER, maze2.file || maze2.name);
// Reading raw data
if (maze2.data) {
return { ...maze2, fname };
}
// Reading an actual picture (PNG)
const buf = fs.readFileSync(fname);
const img = PNG.sync.read(buf);
const data = ndarray(
new Uint8Array(img.data),
[img.width | 0, img.height | 0, 4],
[4, (4 * img.width) | 0, 1],
0
);
return { ...maze2, data, fname };
});
const planners = {
AsyncAstar(mazeTest) {
const mInfo = MAZE_DATA.find(mzData => mazeTest.maze === mzData.name);
const maze1 = mInfo.data;
const planner1 = createPlanner(
maze1,
mazeTest.start,
mazeTest.goal,
mazeTest.allowDiag,
mazeTest.heuristic
);
return {planner: planner1, mazeInfo: mInfo, maze: maze1};
}
};
const test = MAZE_BENCHMARKS[0];
const {planner, mazeInfo, maze} = planners.AsyncAstar(test);
const result = planner.searchAsync();
const pathData = result.path.map(node => [
node.data.x,
node.data.y,
node.data.z
]);
saveImage(
maze,
pathData,
mazeInfo.fname.slice(0, -4) + `_solved.png`,
planner,
mazeInfo.is3D
);
console.log(result)
|
<filename>src/Server/Entity/Resource/User.js
'use strict';
const Crypto = require('crypto');
/**
* A stored password
*
* @memberof HashBrown.Server.Entity
*/
class Password extends HashBrown.Entity.EntityBase {
structure() {
this.def(String, 'hash');
this.def(String, 'salt');
}
}
/**
* A model for Users
*
* @memberof HashBrown.Server.Entity.Resource
*/
class User extends require('Common/Entity/Resource/User') {
constructor(params) {
if(params && params.password) {
// Ensure correct object type
params.password = new Password({
hash: params.password.hash,
salt: params.password.salt
});
}
super(params);
}
structure() {
super.structure();
this.def(Password, 'password', new Password());
this.def(Array, 'tokens', []);
}
/**
* Removes a token
*
* @param {String} token
*/
removeToken(token) {
for(let i in this.tokens) {
if(this.tokens[i].key === token) {
this.tokens.splice(i, 1);
break;
}
}
}
/**
* Clears all sensitive data
*/
clearSensitiveData() {
this.password = null;
this.tokens = null;
}
/**
* Sets all project scopes
*
* @param {String} project
* @param {Array} scopes
*/
setScopes(project, scopes) {
if(!this.scopes[project]) {
this.scopes[project] = [];
}
this.scopes[project] = scopes;
}
/**
* Creates a new access token
*
* @param {Boolean} persist
*/
generateToken(persist) {
let key = Crypto.randomBytes(20).toString('hex');
let validDuration =
8 * // Hours
60 * // Minutes
60 * // Seconds
1000; // Milliseconds
let expires = persist ? false : Date.now() + validDuration;
let token = {
key: key,
expires: expires
};
this.tokens.push(token);
return key;
}
/**
* Validate token
*
* @param {String} token
*
* @returns {Boolean} valid
*/
validateToken(token) {
for(let i = this.tokens.length - 1; i >= 0; i--) {
let existingToken = this.tokens[i];
let isExpired = existingToken.expires != false && existingToken.expires < Date.now();
if(isExpired) {
this.tokens.splice(i, 1);
} else if(existingToken.key == token) {
return true;
}
}
return false;
}
/**
* Cleans up expired tokens
*/
cleanUpTokens() {
for(let i = this.tokens.length - 1; i >= 0; i--) {
let existingToken = this.tokens[i];
let isExpired = existingToken.expires != false && existingToken.expires < Date.now();
if(isExpired) {
this.tokens.splice(i, 1);
}
}
}
/**
* Validate password
*
* @param {String} password
*
* @returns {Boolean} valid
*/
validatePassword(password) {
let hashedPassword = User.sha512(password, this.password.salt);
return this.password.hash == hashedPassword;
}
/**
* Set new password
*
* @param {String} password
*/
setPassword(password) {
let salt = Crypto.randomBytes(128).toString('hex');
let hashedPassword = User.sha512(password, salt);
this.password.salt = salt;
this.password.hash = hashedPassword;
}
/**
* Creates a password hash and salt
*
* @param {String} password
*
* @returns {Object} Hash and salt
*/
static createPasswordHashSalt(password) {
let salt = Crypto.randomBytes(128).toString('hex');
let hashedPassword = User.sha512(password, salt);
return {
salt: salt,
hash: hashedPassword
};
}
/**
* Creates a sha512 hash
*
* @param {String} string
* @param {String} salt
*
* @returns {String} hash
*/
static sha512(string, salt) {
let hash = Crypto.createHmac('sha512', salt);
hash.update(string);
let value = hash.digest('hex');
return value;
}
/**
* Creates a new user object
*
* @param {String} username
* @param {String} password
*
* @returns {User} user
*/
static create(username, password) {
let passwordObj = {};
if(password) {
let salt = Crypto.randomBytes(128).toString('hex');
let hash = User.sha512(password, salt);
passwordObj = {
hash: hash,
salt: salt
}
}
let user = new User({
id: HashBrown.Entity.EntityBase.createId(),
username: username,
password: <PASSWORD>
});
return user;
}
}
module.exports = User;
|
import psutil
def info():
return psutil.disk_partitions() |
<gh_stars>0
/*
mybot - Illustrative Slack bot in Go
Copyright (c) 2015 RapidLoop
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
package main
import (
"encoding/csv"
"fmt"
"log"
"net/http"
"os"
"strings"
"bufio"
_ "text/scanner"
)
var emojifile = "emoji.csv"
func main() {
if len(os.Args) != 2 {
fmt.Fprintf(os.Stderr, "usage: mybot slack-bot-token\n")
os.Exit(1)
}
if !FileExists(emojifile){
CreateFile(emojifile)
}
// start a websocket-based Real Time API session
ws, id := slackConnect(os.Args[1])
fmt.Println("mybot ready, ^C exits")
for { // main loop, read each incoming message
m, err := getMessage(ws)
if err != nil {
log.Fatal(err)
}
// see if bot is mentioned
if m.Type == "message" && strings.HasPrefix(m.Text, "<@"+id+">") {
// if so try to parse if
parts := strings.Fields(m.Text)
if len(parts) == 4 && parts[1] == "emoji" {
// looks good, get the quote and reply with the result
go func(m Message) {
if(m.Channel =="G5GSTPRPZ") {
i := AddEmojiToCSV(parts[2], parts[3])
if i {
print(parts)
m.Text = fmt.Sprintf("Added " + parts[2])
postMessage(ws, m)
} else {
m.Text = fmt.Sprintf("Failed to add.")
postMessage(ws, m)
}
}
}(m)
// NOTE: the Message object is copied, this is intentional
} else if len(parts) ==3 && parts[1] =="gbf" { //gbf stickers
go func(m Message) {
if (m.Channel == "C09HBS03F"){ //hardcode mobile game channel ID
m.Text = fmt.Sprintf(getEmoji(parts[2]))
if m.Text != "" {postMessage(ws, m)}
}
}(m)
} else { //unrecognized command
m.Text = fmt.Sprintf("Sorry, that's not a recognized command.\n")
postMessage(ws, m)
}
}
}
}
func getEmoji(sym string) string {
sym = strings.ToUpper(sym)
if !FileExists(emojifile){
return fmt.Sprintf("error: No emoji file")
}
file,err:= os.Open(emojifile)
if err != nil {
return fmt.Sprintf("error: %v", err)
}
defer file.Close()
rows, err := csv.NewReader(file).ReadAll()
if err != nil {
return fmt.Sprintf("error: %v", err)
}
for i:= 0; i < len(rows); i++ {
s := fmt.Sprintf("%s", rows[i][0])
if len(rows) >= 1 && s == sym {
return fmt.Sprintf("%s", rows[i][1]) //return emoji URL for Slack to embed
}
}
file.Close()
return fmt.Sprintf("", sym)
}
func AddEmojiToCSV(key string, value string) bool { //TODO: Permission system? Password?
ret := true
ins := false
//insert into file, change to true if applicable
key = strings.ToUpper(key)
value = value[1:(len(value)-1)] //trim slack formatting for URLs //TODO: Input validation
if !FileExists(emojifile){
return false
}
file,err:= os.OpenFile("emoji.csv",os.O_WRONLY|os.O_CREATE|os.O_APPEND,0644)
if err != nil {
return false
}
defer file.Close()
//TODO: Look for duplicate emoji names, set ret accordingly
if (ret==true) {
b :=[]string{key,value}
fmt.Println(b)
w := csv.NewWriter(file)
err := w.Write([]string{key,value})
if err != nil {
println(fmt.Sprintf("error: %v", err))
return false
} else{ins = true}
w.Flush()
}
return ins
}
func GetStringFromFile(name string, key string) string {
ret := "String not found"
if FileExists(name) {
file, err := os.Open(name)
if err != nil {
log.Fatal(err)
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
i := scanner.Text()
if strings.HasPrefix(i, key) {
ret = i
}
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}
return ret
}
func FileExists(name string) bool {
if _, err := os.Stat(name); err != nil {
if os.IsNotExist(err) {
return false
}
}
return true
}
func CreateFile(name string) error {
fo, err := os.Create(name)
if err != nil {
return err
}
defer func() {
fo.Close()
}()
return nil
} |
#!/bin/sh
# This file is licensed under the BSD-3-Clause license.
# See the AUTHORS and LICENSE files for more information.
. ../spec_helper.sh
. ../../share/zfsnap/core.sh
# These contain a valid pool, and should be trimmed accordingly
POOLS='zpool logs var z--pool'
ItsRetvalIs "TrimToPool 'zpool'" "zpool" 0 # w/ child w/o snapshot
ItsRetvalIs "TrimToPool 'logs/child'" "logs" 0 # w/ child w/o snapshot
ItsRetvalIs "TrimToPool 'var/child/grandchild'" "var" 0 # w/ grandchild w/o snapshot
ItsRetvalIs "TrimToPool 'zpool/child/grandchild'" "zpool" 0 # w/ grandchild w/o snapshot
ItsRetvalIs "TrimToPool 'zpool@2011-04-05_02.06.00--1y'" "zpool" 0 # pool w/o child w/ snapshot
ItsRetvalIs "TrimToPool 'z--pool@2011-04-05_02.06.00--1y'" "z--pool" 0 # special characters in poolname w/ snapshot
ItsRetvalIs "TrimToPool 'zpool/child@2010-04-05_02.06.00--1m'" "zpool" 0 # w/ child w/ snapshot
ItsRetvalIs "TrimToPool 'zpool/child/grandchild@2009-06-08_02.06.00--3d'" "zpool" 0 # w/ grandchild w/ snapshot
# These don't contain a valid pool, and should return an empty string
ItsRetvalIs "TrimToPool ''" "" 1 # empty
ItsRetvalIs "TrimToPool 'zpool_fake'" "" 1 # special character in poolname
ExitTests
|
private List<Schema.Type> convertOutputColumnType(Type type) {
if (type instanceof Product) {
List<Type> types = ((Product) type).getTypes();
return types.stream().map(this::convertType).collect(Collectors.toList());
} else if (type instanceof OpenSearchDataType) {
return ImmutableList.of(convertType(type));
} else {
return ImmutableList.of(COLUMN_DEFAULT_TYPE);
}
}
private Schema.Type convertType(Type type) {
// Implement the conversion logic for individual types if needed
// Example:
// if (type instanceof SomeType) {
// return Schema.Type.SOME_TYPE;
// }
// Add similar conversion logic for other types as required
// If no specific conversion is needed, return a default type
return COLUMN_DEFAULT_TYPE;
} |
import {Component, Inject, ViewChild} from '@angular/core';
import {FormBuilder, FormGroup, Validators} from '@angular/forms';
import {MAT_DIALOG_DATA, MatDialogRef} from '@angular/material/dialog';
import {LocalStorageService} from 'projects/tools/src/lib/local-storage.service';
import {TaskType} from 'projects/runtime/src/lib/entities/task-type';
import {Host} from 'projects/runtime/src/lib/entities/host';
import {PrettyStringPipe} from 'projects/tools/src/lib/pretty-string.pipe';
import {DescriptionInputComponent} from 'projects/gatling/src/app/simulations/simulation-dialogs/description-input/description-input.component';
import {EnvironmentVariablesListComponent} from 'projects/runtime/src/lib/runtime-host/environment-variables-list/environment-variables-list.component';
import {ExecutionContext} from 'projects/runtime/src/lib/entities/execution-context';
export interface ExecuteSimulationDialogData {
simulationPackage: string;
simulationClass: string;
type: TaskType;
atOnce: boolean;
}
@Component({
selector: 'app-execute-simulation-dialog',
templateUrl: './execute-simulation-dialog.component.html',
})
export class ExecuteSimulationDialogComponent {
simulationForm: FormGroup;
@ViewChild('descriptionInput', {static: true})
descriptionInput: DescriptionInputComponent;
@ViewChild('envVarList', {static: true})
envVarList: EnvironmentVariablesListComponent;
constructor(public dialogRef: MatDialogRef<ExecuteSimulationDialogComponent>,
@Inject(MAT_DIALOG_DATA) public data: ExecuteSimulationDialogData,
private fb: FormBuilder) {
this.simulationForm = this.fb.group({
simulationName: [data.simulationPackage + '.' + data.simulationClass, [
Validators.required,
Validators.pattern(/^(\w+\.)*\w+$/),
]],
});
}
get simulationName() {
return this.simulationForm.get('simulationName');
}
run() {
const env = this.envVarList.environment;
env.KRAKEN_GATLING_SIMULATION = this.simulationName.value;
const context = new ExecutionContext(
this.data.type,
this.descriptionInput.description.value,
env,
this.envVarList.hosts
);
this.dialogRef.close(context);
}
}
|
def ascii_to_binary(message):
binary_string = ""
for char in message:
binary_string += str(bin(ord(char)))[2:]
return binary_string |
<filename>python/demo/yuesefu.py
#!/usr/bin/env python
# -*- coding:utf-8 -*-
author = "夜微凉"
"""
Python实现约瑟夫环
用for遍历lists, 同时设定一个游标,当游标为7时把该数加到删除列表中
在一次循环结束后,计算新的lists数据和长度
注意点:
1.如果在for循环中修改count的值,会导致先判断while,致使for循环重新开始
2.如果判断要删除的数值之后直接在for循环中删除,会导致循环索引偏差,所以采用一个删除数组
3.lists和del_list如果采用求差集的方式,注意最后的lists数据顺序是否符合预期
"""
def yue_se_fu(lists):
count = len(lists)
del_list = []
if count <= 1:
return lists
cursor = 0
while count > 1:
for i in lists:
cursor += 1
if cursor % 7 == 0:
del_list.append(i)
cursor = 0
lists = [i for i in lists if i not in del_list]
print(lists)
print(cursor)
count = len(lists)
return lists, del_list, len(del_list)
L = list(range(1, 51))
print(yue_se_fu(L))
|
import { FieldOfType } from '../key';
export type EmptyObject = Record<string, never>;
export function objectHasNoKeys(obj: object): obj is EmptyObject {
return Object.keys(obj).length === 0;
}
export function objectHasKey<T, K extends keyof T = keyof T>(obj: T, key: K): boolean;
export function objectHasKey<T>(obj: T, key: string): boolean;
export function objectHasKey<T>(obj: T, key: string): boolean {
return Object.prototype.hasOwnProperty.call(obj, key);
}
export function applyToMultipleFields<T extends object, X = unknown>(value: X, fields: FieldOfType<T>[]): Partial<{ [K in keyof T]: X }> {
const result = {} as { [K in keyof T]: X };
fields.forEach((field) => {
result[field] = value;
});
return result;
}
export function mapToObject<T, K extends PropertyKey>(map: Map<K, T>): { [key: PropertyKey]: T } {
const object = {} as { [key: PropertyKey]: T };
map.forEach((x: T, key: K) => {
object[key] = x;
});
return object;
}
export type CopyObjectFunction<T> = (input: T) => T;
/**
* Creates a shallow copy of an object using the spread operator.
*
* @param input
* @returns
*/
export function copyObject<T extends object>(input: T): T {
return { ...input };
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.