text stringlengths 1 1.05M |
|---|
#!/bin/bash
##
#CF(Collaborate Filtering, ALS)
# run:
# spark-submit --class cn.ac.ict.bigdatabench.ALS $JAR_FILE <ratings_file> <rank> <iterations> [| <splits>]
#
# parameters:
# <ratings_file>: path of input data file
# <rank>: number of features to train the model
# <iterations>: number of iterations to run the algorithm
# [<splits>]: optional, level of parallelism to split computation into
#
# input data:
# userID,productID,rating
# for example:
# 1,1,5
# 1,3,4
##
curdir=`pwd`
ratings_file=/spark/cf/cf-${1}G
rank=$2
iter=$3
#-----------------running hadoop cf-------------
$SPARK_HOME/bin/spark-submit --class cn.ac.ict.bigdatabench.ALS cf/pre-build/bigdatabench-spark_1.3.0-hadoop_1.0.4.jar $ratings_file $rank $iter
echo "spark cf end"
|
/*
* [The "BSD licence"]
* Copyright (c) 2010 <NAME> (JesusFreke)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.baksmali.Adaptors.Format;
import org.jf.baksmali.Adaptors.LabelMethodItem;
import org.jf.baksmali.Adaptors.MethodDefinition;
import org.jf.util.IndentingWriter;
import org.jf.dexlib.Code.Instruction;
import org.jf.dexlib.Code.OffsetInstruction;
import org.jf.dexlib.Code.Opcode;
import org.jf.dexlib.CodeItem;
import java.io.IOException;
public class OffsetInstructionFormatMethodItem<T extends Instruction & OffsetInstruction>
extends InstructionMethodItem<T> {
protected LabelMethodItem label;
public OffsetInstructionFormatMethodItem(MethodDefinition.LabelCache labelCache, CodeItem codeItem, int codeAddress,
T instruction) {
super(codeItem, codeAddress, instruction);
label = new LabelMethodItem(codeAddress + instruction.getTargetAddressOffset(), getLabelPrefix());
label = labelCache.internLabel(label);
}
@Override
protected void writeTargetLabel(IndentingWriter writer) throws IOException {
label.writeTo(writer);
}
public LabelMethodItem getLabel() {
return label;
}
private String getLabelPrefix() {
switch (instruction.getFormat()) {
case Format10t:
case Format20t:
case Format30t:
return "goto_";
case Format21t:
case Format22t:
return "cond_";
case Format31t:
if (instruction.opcode == Opcode.FILL_ARRAY_DATA) {
return "array_";
}
if (instruction.opcode == Opcode.PACKED_SWITCH) {
return "pswitch_data_";
}
assert instruction.opcode == Opcode.SPARSE_SWITCH;
return "sswitch_data_";
}
assert false;
return null;
}
}
|
<reponame>cbeer/web
// SyntaxHighlighter by <NAME>
//
//= require sh/XRegExp
//= require sh/shCore
//= require_tree ./brushes
|
<reponame>AliFrank608-TMW/RacingReact<filename>src/views/Horse/Public/HorseOverview/index.js
import horseView from 'views/Horse/View'
import HorsePublicOverview from '../HorsePublicOverview'
export default horseView(HorsePublicOverview)
|
enum TemperatureUnit {
case celsius
case fahrenheit
case kelvin
static let getAllUnits = [celsius, fahrenheit, kelvin]
}
struct Temperature {
let value: Double
let unit: TemperatureUnit
func converted(to unit: TemperatureUnit) -> Temperature {
var convertedValue: Double
switch (self.unit, unit) {
case (.celsius, .fahrenheit):
convertedValue = (value * 9/5) + 32
case (.celsius, .kelvin):
convertedValue = value + 273.15
case (.fahrenheit, .celsius):
convertedValue = (value - 32) * 5/9
case (.fahrenheit, .kelvin):
convertedValue = (value + 459.67) * 5/9
case (.kelvin, .celsius):
convertedValue = value - 273.15
case (.kelvin, .fahrenheit):
convertedValue = (value * 9/5) - 459.67
default:
convertedValue = value
}
return Temperature(value: convertedValue, unit: unit)
}
static func average(_ temperatures: [Temperature]) -> Temperature? {
guard !temperatures.isEmpty else { return nil }
let totalValue = temperatures.reduce(0) { $0 + $1.converted(to: temperatures[0].unit).value }
let averageValue = totalValue / Double(temperatures.count)
return Temperature(value: averageValue, unit: temperatures[0].unit)
}
}
// Demonstration
let temperature1 = Temperature(value: 25, unit: .celsius)
let temperature2 = temperature1.converted(to: .fahrenheit)
print("Temperature 1 in Fahrenheit: \(temperature2.value)°F")
let temperature3 = Temperature(value: 32, unit: .fahrenheit)
let temperature4 = temperature3.converted(to: .kelvin)
print("Temperature 3 in Kelvin: \(temperature4.value)K")
let temperaturesToAverage = [temperature1, temperature2, temperature3, temperature4]
if let averageTemperature = Temperature.average(temperaturesToAverage) {
print("Average temperature: \(averageTemperature.value)\(averageTemperature.unit)")
} else {
print("Cannot calculate average for empty array")
} |
import discord
import asyncio
# Replace 'YOUR_DISCORD_TOKEN' with your actual Discord bot token
TOKEN = 'YOUR_DISCORD_TOKEN'
client = discord.Client()
async def spam_messages(channel, num_messages):
if num_messages > 16:
num_messages = 16 # Limit the number of messages to 16
for i in range(1, num_messages + 1):
await channel.send(f"Spam message {i} out of {num_messages}")
@client.event
async def on_ready():
print(f'We have logged in as {client.user}')
# Replace 'YOUR_CHANNEL_ID' with the actual channel ID where you want to send messages
channel = client.get_channel(YOUR_CHANNEL_ID)
if channel:
await spam_messages(channel, 10) # Send 10 spam messages
else:
print("Channel not found")
client.run(TOKEN) |
import { schema } from 'normalizr'
export const institution = new schema.Entity(
'institutions',
)
export const institutions = new schema.Array(institution) |
# frozen_string_literal: true
RSpec.describe App do
describe '#VERSION' do
it { expect(described_class::VERSION).not_to be_nil }
end
describe '#Math' do
it { expect(described_class::Math).not_to be_nil }
end
end
|
docker build -t $DOCKER_BUILD_TAG .
|
#!/bin/bash
CONTROL_PREFIX="=-.-="
echo "${CONTROL_PREFIX} PID $$"
function usage() {
echo "Usage: ${0} \\"
echo " JOB_NAME QUEUE_NAME COLLECTOR TOKEN_FILE LIFETIME PILOT_BIN \\"
echo " OWNERS NODES MULTI_PILOT_BIN ALLOCATION REQUEST_ID PASSWORD_FILE \\"
echo " [CPUS] [MEM_MB]"
echo "where OWNERS is a comma-separated list. Omit CPUS and MEM_MB to get"
echo "whole-node jobs. NODES is ignored on non-whole-node jobs."
}
JOB_NAME=$1
if [[ -z $JOB_NAME ]]; then
usage
exit 1
fi
QUEUE_NAME=$2
if [[ -z $QUEUE_NAME ]]; then
usage
exit 1
fi
COLLECTOR=$3
if [[ -z $COLLECTOR ]]; then
usage
exit 1
fi
TOKEN_FILE=$4
if [[ -z $TOKEN_FILE ]]; then
usage
exit 1
fi
LIFETIME=$5
if [[ -z $LIFETIME ]]; then
usage
exit 1
fi
PILOT_BIN=$6
if [[ -z $PILOT_BIN ]]; then
usage
exit 1
fi
OWNERS=$7
if [[ -z $OWNERS ]]; then
usage
exit 1
fi
NODES=$8
if [[ -z $NODES ]]; then
usage
exit 1
fi
MULTI_PILOT_BIN=$9
if [[ -z $MULTI_PILOT_BIN ]]; then
usage
exit 1
fi
ALLOCATION=${10}
if [[ $ALLOCATION == "None" ]]; then
ALLOCATION=""
fi
if [[ -z $ALLOCATION ]]; then
echo "Will try to use the default allocation."
fi
REQUEST_ID=${11}
if [[ -z $REQUEST_ID ]]; then
usage
exit 1
fi
PASSWORD_FILE=${12}
if [[ -z $PASSWORD_FILE ]]; then
usage
exit 1
fi
CPUS=${13}
if [[ $CPUS == "None" ]]; then
CPUS=""
fi
MEM_MB=${14}
if [[ $MEM_MB == "None" ]]; then
MEM_MB=""
fi
BIRTH=`date +%s`
# echo "Starting script at `date`..."
#
# Download and configure the pilot on the head node before running it
# on the execute node(s).
#
# The following variables are constants.
#
# The binaries must be a tarball named condor-*, and unpacking that tarball
# must create a directory which also matches condor-*.
WELL_KNOWN_LOCATION_FOR_BINARIES=https://research.cs.wisc.edu/htcondor/tarball/current/9.5.4/update/condor-9.5.4-20220207-x86_64_Rocky8-stripped.tar.gz
# The configuration must be a tarball which does NOT match condor-*. It
# will be unpacked in the root of the directory created by unpacking the
# binaries and as such should contain files in local/config.d/*.
WELL_KNOWN_LOCATION_FOR_CONFIGURATION=https://cs.wisc.edu/~tlmiller/hpc-config.tar.gz
# How early should HTCondor exit to make sure we have time to clean up?
CLEAN_UP_TIME=300
#
# Create pilot-specific directory on shared storage. The least-awful way
# to do this is by having the per-node script NOT exec condor_master, but
# instead configure the condor_master to exit well before the "run time"
# of the job, and the script carry on to do the clean-up.
#
# That won't work for multi-node jobs, which we'll need eventually, but
# we'll leave that for then.
#
# echo "Creating temporary directory for pilot..."
echo "Step 1 of 8..."
SCRATCH=${SCRATCH:-$PROJECT/hpc-annex/scratch}
mkdir -p "$SCRATCH"
PILOT_DIR=`/usr/bin/mktemp --directory --tmpdir=${SCRATCH} pilot.XXXXXXXX 2>&1`
if [[ $? != 0 ]]; then
echo "Failed to create temporary directory for pilot, aborting."
echo ${PILOT_DIR}
exit 1
fi
echo "${CONTROL_PREFIX} PILOT_DIR ${PILOT_DIR}"
function cleanup() {
echo "Cleaning up temporary directory..."
rm -fr ${PILOT_DIR}
}
trap cleanup EXIT
#
# Download the configuration. (Should be smaller, and we fail if either
# of these downloads fail, so we may as well try this one first.)
#
cd ${PILOT_DIR}
# The .sif files need to live in ${PILOT_DIR} for the same reason. We
# require that they have been transferred to the same directory as the
# PILOT_BIN mainly because this script has too many arguments already.
SIF_DIR=${PILOT_DIR}/sif
mkdir ${SIF_DIR}
PILOT_BIN_DIR=`dirname ${PILOT_BIN}`
mv ${PILOT_BIN_DIR}/sif ${PILOT_DIR}
# The pilot scripts need to live in the ${PILOT_DIR} because the front-end
# copied them into a temporary directory that it's responsible for cleaning up.
mv ${PILOT_BIN} ${PILOT_DIR}
mv ${MULTI_PILOT_BIN} ${PILOT_DIR}
PILOT_BIN=${PILOT_DIR}/`basename ${PILOT_BIN}`
MULTI_PILOT_BIN=${PILOT_DIR}/`basename ${MULTI_PILOT_BIN}`
# echo "Downloading configuration..."
echo "Step 2 of 8..."
CONFIGURATION_FILE=`basename ${WELL_KNOWN_LOCATION_FOR_CONFIGURATION}`
CURL_LOGGING=`curl -fsSL ${WELL_KNOWN_LOCATION_FOR_CONFIGURATION} -o ${CONFIGURATION_FILE} 2>&1`
if [[ $? != 0 ]]; then
echo "Failed to download configuration from '${WELL_KNOWN_LOCATION_FOR_CONFIGURATION}', aborting."
echo ${CURL_LOGGING}
exit 2
fi
#
# Download the binaries.
#
# echo "Downloading binaries..."
echo "Step 3 of 8..."
BINARIES_FILE=`basename ${WELL_KNOWN_LOCATION_FOR_BINARIES}`
CURL_LOGGING=`curl -fsSL ${WELL_KNOWN_LOCATION_FOR_BINARIES} -o ${BINARIES_FILE} 2>&1`
if [[ $? != 0 ]]; then
echo "Failed to download configuration from '${WELL_KNOWN_LOCATION_FOR_BINARIES}', aborting."
echo ${CURL_LOGGING}
exit 2
fi
#
# Unpack the binaries.
#
# echo "Unpacking binaries..."
echo "Step 4 of 8..."
TAR_LOGGING=`tar -z -x -f ${BINARIES_FILE} 2>&1`
if [[ $? != 0 ]]; then
echo "Failed to unpack binaries from '${BINARIES_FILE}', aborting."
echo ${TAR_LOGGING}
exit 3
fi
#
# Make the personal condor.
#
rm condor-*.tar.gz
cd condor-*
# echo "Making a personal condor..."
echo "Step 5 of 8..."
MPC_LOGGING=`./bin/make-personal-from-tarball 2>&1`
if [[ $? != 0 ]]; then
echo "Failed to make personal condor, aborting."
echo ${MPC_LOGGING}
exit 4
fi
# It may have take some time to get everything installed, so to make sure
# we get our full clean-up time, subtract off how long we've been running
# already.
YOUTH=$((`date +%s` - ${BIRTH}))
REMAINING_LIFETIME=$(((${LIFETIME} - ${YOUTH}) - ${CLEAN_UP_TIME}))
WHOLE_NODE=1
CONDOR_CPUS_LINE=""
if [[ -n $CPUS && $CPUS -gt 0 ]]; then
CONDOR_CPUS_LINE="NUM_CPUS = ${CPUS}"
WHOLE_NODE=""
fi
CONDOR_MEMORY_LINE=""
if [[ -n $MEM_MB && $MEM_MB -gt 0 ]]; then
CONDOR_MEMORY_LINE="MEMORY = ${MEM_MB}"
WHOLE_NODE=""
fi
# echo "Converting to a pilot..."
echo "Step 6 of 8..."
rm local/config.d/00-personal-condor
echo "
use role:execute
use security:recommended_v9_0
use feature:PartitionableSLot
COLLECTOR_HOST = ${COLLECTOR}
# We shouldn't ever actually need this, but it's convenient for testing.
SHARED_PORT_PORT = 0
# Allows condor_off (et alia) to work from the head node.
ALLOW_ADMINISTRATOR = \$(ALLOW_ADMINISTRATOR) $(whoami)@$(hostname)
# FIXME: use same-AuthenticatedIdentity once that becomes available, instead.
# Allows condor_off (et alia) to work from the submit node.
ALLOW_ADMINISTRATOR = \$(ALLOW_ADMINISTRATOR) condor_pool@*
SEC_DEFAULT_AUTHENTICATION_METHODS = FS IDTOKENS PASSWORD
# Eliminate a bogus, repeated warning in the logs. This is a bug;
# it should be the default.
SEC_PASSWORD_DIRECTORY = \$(LOCAL_DIR)/passwords.d
# This is a bug; it should be the default.
SEC_TOKEN_SYSTEM_DIRECTORY = \$(LOCAL_DIR)/tokens.d
# Having to set it twice is also a bug.
SEC_TOKEN_DIRECTORY = \$(LOCAL_DIR)/tokens.d
# Don't run benchmarks.
RUNBENCHMARKS = FALSE
# We definitely need CCB.
CCB_ADDRESS = \$(COLLECTOR_HOST)
#
# Commit suicide after being idle for five minutes.
#
STARTD_NOCLAIM_SHUTDOWN = 300
#
# Don't run for more than two hours, to make sure we have time to clean up.
#
MASTER.DAEMON_SHUTDOWN_FAST = (CurrentTime - DaemonStartTime) > ${REMAINING_LIFETIME}
# Only start jobs from the specified owner.
START = \$(START) && stringListMember( Owner, \"${OWNERS}\" )
# Advertise the standard annex attributes (master ad for condor_off).
IsAnnex = TRUE
AnnexName = \"${JOB_NAME}\"
hpc_annex_request_id = \"${REQUEST_ID}\"
STARTD_ATTRS = \$(STARTD_ATTRS) AnnexName IsAnnex hpc_annex_request_id
MASTER_ATTRS = \$(MASTER_ATTRS) AnnexName IsAnnex hpc_annex_request_id
# Force all container-universe jobs to try to use pre-staged .sif files.
# This should be removed when we handle this in HTCondor proper.
JOB_EXECUTION_TRANSFORM_NAMES = siffile
JOB_EXECUTION_TRANSFORM_siffile @=end
if defined MY.ContainerImage
EVALSET ContainerImage strcat(\"${SIF_DIR}/\", MY.ContainerImage)
endif
@end
#
# Subsequent configuration is machine-specific.
#
SINGULARITY = /usr/bin/singularity
${CONDOR_CPUS_LINE}
${CONDOR_MEMORY_LINE}
# Create dynamic slots 3 GB at a time. This number was chosen because it's
# the amount of RAM requested per core on the OS Pool, but we actually bother
# to set it because we start seeing weird scaling issues with more than 64
# or so slots. Since we can't fix that problem right now, avoid it.
MUST_MODIFY_REQUEST_EXPRS = TRUE
MODIFY_REQUEST_EXPR_REQUESTMEMORY = max({ 3072, quantize(RequestMemory, {128}) })
" > local/config.d/00-basic-pilot
mkdir local/passwords.d
mkdir local/tokens.d
mv ${TOKEN_FILE} local/tokens.d
mv ${PASSWORD_FILE} local/passwords.d/POOL
#
# Unpack the configuration on top.
#
# echo "Unpacking configuration..."
echo "Step 7 of 8..."
TAR_LOGGING=`tar -z -x -f ../${CONFIGURATION_FILE} 2>&1`
if [[ $? != 0 ]]; then
echo "Failed to unpack binaries from '${CONFIGURATION_FILE}', aborting."
echo ${TAR_LOGGING}
exit 5
fi
#
# Write the SLURM job.
#
# Compute the appropriate duration. (-t)
#
# This script does NOT embed knowledge about this machine's queue limits. It
# seems like it'll be much easier to embed that knowledge in the UI script
# (rather than transmit a reasonable error back), plus it'll be more user-
# friendly, since they won't have to log in to get error about requesting
# the wrong queue length.
MINUTES=$(((${REMAINING_LIFETIME} + ${CLEAN_UP_TIME})/60))
if [[ $WHOLE_NODE ]]; then
# Whole node jobs request the same number of tasks per node as tasks total.
# They make no specific requests about CPUs, memory, etc., since the SLURM
# partition should already determine that.
SBATCH_RESOURCES_LINES="\
#SBATCH --nodes=${NODES}
#SBATCH --ntasks=${NODES}
"
else
# Jobs on shared (non-whole-node) SLURM partitions can't be multi-node on
# Expanse. Request one job, and specify the resources that should be
# allocated to the job.
# XXX Should I reject NODES > 1?
# FIXME: I'm OK with ignoring it, but the FE should check..
SBATCH_RESOURCES_LINES="\
#SBATCH --ntasks=1
#SBATCH --nodes=1
"
if [[ $CPUS ]]; then
SBATCH_RESOURCES_LINES="\
${SBATCH_RESOURCES_LINES}
#SBATCH --cpus-per-task=${CPUS}
"
fi
if [[ $MEM_MB ]]; then
SBATCH_RESOURCES_LINES="\
${SBATCH_RESOURCES_LINES}
#SBATCH --mem=${MEM_MB}M
"
fi
fi
if [[ -n $ALLOCATION ]]; then
SBATCH_ALLOCATION_LINE="#SBATCH -A ${ALLOCATION}"
fi
echo '#!/bin/bash' > ${PILOT_DIR}/bridges2.slurm
echo "
#SBATCH -J ${JOB_NAME}
#SBATCH -o ${PILOT_DIR}/%j.out
#SBATCH -e ${PILOT_DIR}/%j.err
#SBATCH -p ${QUEUE_NAME}
${SBATCH_RESOURCES_LINES}
#SBATCH -t ${MINUTES}
${SBATCH_ALLOCATION_LINE}
${MULTI_PILOT_BIN} ${PILOT_BIN} ${PILOT_DIR}
" >> ${PILOT_DIR}/bridges2.slurm
#
# Submit the SLURM job.
#
# echo "Submitting SLURM job..."
echo "Step 8 of 8..."
SBATCH_LOG=${PILOT_DIR}/sbatch.log
sbatch ${PILOT_DIR}/bridges2.slurm &> ${SBATCH_LOG}
SBATCH_ERROR=$?
if [[ $SBATCH_ERROR != 0 ]]; then
echo "Failed to submit job to SLURM (${SBATCH_ERROR}), aborting."
cat ${SBATCH_LOG}
exit 6
fi
JOB_ID=`cat ${SBATCH_LOG} | awk '/^Submitted batch job/{print $4}'`
echo "${CONTROL_PREFIX} JOB_ID ${JOB_ID}"
echo "... done."
# Reset the EXIT trap so that we don't delete the temporary directory
# that the SLURM job needs. (We pass it the temporary directory so that
# it can clean up after itself.)
trap EXIT
exit 0
|
#!/bin/bash
echo "[----------] BEGIN. RECORDERE NEWS parser script."
cd ../../../
php yii parserrec/news >> console/runtime/logs/parser.log
echo "[----------] END. RECORDERE NEWS parser script." |
echo "START: rotate CA certs on master"
set +x
. /etc/sysconfig/heat-params
set -x
set -eu -o pipefail
ssh_cmd="ssh -F /srv/magnum/.ssh/config root@localhost"
export KUBECONFIG="/etc/kubernetes/admin.conf"
service_account_key=$kube_service_account_key_input
service_account_private_key=$kube_service_account_private_key_input
if [ ! -z "$service_account_key" ] && [ ! -z "$service_account_private_key" ] ; then
# Follow the instructions on https://kubernetes.io/docs/tasks/tls/manual-rotation-of-ca-certificates/
for namespace in $(kubectl get namespace -o jsonpath='{.items[*].metadata.name}'); do
for name in $(kubectl get deployments -n $namespace -o jsonpath='{.items[*].metadata.name}'); do
kubectl patch deployment -n ${namespace} ${name} -p '{"spec":{"template":{"metadata":{"annotations":{"ca-rotation": "1"}}}}}';
done
for name in $(kubectl get daemonset -n $namespace -o jsonpath='{.items[*].metadata.name}'); do
kubectl patch daemonset -n ${namespace} ${name} -p '{"spec":{"template":{"metadata":{"annotations":{"ca-rotation": "1"}}}}}';
done
done
# Annotate any Daemonsets and Deployments to trigger pod replacement in a safer rolling fashion.
for namespace in $(kubectl get namespace -o jsonpath='{.items[*].metadata.name}'); do
for name in $(kubectl get deployments -n $namespace -o jsonpath='{.items[*].metadata.name}'); do
kubectl patch deployment -n ${namespace} ${name} -p '{"spec":{"template":{"metadata":{"annotations":{"ca-rotation": "1"}}}}}';
done
for name in $(kubectl get daemonset -n $namespace -o jsonpath='{.items[*].metadata.name}'); do
kubectl patch daemonset -n ${namespace} ${name} -p '{"spec":{"template":{"metadata":{"annotations":{"ca-rotation": "1"}}}}}';
done
done
for service in etcd kube-apiserver kube-controller-manager kube-scheduler kubelet kube-proxy; do
echo "restart service $service"
$ssh_cmd systemctl restart $service
done
# NOTE(flwang): Re-patch the calico-node daemonset again to make sure all pods are being recreated
kubectl patch daemonset -n kube-system calico-node -p '{"spec":{"template":{"metadata":{"annotations":{"ca-rotation": "2"}}}}}';
fi
echo "END: rotate CA certs on master"
|
#!/bin/bash
set -e
if [ $# -ne 2 ]
then
echo "arguments: SAVES_SRC JSON_DEST" 1>&2
exit 1
fi
mkdir -p $2
for i in $1/*.ck2
do
output="$2/$(basename -s '.ck2' $i).json"
echo "$i : $output"
# Create Json
./ck2json "$i" > "$output"
# Validate Json Syntax
printf "import sys\nimport json\nwith open(sys.argv[1]) as f:\n json.load(f)\n" | python3 - "$output"
done
|
<filename>app/IPS/IPS.js
var PFlist = require('./Plugins/PFlist.js')
var ModSec = require('./Plugins/ModSec.js')
var HAProxy = require('./Plugins/HAProxy.js')
var CSV_Export = require('./Plugins/CSV_Export.js')
var IPTables = require('./Plugins/IPTables.js')
var Checkpoint = require('./Plugins/Checkpoint.js')
var CheckpointSecureXL = require('./Plugins/CheckpointSecureXL.js')
var CloudFlare = require('./Plugins/CloudFlareIPS.js')
var Cisco = require('./Plugins/Cisco.js')
var AWSWAF = require('./Plugins/AwsIPS.js')
var DBCleanUP = require('./DBCleanUP.js')
var Remote_Read = require('../Remote/Remote_Read.js')
var IPS = function (){
var pflist = new PFlist()
var modsec = new ModSec()
var cloudflare = new CloudFlare()
var cisco = new Cisco()
var haProxy = new HAProxy()
var csv_export = new CSV_Export()
var ipTables = new IPTables()
var checkpoint = new Checkpoint()
var checkpointsecureXL = new CheckpointSecureXL()
var awsWaf = new AWSWAF()
var dbcleanup = new DBCleanUP()
var remote_read = new Remote_Read()
this.init = function(input_obj){
var isPluginEnabled = false;
var callback_array = []
callback_array.push(remote_read.init)
if(input_obj.config.ips.pflist.active===true){ // MacOS
console.log("INFO: IPS PFList Enabled")
callback_array.push(pflist.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.modsec.active===true){ // ModSec
console.log("INFO: IPS ModSec Enabled")
callback_array.push(modsec.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.cloudflare.active===true){ // CloudFlare
console.log("INFO: IPS CloudFlare Enabled")
callback_array.push(cloudflare.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.cisco.active===true){ // Cisco
console.log("INFO: IPS Cisco Enabled")
callback_array.push(cisco.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.haproxy.active===true){ // HAProxy
console.log('INFO: IPS HAProxy Enabled')
callback_array.push(haProxy.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.csv.active===true){ // csv_export
console.log('INFO: IPS csv_export Enabled')
callback_array.push(csv_export.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.iptables.active===true){ // iptables
console.log('INFO: IPS iptables Enabled')
callback_array.push(ipTables.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.checkpoint.active===true){ // Checkpoint
console.log('INFO: IPS Checkpoint Enabled')
callback_array.push(checkpoint.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.checkpoint_securexl.active===true){ // Checkpoint SecureXL
console.log('INFO: IPS Checkpoint SecureXL Enabled')
callback_array.push(checkpointsecureXL.init)
isPluginEnabled = true;
}
if(input_obj.config.ips.aws.active===true){ // AWS WAF
console.log('INFO: IPS AWS WAF Enabled')
callback_array.push(awsWaf.init)
isPluginEnabled = true;
}
if(!isPluginEnabled){
callback_array.push(dbcleanup.init)
}
var fist_function = callback_array[0]
fist_function(input_obj, callback_array.slice(1))
}
}
module.exports = IPS
|
<reponame>thedigicraft/Rocket.Chat
import { Meteor } from 'meteor/meteor';
import peerClient from '../peerClient';
import peerServer from '../peerServer';
Meteor.methods({
federationSearchUsers(email) {
if (!Meteor.userId()) {
throw new Meteor.Error('error-invalid-user', 'Invalid user', { method: 'federationSearchUsers' });
}
if (!peerServer.enabled) {
throw new Meteor.Error('error-federation-disabled', 'Federation disabled', { method: 'federationAddUser' });
}
const federatedUsers = peerClient.findUsers(email);
if (!federatedUsers.length) {
throw new Meteor.Error('federation-user-not-found', `Could not find federated users using "${ email }"`);
}
return federatedUsers;
},
});
|
def to_uppercase(string):
return string.upper()
print(to_uppercase("hello World"))
# Outputs:
# HELLO WORLD |
import re
input_string = "There are 7 days in a week and 12 noon in a day."
pattern = r"\d+"
match = re.findall(pattern, input_string)
print (match)
# Output: ['7', '12'] |
parallel -j1 "conjure solve gen.essence --solver-options -randomiseorder ; mv gen.solution random{}.param" ::: $(seq -w 1 100)
|
import DashboardPageWidgetVO from "../../../../../../../shared/modules/DashboardBuilder/vos/DashboardPageWidgetVO";
import TableColumnDescVO from "../../../../../../../shared/modules/DashboardBuilder/vos/TableColumnDescVO";
import DefaultTranslation from "../../../../../../../shared/modules/Translation/vos/DefaultTranslation";
export default class TableWidgetOptions {
public static TITLE_CODE_PREFIX: string = "TableWidgetOptions.title.";
public static get_selected_fields(page_widget: DashboardPageWidgetVO): { [api_type_id: string]: { [field_id: string]: boolean } } {
let res: { [api_type_id: string]: { [field_id: string]: boolean } } = {};
let options: TableWidgetOptions = (page_widget && page_widget.json_options) ? JSON.parse(page_widget.json_options) : null;
if (!options) {
return res;
}
for (let i in options.columns) {
let column = options.columns[i];
if ((!column.api_type_id) || (!column.field_id)) {
continue;
}
if (!res[column.api_type_id]) {
res[column.api_type_id] = {};
}
res[column.api_type_id][column.field_id] = true;
}
return res;
}
public constructor(
public columns: TableColumnDescVO[],
public is_focus_api_type_id: boolean,
public limit: number,
public crud_api_type_id: string,
public vocus_button: boolean,
public delete_button: boolean,
public delete_all_button: boolean,
public create_button: boolean,
public update_button: boolean,
public refresh_button: boolean,
public export_button: boolean,
public can_filter_by: boolean
) { }
public get_title_name_code_text(page_widget_id: number): string {
if (!page_widget_id) {
return null;
}
return TableWidgetOptions.TITLE_CODE_PREFIX + page_widget_id + DefaultTranslation.DEFAULT_LABEL_EXTENSION;
}
} |
<filename>spec/i18n_spec.rb<gh_stars>1-10
require 'spec_helper'
include Persons
def __find__( clazz, what, name )
g = clazz.new name => 'bla'
f = g.send(what).detect{|w|w.name == name}
f.name.should == name
f
end
def check_local( en, de )
I18n.locale = :en
yield.should == en
I18n.locale = :de
yield.should == de
end
describe 'I18n' do
it 'should translate filter labels' do
check_local('en_name', 'de_name'){ __find__(PersonGrid, :filters, :localname).label }
end
it 'should translate facet labels' do
check_local('en_label', 'de_label'){ __find__(PersonGrid, :facets, :localname).label }
end
class HeaderGrid < Gitter::Grid
grid do
scope do Person.scoped end
column :localname, :column => :name
end
end
it 'should translate column headers' do
check_local('en_header', 'de_header'){ __find__(HeaderGrid, :columns, :localname).header }
end
end
|
for combo in $(curl -s https://raw.githubusercontent.com/LightningFastRom/hudson/master/lineage-build-targets | sed -e 's/#.*$//' | grep cm-14.1 | awk '{printf "lineage_%s-%s\n", $1, $2}')
do
add_lunch_combo $combo
done
|
#!/bin/bash
EMAIL="" # Insert your email address here to receive notifications when the ffmpeg script is restarted
SERVICE="ffmpeg"
WEBCAM=192.168.86.219 #Change to your webcam IP address
RTSP_URL="rtsp://$WEBCAM/user=admin&password=tlJwpbo6&channel=2&stream=0.sdp?real_stream"
YOUTUBE_URL="rtmp://a.rtmp.youtube.com/live2"
YOUTUBE_KEY="" # Insert your Youtube Live Stream Key here
COMMAND="/home/pi/ffmpeg/ffmpeg -f lavfi -i anullsrc -thread_queue_size 10240 -rtsp_transport tcp -stimeout 60000000 -i ${RTSP_URL} -tune zerolatency -s 1920x1080 -r 25 -vcodec libx264 -pix_fmt + -c:v copy -c:a aac -strict experimental -f flv ${YOUTUBE_URL}/${YOUTUBE_KEY}"
if sudo /usr/bin/pgrep $SERVICE > /dev/null
then
>&2 echo "${SERVICE} is already running."
else
>&2 echo "${SERVICE} is NOT running! Starting now..."
echo "Restarted ffmpeg!" | mail -s "Restarted ffmpeg" $EMAIL
$COMMAND
fi
|
<gh_stars>10-100
package inventory
import (
"fmt"
"github.com/cbuschka/tfvm/internal/inventory/state"
"github.com/cbuschka/tfvm/internal/log"
platformPkg "github.com/cbuschka/tfvm/internal/platform"
"github.com/cbuschka/tfvm/internal/version"
"os"
"sort"
)
// GetMatchingTerraformRelease returns the terraform version for a version specification.
func (inventory *Inventory) GetMatchingTerraformRelease(versionSpec *version.TerraformVersionSpec) (*state.TerraformReleaseState, error) {
terraformReleasesAsc := inventory.GetTerraformReleasesAsc()
if len(terraformReleasesAsc) == 0 {
return nil, version.NewNoTerraformReleases()
}
latestTfRelease := terraformReleasesAsc[len(terraformReleasesAsc)-1]
for index := range terraformReleasesAsc {
tfReleaseVersion := terraformReleasesAsc[len(terraformReleasesAsc)-index-1]
if versionSpec.Matches(tfReleaseVersion, latestTfRelease) {
tfRelease, found := inventory.TerraformReleases[tfReleaseVersion.String()]
if !found {
return nil, fmt.Errorf("terraform release of best version not found")
}
return tfRelease, nil
}
}
return nil, version.NewNoSuchTerraformRelease()
}
// GetLatestRelease returns the newest terraform version known.
func (inventory *Inventory) GetLatestRelease() (*version.TerraformVersion, error) {
terraformReleasesAsc := inventory.GetTerraformReleasesAsc()
if len(terraformReleasesAsc) == 0 {
return nil, version.NewNoTerraformReleases()
}
return terraformReleasesAsc[len(terraformReleasesAsc)-1], nil
}
// GetTerraformReleasesAsc lists terraform versions known in ascending order.
func (inventory *Inventory) GetTerraformReleasesAsc() []*version.TerraformVersion {
tfReleasesAsc := make([]*version.TerraformVersion, 0, len(inventory.TerraformReleases))
for _, tfReleaseState := range inventory.TerraformReleases {
tfReleasesAsc = append(tfReleasesAsc, tfReleaseState.Version)
}
sort.Sort(version.Collection(tfReleasesAsc))
return tfReleasesAsc
}
// GetTerraformRelease returns the terraform version.
func (inventory *Inventory) GetTerraformRelease(tfReleaseVersion *version.TerraformVersion) (*state.TerraformReleaseState, error) {
tfRelease, found := inventory.TerraformReleases[tfReleaseVersion.String()]
if !found {
return nil, version.NewNoSuchTerraformRelease()
}
return tfRelease, nil
}
// GetTerraform get reference to a terraform installation of a given version.
func (inventory *Inventory) GetTerraform(tfRelease *version.TerraformVersion, platform platformPkg.Platform) (*Terraform, error) {
log.Debugf("Looking up terraform %s on %s...", tfRelease.String(), platform)
tfPath, err := inventory.getTerraformPath(tfRelease, platform)
if err != nil {
return nil, err
}
if _, err := os.Stat(tfPath); err != nil {
return nil, err
}
return newTerraform(tfRelease.String(), platform.Os, platform.Arch, tfPath), nil
}
// IsTerraformInstalledOnAnyPlatform answers if a particular terraform version is already installed locally.
func (inventory *Inventory) IsTerraformInstalledOnAnyPlatform(tfRelease *version.TerraformVersion) (bool, error) {
for _, platform := range inventory.platforms {
log.Debugf("Checking if terraform %s is installed on %s", tfRelease.String(), platform)
installed, err := inventory.IsTerraformInstalled(tfRelease, platform)
if err != nil {
return false, err
}
if installed {
log.Infof("Terraform %s is installed on %s", tfRelease.String(), platform)
return true, nil
} else {
log.Debugf("Terraform %s not installed on %s", tfRelease.String(), platform)
}
}
log.Infof("Terraform %s not installed on any platform", tfRelease.String())
return false, nil
}
// IsTerraformInstalled answers if a particular terraform version is already installed locally.
func (inventory *Inventory) IsTerraformInstalled(tfRelease *version.TerraformVersion, platform platformPkg.Platform) (bool, error) {
versionedTfPath, err := inventory.getTerraformPath(tfRelease, platform)
if err != nil {
return false, err
}
log.Debugf("Terraform path for %s on %s: %s", tfRelease.String(), platform, versionedTfPath)
if _, err := os.Stat(versionedTfPath); err != nil {
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
log.Infof("Terraform %s on %s found at %s", tfRelease.String(), platform, versionedTfPath)
return true, nil
}
|
<reponame>vsilaev/tascalate-async-awa
/**
* Copyright 2015-2021 <NAME> (http://vsilaev.com)
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.tascalate.async;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletionStage;
import net.tascalate.async.core.AsyncMethodExecutor;
import net.tascalate.async.core.InternalCallContext;
import net.tascalate.javaflow.function.SuspendableFunction;
/**
* @author <NAME>
*
*/
public class CallContext {
private CallContext() {}
/**
* Wait for the {@link CompletionStage} within {@link async} method.
*
* The {@link async} method will be suspended until {@link CompletionStage}
* returns or throws the result.
*/
public @suspendable static <T> T await(CompletionStage<T> future) throws CancellationException, InvalidCallContextException {
return AsyncMethodExecutor.await(future);
}
public static boolean interrupted() throws InvalidCallContextException {
// Implementation is used only in @suspendable methods
// @async methods get this call replaced with optimized
// version that invokes instance method on generated class
return InternalCallContext.interrupted(true);
}
public static <T, R extends CompletionStage<T>> R async(T value) {
throw new IllegalStateException("Method call must be replaced by bytecode enhancer");
}
public static <T> YieldReply<T> yield(T readyValue) throws InvalidCallContextException {
throw new IllegalStateException("Method call must be replaced by bytecode enhancer");
}
public static <T> YieldReply<T> yield(CompletionStage<T> pendingValue) throws CancellationException, InvalidCallContextException {
throw new IllegalStateException("Method call must be replaced by bytecode enhancer");
}
public static <T> YieldReply<T> yield(Sequence<? extends CompletionStage<T>> values) throws CancellationException, InvalidCallContextException {
throw new IllegalStateException("Method call must be replaced by bytecode enhancer");
}
public static <T> AsyncGenerator<T> yield() {
throw new IllegalStateException("Method call must be replaced by bytecode enhancer");
}
public static <E1 extends Throwable> void throwing(Class<E1> e1) throws E1 {}
public static <E1 extends Throwable,
E2 extends Throwable> void throwing(Class<E1> e1, Class<E2> e2) throws E1, E2 {}
public static <E1 extends Throwable,
E2 extends Throwable,
E3 extends Throwable> void throwing(Class<E1> e1, Class<E2> e2, Class<E3> e3) throws E1, E2, E3 {}
public static <E1 extends Throwable,
E2 extends Throwable,
E3 extends Throwable,
E4 extends Throwable> void throwing(Class<E1> e1, Class<E2> e2, Class<E3> e3, Class<E4> e4) throws E1, E2, E3, E4 {}
public static <E1 extends Throwable,
E2 extends Throwable,
E3 extends Throwable,
E4 extends Throwable,
E5 extends Throwable> void throwing(Class<E1> e1, Class<E2> e2, Class<E3> e3, Class<E4> e4, Class<E5> e5) throws E1, E2, E3, E4, E5 {}
public static <T> SuspendableFunction<CompletionStage<T>, T> awaitValue() {
return new SuspendableFunction<CompletionStage<T>, T>() {
@Override
public T apply(CompletionStage<T> future) {
return AsyncMethodExecutor.await(future);
}
};
}
}
|
<filename>demo/src/app/treeTwo/treeTwoNode.service.ts
import {Injectable} from '@angular/core';
import {TreeLocalStorageNodeService} from '../localStorage/treeLocalStorage.service';
@Injectable()
export class TreeTwoNodeService extends TreeLocalStorageNodeService {
protected treeName = 'treeTwo';
public get treeId(): string {
return 'tree2';
}
}
|
package pantry
import (
"context"
"io"
"io/ioutil"
"github.com/yazver/golibs/reflect"
)
//var (
// ErrInvalid = errors.New("invalid argument") // methods on File will return this error when the receiver is nil
// ErrPermission = errors.New("permission denied")
// ErrExist = errors.New("file already exists")
// ErrNotExist = errors.New("file does not exist")
//)
// Locker interface is used for exlusive access to a variable if it realized.
type Locker interface {
Lock()
Unlock()
}
// Tag options.
type Tag struct {
Use bool // Use tag
Name string // Name of tag
}
// Tags options.
type Tags struct {
Config Tag
Flag Tag
Env Tag
Default Tag
Description Tag
}
// Config processing options.
type Options struct {
Flags Flags
Enviropment Enviropment
Tags Tags
DefaultFormat string
}
// Pantry is used to load config data from different sources.
type Pantry struct {
Locations *Locations
Options *Options
}
// Init Pantry.
func (p *Pantry) Init(applicationName string, locations ...string) *Pantry {
p.Locations = NewLocations(applicationName, locations...)
p.Options = &Options{
Flags: Flags{Using: FlagsDontUse},
Enviropment: Enviropment{Use: true, Prefix: ""},
Tags: Tags{
Config: Tag{true, "pantry"},
Flag: Tag{true, "flag"},
Env: Tag{true, "env"},
Default: Tag{true, "default"},
Description: Tag{false, "desc"},
},
DefaultFormat: "",
}
p.Options.Flags.Init(nil, nil)
return p
}
// NewPantry creates new Pantry.
func NewPantry(applicationName string, locations ...string) *Pantry {
return new(Pantry).Init(applicationName, locations...)
}
// AddLocation adds searching location.
func (p *Pantry) AddLocation(locationParts ...string) {
p.Locations.AddJoin(locationParts...)
}
// AddLocations adds searching locations.
func (p *Pantry) AddLocations(locations ...string) {
p.Locations.Add(locations...)
}
// LocatePath looks for the file in previously added locations.
func (p *Pantry) LocatePath(filename string) (string, error) {
if s := p.Options.Flags.GetConfigPath(); s != "" {
filename = s
}
return p.Locations.LocatePath(filename)
}
// Locate looks for the file in previously added locations.
func (p *Pantry) Locate(filename string) (Box, error) {
if s := p.Options.Flags.GetConfigPath(); s != "" {
filename = s
}
return p.Locations.Locate(filename)
}
func (p *Pantry) searchFormat(s string) (*ConfigFormat, error) {
f, err := Formats.Search(s)
if err != nil && p.Options.DefaultFormat != "" {
return Formats.Search(p.Options.DefaultFormat)
}
return f, err
}
// UnmarshalWith unmarshals data by "unmarshaler" and applays enviropment variables and command line flags.
func (p *Pantry) UnmarshalWith(b []byte, v interface{}, unmarshaler UnmarshalFunc) error {
if l, ok := v.(Locker); ok {
l.Lock()
defer l.Unlock()
}
reflect.Clear(v)
if p.Options.Tags.Default.Use {
if err := unmarshaler(b, v); err != nil {
return ConfigParseError{err}
}
if err := processDefaultValues(v, p.Options); err != nil {
return err
}
}
if err := unmarshaler(b, v); err != nil {
return ConfigParseError{err}
}
return processTags(v, p.Options)
}
func (p *Pantry) MarshalWith(v interface{}, marshaler MarshalFunc) (b []byte, err error) {
if l, ok := v.(Locker); ok {
l.Lock()
defer l.Unlock()
}
b, err = marshaler(v)
if err != nil {
return nil, ConfigEncodeError{err}
}
return
}
// Unmarshal unmarshals data as diffened format and applays enviropment variables and command line flags.
func (p *Pantry) Unmarshal(b []byte, v interface{}, format string) error {
f, err := p.searchFormat(format)
if err != nil {
return err
}
return p.UnmarshalWith(b, v, f.Unmarshal)
}
func (p *Pantry) Marshal(v interface{}, format string) (b []byte, err error) {
f, err := p.searchFormat(format)
if err != nil {
return nil, err
}
return p.MarshalWith(v, f.Marshal)
}
func (p *Pantry) Decode(r io.Reader, v interface{}, format string) error {
b, err := ioutil.ReadAll(r)
if err != nil {
return err
}
return p.Unmarshal(b, v, format)
}
func (p *Pantry) Encode(w io.Writer, v interface{}, format string) error {
b, err := p.Marshal(v, format)
if err != nil {
return err
}
_, err = w.Write(b)
return err
}
func (p *Pantry) UnBoxWith(box Box, v interface{}, unmarshaler UnmarshalFunc) error {
b, err := box.Get()
if err != nil {
return err
}
return p.UnmarshalWith(b, v, unmarshaler)
}
func (p *Pantry) BoxWith(box Box, v interface{}, marshaler MarshalFunc) error {
b, err := p.MarshalWith(v, marshaler)
if err != nil {
return err
}
return box.Set(b)
}
func (p *Pantry) LoadWith(path string, v interface{}, unmarshaler UnmarshalFunc) (string, error) {
box, err := p.Locate(path)
if box == nil {
return "", err
}
return box.Path(), p.UnBoxWith(box, v, unmarshaler)
}
func (p *Pantry) SaveWith(path string, v interface{}, marshaler MarshalFunc) (string, error) {
box, err := p.Locate(path)
if box == nil {
return "", err
}
return box.Path(), p.BoxWith(box, v, marshaler)
}
func (p *Pantry) UnBox(box Box, v interface{}) error {
b, err := box.Get()
if err != nil {
return err
}
return p.Unmarshal(b, v, box.Path())
}
func (p *Pantry) Box(box Box, v interface{}) error {
b, err := p.Marshal(v, box.Path())
if err != nil {
return err
}
return box.Set(b)
}
func (p *Pantry) UnBoxAs(box Box, v interface{}, format string) error {
b, err := box.Get()
if err != nil {
return err
}
if format == "" {
format = box.Path()
}
return p.Unmarshal(b, v, format)
}
func (p *Pantry) BoxAs(box Box, v interface{}, format string) error {
if format == "" {
format = box.Path()
}
b, err := p.Marshal(v, format)
if err != nil {
return err
}
return box.Set(b)
}
func (p *Pantry) LoadAs(path string, v interface{}, format string, opts ...func(*LoadOptions)) (Box, error) {
box, err := p.Locate(path)
if box == nil {
return nil, err
}
lo := LoadOptions{Context: context.Background()}
for _, opt := range opts {
opt(&lo)
}
err = p.UnBoxAs(box, v, format)
if err != nil {
return nil, err
}
if lo.Watcher != nil || lo.Reload {
err := box.WatchContext(lo.Context, func(err error) {
if err == nil {
if lo.Reload {
err = p.UnBoxAs(box, v, format)
}
}
for lo.Watcher != nil {
lo.Watcher(err)
}
})
if err != nil {
return nil, err
}
}
return box, nil
}
func (p *Pantry) Load(path string, v interface{}, opt ...func(*LoadOptions)) (Box, error) {
return p.LoadAs(path, v, "", opt...)
}
func (p *Pantry) Save(path string, v interface{}) (Box, error) {
box, err := p.Locate(path)
if box == nil {
return nil, err
}
return box, p.Box(box, v)
}
func (p *Pantry) SaveAs(path string, v interface{}, format string) (Box, error) {
box, err := p.Locate(path)
if box == nil {
return nil, err
}
return box, p.BoxAs(box, v, format)
}
|
set -e -x
export TF_NEED_CUDA=0
export BAZEL_VC="C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise/VC/"
python -m pip install --default-timeout=1000 wheel setuptools tensorflow==$TF_VERSION
bash ./tools/testing/build_and_run_tests.sh
python configure.py
bazel.exe build --no-cache \
--noshow_progress \
--noshow_loading_progress \
--verbose_failures \
--test_output=errors \
build_pip_pkg
bazel-bin/build_pip_pkg wheelhouse $NIGHTLY_FLAG
|
package com.example.ecommerce;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.MenuItem;
import android.view.View;
import android.view.Menu;
import android.widget.Toast;
import com.google.android.material.floatingactionbutton.FloatingActionButton;
import com.google.android.material.snackbar.Snackbar;
import com.google.android.material.navigation.NavigationView;
import androidx.annotation.NonNull;
import androidx.appcompat.app.ActionBarDrawerToggle;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentManager;
import androidx.fragment.app.FragmentTransaction;
import androidx.navigation.NavController;
import androidx.navigation.Navigation;
import androidx.navigation.ui.AppBarConfiguration;
import androidx.navigation.ui.NavigationUI;
import androidx.drawerlayout.widget.DrawerLayout;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import io.paperdb.Paper;
public class HomeUser extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_user);
Toolbar toolbar = findViewById(R.id.toolbar);
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.addDrawerListener(toggle);
toggle.syncState();
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
navigationView.setNavigationItemSelectedListener(HomeUser.this);
showHomeFragment();
}
void showHomeFragment(){
FragmentManager fm = getSupportFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
ft.replace(R.id.userHomecontainer, new HomeUserFragment()).addToBackStack("");
ft.commit();
}
@Override
public boolean onNavigationItemSelected(@NonNull MenuItem item) {
switch (item.getItemId()) {
case R.id.nav_home:
FragmentManager fm = getSupportFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
ft.replace(R.id.userHomecontainer, new HomeUserFragment()).addToBackStack("");
ft.commit();
break;
case R.id.nav_favorites:
break;
case R.id.nav_orders:
break;
case R.id.nav_share_app:
Intent share = new Intent(Intent.ACTION_SEND);
share.setType("text/plain");
String shareBody = "https//play.google.com/store/apps/details ? id=package com.example.userpart";
String shareSub = "Share App";
share.putExtra(Intent.EXTRA_SUBJECT, shareSub);
share.putExtra(Intent.EXTRA_TEXT, shareBody);
startActivity(Intent.createChooser(share, "Share"));
break;
case R.id.nav_send_notes:
String txtEmail = "Hello Developers\n My Opinion is ";
Intent sendNotes = new Intent(Intent.ACTION_SEND);
sendNotes.setData(Uri.parse("mailto: "));
sendNotes.setType("message/rfc822");
sendNotes.putExtra(Intent.EXTRA_EMAIL,"<EMAIL>");
sendNotes.putExtra(Intent.EXTRA_SUBJECT, "Bliss");
sendNotes.putExtra(Intent.EXTRA_TEXT, txtEmail);
startActivity(sendNotes);
break;
case R.id.nav_rate_us:
break;
case R.id.nav_about_us:
FragmentManager fm1 = getSupportFragmentManager();
FragmentTransaction ft1 = fm1.beginTransaction();
ft1.replace(R.id.userHomecontainer, new AboutUsFragment()).addToBackStack("");
ft1.commit();
break;
case R.id.nav_logout:
AlertDialog.Builder builder = new AlertDialog.Builder(HomeUser.this);
builder.setMessage("Are you sure you want to logout?")
.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Paper.book().destroy();
Intent logout = new Intent(HomeUser.this,MainActivity.class);
startActivity(logout);
finish();
}
})
.setNegativeButton("Cancel", null);
AlertDialog alertDialog = builder.create();
alertDialog.show();
break;
default:
// fragment = GalleryFragment.class;
}
return false;
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sdb.test.misc;
import static org.junit.Assert.assertFalse ;
import static org.junit.Assert.assertNotNull ;
import static org.junit.Assert.assertTrue ;
import java.sql.Connection ;
import org.apache.jena.assembler.Assembler ;
import org.apache.jena.query.Dataset ;
import org.apache.jena.query.DatasetFactory ;
import org.apache.jena.rdf.model.Literal ;
import org.apache.jena.rdf.model.Model ;
import org.apache.jena.rdf.model.Property ;
import org.apache.jena.rdf.model.Resource ;
import org.apache.jena.sdb.SDBFactory ;
import org.apache.jena.sdb.Store ;
import org.apache.jena.sdb.StoreDesc ;
import org.apache.jena.sdb.store.DatasetGraphSDB ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.util.FileManager ;
import org.junit.Test ;
public class TestAssembler
{
static final String dir = "testing/Assembler/" ;
@Test public void dataset_1()
{
Dataset ds = DatasetFactory.assemble(dir+"dataset.ttl") ;
assertNotNull(ds) ;
// Check it will be dispatched to SDB
DatasetGraph dsg = ds.asDatasetGraph() ;
assertTrue( dsg instanceof DatasetGraphSDB ) ;
}
@Test public void connection_1()
{
Connection jdbc = SDBFactory.createSqlConnection(dir+"connection.ttl") ;
assertNotNull(jdbc) ;
}
@Test public void store_1()
{
Store store = SDBFactory.connectStore(dir+"store.ttl") ;
assertNotNull(store) ;
}
@Test public void model_1()
{
Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ;
Resource x = assem.getResource("http://example/test#graphDft") ;
// Model for default graph
Model model = (Model)Assembler.general.open(x) ;
assertNotNull(model) ;
}
@Test public void model_2()
{
Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ;
Resource x = assem.getResource("http://example/test#graphNamed") ;
// Model for default graph
Model model = (Model)Assembler.general.open(x) ;
assertNotNull(model) ;
}
private Store create(Model assem)
{
// Create a store and format
Dataset ds = DatasetFactory.assemble(assem) ;
Store store = ((DatasetGraphSDB)ds.asDatasetGraph()).getStore() ;
store.getTableFormatter().create() ;
return store ;
}
@Test public void model_3()
{
Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ;
Resource xDft = assem.getResource("http://example/test#graphDft") ;
Resource xNamed = assem.getResource("http://example/test#graphNamed") ;
Store store = create(assem) ;
Model model1 = (Model)Assembler.general.open(xDft) ;
Model model2 = (Model)Assembler.general.open(xNamed) ;
assertNotNull(model1 != model2) ;
}
@Test public void model_4()
{
Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ;
Resource xDft = assem.getResource("http://example/test#graphDft") ;
Resource xNamed = assem.getResource("http://example/test#graphNamed") ;
Store store = create(assem) ;
Model model1 = (Model)Assembler.general.open(xDft) ;
Model model2 = (Model)Assembler.general.open(xNamed) ;
// Check they are not connected to the same place in the store
Resource s = model1.createResource() ;
Property p = model1.createProperty("http://example/p") ;
Literal o = model1.createLiteral("foo") ;
model1.add(s,p,o) ;
assertTrue(model1.contains(s, p, o)) ;
assertTrue(model1.size() == 1 ) ;
assertTrue(model2.size() == 0 ) ;
assertFalse(model1.isIsomorphicWith(model2)) ;
}
@Test public void model_5()
{
Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ;
Resource xDft = assem.getResource("http://example/test#graphDft") ;
Store store = create(assem) ;
// Default graph: Check they are connected to the same place in the store
Model model2 = (Model)Assembler.general.open(xDft) ;
Model model3 = (Model)Assembler.general.open(xDft) ;
Resource s = model2.createResource() ;
Property p = model2.createProperty("http://example/p") ;
// Check two models connected to the same graph
Literal o2 = model2.createLiteral("xyz") ;
model2.add(s,p,o2) ;
assertTrue(model3.contains(s,p,o2)) ;
}
@Test public void model_6()
{
Model assem = FileManager.get().loadModel(dir+"graph-assembler.ttl") ;
Resource xNamed = assem.getResource("http://example/test#graphNamed") ;
Store store = create(assem) ;
// Named graph: Check they are connected to the same place in the store
Model model2 = (Model)Assembler.general.open(xNamed) ;
Model model3 = (Model)Assembler.general.open(xNamed) ;
Resource s = model2.createResource() ;
Property p = model2.createProperty("http://example/p") ;
// Check two models connected to the same graph
Literal o2 = model2.createLiteral("xyz") ;
model2.add(s,p,o2) ;
assertTrue(model3.contains(s,p,o2)) ;
}
@Test public void pool_1()
{
// Connection
Connection conn = SDBFactory.createSqlConnection(dir+"connection.ttl") ;
// Store
StoreDesc desc = StoreDesc.read(dir+"dataset2.ttl") ;
Store store = SDBFactory.connectStore(conn, desc) ;
}
}
|
#!/bin/bash
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
if ! [ -x "$(command -v jq)" ]; then
echo "Installing jq"
sudo yum install -y jq
fi
echo "Setting environment variables"
MY_AWS_REGION=$(aws configure list | grep region | awk '{print $2}')
echo "AWS Region = $MY_AWS_REGION"
STACK_OUTPUTS=$(aws cloudformation describe-stacks | jq -r '.Stacks[] | select(.Outputs != null) | .Outputs[]')
API_GATEWAY_URL=$(echo $STACK_OUTPUTS | jq -r 'select(.OutputKey == "ApiGatewayEndpointLab2") | .OutputValue')
echo "API Gateway Invoke URL = $API_GATEWAY_URL"
S3_WEBSITE_BUCKET=$(echo $STACK_OUTPUTS | jq -r 'select(.OutputKey == "WebsiteS3Bucket") | .OutputValue')
echo "S3 website bucket = $S3_WEBSITE_BUCKET"
CLOUDFRONT_DISTRIBUTION=$(echo $STACK_OUTPUTS | jq -r 'select(.OutputKey == "CloudFrontDistributionDNS") | .OutputValue')
echo "CloudFront distribution URL = $CLOUDFRONT_DISTRIBUTION"
echo
if [ -z "$API_GATEWAY_URL" ] || [ -z "$S3_WEBSITE_BUCKET" ] || [ -z "$CLOUDFRONT_DISTRIBUTION" ]; then
echo "Missing required environment variables. Please make sure the lab2 CloudFormation stack has completed successfully."
exit 1
fi
# Edit src/shared/config.js in the ReactJS codebase
# set base_url to the REST API stage v1 invoke URL
echo "Configuring React to talk to API Gateway"
cd /home/ec2-user/environment/saas-factory-serverless-workshop/lab2/client
sed -i -r -e 's|(^\s+)(base_url: )(process.env.REACT_APP_BASE_URL,)|//\1\2\3\n\1\2"'"${API_GATEWAY_URL}"'"|g' src/shared/config.js
echo
echo "Installing NodeJS dependencies"
npm install
echo
echo "Building React app"
npm run build
# Setting the cache control metadata so that we don't have to invalidate
# (and wait for) the CloudFront distribution. You wouldn't do this in real life.
echo
echo "Uploading React app to S3 website bucket"
cd build
aws s3 sync --delete --cache-control no-store . s3://$S3_WEBSITE_BUCKET
echo
echo "Access your website at..."
echo $CLOUDFRONT_DISTRIBUTION
echo
|
#!/usr/bin/env bash
# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -e
set -o pipefail
if [ ! -d "/root/.docker" ]; then
mkdir -p /root/.docker
fi
if [ ! -d "/root/.config/containers" ]; then
mkdir -p /root/.config/containers
fi
mv release/scripts/docker-ecr-config.json /root/.docker/config.json
mv release/scripts/policy.json /root/.config/containers/policy.json
git config --global credential.helper '!aws codecommit credential-helper $@'
git config --global credential.UseHttpPath true
|
#!/usr/bin/env bash
export CONFIG_PREFIX="stage_two_"
export NUMBER_OF_TRAININGS=5
cd `dirname "${BASH_SOURCE[0]}"`
./run_stage_generic.sh
|
<filename>frontend/app/scripts/controllers/profile.js<gh_stars>0
/**
* @ngdoc function
* @name foodCircle.controller:ProfileCtrl
* @description
* # ProfileCtrl
* Controller of the foodCircle
*/
/*global
angular
*/
(function () {
'use strict';
angular.module('foodCircle').controller('ProfileCtrl', ['$log', '$state', 'authService', function ($log, $state, authService) {
var vm = this;
vm.eventListQuery = {where: {eventowner: authService.currentUser().id}, sort: 'updatedAt DESC'};
vm.eventOwnerId = authService.currentUser().id;
vm.onEventItemClick = function (event) {
$state.go('main.event.edit', {id: event.id});
};
vm.onRecipeItemClick = function (recipe) {
$state.go('main.myrecipes.edit', {name: recipe.name});
};
}]);
}());
|
public class Employee
{
public string Name { get; }
public int Experience { get; private set; }
public Employee(string name, int experience)
{
Name = name;
Experience = experience;
}
public void GainExperience(int years)
{
// Ensure that the experience can only be increased from within the class
Experience += years;
}
} |
<reponame>jason0539/Android-Plugin-Framework
package no.agens.depth.lib.tween.interpolators;
import android.animation.TimeInterpolator;
/**
* Created by danielzeller on 09.04.15.
*/
public class ExpoInOut implements TimeInterpolator {
@Override public float getInterpolation(float t) {
if (t == 0) return 0;
if (t == 1) return 1;
if ((t *= 2) < 1) return 0.5f * (float) Math.pow(2, 10 * (t - 1));
return 0.5f * (-(float) Math.pow(2, -10 * --t) + 2);
}
} |
def most_common_value(dict, key):
values = []
for i in dict:
values.append(dict[i][key])
return max(set(values), key = values.count)
dict = {
'a': {'name': 'John', 'color': 'blue'},
'b': {'name': 'Bob', 'color': 'green'},
'c': {'name': 'John', 'color': 'blue'},
'd': {'name': 'Dan', 'color': 'red'},
'e': {'name': 'John', 'color': 'blue'}
}
print(most_common_value(dict, 'name')) |
mkdir dataset
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_001to200.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_201to400.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_401to600.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_601to800.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_801to1000.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_1001to1200.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_1201to1400.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/trainHR_1401to1500.zip -P dataset/
wget http://data.vision.ee.ethz.ch/timofter/AIM19ExtremeSR/validationLR.zip -P dataset/
|
module JokerAPI
module Operations
module DomainGetProperty
# @param [String] domain Domain name
# @param [String] key Property name
def domain_get_property(domain, key)
response = perform_request('domain-get-property', {:domain => domain, :pname => key})
properties = response.body.split("\n").inject({}) do |hash, property|
hash.store(*property.split(": ", 2))
hash
end
properties[key]
end
end
end
end
|
<gh_stars>1-10
//package com.touch.air.mall.gateway.config;
//
//import com.alibaba.csp.sentinel.adapter.gateway.sc.callback.BlockRequestHandler;
//import com.alibaba.csp.sentinel.adapter.gateway.sc.callback.GatewayCallbackManager;
//import com.alibaba.fastjson.JSON;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.web.reactive.function.server.ServerResponse;
//import org.springframework.web.server.ServerWebExchange;
//import reactor.core.publisher.Mono;
//
//import java.util.HashMap;
//import java.util.Map;
//
///**
// * @author: bin.wang
// * @date: 2021/3/12 14:05
// */
//@Configuration
//public class SentinelGatewayConfig {
//
// public SentinelGatewayConfig(){
// GatewayCallbackManager.setBlockHandler(new BlockRequestHandler() {
// //网关限流触发
// @Override
// public Mono<ServerResponse> handleRequest(ServerWebExchange serverWebExchange, Throwable throwable) {
// Map<Integer, String> map = new HashMap<>();
// map.put(400, "网关限流触发");
// Mono<ServerResponse> body = ServerResponse.ok().body(Mono.just(JSON.toJSONString(map)), String.class);
// return body;
// }
// });
// }
//}
|
import React from "react";
import { Grid } from "@material-ui/core";
// styles
import useStyles from "./styles";
// components
import PageTitle from "../../components/PageTitle";
import Widget from "../../components/Widget";
import { Typography } from "../../components/Wrappers";
export default function ImageClassifier() {
var classes = useStyles();
return (
<>
<PageTitle title="Image Classifier" />
<Grid container spacing={4}>
<Grid item xs={12} md={6}>
{/* <Widget title="Headings" disableWidgetMenu>
<div className={classes.dashedBorder}>
</div>
</Widget> */}
</Grid>
</Grid>
</>
);
}
|
#!/bin/bash
if [[ $EUID != 0 ]]; then
echo "Error: This script requires root privileges to run."
else
read -p "Enter the directory path: " directory_path
if [ -d "$directory_path" ]; then
echo "Contents of $directory_path:"
ls "$directory_path"
else
echo "Error: Invalid directory path."
fi
fi |
package br.com.zup.mercadolivre.detalheproduto;
import br.com.zup.mercadolivre.config.exception.ProdutoNotFoundException;
import br.com.zup.mercadolivre.opiniao.OpiniaoRepository;
import br.com.zup.mercadolivre.pergunta.PerguntaRepository;
import br.com.zup.mercadolivre.produto.CaracteristicaRepository;
import br.com.zup.mercadolivre.produto.Produto;
import br.com.zup.mercadolivre.produto.ProdutoRepository;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import javax.transaction.Transactional;
@RestController
public class DetalhesProdutoController {
private CaracteristicaRepository caracteristicaRepository;
private ProdutoRepository produtoRepository;
private ImagemProdutoRepository imagemRepository;
private PerguntaRepository perguntaRepository;
private OpiniaoRepository opiniaoRepository;
public DetalhesProdutoController(CaracteristicaRepository caracteristicaRepository, ProdutoRepository produtoRepository,
ImagemProdutoRepository imagemRepository, PerguntaRepository perguntaRepository,
OpiniaoRepository opiniaoRepository) {
this.caracteristicaRepository = caracteristicaRepository;
this.produtoRepository = produtoRepository;
this.imagemRepository = imagemRepository;
this.perguntaRepository = perguntaRepository;
this.opiniaoRepository = opiniaoRepository;
}
@GetMapping(path = "api/v1/produtos/{id}/detalhe-produto")
@Transactional
public DetalhesProdutoDto getDetalhes(@PathVariable ("id") Long id) throws ProdutoNotFoundException {
Produto produto = produtoRepository.findById(id).orElseThrow(()-> new ProdutoNotFoundException(id));
DetalhesProdutoDto dto = new DetalhesProdutoDto(produto, caracteristicaRepository, imagemRepository, perguntaRepository, opiniaoRepository);
return dto;
}
}
|
#!/bin/bash
set -e
# install git-crypt and create a sim link
pushd /tmp
wget https://github.com/AGWA/git-crypt/archive/ccdcc76f8e1a639847a8accd801f5a284194e43f.zip -O git-crypt.zip
unzip git-crypt.zip
cd git-crypt-ccdcc76f8e1a639847a8accd801f5a284194e43f
make
sudo install git-crypt /usr/local/bin
popd
|
Pod::Spec.new do |s|
s.name = 'DevPlayer'
s.version = '0.0.1'
s.summary = 'A short description of Network.'
s.swift_version = '5.0'
s.description = "this is a group of extensions for apps, used as private pod, it contains extensions for Foundation, UIKit, AVFoundation,...etc you could add to these extensions whatever you want while you develop your app"
s.homepage = 'https://github.com/abozaid-ibrahim/DevPods.git'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'abozaid-ibrahim' => '<EMAIL>' }
s.source = { :git => 'https://github.com/abozaid-ibrahim/DevPods.git', :tag => s.version.to_s }
s.requires_arc = true
s.ios.deployment_target = '11.0'
s.source_files = 'DevPlayer/*.{swift,h,m}'
s.dependency 'RxSwift', '6.0.0'
s.dependency 'RxCocoa', '6.0.0'
end
|
export * from './z-menu-material.component';
|
AWSTemplateFormatVersion: '2010-09-09'
Transform: AWS::Serverless-2016-10-31
Description: "My SAM application"
Parameters:
EnvironmentName:
Type: String
Globals:
Function:
Timeout: 3
Resources:
ServerlessFunction:
Type: AWS::Serverless::Function
Properties:
CodeUri: build/
Handler: index.handler
Runtime: nodejs12.x
Environment:
Variables:
ENV_NAME: !Ref EnvironmentName
Events:
HelloWorld:
Type: Api
Properties:
Path: /hello
Method: get
Outputs:
ServerlessFunction:
Description: URL for application
Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello" |
#!/bin/bash
#
# Assuming you have a kubernetes cluser and kubectl installed
#
if kubectl get svc | grep gitlab > /dev/null 2>&1 ;
then
echo "gitlab service already exists."
else
for i in *.yml
do
kubectl create -f $i
done
fi
kubectl get pods
kubectl get svc
|
#!/usr/bin/env bash
# Make sure we exit if there is a failure
set -e
function usage() {
echo "Usage: $0 [--with-musllvm] [--disable-inlining] [--ipdse] [--ai-dce] [--use-pointer-analysis] [--inter-spec VAL] [--intra-spec VAL] [--enable-config-prime] [--help]"
echo " VAL=none|aggressive|nonrec-aggressive|onlyonce (default)"
}
#default values
INTER_SPEC="onlyonce"
INTRA_SPEC="onlyonce"
OPT_OPTIONS=""
USE_MUSLLVM="false"
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-inter-spec|--inter-spec)
INTER_SPEC="$2"
shift # past argument
shift # past value
;;
-intra-spec|--intra-spec)
INTRA_SPEC="$2"
shift # past argument
shift # past value
;;
-disable-inlining|--disable-inlining)
OPT_OPTIONS="${OPT_OPTIONS} --disable-inlining"
shift # past argument
;;
-enable-config-prime|--enable-config-prime)
OPT_OPTIONS="${OPT_OPTIONS} --enable-config-prime"
shift # past argument
;;
-with-musllvm|--with-musllvm)
USE_MUSLLVM="true"
shift # past argument
;;
-ipdse|--ipdse)
OPT_OPTIONS="${OPT_OPTIONS} --ipdse"
shift # past argument
;;
-ai-dce|--ai-dce)
OPT_OPTIONS="${OPT_OPTIONS} --ai-dce"
shift # past argument
;;
-use-pointer-analysis|--use-pointer-analysis)
OPT_OPTIONS="${OPT_OPTIONS} --use-pointer-analysis"
shift # past argument
;;
-help|--help)
usage
exit 0
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
#check that the require dependencies are built
if [ $USE_MUSLLVM == "true" ];
then
declare -a bitcode=("b2sum.bc" "libc.a.bc" "libc.a")
else
declare -a bitcode=("b2sum.bc" )
fi
for bc in "${bitcode[@]}"
do
if [ -a "$bc" ]
then
echo "Found $bc"
else
if [ "$bc" == "libc.a.bc" ];
then
echo "Error: $bc not found. You need to compile musllvm and copy $bc to ${PWD}."
else
echo "Error: $bc not found. Try \"make -f Makefile_libevent; make\"."
fi
exit 1
fi
done
MANIFEST=b2sum.manifest
if [ $USE_MUSLLVM == "true" ];
then
cat > ${MANIFEST} <<EOF
{"binary": "b2sum_fin",
"native_libs":[],
"name": "b2sum",
"static_args": [],
"modules": ["libc.a.bc"],
"ldflags": [],
"main": "b2sum.bc"
}
EOF
else
cat > ${MANIFEST} <<EOF
{"binary": "b2sum_fin",
"native_libs":[],
"name": "b2sum",
"static_args": [],
"modules": [],
"ldflags": [],
"main": "b2sum.bc"
}
EOF
fi
export OCCAM_LOGLEVEL=INFO
export OCCAM_LOGFILE=${PWD}/slash/occam.log
rm -rf slash
SLASH_OPTS="--inter-spec-policy=${INTER_SPEC} --intra-spec-policy=${INTRA_SPEC} --no-strip --stats $OPT_OPTIONS"
echo "============================================================"
if [ $USE_MUSLLVM == "true" ];
then
echo "Running memcacched with libevent library and musllvm"
else
echo "Running memcacched with libevent library"
fi
echo "slash options ${SLASH_OPTS}"
echo "============================================================"
slash ${SLASH_OPTS} --work-dir=slash ${MANIFEST}
status=$?
if [ $status -eq 0 ]
then
## runbench needs _orig and _slashed versions
cp slash/b2sum_fin b2sum_slashed
else
echo "Something failed while running slash"
fi
|
def generate_url_mapping(url_patterns):
url_mapping = {}
for pattern in url_patterns:
url_mapping[pattern.regex.pattern] = pattern.callback.view_class.as_view()
return url_mapping |
<reponame>chendave/buildkit
package solver
import (
"sync"
digest "github.com/opencontainers/go-digest"
)
// NewCacheKey creates a new cache key for a specific output index
func NewCacheKey(dgst digest.Digest, output Index) *CacheKey {
return &CacheKey{
ID: rootKey(dgst, output).String(),
digest: dgst,
output: output,
ids: map[*cacheManager]string{},
}
}
// CacheKeyWithSelector combines a cache key with an optional selector digest.
// Used to limit the matches for dependency cache key.
type CacheKeyWithSelector struct {
Selector digest.Digest
CacheKey ExportableCacheKey
}
type CacheKey struct {
mu sync.RWMutex
ID string
deps [][]CacheKeyWithSelector // only [][]*inMemoryCacheKey
digest digest.Digest
output Index
ids map[*cacheManager]string
indexIDs []string
}
func (ck *CacheKey) Deps() [][]CacheKeyWithSelector {
ck.mu.RLock()
defer ck.mu.RUnlock()
deps := make([][]CacheKeyWithSelector, len(ck.deps))
for i := range ck.deps {
deps[i] = append([]CacheKeyWithSelector(nil), ck.deps[i]...)
}
return deps
}
func (ck *CacheKey) Digest() digest.Digest {
return ck.digest
}
func (ck *CacheKey) Output() Index {
return ck.output
}
func (ck *CacheKey) clone() *CacheKey {
nk := &CacheKey{
ID: ck.ID,
digest: ck.digest,
output: ck.output,
ids: map[*cacheManager]string{},
}
for cm, id := range ck.ids {
nk.ids[cm] = id
}
return nk
}
|
<filename>app/actions/entities/user.js
/*
* @link https://libretask.org/
* @license https://github.com/LibreTask/desktop/blob/master/LICENSE.md
*/
export const CREATE_OR_UPDATE_PROFILE = "CREATE_OR_UPDATE_PROFILE";
/*
LibreTask aspires to be an offline-first application (i.e., no internet
connection is ever required for the basic functionality).
This means that a profile can exist, even if the user is not logged in. This "offline profile" stores the user preferences of those without an account.
*/
export const createOrUpdateProfile = (profile, isLoggedIn = true) => {
return {
type: CREATE_OR_UPDATE_PROFILE,
profile: profile,
isLoggedIn: isLoggedIn
};
};
export const DELETE_PROFILE = "DELETE_PROFILE";
export const deleteProfile = () => {
return {
type: DELETE_PROFILE
};
};
export const START_USER_SYNC = "START_USER_SYNC";
export const startUserSync = intervalId => {
return {
type: START_USER_SYNC,
intervalId: intervalId
};
};
export const END_USER_SYNC = "END_USER_SYNC";
export const endUserSync = () => {
return {
type: END_USER_SYNC
};
};
import * as UserController from "../../models/controllers/user";
import * as ProfileStorage from "../../models/storage/profile-storage";
export const SYNC_USER = "SYNC_USER";
export const syncUser = () => {
return function(dispatch, getState) {
let user = getState().entities.user;
// only sync is the user is logged in
if (user && user.isLoggedIn) {
let currentSyncDateTimeUtc = new Date(); // TODO - refine
UserController.syncUser()
.then(response => {
if (response && response.profile) {
let syncAction = {
type: SYNC_USER,
profile: response.profile,
lastSuccessfulSyncDateTimeUtc: currentSyncDateTimeUtc
};
dispatch(syncAction);
}
})
.catch(error => {});
}
};
};
export const ADD_PENDING_PROFILE_UPDATE = "ADD_PENDING_PROFILE_UPDATE";
export const addPendingProfileUpdate = profile => {
return {
type: ADD_PENDING_PROFILE_UPDATE,
queuedProfile: profile
};
};
export const REMOVE_PENDING_PROFILE_UPDATE = "REMOVE_PENDING_PROFILE_UPDATE";
export const removePendingProfileUpdate = () => {
return {
type: REMOVE_PENDING_PROFILE_UPDATE
};
};
export const START_QUEUED_PROFILE_SUBMIT = "START_QUEUED_PROFILE_SUBMIT";
export const startQueuedProfileSubmission = intervalId => {
return {
type: START_QUEUED_PROFILE_SUBMIT,
intervalId: intervalId
};
};
export const STOP_QUEUED_PROFILE_SUBMIT = "STOP_QUEUED_PROFILE_SUBMIT";
export const stopQueuedProfileSubmission = () => {
return {
type: STOP_QUEUED_PROFILE_SUBMIT
};
};
export const submitQueuedProfileUpdate = () => {
return function(dispatch, getState) {
const profile = getState().entities.user.profile;
// only submit queued tasks if the user can access the network
if (UserController.canAccessNetwork(profile)) {
const queuedProfile = getState().entities.user.queuedProfile;
if (queuedProfile) {
// update queued profile credentials, so that API access is possible
queuedProfile.id = profile.id;
queuedProfile.password = <PASSWORD>;
UserController.updateProfile(queuedProfile)
.then(response => {
ProfileStorage.deletedQueuedProfile();
dispatch({
type: REMOVE_PENDING_PROFILE_UPDATE
});
})
.catch(error => {});
}
return;
}
};
};
|
#!/bin/bash
# https://mochajs.org/#running-mocha-in-the-browser
wget https://unpkg.com/mocha/mocha.css --output-document=mocha.css
wget https://unpkg.com/mocha/mocha.js --output-document=mocha.js
|
<filename>2021-05-09/外卖小程序/SpiceSpirit/pages/index/index.js
const app = getApp()
let height = 0
Page({
data:{
foodCategorys:["虾蟹","特色美食","预订商品","主食酒水"],
current_id:0,
city:'北京市',
screenHeight:parseInt(app.globalData.height) * 2,
hot_goods:[
{
"name":"麻辣龙虾尾",
"price":"89",
"id":0,
"pic":"../../images/MNX.png",
"sale_num":99
},
{
"name":"嗨麻嗨辣小龙虾",
"price":"89",
"id":1,
"pic":"../../images/MX.png",
"sale_num":99
},
{
"name":"麻辣小龙虾",
"price":"89",
"id":2,
"pic":"../../images/MXJP.png",
"sale_num":99
},
],
common_goods:[
{
"name":"麻辣豆皮",
"price":"89",
"id":0,
"pic":"http://123.56.182.28/images/201604/1460351377410581585.jpg",
"sale_num":99
},
{
"name":"麻辣海白菜",
"price":"89",
"id":1,
"pic":"http://123.56.182.28/images/201604/1460351328442917106.jpg",
"sale_num":99
},
{
"name":"微辣兔头",
"price":"89",
"id":2,
"pic":"http://123.56.182.28/images/201604/1460351296256062421.jpg",
"sale_num":99
},
{
"name":"微辣鸭舌",
"price":"89",
"id":3,
"pic":"http://172.16.58.3/images/201604/1460351255750519007.jpg",
"sale_num":99
},
{
"name":"麻辣牛蹄筋",
"price":"89",
"id":4,
"pic":"http://172.16.58.3/images/201604/1460351205527158641.jpg",
"sale_num":99
},
{
"name":"麻辣小竹笋",
"price":"89",
"id":5,
"pic":"http://172.16.58.3/images/201604/1460351146788043235.jpg",
"sale_num":99
}
]
},
onLoad:function(options){
// 页面初始化 options为页面跳转所带来的参数
let res = wx.getSystemInfoSync()
height = res.windowHeight
console.log("app" + app.globalData.height)
},
onReady:function(){
// 页面渲染完成
console.log(app.objcToString(this.data.hot_goods[0]))
},
onShow:function(){
// 页面显示
},
onHide:function(){
// 页面隐藏
},
onUnload:function(){
// 页面关闭
},
selectSection:function(event){
console.log(event)
let index = parseInt(event.currentTarget.dataset.index);
this.setData({current_id:index})
},
showdetail:function(event){
let good = {}
let index = parseInt(event.currentTarget.dataset.index)
if(this.data.current_id == 0){
good = this.data.hot_goods[index]
}else{
good = this.data.common_goods[index]
console.log(good);
}
let params = app.objcToString(good)
console.log("点击了" + params)
wx.navigateTo({
url: '../shopdetail/shopdetail?' + params,
success: function(res){
// success
},
fail: function() {
// fail
},
complete: function() {
// complete
}
})
},
onShareAppMessage: function () {
return {
title: '麻小外卖',
desc: '小程序分享测试',
path: '/pages/index/index'
}
},
chooseAddress:function(){
wx.navigateTo({
url: '../addAddress/addAddress',
success: function(res){
},
fail: function() {
}
})
},
// 跳转到购物车界面
showshopcar:function(){
wx.navigateTo({
url: '../buycar/buycar'
})
}
})
|
import { IPanel, PanelStack } from '@blueprintjs/core'
import * as React from 'react'
import ChannelDetailsOverview from 'components/ChannelDetailsOverview'
import styled from 'styled'
/**
* ChannelDetailsPanels component.
*/
const ChannelDetailsPanels = styled(PanelStack)`
width: 300px;
height: 360px;
`
/**
* ChannelDetails Component.
*/
const ChannelDetails: React.FunctionComponent<ChannelDetailsProps> = ({ id, name }) => {
const initialPanel: IPanel<any> = {
component: ChannelDetailsOverview,
props: { id, name },
title: 'Overview',
}
return <ChannelDetailsPanels initialPanel={initialPanel} />
}
export default ChannelDetails
/**
* React Props.
*/
export interface ChannelDetailsProps {
id: string
name: string
}
|
// @flow
import type { CounterTransaction } from 'resource-counter';
import { CounterImmutable } from 'resource-counter';
import { Map as MapI } from 'immutable';
import Reference from 'reference-pointer';
type Taggable = Object|null|void;
type TaggerTransaction = {
tag ({[string]: any}): void,
untag ({[string]: any}): void,
strip ({[string]: any}): void,
isTag (string, any): boolean,
getTag (string, Taggable): ?[string, number]
};
function tag (
tagKeys: Set<string>,
tagSuffix: string,
tagCounter: CounterTransaction,
tagMap: MapI<Taggable, [number, number]>,
changed: Reference<boolean>,
object: {[string]: any}
): void {
tagKeys.forEach((key) => {
if (
object.hasOwnProperty(key) &&
(
typeof object[key] === 'object' ||
object[key] === undefined
)
) {
const objectTagged = object[key];
const tagAndCount = tagMap.get(objectTagged);
let tag;
if (tagAndCount) {
tag = tagAndCount[0];
tagMap.set(objectTagged, [tag, tagAndCount[1] + 1])
} else {
tag = tagCounter.allocate();
tagMap.set(objectTagged, [tag, 1]);
}
object[key + tagSuffix] = tag;
changed.set(true);
}
});
}
function untag (
tagKeys: Set<string>,
tagSuffix: string,
tagCounter: CounterTransaction,
tagMap: MapI<Taggable, [number, number]>,
changed: Reference<boolean>,
object: {[string]: any}
): void {
tagKeys.forEach((key) => {
if (
object.hasOwnProperty(key) &&
(
typeof object[key] === 'object' ||
object[key] === undefined
)
) {
const objectTagged = object[key];
const tagAndCount = tagMap.get(objectTagged);
if (tagAndCount) {
if ((tagAndCount[1] - 1) < 1) {
tagCounter.deallocate(tagAndCount[0]);
tagMap.delete(objectTagged);
} else {
tagMap.set(objectTagged, [tagAndCount[0], tagAndCount[1] - 1]);
}
}
delete object[key + tagSuffix];
changed.set(true);
}
});
}
function strip (
tagKeys: Set<string>,
tagSuffix: string,
object: {[string]: any}
) {
tagKeys.forEach((key) => {
if (
object.hasOwnProperty(key) &&
(
typeof object[key] === 'object' ||
object[key] === undefined
)
) {
delete object[key + tagSuffix];
}
});
}
function isTag (
tagKeys: Set<string>,
tagSuffix: string,
key: string,
tag: any
): boolean {
if (tag === undefined || typeof tag === 'number') {
// '' + tagSuffix is also potentially a valid tag
// if the empty string was a key
const match = key.match(new RegExp('(.*)' + tagSuffix + '$'));
if (match && tagKeys.has(match[1])) {
return true;
}
}
return false;
}
function getTag (
tagKeys: Set<string>,
tagSuffix: string,
tagMap: MapI<Taggable, [number, number]>,
key: string,
value: Taggable
): ?[string, number] {
if (tagKeys.has(key)) {
const tagAndCount = tagMap.get(value);
if (tagAndCount) {
return [key + tagSuffix, tagAndCount[0]];
}
}
return null;
}
class TaggerImmutable {
_tagKeys: Set<string>;
_tagSuffix: string;
_tagCounter: CounterImmutable;
_tagMap: MapI<Taggable, [number, number]>;
constructor (
tagKeys: Set<string>,
tagSuffix: string,
tagCounter: CounterImmutable = new CounterImmutable,
tagMap: MapI<Taggable, [number, number]> = MapI()
) {
this._tagKeys = tagKeys;
this._tagSuffix = tagSuffix;
this._tagCounter = tagCounter;
this._tagMap = tagMap;
}
tag (object: {[string]: any}): TaggerImmutable {
const changed = new Reference(false);
let tagCounter, tagMap;
tagCounter = this._tagCounter.transaction((counter) => {
tagMap = this._tagMap.withMutations((map) => {
tag(this._tagKeys, this._tagSuffix, counter, map, changed, object);
});
});
if (changed.get()) {
return new TaggerImmutable(
this._tagKeys,
this._tagSuffix,
tagCounter,
tagMap
);
} else {
return this;
}
}
untag (object: {[string]: any}): TaggerImmutable {
const changed = new Reference(false);
let tagCounter, tagMap;
tagCounter = this._tagCounter.transaction((counter) => {
tagMap = this._tagMap.withMutations((map) => {
untag(this._tagKeys, this._tagSuffix, counter, map, changed, object);
});
});
if (changed.get()) {
return new TaggerImmutable(
this._tagKeys,
this._tagSuffix,
tagCounter,
tagMap
);
} else {
return this;
}
}
strip (object: {[string]: any}): void {
strip(this._tagKeys, this._tagSuffix, object);
}
isTag (key: string, tag: any): boolean {
return isTag(this._tagKeys, this._tagSuffix, key, tag);
}
getTag (key: string, value: Taggable): ?[string, number] {
return getTag(this._tagKeys, this._tagSuffix, this._tagMap, key, value);
}
transaction (callback: (TaggerTransaction) => any): TaggerImmutable {
let changed = new Reference(false);
let tagCounter, tagMap;
tagCounter = this._tagCounter.transaction((counter) => {
tagMap = this._tagMap.withMutations((map) => {
const taggerTransaction = {
tag: (object) => tag(
this._tagKeys,
this._tagSuffix,
counter,
map,
changed,
object
),
untag: (object) => untag(
this._tagKeys,
this._tagSuffix,
counter,
map,
changed,
object
),
strip: (object) => strip(
this._tagKeys,
this._tagSuffix,
object
),
isTag: (key, tag) => isTag(
this._tagKeys,
this._tagSuffix,
key,
tag
),
getTag: (key, value) => getTag(
this._tagKeys,
this._tagSuffix,
map,
key,
value
)
};
callback(taggerTransaction);
});
});
if (changed.get()) {
return new TaggerImmutable(this._tagKeys, this._tagSuffix, tagCounter, tagMap);
} else {
return this;
}
}
}
export default TaggerImmutable;
export { CounterImmutable, MapI };
export type { Taggable, TaggerTransaction };
|
import { AnimatorState } from "./AnimatorState";
export interface AnimatorStateMap {
[key: string]: AnimatorState;
}
/**
* A graph controlling the interaction of states. Each state references a motion.
*/
export class AnimatorStateMachine {
/** The list of states. */
readonly states: AnimatorState[] = [];
/** @internal */
_statesMap: AnimatorStateMap = {};
/**
* Add a state to the state machine.
* @param name - The name of the new state
*/
addState(name: string): AnimatorState {
let state = this.findStateByName(name);
if (!state) {
state = new AnimatorState(name);
this.states.push(state);
this._statesMap[name] = state;
} else {
console.warn(`The state named ${name} has existed.`);
}
return state;
}
/**
* Remove a state from the state machine.
* @param state - The state
*/
removeState(state: AnimatorState): void {
const { name } = state;
const index = this.states.indexOf(state);
if (index > -1) {
this.states.splice(index, 1);
}
delete this._statesMap[name];
}
/**
* Get the state by name.
* @param name - The layer's name
*/
findStateByName(name: string): AnimatorState {
return this._statesMap[name];
}
/**
* Makes a unique state name in the state machine.
* @param name - Desired name for the state.
* @returns Unique name.
*/
makeUniqueStateName(name: string): string {
const { _statesMap } = this;
const originName = name;
let index = 0;
while (_statesMap[name]) {
name = `${originName} ${index}`;
index++;
}
return name;
}
}
|
# src/bash/sca-pola/funcs/gmail-package.spec.sh
# v1.0.9
# ---------------------------------------------------------
# todo: add doSpecGmailPackage comments ...
# ---------------------------------------------------------
doSpecGmailPackage(){
doLog "DEBUG START doSpecGmailPackage"
cat doc/txt/sca-pola/tmpl/gmail-package.spec.txt
sleep 2
# add your action implementation code here ...
doLog "DEBUG STOP doSpecGmailPackage"
}
# eof func doSpecGmailPackage
# eof file: src/bash/sca-pola/funcs/gmail-package.spec.sh
|
public class ArrayPrint{
int[] arr = {1, 2, 3, 4, 5};
public void printElements() {
for (int i : arr) {
System.out.println(i);
}
}
} |
<filename>src/j6libc/errno.c
/* _PDCLIB_errno
This file is part of the Public Domain C Library (PDCLib).
Permission is granted to use, modify, and / or redistribute at will.
*/
#include "j6libc/int.h"
int _PDCLIB_errno = 0;
int * _PDCLIB_errno_func()
{
return &_PDCLIB_errno;
}
|
package com.sawert.sandbox.spring.mvc.aspect;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
/**
* Sample aspect implementation
*
* @author bsawert
*
*/
@Aspect
public class TestModelServiceAspect {
private static final Log log = LogFactory.getLog(TestModelServiceAspect.class);
/**
* Default constructor
*/
public TestModelServiceAspect() {
if (log.isDebugEnabled()) {
log.debug("Constructing TestModelServiceAspect.");
}
}
// define method pointcuts
@Pointcut("execution(* com.sawert.sandbox.spring.mvc.service.TestModelService.getModelById(*)) && args(id)")
public void getModelByIdAspect(String id) {
}
@Pointcut("execution(* com.sawert.sandbox.spring.mvc.service.TestModelService.getAllModels())")
public void getAllModelsAspect() {
}
// define pointcut advice
@Before("getModelByIdAspect(id)")
public void logBeforeGetModelById(JoinPoint jp, String id) throws Throwable {
if (log.isDebugEnabled()) {
log.debug("Aspect before execution of getModelById()");
}
}
@Before("getAllModelsAspect()")
public void logBeforeGetAllModelsAspect(JoinPoint jp) throws Throwable {
if (log.isDebugEnabled()) {
log.debug("Aspect before execution of getAllModels()");
}
}
}
|
#!/bin/sh
# Package
PACKAGE="node"
DNAME="Node.js"
# Others
INSTALL_DIR="/usr/local/${PACKAGE}"
PATH="${INSTALL_DIR}/bin:${PATH}"
preinst ()
{
exit 0
}
postinst ()
{
# Link
ln -s ${SYNOPKG_PKGDEST} ${INSTALL_DIR}
# Correct the files ownership
chown -R root:root ${SYNOPKG_PKGDEST}
exit 0
}
preuninst ()
{
exit 0
}
postuninst ()
{
# Remove link
rm -f ${INSTALL_DIR}
exit 0
}
preupgrade ()
{
exit 0
}
postupgrade ()
{
exit 0
}
|
package com.github.danildorogoy.template;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
public class PieceKing extends Piece {
private Image image;
public PieceKing(int type, int xPos, int yPos) {
super(type, xPos, yPos);
name = "King";
if (type == 1) {
image = new Image("file:src/main/resources/wKing.png");
} else {
image = new Image("file:src/main/resources/bKing.png");
}
imageView.setImage(image);
imageView.fitHeightProperty();
imageView.fitWidthProperty();
imageView.setPreserveRatio(true);
imageView.setSmooth(true);
imageView.setCache(true);
}
@Override
public ImageView getImage() {
return (imageView);
}
@Override
public void SelectPiece(ChessBoard chessBoard) {
int x;
int y;
chessBoard.colorSquare(this.xPos, this.yPos, true);
for (y = this.yPos - 1; y <= this.yPos + 1; y++) {
for (x = this.xPos - 1; x <= this.xPos + 1; x++) {
if (y >= 0 && y < chessBoard.getBoardHeight() && x >= 0 &&
x < chessBoard.getBoardWidth() && chessBoard.getBoardPosition(x, y) != this.type) {
if (!chessBoard.checkState)
this.canCastle(chessBoard);
if (!gameLogic.isCheck(chessBoard, x, y, this.type, true))
chessBoard.colorSquare(x, y, false);
}
}
}
}
public int canCastle(ChessBoard chessBoard) {
int canCastle = 0;
if (type == 2 && this.isFirstTime && chessBoard.getBoardPosition(5, 0) == 0 &&
chessBoard.getBoardPosition(6, 0) == 0 && chessBoard.getPiece(7, 0) != null &&
chessBoard.getPiece(7, 0).isFirstTime) {
canCastle = 1;
chessBoard.colorSquare(7, 0, false);
}
if (type == 2 && this.isFirstTime && chessBoard.getBoardPosition(1, 0) == 0 &&
chessBoard.getBoardPosition(2, 0) == 0 && chessBoard.getBoardPosition(3, 0) == 0 &&
chessBoard.getPiece(0, 0) != null && chessBoard.getPiece(0, 0).isFirstTime) {
canCastle = 2;
chessBoard.colorSquare(0, 0, false);
}
// White
if (type == 1 && this.isFirstTime && chessBoard.getBoardPosition(5, 7) == 0 &&
chessBoard.getBoardPosition(6, 7) == 0 && chessBoard.getPiece(7, 7) != null && chessBoard.getPiece(7, 7).isFirstTime) {
canCastle = 3;
chessBoard.colorSquare(7, 7, false);
}
if (type == 1 && this.isFirstTime && chessBoard.getBoardPosition(1, 7) == 0 &&
chessBoard.getBoardPosition(2, 7) == 0 && chessBoard.getBoardPosition(3, 7) == 0 &&
chessBoard.getPiece(0, 7) != null && chessBoard.getPiece(0, 7).isFirstTime) {
canCastle = 4;
chessBoard.colorSquare(0, 7, false);
}
return canCastle;
}
}
|
<gh_stars>1-10
import block from 'bem-cn';
import React from 'react';
import {SortableContainer, SortableElement, SortableHandle } from 'react-sortable-hoc';
import { bind } from 'decko';
import { Button } from 'shared/view/elements';
import { IAnnouncement } from 'shared/types/models';
import './AnnouncementsList.scss';
const b = block('announcements-list');
interface IProps {
items: IAnnouncement[];
loading: boolean;
delete(index: number): void;
edit(index: number): void;
}
const DragHandle = SortableHandle(() => <span className={b('drag-handle')()} >::</span>);
const SortableItem =
SortableElement<{key: number, value: {content: string}, editItem(): void, deleteItem(): void}>
(({key, value, editItem, deleteItem}) => (
<li className={b('item')()} key={key}>
<DragHandle />
<div className={b('item-content')()} dangerouslySetInnerHTML={{__html: value.content}} />
<div className={b('buttons')()}>
<Button onClick={editItem} color="text-blue">Edit</Button>
<Button onClick={deleteItem} color="text-blue">Delete</Button>
</div>
</li>
));
class AnnouncementsList extends React.Component<IProps> {
public render() {
const { items, loading } = this.props;
return (
<div className={b()}>
<ul className={b('items')()}>
{
items.map((item, key) => {
return (
<SortableItem
editItem={this.edit(key)}
deleteItem={this.delete(key)}
key={key}
index={key}
value={item}
/>
);
})
}
</ul>
{!loading && items.length === 0 && <p className={b('info-message')()}>There are no elements to display</p>}
</div>
);
}
@bind
private delete(index: number) {
return () => {
this.props.delete(index);
};
}
@bind
private edit(index: number) {
return () => {
this.props.edit(index);
};
}
}
export default SortableContainer(AnnouncementsList);
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/hud/framework/Panel.java
package io.opensphere.core.hud.framework;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import io.opensphere.core.geometry.Geometry;
import io.opensphere.core.model.ScreenBoundingBox;
import io.opensphere.core.util.collections.CollectionUtilities;
/**
* Panel for a HUD component.
*
* @param <S> Layout constraint type.
* @param <T> Layout type.
*/
public abstract class Panel<S extends LayoutConstraints, T extends AbstractLayout<S>> extends Component
{
/**
* Border inset into this panel.
*/
private Border myBorder;
/**
* Components which exist within this component.
*/
private final Collection<Component> myChildren = new ArrayList<>();
/** Layout for the panel. */
private T myLayout;
/**
* Construct me.
*
* @param parent parent component.
*/
public Panel(Component parent)
{
super(parent);
}
/**
* Construct me.
*
* @param parent parent component.
* @param location frame location.
*/
public Panel(Component parent, ScreenBoundingBox location)
{
super(parent, location);
}
/**
* Set up the geometry to match my layout.
*
* @param subComp component to add.
* @param constraint parameters describing the position of the component in
* the layout.
*/
public void add(Component subComp, S constraint)
{
if (subComp == null)
{
return;
}
getChildren().add(subComp);
myLayout.add(subComp, constraint);
}
/**
* Removes the supplied component from the panel and layout.
*
* @param component the component to remove from the panel and layout.
*/
public void remove(Component component)
{
if (component == null)
{
return;
}
getChildren().remove(component);
myLayout.remove(component);
}
@Override
public void clearGeometries()
{
for (Component child : myChildren)
{
child.clearGeometries();
}
}
/**
* Get the border.
*
* @return the border
*/
public Border getBorder()
{
return myBorder;
}
/**
* Get the children.
*
* @return the children
*/
public Collection<Component> getChildren()
{
return myChildren;
}
@Override
public Set<Geometry> getGeometries()
{
HashSet<Geometry> geoms = new HashSet<>();
for (Component child : myChildren)
{
geoms.addAll(child.getGeometries());
}
if (CollectionUtilities.hasContent(getBorder().getGeometries()))
{
geoms.addAll(getBorder().getGeometries());
}
return geoms;
}
/**
* Get the layout.
*
* @return the layout
*/
public T getLayout()
{
return myLayout;
}
@Override
public void handleCleanupListeners()
{
for (Component child : myChildren)
{
child.handleCleanupListeners();
}
}
@Override
public void handleWindowMoved()
{
for (Component child : myChildren)
{
child.handleWindowMoved();
}
}
/**
* Initialize the border, or if I don't have one, create an empty one.
*/
public void initBorder()
{
if (myBorder == null)
{
myBorder = new EmptyBorder();
}
myBorder.init();
}
/**
* Set the border.
*
* @param border the border to set
*/
public void setBorder(Border border)
{
// TODO enzio - if there are sub-components, reset their positions.
// (unless the
// old border is the same size)
myBorder = border;
}
/**
* Set the border only if the border is not already set. If the border is
* set before init() is called the panel should use the one provided,
* otherwise this method should be used to ensure that a default border is
* used.
*
* @param border the border to set
*/
public void setDefaultBorder(Border border)
{
if (myBorder == null)
{
setBorder(border);
}
}
/**
* Set the layout. Clear the children since their dimensions are no longer
* correct.
*
* @param layout the layout to set
*/
public void setLayout(T layout)
{
myChildren.clear();
myLayout = layout;
}
}
|
<filename>experimental/coach/ideal/showcase/coach/reflections/element_reference.java
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.showcase.coach.reflections;
import ideal.library.elements.*;
import ideal.library.reflections.*;
import ideal.runtime.elements.*;
import ideal.runtime.logs.*;
import ideal.runtime.reflections.*;
import ideal.development.elements.*;
import ideal.development.names.*;
import ideal.development.scanners.*;
import ideal.development.types.*;
import ideal.development.actions.*;
import ideal.development.analyzers.*;
import ideal.development.flavors.*;
import ideal.development.kinds.*;
import ideal.development.values.*;
import ideal.development.notifications.*;
import ideal.development.declarations.*;
import javax.annotation.Nullable;
public class element_reference extends debuggable implements reference_wrapper<any_value> {
private final list_wrapper value;
private final int index;
private final type_id element_type;
public element_reference(list_wrapper value, int index) {
this.value = value;
this.index = index;
this.element_type = get_datastore().get_schema().get_element_type(value.type_bound());
}
public element_reference(value_wrapper<list<value_wrapper>> value, int index) {
this((list_wrapper) value, index);
}
private datastore_state get_datastore() {
return (datastore_state) value.zone();
}
@Override
public type_id type_bound() {
return get_datastore().get_schema().get_mutable_reference(value_type_bound());
}
@Override
public type_id value_type_bound() {
return element_type;
}
@Override
public void init(value_wrapper new_value) {
// TODO: check that the value wasn't initialized.
set(new_value);
}
@Override
public value_wrapper get() {
list<value_wrapper> elements = value.unwrap();
value_wrapper result = index < elements.size() ? elements.get(index) : null;
if (result == null) {
result = get_datastore().make_default_value(value_type_bound());
}
return result;
}
@Override
public void set(value_wrapper new_value) {
list<value_wrapper> elements = value.unwrap();
if (new_value == null && index >= elements.size()) {
// Do not extend the list for null values.
return;
}
while (elements.size() < index) {
elements.append(null);
}
if (elements.size() == index) {
elements.append(new_value);
} else {
// the index'th element exists in elements.
elements.at(index).set(new_value);
}
value.zone().mark_modified();
}
@Override
public string to_string() {
return utilities.describe(this, value);
}
}
|
using UnityEngine;
using System.Collections;
public class VipItemUI : MonoBehaviour {
// Reference to the UI elements representing different VIP levels
public GameObject[] vipLevelUIElements;
// Method to update the UI based on the VIP level
public void UpdateVipUI(int vipLevel) {
// Ensure the VIP level is within the valid range
if (vipLevel >= 0 && vipLevel < vipLevelUIElements.Length) {
// Disable all UI elements
foreach (GameObject element in vipLevelUIElements) {
element.SetActive(false);
}
// Enable the UI element corresponding to the VIP level
vipLevelUIElements[vipLevel].SetActive(true);
} else {
Debug.LogError("Invalid VIP level: " + vipLevel);
}
}
} |
from django.forms.models import model_to_dict
class MerchantSerializer:
def serialize(self, merchant):
serialized_data = model_to_dict(merchant)
return serialized_data |
#!/bin/sh
rm -f nginx.conf fastcgi.conf proxy.conf
rm -f *.log
rm -rf logs/*
|
<reponame>Isaquehg/algorithms_and_data_structures
#include <iostream>
using namespace std;
void Insertion_Sort(int vetor[], int tam){
int i, j;//auxiliares
int chave;
for(j = 1; j < tam; j ++){
chave = vetor[j];
i = j - 1;
while((i >= 0) && (vetor[i] < chave)){
vetor[i + 1] = vetor[i];
i -= 1;
}
vetor[i + 1] = chave;
}
}
int main(){
int vet[100];//armazenamento da entrada
int x;//contador
int y = 0;//tamanho do vetor
//input
cin >> vet[y];
while(vet[y] != 0){
y ++;
cin >> vet[y];
}
//atribuicao funcao
Insertion_Sort(vet, y);
//ouput
x = 0;
while(x < y){
cout << vet[x] << " ";
x ++;
}
cout << endl;
return 0;
} |
//
// NSString+Levenshtein.h
//
// Created by <NAME> on Sat Aug 09 2003.
// <EMAIL>
@interface NSString (Levenshtein)
// calculate the smallest distance between all words in stringA and stringB
- (float) compareWithString: (NSString *) stringB;
// calculate the distance between two string treating them each as a
// single word
- (float) compareWithWord: (NSString *) stringB;
// return the minimum of a, b and c
- (NSInteger) smallestOf: (NSInteger) a andOf: (NSInteger) b andOf: (NSInteger) c;
@end
|
import { Vector3, Quaternion } from "@arche-engine/math";
import { ICollider } from "../ICollider";
/**
* a base interface providing common functionality for joints.
*/
export interface IJoint {
/**
* Set the actors for this joint.
* @param actor0 the first actor.
* @param actor1 the second actor
*/
setActors(actor0?: ICollider, actor1?: ICollider): void;
/**
* Set the joint local pose for an actor.
* @param actor 0 for the first actor, 1 for the second actor.
* @param position the local position for the actor this joint
* @param rotation the local rotation for the actor this joint
*/
setLocalPose(actor: number, position: Vector3, rotation: Quaternion): void;
/**
* set the break force for this joint.
* @param force the maximum force the joint can apply before breaking
* @param torque the maximum torque the joint can apply before breaking
*/
setBreakForce(force: number, torque: number): void;
/**
* set a constraint flags for this joint to a specified value.
* @param flags the constraint flag
* @param value the value to which to set the flag
*/
setConstraintFlag(flags: number, value: boolean): void;
/**
* set the inverse mass scale for actor0.
* @param invMassScale the scale to apply to the inverse mass of actor 0 for resolving this constraint
*/
setInvMassScale0(invMassScale: number): void;
/**
* set the inverse inertia scale for actor0.
* @param invInertiaScale the scale to apply to the inverse inertia of actor0 for resolving this constraint
*/
setInvInertiaScale0(invInertiaScale: number): void;
/**
* set the inverse mass scale for actor1.
* @param invMassScale the scale to apply to the inverse mass of actor 1 for resolving this constraint
*/
setInvMassScale1(invMassScale: number): void;
/**
* set the inverse inertia scale for actor1.
* @param invInertiaScale the scale to apply to the inverse inertia of actor1 for resolving this constraint
*/
setInvInertiaScale1(invInertiaScale: number): void;
}
|
from typing import List
from unittest.mock import MagicMock
def process_directories(config: int, dirs: List[str], m_success: MagicMock) -> None:
if list(m_success.call_args) == [(config & set(dirs), ), {}]:
assert (
list(m_success.call_args)
== [(config & set(dirs), ), {}])
else:
assert not m_success.called
if config - set(dirs):
# Perform additional logic here based on the conditional check
pass |
<gh_stars>1-10
import React from "react";
import footerLogo from "../../assets/img/MOIZALogo/koreanHorizontalTypeWhite.svg";
import * as F from "./Footer.style";
const Footer = () => {
return (
<F.Container>
<F.ItemContainer>
<F.LogoItem src={footerLogo} />
<F.TextItem>
이용약관 | 개인정보처리방침
</F.TextItem>
</F.ItemContainer>
<F.ItemContainer>
<F.TextItem>
©2022. 팀이름. All rights reserved
</F.TextItem>
<F.TextItem><EMAIL></F.TextItem>
</F.ItemContainer>
</F.Container>
);
};
export default Footer;
|
#!/usr/bin/env bash
## 导入通用变量与函数
#15 3,23 * * * jd_sharecode.sh
#new Env('获取互助码');
grep '6dylan6_1124' /ql/config/task_before.sh >/dev/null 2>&1
if [[ $? != 0 ]];then
cp /ql/repo/6dylan6_jdpro/docker/task_before.sh /ql/config/ >/dev/null 2>&1 || cp /ql/data/repo/6dylan6_jdpro/docker/task_before.sh /ql/data/config/
fi
dir_shell=/ql/shell
. $dir_shell/share.sh
env_name=(
FRUITSHARECODES
PETSHARECODES
PLANT_BEAN_SHARECODES
DREAM_FACTORY_SHARE_CODES
DDFACTORY_SHARECODES
# JDZZ_SHARECODES
# JXNC_SHARECODES
# BOOKSHOP_SHARECODES
JD_CASH_SHARECODES
JDSGMH_SHARECODES
# JDCFD_SHARECODES
JDHEALTH_SHARECODES
MONEYTREE_SHARECODES
)
var_name=(
ForOtherFruit
ForOtherPet
ForOtherBean
ForOtherDreamFactory
ForOtherJdFactory
# ForOtherJdzz
# ForOtherJxnc
# ForOtherBookShop
ForOtherCash
ForOtherSgmh
# ForOtherCfd
ForOtherHealth
ForOtherMoneyTree
)
name_js=(
6dylan6_jdpro_jd_fruit
6dylan6_jdpro_jd_pet
6dylan6_jdpro_jd_plantBean
6dylan6_jdpro_jd_dreamFactory
6dylan6_jdpro_jd_jdfactory
# 6dylan6_jdpro_jd_jdzz
# 6dylan6_jdpro_jd_jxnc
# 6dylan6_jdpro_jd_bookshop
6dylan6_jdpro_jd_cash
6dylan6_jdpro_jd_sgmh
# 6dylan6_jdpro_jd_cfd
6dylan6_jdpro_jd_health
6dylan6_jdpro_jd_moneyTree_help
)
name_config=(
Fruit
Pet
Bean
DreamFactory
JdFactory
# Jdzz
# Jxnc
# BookShop
Cash
Sgmh
# Cfd
Health
MoneyTree
)
name_chinese=(
东东农场
东东萌宠
京东种豆得豆
京喜工厂
东东工厂
# 京东赚赚
# 京喜农场
# 口袋书店
签到领现金
闪购盲盒
# 京喜财富岛
东东健康社区
摇钱树
)
gen_pt_pin_array() {
local envs=$(eval echo "\$JD_COOKIE")
local array=($(echo $envs | sed 's/&/ /g'))
user_sum="${#array[*]}"
local tmp1 tmp2 i pt_pin_temp
for i in "${!array[@]}"; do
pt_pin_temp=$(echo ${array[i]} | perl -pe "{s|.*pt_pin=([^; ]+)(?=;?).*|\1|; s|%|\\\x|g}")
[[ $pt_pin_temp == *\\x* ]] && pt_pin[i]=$(printf $pt_pin_temp) || pt_pin[i]=$pt_pin_temp
done
}
export_codes_sub() {
local task_name=$1
local config_name=$2
local chinese_name=$3
local config_name_my=My$config_name
local config_name_for_other=ForOther$config_name
local i j k m n pt_pin_in_log code tmp_grep tmp_my_code tmp_for_other user_num random_num_list
if cd $dir_log/$task_name &>/dev/null && [[ $(ls) ]]; then
## 寻找所有互助码以及对应的pt_pin
i=0
pt_pin_in_log=()
code=()
pt_pin_and_code=$(ls -r *.log | xargs awk -v var="的$chinese_name好友互助码" 'BEGIN{FS="[( )】]+"; OFS="&"} $3~var {print $2,$4}')
for line in $pt_pin_and_code; do
pt_pin_in_log[i]=$(echo $line | awk -F "&" '{print $1}')
code[i]=$(echo $line | awk -F "&" '{print $2}')
let i++
done
## 输出My系列变量
if [[ ${#code[*]} -gt 0 ]]; then
for ((m = 0; m < ${#pt_pin[*]}; m++)); do
tmp_my_code=""
j=$((m + 1))
for ((n = 0; n < ${#code[*]}; n++)); do
if [[ ${pt_pin[m]} == ${pt_pin_in_log[n]} ]]; then
tmp_my_code=${code[n]}
break
fi
done
echo "$config_name_my$j='$tmp_my_code'"
done
else
echo "## 从日志中未找到任何互助码"
fi
## 输出ForOther系列变量
if [[ ${#code[*]} -gt 0 ]]; then
echo
case $HelpType in
0) ## 全部一致
tmp_for_other=""
for ((m = 0; m < ${#pt_pin[*]}; m++)); do
j=$((m + 1))
tmp_for_other="$tmp_for_other@\${$config_name_my$j}"
done
echo "${config_name_for_other}1=\"$tmp_for_other\"" | perl -pe "s|($config_name_for_other\d+=\")@|\1|"
for ((m = 1; m < ${#pt_pin[*]}; m++)); do
j=$((m + 1))
echo "$config_name_for_other$j=\"\${${config_name_for_other}1}\""
done
;;
1) ## 均等助力
for ((m = 0; m < ${#pt_pin[*]}; m++)); do
tmp_for_other=""
j=$((m + 1))
for ((n = $m; n < $(($user_sum + $m)); n++)); do
[[ $m -eq $n ]] && continue
if [[ $((n + 1)) -le $user_sum ]]; then
k=$((n + 1))
else
k=$((n + 1 - $user_sum))
fi
tmp_for_other="$tmp_for_other@\${$config_name_my$k}"
done
echo "$config_name_for_other$j=\"$tmp_for_other\"" | perl -pe "s|($config_name_for_other\d+=\")@|\1|"
done
;;
2) ## 本套脚本内账号间随机顺序助力
for ((m = 0; m < ${#pt_pin[*]}; m++)); do
tmp_for_other=""
random_num_list=$(seq $user_sum | sort -R)
j=$((m + 1))
for n in $random_num_list; do
[[ $j -eq $n ]] && continue
tmp_for_other="$tmp_for_other@\${$config_name_my$n}"
done
echo "$config_name_for_other$j=\"$tmp_for_other\"" | perl -pe "s|($config_name_for_other\d+=\")@|\1|"
done
;;
*) ## 按编号优先
for ((m = 0; m < ${#pt_pin[*]}; m++)); do
tmp_for_other=""
j=$((m + 1))
for ((n = 0; n < ${#pt_pin[*]}; n++)); do
[[ $m -eq $n ]] && continue
k=$((n + 1))
tmp_for_other="$tmp_for_other@\${$config_name_my$k}"
done
echo "$config_name_for_other$j=\"$tmp_for_other\"" | perl -pe "s|($config_name_for_other\d+=\")@|\1|"
done
;;
esac
fi
else
echo "## 未运行过 $task_name.js 脚本,未产生日志"
fi
}
export_all_codes() {
gen_pt_pin_array
echo -e "\n# 从日志提取互助码,编号和配置文件中Cookie编号完全对应,如果为空就是所有日志中都没有。\n\n# 即使某个MyXxx变量未赋值,也可以将其变量名填在ForOtherXxx中,jtask脚本会自动过滤空值。\n"
echo -n "# 你选择的互助码模板为:"
case $HelpType in
0)
echo "所有账号助力码全部一致。"
;;
1)
echo "所有账号机会均等助力。"
;;
2)
echo "本套脚本内账号间随机顺序助力。"
;;
*)
echo "按账号编号优先。"
;;
esac
for ((i = 0; i < ${#name_js[*]}; i++)); do
echo -e "\n## ${name_chinese[i]}:"
export_codes_sub "${name_js[i]}" "${name_config[i]}" "${name_chinese[i]}"
done
}
export_all_codes | perl -pe "{s|京东种豆|种豆|; s|crazyJoy任务|疯狂的JOY|}" |
<reponame>chlds/util<gh_stars>0
/*
Get a byte or bytes for one character based on UTF-8 out of the key board.
*/
# define CAR
# include <stdio.h>
# include "../../../incl/config.h"
signed(__cdecl cli_i_r(signed char(**argp))) {
auto signed char *b;
auto signed r;
auto signed short flag;
if(!argp) return(0x00);
if(!(*argp)) return(0x00);
if(!(c_kbhit())) return(0x00);
r = c_getch(0x00);
if(!(EOF^(r))) {
printf("%s\n","<< Error at fn. c_getch()");
return(0x00);
}
if(!(cat_bb(argp,r))) {
printf("%s \n","<< Error at fn. cat_bb()");
return(0x00);
}
return(0x01+(cli_i_r(argp)));
}
|
<reponame>danimartinc/chatroom-TFG<gh_stars>0
import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { UsuarioModel } from '../models/usuario.model';
import { map} from "rxjs/operators";
@Injectable({ //No necesito importarlo en el app.module
providedIn: 'root'
})
//En este servicio controlamos toda la autenticacion
export class AuthService {
//Definimos la parte común de ambas URLs
private url = 'https://identitytoolkit.googleapis.com/v1/accounts:';
//La key se obtiene de la configuracion del proyecto en firebase
private apiKey = '<KEY>';
//Almacenamos si existe el idToken
userToken: string;
//Nos sirve para crear nuevos usuarios
// https://identitytoolkit.googleapis.com/v1/accounts:signUp?key=[API_KEY]
//Login
//https://identitytoolkit.googleapis.com/v1/accounts:signInWithPassword?key=[API_KEY]
constructor( private http: HttpClient) {
this.leerToken();
}
logout(){
localStorage.removeItem('token'); //Para hacer logout lo que tenemos que hacer es destruir el token
}
login( usuario: UsuarioModel){
//Tenemos que definir el AuthToken igual que en el registro
const authData = {
/* email: usuario.email,
password: <PASSWORD>, */
...usuario, //Sinonimo a lo anterior ya que tenemos las propiedades en usuario
returnSecureToken: true //Se tiene que inicializar siempre en true
};
//Hacemos la petición POST
return this.http.post( //Sucrbimos en otro lugar
`${ this.url }signInWithPassword?key= ${ this.apiKey }`, //url definido más el endpoint
authData//La informacion del POST que quiero enviar
).pipe( //Lo podemos pasar por un pipe
map( respuesta =>{ //Permite obtener la respuesta del POST del subscribe. Si hay un error el map no se dispara
this.guardarToken( respuesta['idToken']); //Mandamos como respuesta el idToken
return respuesta;
})
);
}
nuevoUsuario( usuario: UsuarioModel){
//email y password ya vienen declarados dentro del usuario
//Tenemos que definir el AuthToken
const authData = {
/* email: usuario.email,
password: <PASSWORD>, */
...usuario, //sinonimo a lo anterior ya que tenemos las propiedades en usuario
returnSecureToken: true //Se tiene que inicializar siempre en true
};
//Hacemos la petición POST
return this.http.post( //Sucrbimos en otro lugar. POST devuelve un observable
`${ this.url }signUp?key= ${ this.apiKey }`, //url definido más el endpoint
authData//La informacion del POST que quiero enviar
).pipe( //Lo podemos pasar por un pipe
map( respuesta =>{ //Permite obtener la respuesta del POST del subscribe. Si hay un error el map no se dispara
this.guardarToken( respuesta['idToken']); //Mandamos como respuesta el idToken
return respuesta;
})
);
}
private guardarToken( idToken: string){
this.userToken = idToken;
//Almacenamos en nuestro localStorage en la propiedad llamada token. Tenemos que almacenar un string que es idToken
localStorage.setItem('token', idToken);
let fechaActual = new Date();
fechaActual.setSeconds( 3600 ); //Incrementamos una hora a la fecha actual
//Almacenamos la fecha en localStorage
localStorage.setItem('expira', fechaActual.getTime().toString()); //Convertimos en string la fecha que expira el token
}
private leerToken(){
if( localStorage.getItem('token')){ //Comprobamos si tenemos ya informacion en el token
this.userToken = localStorage.getItem('token'); //Si existe el token lo igualamos al token que obtenemos de localStorage
}else{
this.userToken = ''; //Si no existe, lo inicializamos como un string vacio
}
return this.userToken;
}
estaAutenticado(): boolean {
if( this.userToken.length < 2){
return false; //Vemos que el token no exite por lo que devolvemos null
}
//Recuperar la fecha a la que expira de localStorage
const expira = Number(localStorage.getItem('expira')); //Lo convertimos a numero
const fechaExpira = new Date();
fechaExpira.setTime(expira);
if( fechaExpira > new Date()){ //Comprobamos si esa fecha es mayor en el momento actual
return true;
}else{ //El token ha expirado
return false; //Token no valido
}
}
}
|
<form>
<div>
<label>Position 1: </label>
<input type="text" name="position1" />
</div>
<div>
<label>Position 2: </label>
<input type="text" name="position2" />
</div>
<div>
<label>Position 3: </label>
<input type="text" name="position3" />
</div>
<div>
<label>Start Date: </label>
<input type="date" name="start_date" />
</div>
<div>
<label>End Date: </label>
<input type="date" name="end_date" />
</div>
<input type="submit" value="Submit" />
</form> |
tmux kill-session -t roughbot-discord
|
import os
import stat
def is_file_accessible(file_path: str) -> bool:
if not os.path.exists(file_path):
return False # File does not exist
try:
mode = os.stat(file_path)[stat.ST_MODE]
if (mode & stat.S_IRUSR) == 0:
return False # File is not readable by the current user
except OSError:
return False # Unable to access file information
return True # File exists and is accessible |
#
# Cookbook: kubernetes-cluster
# License: Apache 2.0
#
# Copyright 2015-2016, Bloomberg Finance L.P.
#
default['kubernetes']['registry'].tap do |registry|
# Set port for registry
registry['port'] = '5000'
# Set number of workers for Gunicorn
registry['workers'] = '8'
# Set storage location base for images and registry metadata
# Only supports local storage on registry server right now
# Full path where 'images' and 'registry' directories will be created
registry['storage'] = '/var/docker-registry/'
end
|
<gh_stars>0
package com.imooc.o2o.service;
import com.imooc.o2o.BaseTest;
import com.imooc.o2o.dao.ShopDao;
import com.imooc.o2o.dto.ImageHolder;
import com.imooc.o2o.dto.ShopExecution;
import com.imooc.o2o.entity.Area;
import com.imooc.o2o.entity.PersonInfo;
import com.imooc.o2o.entity.Shop;
import com.imooc.o2o.entity.ShopCategory;
import com.imooc.o2o.enums.ShopStateEnum;
import com.imooc.o2o.exceptions.ShopOperationException;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Date;
import static org.junit.Assert.assertEquals;
/**
* Created by Administrator on 2017/11/29.
*/
public class ShopServiceTest extends BaseTest {
@Autowired
private ShopDao shopDao;
@Autowired
private ShopService shopService;
@Test
@Ignore
public void testAddShop() throws ShopOperationException,FileNotFoundException{
Shop shop = new Shop();
PersonInfo owner = new PersonInfo();
Area area = new Area();
ShopCategory shopCategory = new ShopCategory();
owner.setUserId(1L);
area.setAreaId(2);
shopCategory.setShopCategoryId(1L);
shop.setOwner(owner);
shop.setArea(area);
shop.setShopCategory(shopCategory);
shop.setShopName("测试的店铺3");
shop.setShopDesc("test3");
shop.setShopAddr("test3");
shop.setPhone("test3");
shop.setCreateTime(new Date());
shop.setEnableStatus(ShopStateEnum.CHECK.getState());
shop.setAdvice("审核中");
File shopImg = new File("/Users/baidu/work/image/xiaohuangren.jpg");
InputStream is = new FileInputStream(shopImg);
ImageHolder imageHolder = new ImageHolder(shopImg.getName(),is);
ShopExecution se = shopService.addShop(shop,imageHolder);
assertEquals(ShopStateEnum.CHECK,se.getState());
}
@Test
@Ignore
public void testModifyShop() throws ShopOperationException,FileNotFoundException{
Shop shop = new Shop();
shop.setShopId(2L);
shop.setShopName("修改后的店铺名字");
File shopImg = new File("F:/IdeaProjects/img/dabai.jpg");
InputStream is = new FileInputStream(shopImg);
ImageHolder imageHolder = new ImageHolder("dabai.jpg",is);
ShopExecution shopExecution = shopService.modifyShop(shop,imageHolder);
System.out.println("新的图片地址为:"+shopExecution.getShop().getShopImg());
}
}
|
#include "main.h"
#include "project_hal.h"
#include "max30100_for_stm32_hal.h"
// Private vars
I2C_HandleTypeDef hI2C; // I2C Handler
TIM_HandleTypeDef hTIMER; // TIMER Handler
UART_HandleTypeDef hUART; // UART Handler
// Private functions declaration
void Error_Handler(void);
void SystemClock_Config(void);
static void MX_GPIO_Init(void);
static void MX_I2C1_Init(void);
static void MX_TIM2_Init(void);
static void MX_USART2_UART_Init(void);
/***********************************************************************
* *
* BELOW THE LIST OF MY HAL FUNCTIONS *
* *
***********************************************************************/
void START_HAL(void){
HAL_Init();
}
void CONFIG_CLOCK(void){
SystemClock_Config();
}
void START_GPIO(void){
MX_GPIO_Init();
}
void START_I2C(void){
MX_I2C1_Init();
}
void START_TIMER(void){
MX_TIM2_Init();
HAL_TIM_Base_Start_IT(&hTIMER);
}
void START_UART(void){
MX_USART2_UART_Init();
}
void START_MAX30100(void){
MAX30100_Init(&hI2C, &hUART);
MAX30100_SetSpO2SampleRate(MAX30100_SPO2SR_DEFAULT);
MAX30100_SetLEDPulseWidth(MAX30100_LEDPW_DEFAULT);
MAX30100_SetLEDCurrent(MAX30100_LEDCURRENT_0_0, MAX30100_LEDCURRENT_0_0);
MAX30100_SetMode(MAX30100_IDLE_MODE);
}
void MAX30100_Stop(void){
MAX30100_SetLEDCurrent(MAX30100_LEDCURRENT_0_0, MAX30100_LEDCURRENT_0_0);
MAX30100_SetMode(MAX30100_IDLE_MODE);
}
void MAX30100_Start(void){
MAX30100_SetLEDCurrent(MAX30100_LEDCURRENT_DEFAULT, MAX30100_LEDCURRENT_DEFAULT);
MAX30100_SetMode(MAX30100_SPO2_MODE);
}
void ReadPin(GPIO_TypeDef *GPIOx, uint16_t GPIO_Pin){
HAL_GPIO_ReadPin(GPIOx, GPIO_Pin);
}
void WritePin(GPIO_TypeDef *GPIOx, uint16_t GPIO_Pin, GPIO_PinState PinState){
HAL_GPIO_WritePin(GPIOx, GPIO_Pin, PinState);
}
void UART_Receive(uint8_t *pData){
HAL_UART_Receive_IT(&hUART, pData, sizeof(pData));
}
void UART_Transmit(uint8_t *pData, uint16_t timeout){
HAL_UART_Transmit(&hUART, pData, sizeof(pData), timeout);
}
void I2C_Receive(uint16_t DevAddress, uint8_t * pData){
HAL_I2C_Master_Receive_IT (&hI2C, DevAddress, pData, sizeof(pData));
}
void I2C_Transmit(uint16_t DevAddress, uint8_t * pData){
HAL_I2C_Master_Transmit_IT (&hI2C, DevAddress, pData, sizeof(pData));
}
/***********************************************************************
* *
* BELOW LIST OF INTERRUPTIONS *
* *
***********************************************************************/
void HAL_TIM_PeriodElapsedCallback(TIM_HandleTypeDef * htim)
{
Interruption_TIM2();
}
void HAL_UART_RxCpltCallback(UART_HandleTypeDef *huart)
{
Interruption_UART();
}
void HAL_I2C_MasterRxCpltCallback(I2C_HandleTypeDef * hi2c)
{
Interruption_I2C();
}
/***********************************************************************
* *
* BELOW LIST OF HAL USED IN THE PROJECT PROVIDED BY THE MANUFACTURER *
* *
***********************************************************************/
// System Clock Configuration
void SystemClock_Config(void)
{
RCC_OscInitTypeDef RCC_OscInitStruct = {0};
RCC_ClkInitTypeDef RCC_ClkInitStruct = {0};
RCC_PeriphCLKInitTypeDef PeriphClkInit = {0};
__HAL_PWR_VOLTAGESCALING_CONFIG(PWR_REGULATOR_VOLTAGE_SCALE1);
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_MSI;
RCC_OscInitStruct.MSIState = RCC_MSI_ON;
RCC_OscInitStruct.MSICalibrationValue = 0;
RCC_OscInitStruct.MSIClockRange = RCC_MSIRANGE_5;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK)
Error_Handler();
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK|RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_MSI;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_0) != HAL_OK)
Error_Handler();
PeriphClkInit.PeriphClockSelection = RCC_PERIPHCLK_USART2|RCC_PERIPHCLK_I2C1;
PeriphClkInit.Usart2ClockSelection = RCC_USART2CLKSOURCE_PCLK1;
PeriphClkInit.I2c1ClockSelection = RCC_I2C1CLKSOURCE_PCLK1;
if (HAL_RCCEx_PeriphCLKConfig(&PeriphClkInit) != HAL_OK)
Error_Handler();
}
// GPIO Initialization Function
static void MX_GPIO_Init(void)
{
GPIO_InitTypeDef GPIO_InitStruct = {0};
__HAL_RCC_GPIOC_CLK_ENABLE();
__HAL_RCC_GPIOA_CLK_ENABLE();
__HAL_RCC_GPIOB_CLK_ENABLE();
HAL_GPIO_WritePin(LED_PIN_GPIO_Port, LED_PIN, GPIO_PIN_RESET);
/*Configure GPIO pins : PA0 PA1 PA2 PA3 PA4 PA5 PA6 PA7 PA8 PA11 PA12 */
GPIO_InitStruct.Pin = GPIO_PIN_0|GPIO_PIN_1|GPIO_PIN_2|GPIO_PIN_3|GPIO_PIN_4|GPIO_PIN_5|GPIO_PIN_6|GPIO_PIN_7|GPIO_PIN_8|GPIO_PIN_11|GPIO_PIN_12;
GPIO_InitStruct.Mode = GPIO_MODE_ANALOG;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
/*Configure GPIO pins : PB0 PB1 PB3 PB4 PB5 */
GPIO_InitStruct.Pin = GPIO_PIN_0|GPIO_PIN_1|GPIO_PIN_3|GPIO_PIN_4|GPIO_PIN_5;
GPIO_InitStruct.Mode = GPIO_MODE_ANALOG;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.Pin = LED_PIN;
GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_LOW;
HAL_GPIO_Init(LED_PIN_GPIO_Port, &GPIO_InitStruct);
}
// I2C1 Initialization Function
static void MX_I2C1_Init(void)
{
hI2C.Instance = I2C1;
hI2C.Init.Timing = 0x00000708;
hI2C.Init.OwnAddress1 = 0;
hI2C.Init.AddressingMode = I2C_ADDRESSINGMODE_7BIT;
hI2C.Init.DualAddressMode = I2C_DUALADDRESS_DISABLE;
hI2C.Init.OwnAddress2 = 0;
hI2C.Init.OwnAddress2Masks = I2C_OA2_NOMASK;
hI2C.Init.GeneralCallMode = I2C_GENERALCALL_DISABLE;
hI2C.Init.NoStretchMode = I2C_NOSTRETCH_DISABLE;
if (HAL_I2C_Init(&hI2C) != HAL_OK)
Error_Handler();
if (HAL_I2CEx_ConfigAnalogFilter(&hI2C, I2C_ANALOGFILTER_ENABLE) != HAL_OK)
Error_Handler();
if (HAL_I2CEx_ConfigDigitalFilter(&hI2C, 0) != HAL_OK)
Error_Handler();
}
// TIM2 Initialization Function
static void MX_TIM2_Init(void)
{
TIM_ClockConfigTypeDef sClockSourceConfig = {0};
TIM_SlaveConfigTypeDef sSlaveConfig = {0};
TIM_MasterConfigTypeDef sMasterConfig = {0};
hTIMER.Instance = TIM2;
hTIMER.Init.Prescaler = 0;
hTIMER.Init.CounterMode = TIM_COUNTERMODE_UP;
hTIMER.Init.Period = 65535;
hTIMER.Init.ClockDivision = TIM_CLOCKDIVISION_DIV1;
hTIMER.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_DISABLE;
if (HAL_TIM_Base_Init(&hTIMER) != HAL_OK)
Error_Handler();
sClockSourceConfig.ClockSource = TIM_CLOCKSOURCE_INTERNAL;
if (HAL_TIM_ConfigClockSource(&hTIMER, &sClockSourceConfig) != HAL_OK)
Error_Handler();
sSlaveConfig.SlaveMode = TIM_SLAVEMODE_DISABLE;
sSlaveConfig.InputTrigger = TIM_TS_ITR0;
if (HAL_TIM_SlaveConfigSynchro(&hTIMER, &sSlaveConfig) != HAL_OK)
Error_Handler();
sMasterConfig.MasterOutputTrigger = TIM_TRGO_RESET;
sMasterConfig.MasterSlaveMode = TIM_MASTERSLAVEMODE_DISABLE;
if (HAL_TIMEx_MasterConfigSynchronization(&hTIMER, &sMasterConfig) != HAL_OK)
Error_Handler();
}
// USART2 Initialization Function
static void MX_USART2_UART_Init(void)
{
hUART.Instance = USART2;
hUART.Init.BaudRate = 9600;
hUART.Init.WordLength = UART_WORDLENGTH_8B;
hUART.Init.StopBits = UART_STOPBITS_1;
hUART.Init.Parity = UART_PARITY_NONE;
hUART.Init.Mode = UART_MODE_TX_RX;
hUART.Init.HwFlowCtl = UART_HWCONTROL_NONE;
hUART.Init.OverSampling = UART_OVERSAMPLING_16;
hUART.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
hUART.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&hUART) != HAL_OK)
Error_Handler();
}
// This function is executed in case of error occurrence
void Error_Handler(void)
{
__disable_irq();
while (1){};
}
|
<reponame>ironhack-project-three/ironhack-project-three<filename>client/src/Pages/createWine.js
import React from "react";
import AddWine from "../components/AddWine";
export default function createWine() {
return (
<div>
<AddWine />
</div>
);
}
|
<gh_stars>1-10
const MAX = 10000
const generate = () => {
return Math.floor(Math.random() * MAX)
}
// Existem diferentes formas disponíveis para exportar funções/objetos/variáveis do seu módulo
// module.exports = { generate }
// module.exports.generate = generate;
// exports.generate = generate;
module.exports = {
generate: generate
}
|
<reponame>JoonasKajava/AdventureGame
#include "stdafx.h"
#include "Boss.h"
#include "Enemy.h"
#include "GameContext.h"
Boss::Boss()
{
Name = "<NAME>";
sf::Texture* CharacterTexture = new sf::Texture();
this->Health = 30;
this->MaxHealth = 30;
Attack = 6;
Defence = 3;
Speed = 2;
Luck = 0;
MovementSpeed = 0.00005;
Luck = 30;
CharacterTexture->loadFromFile("Graphics/Dragons.png", sf::IntRect(3 * 32, 3 * 32, 32, 32));
Body = sf::Sprite(*CharacterTexture);
Body.setPosition(sf::Vector2f(45 * 32, 7 * 32));
}
Boss::~Boss()
{
}
void Boss::Die()
{
Enemy::Die();
GameContext::instance->endScreen.SetText(true);
GameContext::instance->GameOver = true;
GameContext::instance->audioManager.SetMusic(AudioManager::Win);
}
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
# Load the dataset
df = pd.read_csv('data.csv')
# Separate the target variable and input variables
X = df.drop('SuccessProb', axis=1)
y = df['SuccessProb']
# Split data into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
#Build the logistic regression model
model = LogisticRegression()
model.fit(X_train, y_train)
# Make predictions
predictions = model.predict_proba(X_test) |
var middle_names = ["anal","anus","arse","ass","ass-fucker","asses","asshole","ballbag","balls","ballsack","bastard","beastiality","bellend",
"bestiality","biatch","bitch","bitcher","bitches","bitchin","bitching","bloody","blow job","blowjob","bollock","boner","boob","bugger","bum",
"bunny fucker","butt","butthole","buttmunch","buttplug","carpet muncher","cawk","cipa","clit","clitoris","clits","cock","cock-sucker",
"cockface","cockhead","cockmunch","cockmuncher","cocksuck","cocksucked","cocksucker","cocksucking","cocksucks","cox","crap","cum","cummer",
"cumming","cums","cumshot","cunilingus","cunillingus","cunnilingus","cunt","cuntlick","cuntlicker","cuntlicking","cunts","cyberfuck","cyberfucked",
"cyberfucker","cyberfucking","damn","dick","dickhead","dildo","dog-fucker","dogging","donkeyribber","douche","ejaculate","ejaculated","ejaculating",
"ejaculation","fag","fagging","faggot","faggs","fagot","fagots","fags","fanny","fannyflaps","fannyfucker","fatass","feck","fecker","felching",
"fellate","fellatio","fingerfuck","fingerfucked","fingerfucker","fingerfuckers","fingerfucking","fistfuck","fistfucked","fistfucker","fistfucking",
"fistfucks","flange","fuck","fucka","fucked","fucker","fuckers","fuckhead","fuckheads","fucking","fuckings","fuckingshitmotherfucker","fucks","fuckwhit",
"fuckwit","fudge packer","fudgepacker","gangbang","gangbanged","gangbangs","gaylord","gaysex","goatse","God","god-dam","god-damned","goddamn",
"goddamned","hardcoresex","hell","homo","hore","horniest","horny","hotsex","jack-off","jackoff","jerk-off","jism","jiz","jizm","jizz","knob",
"knobead","knobed","knobend","knobhead","knobjocky","knobjokey","labia","lmfao","lust","lusting","masochist","master-bate","masterbate","masterbation",
"masturbate","mo-fo","mofo","mothafuck","mothafucka","mothafucked","mothafucker","mothafuckin","mothafucking","mother fucker","motherfuck",
"motherfucked","motherfucker","motherfuckers","motherfuckin","motherfucking","motherfuckka","muff","mutha","muthafecker","muthafuckker","muther",
"mutherfucker","nazi","nob","nob jokey","nobhead","nobjocky","nobjokey","numbnuts","nutsack","orgasm","pawn","pecker","penis","penisfucker","phonesex",
"pigfucker","piss","pissed","pisser","pisses","pissflaps","pissin","pissing","pissoff","poop","porn","porno","pornography","pornos","prick","pricks",
"pron","pube","pussy","rectum","retard","rimming","sadist","schlong","screwing","scrote","scrotum","semen","sex","shag","shagger","shaggin","shagging",
"shemale","shit","shitdick","shite","shitfuck","shitfull","shithead","shiting","shitted","shitter","shitting","shitty","skank","slut","smegma","smut",
"snatch","son-of-a-bitch","spac","spunk","teet","teez","testicle","tit","titfuck","tits","tittiefucker","titties","tittyfuck","tittywank","titwank",
"tosser","turd","twat","twathead","twatty","vagina","viagra","vulva","wang","wank","wanker","wanky","whore","willy","xrated"];
walk(document.body);
function walk(node)
{
// I stole this function from here:
// http://is.gd/mwZp7E
var child, next;
switch ( node.nodeType )
{
case 1: // Element
case 9: // Document
case 11: // Document fragment
child = node.firstChild;
while ( child )
{
next = child.nextSibling;
walk(child);
child = next;
}
break;
case 3: // Text node
handleText(node);
break;
}
}
function handleText(textNode)
{
var v = textNode.nodeValue;
v = v.replace(/\bTony Abbott\b/g, "Tony '" + getName() + "' Abbott");
v = v.replace(/\bMr Abbott\b/g, "Mr '" + getName() + "' Abbott");
v = v.replace(/\bMr. Abbott\b/g, "Mr. '" + getName() + "' Abbott");
v = v.replace(/^Abbott\b/g, "'" + getName() + "' Abbott");
v = v.replace(/\bAbbott Government\b/g, "Abbott '" + getName() + "' Government");
// While we're here, Joe gets a bit too
v = v.replace(/\bJoe Hockey\b/g, "Joe '" + getName() + "' Hockey");
v = v.replace(/\bMr Hockey\b/g, "Mr '" + getName() + "' Hockey");
v = v.replace(/\bMr. Hockey\b/g, "Mr. '" + getName() + "' Hockey");
v = v.replace(/^Hockey\b/g, "'" + getName() + "' Hockey");
textNode.nodeValue = v;
}
function getName()
{
return middle_names[Math.floor(Math.random() * middle_names.length)];
}
|
import React from 'react';
class App extends React.Component {
render() {
return (
<div>
<nav>
<a href="/">Home</a> |
<a href="/about">About Us</a> |
<a href="/contact">Contact</a>
</nav>
</div>
);
}
}
export default App; |
#!/bin/bash
# $0 - Script Path, $1 - Package Path, $2 - Target Location, and $3 - Target Volumn
MADLIB_VERSION=1.8dev
find $2/usr/local/madlib/bin -type d -exec cp -RPf {} $2/usr/local/madlib/old_bin \; 2>/dev/null
find $2/usr/local/madlib/bin -depth -type d -exec rm -r {} \; 2>/dev/null
find $2/usr/local/madlib/doc -type d -exec cp -RPf {} $2/usr/local/madlib/old_doc \; 2>/dev/null
find $2/usr/local/madlib/doc -depth -type d -exec rm -r {} \; 2>/dev/null
ln -nsf $2/usr/local/madlib/Versions/$MADLIB_VERSION $2/usr/local/madlib/Current
ln -nsf $2/usr/local/madlib/Current/bin $2/usr/local/madlib/bin
ln -nsf $2/usr/local/madlib/Current/doc $2/usr/local/madlib/doc
if [ -d "/usr/local/madlib/Versions.bak" ]
then
mv -f $2/usr/local/madlib/Versions.bak/* $2/usr/local/madlib/Versions/
rm -rf $2/usr/local/madlib/Versions.bak
fi
|
/* TestFrame.cpp */
//----------------------------------------------------------------------------------------
//
// Project: UserPreference 1.00
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2016 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <inc/TestFrame.h>
#include <CCore/inc/video/LayoutCombo.h>
//#include <CCore/inc/Print.h>
namespace App {
/* class TestWindow */
String TestWindow::InfoText()
{
return
"This is a test control panel.\n"
"You can see user preferences here.\n"
"Use main window to adjust sizes, colors etc..."_str;
}
String TestWindow::ListText()
{
return
"button\n"
"check box\n"
"radio button\n"
"text line\n"
"light\n"
"scroll\n"
"line edit"_str;
}
void TestWindow::changeColor(int new_id,int)
{
switch( new_id )
{
case 1 : light.setFace(Red); break;
case 2 : light.setFace(Green); break;
case 3 : light.setFace(Blue); break;
}
}
void TestWindow::lightOnOff(bool check)
{
light.turn(check);
edit.alert(check);
}
void TestWindow::knobPressed()
{
text.setText(String(edit.getText()));
}
void TestWindow::enableAll(bool en)
{
btn.enable(en);
alt.enable(en);
knob.enable(en);
check.enable(en);
rad1.enable(en);
rad2.enable(en);
rad3.enable(en);
xscroll.enable(en);
info.enable(en);
edit.enable(en);
text_list.enable(en);
label1.enable(en);
label2.enable(en);
label3.enable(en);
label.enable(en);
text.enable(en);
}
void TestWindow::setFace(ulen index)
{
knob.setFace(KnobShape::FaceType(index));
}
void TestWindow::push()
{
auto pos=progress.getPos();
if( pos<progress.getTotal() )
{
progress.setPosPing(pos+1);
}
else
{
progress.setPos(0);
progress.stopPing();
}
}
void TestWindow::shade()
{
if( enable_frame.isDead() ) enable_frame.create(getFrame(),"TestFrame enable"_str);
disableFrameReact();
}
void TestWindow::unshade()
{
enableFrameReact();
}
TestWindow::TestWindow(SubWindowHost &host,const UserPreference &pref_)
: ComboWindow(host),
pref(pref_),
btn(wlist,pref.getSmartConfig(),"Push"_str),
knob(wlist,pref.getSmartConfig(),KnobShape::FaceOk),
check(wlist,pref.getSmartConfig()),
swtch(wlist,pref.getSmartConfig(),true),
swtch1(wlist,pref.getSmartConfig()),
swtch2(wlist,pref.getSmartConfig()),
alt(wlist,pref.getSmartConfig()),
rad1(wlist,1,pref.getSmartConfig()),
rad2(wlist,2,pref.getSmartConfig()),
rad3(wlist,3,pref.getSmartConfig()),
xscroll(wlist,pref.getSmartConfig()),
info(wlist,pref.getSmartConfig(),InfoFromString(InfoText())),
edit(wlist,pref.getSmartConfig()),
text_list(wlist,pref.getSmartConfig(),InfoFromString(ListText())),
label1(wlist,pref.getSmartConfig(),"Red"_str,AlignX_Left),
label2(wlist,pref.getSmartConfig(),"Green"_str,AlignX_Left),
label3(wlist,pref.getSmartConfig(),"Blue"_str,AlignX_Left),
label(wlist,pref.getSmartConfig(),"On/Off"_str,AlignX_Left),
text(wlist,pref.getSmartConfig(),"<none>"_str),
xsingle(wlist,pref.getSmartConfig()),
ysingle(wlist,pref.getSmartConfig()),
xdouble(wlist,pref.getSmartConfig()),
ydouble(wlist,pref.getSmartConfig()),
contour(wlist,pref.getSmartConfig()),
text_contour(wlist,pref.getSmartConfig(),"Select color"_str),
light(wlist,pref.getSmartConfig(),Red),
progress(wlist,pref.getSmartConfig()),
btn_shade(wlist,pref.getSmartConfig(),"Shade"_str),
enable_frame(host.getFrameDesktop(),pref.getSmartConfig(),pref.updated),
connector_group_changed(this,&TestWindow::changeColor,group.changed),
connector_check_changed(this,&TestWindow::lightOnOff,check.changed),
connector_knob_pressed(this,&TestWindow::knobPressed,knob.pressed),
connector_swtch_changed(this,&TestWindow::enableAll,swtch.changed),
connector_xscroll_changed(this,&TestWindow::setFace,xscroll.changed),
connector_btn_pressed(this,&TestWindow::push,btn.pressed),
connector_btn_shade_pressed(this,&TestWindow::shade,btn_shade.pressed),
connector_enable_frame_destoyed(this,&TestWindow::unshade,enable_frame.destroyed)
{
wlist.insTop(swtch,btn,alt,rad1,rad2,rad3,check,
edit,knob,xscroll,info,text_list,
label1,label2,label3,label,text,xsingle,ysingle,xdouble,ydouble,
contour,text_contour,light,swtch1,swtch2,progress,btn_shade);
group.add(rad1,rad2,rad3);
xscroll.setRange(KnobShape::FaceLim,1);
edit.setText("To find our long-forgotten gold."_c);
enable_frame.add("Ok"_str,Button_Ok).setInfo(InfoFromString("Press Ok to enable"_str));
}
TestWindow::~TestWindow()
{
}
// drawing
void TestWindow::layout()
{
Coord space=pref.get().space_dxy;
// swtch , btn , alt , progress
LayToRightCenter lay1{Lay(swtch),Lay(btn),Lay(alt),Lay(progress)};
// text_contour , label , rad
LayInner lay2(text_contour,LayToBottomLeft(BoxedWindow(rad1,label1),BoxedWindow(rad2,label2),BoxedWindow(rad3,label3)));
// check , label , light , swtch1 , swtch2
LayToRightCenter lay3(BoxedWindow(check,label),Lay(light),Lay(swtch1),LayLeft(swtch2));
// ysingle , knob , ydouble , xscroll
LayToRight lay4{Lay(ysingle),LayCenterY(knob),Lay(ydouble),LayCenterY(xscroll)};
// contour , info
LayInnerSpace lay5(contour,Lay(info),0);
// lay
LayToBottom lay(lay1,
LayAlignLeft(lay2),
lay3,
Lay(xsingle),
Lay(edit),
Lay(text),
Lay(xdouble),
lay4,
LayAlignLeft(lay5),
LayLeft(text_list),
LayAlignTop(LayLeft(btn_shade)));
ExtLay(lay).setPlace(getPane(),space);
}
void TestWindow::drawBack(DrawBuf buf,DrawParam &draw_param) const
{
VColor back=pref.get().back;
draw_param.erase(buf,back);
}
/* class TestClient */
void TestClient::menuOff()
{
if( cascade_menu.isAlive() ) cascade_menu.destroy();
menu.unselect();
}
void TestClient::menu_selected(int id,Point point)
{
if( cascade_menu.isAlive() ) cascade_menu.destroy();
switch( id )
{
case 1 :
{
cascade_menu.create(getFrame(),menu_file_data,point);
}
break;
case 2 :
{
cascade_menu.create(getFrame(),menu_edit_data,point);
}
break;
case 3 :
{
cascade_menu.create(getFrame(),menu_options_data,point);
}
break;
case 5 :
{
cascade_menu.create(getFrame(),menu_window_data,point);
}
break;
case 6 :
{
cascade_menu.create(getFrame(),menu_long_data,point);
}
break;
}
}
void TestClient::cascade_menu_selected(int id,Point point)
{
menuOff();
test.setFocus();
switch( id )
{
case 101 :
{
if( !file_window.isAlive() )
{
file_window.setNewFile(true);
file_window.create(point,"Select file"_str);
}
}
break;
case 102 :
{
if( !file_window.isAlive() )
{
file_window.setNewFile(false);
file_window.create(point,"Select file"_str);
}
}
break;
case 105 :
{
askFrameClose();
}
break;
}
}
void TestClient::cascade_menu_pressed(VKey vkey,KeyMod kmod)
{
menu.put_Key(vkey,kmod);
}
TestClient::TestClient(SubWindowHost &host,const UserPreference &pref,Signal<> &update)
: ComboWindow(host),
menu(wlist,pref.getSmartConfig(),menu_data),
cascade_menu(host.getFrameDesktop(),pref.getSmartConfig()),
test(wlist,pref),
file_window(host.getFrameDesktop(),pref.getSmartConfig(),update,{true}),
connector_menu_selected(this,&TestClient::menu_selected,menu.selected),
connector_cascade_menu_selected(this,&TestClient::cascade_menu_selected,cascade_menu.selected),
connector_cascade_menu_pressed(this,&TestClient::cascade_menu_pressed,cascade_menu.pressed)
{
cascade_menu.connectUpdate(update);
wlist.insTop(menu,test);
wlist.enableTabFocus(false);
menu_data("@File"_str,1)
("@Edit"_str,2)
(MenuSeparator)
("@Options"_str,3)
(MenuDisabled,"@Modules"_str,4)
("@Window"_str,5)
("@Long menu"_str,6);
menu_file_data("@New"_str,101)
("@Open"_str,102)
(MenuDisabled,"@Save"_str,103)
(MenuDisabled,"Save @as"_str,104)
(MenuSeparator)
("E@xit"_str,105);
menu_edit_data("@Undo"_str,201)
("@Check"_str,202)
(MenuSeparator)
("Cut"_str,203)
("Copy"_str,204)
("Paste"_str,205)
(MenuSeparator)
("@Run"_str,206);
menu_options_data("@Colors"_str,301)
("@Fonts"_str,302)
("@Targets"_str,303);
menu_window_data("@Split"_str,501)
("@Close all"_str,502)
("S@tack"_str,503);
for(int i=1; i<100 ;i++)
{
menu_long_data(Stringf("menu item #;",i),600+i);
}
file_window.addFilter("*.h"_c);
file_window.addFilter("*.cpp"_c);
file_window.addFilter("*"_c);
}
TestClient::~TestClient()
{
}
// base
void TestClient::open()
{
wlist.open();
test.setFocus();
}
// drawing
void TestClient::layout()
{
LayToBottom lay{Lay(menu),Lay(test)};
lay.setPlace(getPane(),0);
}
// user input
void TestClient::react(UserAction action)
{
action.dispatch(*this);
}
void TestClient::react_Key(VKey vkey,KeyMod kmod)
{
switch( vkey )
{
case VKey_F10 :
{
menu.setFocus();
}
break;
case VKey_Esc :
{
menuOff();
test.setFocus();
}
break;
default:
{
wlist.put_Key(vkey,kmod);
}
}
}
void TestClient::react_LeftClick(Point point,MouseKey mkey)
{
menuOff();
wlist.put_LeftClick(point,mkey);
}
void TestClient::react_RightClick(Point point,MouseKey mkey)
{
menuOff();
wlist.put_RightClick(point,mkey);
}
void TestClient::react_other(UserAction action)
{
wlist.react(action);
}
/* class TestFrame */
TestFrame::TestFrame(Desktop *desktop,const UserPreference &pref,Signal<> &update)
: DragFrame(desktop,pref.getSmartConfig(),update),
client(*this,pref,update)
{
bindClient(client);
}
TestFrame::~TestFrame()
{
}
} // namespace App
|
#!/bin/bash
# set -x
if [ "$#" -ne 1 ]; then
echo "syntax: $0 <TYPE>"
echo "<TYPE> should be either golang or python"
exit 1
fi
TYPE=$1
function golang_clusterloader() {
# Export kube config
export KUBECONFIG=${KUBECONFIG-$HOME/.kube/config}
#### OCP 4.2: new requirements to run golang cluster-loader from openshift-tests binary:
## - Absolute path to config file needed
## - .yaml extension is required now in config file name
## - full path to the config file must be under 70 characters total
MY_CONFIG=../../config/golang/node-affinity.yaml
# loading cluster based on yaml config file
VIPERCONFIG=$MY_CONFIG openshift-tests run-test "[sig-scalability][Feature:Performance] Load cluster should populate the cluster [Slow][Serial]"
}
function python_clusterloader() {
MY_CONFIG=../../config/node-affinity.yaml
python --version
python ../../cluster-loader.py -f $MY_CONFIG
}
function show_node_labels() {
oc get node --show-labels
oc get node -l cpu=4
oc get node -l cpu=6
oc get node -l beta.kubernetes.io/arch=intel
}
function check_no_error_pods()
{
error=`oc get pods -n $1 | grep Error | wc -l`
if [ $error -ne 0 ]; then
echo "$error pods found, exiting"
#loop to find logs of error pods?
exit 1
fi
}
function wait_for_project_termination() {
COUNTER=0
terminating=$(oc get projects | grep $1 | grep Terminating | wc -l)
while [ $terminating -ne 0 ]; do
sleep 15
terminating=$(oc get projects | grep $1 | grep Terminating | wc -l)
echo "$terminating projects are still terminating"
COUNTER=$((COUNTER + 1))
if [ $COUNTER -ge 20 ]; then
echo "$terminating projects are still terminating after 5 minutes"
exit 1
fi
done
proj=$(oc get projects | grep $1 | wc -l)
if [ $proj -ne 0 ]; then
echo "$proj $1 projects are still there"
exit 1
fi
pods_in_proj=$(oc get pods -A | grep $1 | wc -l)
if [ $pods_in_proj -ne 0 ]; then
echo "$pods_in_proj $1 pods are still there"
exit 1
fi
}
date
uname -a
oc get clusterversion
oc version
oc get node --show-labels
oc describe node | grep Runtime
compute_nodes=$(oc get nodes -l 'node-role.kubernetes.io/worker=' | awk '{print $1}' | grep -v NAME | xargs)
echo -e "\nWorker nodes are: $compute_nodes"
declare -a node_array
counter=1
oc get nodes -l 'node-role.kubernetes.io/worker='
oc describe nodes -l 'node-role.kubernetes.io/worker='
initial_node_label="beta.kubernetes.io/arch=amd64"
# Configuration: label nodes for Affinity and anti-affinity scheduling
for n in ${compute_nodes}; do
node_array[${counter}]=${n}
counter=$((counter+1))
done
# output node array elements
for i in {1..2}; do
echo "Array element node_array index $i has value : ${node_array[${i}]}"
done
# Configuration: label nodes for Affinity and anti-affinity scheduling
echo -e "\nLabeling node ${node_array[1]} with label 'cpu=4'"
oc label nodes ${node_array[1]} cpu=4
echo -e "\nLabeling node ${node_array[2]} with label 'cpu=6'"
oc label nodes ${node_array[2]} cpu=6
echo -e "\nLabeling node ${node_array[1]} with label 'beta.kubernetes.io/arch=intel'"
oc label nodes ${node_array[1]} --overwrite beta.kubernetes.io/arch=intel
show_node_labels
sleep 5
echo "Run tests"
if [ "$TYPE" == "golang" ]; then
golang_clusterloader
pod_counts=$(python -c "import get_pod_total; get_pod_total.get_pod_counts_golang('$MY_CONFIG')")
elif [ "$TYPE" == "python" ]; then
python_clusterloader
pod_counts=$(python -c "import get_pod_total; get_pod_total.get_pod_counts_python('$MY_CONFIG')")
else
echo "$TYPE is not a valid option, available options: golang, python"
exit 1
fi
sleep 30
check_no_error_pods node-affinity-0
check_no_error_pods node-anti-affinity-0
## Check pod counts expecting <num from yaml> pods per namespace
echo "nodes $node_array"
affinity_pods=$(oc get pods -n node-affinity-0 -o wide | grep "pausepods" | grep ${node_array[2]} | grep Running | wc -l | xargs )
anti_affinity_pods=$(oc get pods -n node-anti-affinity-0 -o wide | grep "hellopods" | grep -v ${node_array[2]} | grep Running | wc -l | xargs)
#validate counts
counts=$(echo $pod_counts | tr ' ' '\n')
declare -a node_namespace
node_affinity_total=0
node_anti_affinity_total=0
counter=0
for n in ${counts}; do
if [ $((counter % 2)) == 0 ]; then
echo "counter $counter $n"
node_namespace=${n}
else
if [[ $node_namespace == "node-affinity"* ]]; then
echo "affinity"
node_affinity_total=${n}
elif [[ $node_namespace == "node-anti-affinity"* ]]; then
node_anti_affinity_total=${n}
fi
fi
((counter++))
done
## Pass/Fail
pass_or_fail=0
if [[ ${affinity_pods} == ${node_affinity_total} ]]; then
echo -e "\nActual ${affinity_pods} pods were sucessfully deployed. Node affinity test passed!"
((pass_or_fail++))
else
echo -e "\nActual ${affinity_pods} pods deployed does NOT match expected ${node_affinity_total} pods for node affinity test. Node affinity test failed !"
fi
if [[ ${anti_affinity_pods} == ${node_anti_affinity_total} ]]; then
echo -e "\nActual ${anti_affinity_pods} pods were sucessfully deployed. Node Anti-affinity test passed!"
((pass_or_fail++))
else
echo -e "\nActual ${anti_affinity_pods} pods deployed does NOT match expected ${node_anti_affinity_total} pods for node Anti-affinity test. Node Anti-affinity test failed !"
fi
counts=$(echo $pod_counts | tr ' ' '\n')
declare -a node_namespace
node_affinity_total=0
node_anti_affinity_total=0
counter=0
for n in ${counts}; do
if [ $((counter % 2)) == 0 ]; then
echo "counter $counter $n"
node_namespace=${n}
else
if [[ $node_namespace == "node-affinity"* ]]; then
echo "affinity"
node_affinity_total=${n}
elif [[ $node_namespace == "node-anti-affinity"* ]]; then
node_anti_affinity_total=${n}
fi
fi
((counter++))
done
echo "node_affinity_total $node_affinity_total"
echo "node_anti_affinity_total $node_anti_affinity_total"
## Pass/Fail
pass_or_fail=0
if [[ ${affinity_pods} == ${node_affinity_total} ]]; then
echo -e "\nActual ${affinity_pods} pods were sucessfully deployed. Node affinity test passed!"
((pass_or_fail++))
else
echo -e "\nActual ${affinity_pods} pods deployed does NOT match expected ${node_affinity_total} pods for node affinity test. Node affinity test failed !"
fi
if [[ ${anti_affinity_pods} == ${node_anti_affinity_total} ]]; then
echo -e "\nActual ${anti_affinity_pods} pods were sucessfully deployed. Node Anti-affinity test passed!"
((pass_or_fail++))
else
echo -e "\nActual ${anti_affinity_pods} pods deployed does NOT match expected ${node_anti_affinity_total} pods for node Anti-affinity test. Node Anti-affinity test failed !"
fi
oc describe node/${node_array[2]}
sleep 60
# delete projects:
######### Clean up: delete projects and wait till all projects and pods are gone
oc delete project node-affinity-0
wait_for_project_termination node-affinity-0
oc delete project node-anti-affinity-0
wait_for_project_termination node-anti-affinity-0
sleep 30
## remove node labels
echo -e "\nRemoving the node labels"
oc label nodes ${node_array[1]} cpu-
oc label nodes ${node_array[2]} cpu-
oc label nodes ${node_array[1]} --overwrite ${initial_node_label}
show_node_labels
## Final Pass/Fail result
if [[ ${pass_or_fail} == 2 ]]; then
echo -e "\nOverall Node Affinity and Anti-affinity Testcase result: PASS"
exit 0
else
echo -e "\nOverall Node Affinity and Anti-affinity Testcase result: FAIL"
exit 1
fi
|
#!/bin/bash
echo "Halting all nodes.."
for node in `cat cluster.yml | grep -v '#' | grep ' address: ' | awk '{print $3}'`
do
echo "Node: $node"
timeout 1 ssh -o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -x -l root $node 'poweroff'
done |
var group__CMSIS__MPIDR =
[
[ "__get_MPIDR", "group__CMSIS__MPIDR.html#ga05394b4cb9fb0ba1329ec6521c76e571", null ]
]; |
function take () {
mkdir -p $1
cd $1
}
#
# Get the value of an alias.
#
# Arguments:
# 1. alias - The alias to get its value from
# STDOUT:
# The value of alias $1 (if it has one).
# Return value:
# 0 if the alias was found,
# 1 if it does not exist
#
function alias_value () {
alias "$1" | sed "s/^$1='\(.*\)'$/\1/"
test $(alias "$1")
}
# Get ubuntu daily release
function get-daily () {
if type zsync > /dev/null ; then
echo "zsyncing $1..."
zsync "http://cdimage.ubuntu.com/daily-live/current/$1-desktop-amd64.iso.zsync"
fi
if [[ -n $1 ]]; then
echo "wgetting $1..."
wget -c "http://cdimage.ubuntu.com/daily-live/current/$1-desktop-amd64.iso"
else
echo "Please insert a development release name"
fi
}
function compile () {
local compiler
if [[ $1 == 'c' ]]; then
compiler="clang"
elif [[ $1 == 'cpp' ]]; then
compiler="clang++ --std=c++11"
else
echo "Input a source type. Available: c cpp"
return
fi
if [[ -n $2 ]]; then
for file in $2/*.$1; do
echo "Compiling source file: $file"
eval $($compiler $file -o ${file:t:r})
echo "Done."
done
else
echo "Arg 2: give a working directory"
fi
}
function extract () {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2) tar xjf $1 ;;
*.tar.gz) tar xzf $1 ;;
*.bz2) bunzip2 $1 ;;
*.rar) unrar e $1 ;;
*.gz) gunzip $1 ;;
*.tar) tar xf $1 ;;
*.tbz2) tar xjf $1 ;;
*.tgz) tar xzf $1 ;;
*.zip) unzip $1 ;;
*.Z) uncompress $1 ;;
*.7z) 7z x $1 ;;
*) echo "'$1' cannot be extracted via extract()" ;;
esac
else
echo "'$1' is not a valid file"
fi
}
# ssh wrapper that rename current tmux window to the hostname of the
# remote host.
function ssh () {
# Do nothing if we are not inside tmux or ssh is called without arguments
if [[ $# == 0 || -z $TMUX ]]; then
command ssh $@
return
fi
# The hostname is the last parameter (i.e. ${(P)#})
local remote=${${(P)#}}
local old_name="$(tmux display-message -p '#W')"
local renamed=0
# Save the current name
if [[ $remote != -* ]]; then
renamed=1
tmux rename-window $remote
fi
command ssh $@
if [[ $renamed == 1 ]]; then
tmux rename-window "$old_name"
fi
}
# Tmux
function trw () {
if [[ -z $TMUX ]]; then
return
fi
local window=`basename $PWD`
if [[ -n $1 ]]; then
window="$1"
fi
tmux rename-window "$window"
}
function sensors () {
# alias sensors='sensors && aticonfig --od-gettemperature'
command sensors
if type aticonfig > /dev/null ; then
command aticonfig --od-gettemperature
fi
}
function cd () {
# cd /etc/fstab
if [[ -f ${1} ]]; then
[[ ! -e ${1:h} ]] && return 1
print "Correcting ${1} to ${1:h}"
builtin cd ${1:h}
else
builtin cd ${1}
fi
}
function push-key () {
ssh $1 "echo '`cat ~/.ssh/id_rsa.pub`' >> ~/.ssh/authorized_keys"
}
|
<filename>test/fragments/custom/comments/obsolete.d.ts
/**
* Escape quotation marks
*
* @deprecated Hello "World"!
*/
export interface A {}
/**
* Multiline
*
* @deprecated
* Deprecated because
* of stuff
*/
export interface B {}
/**
* Multiple deprecated tags
*
* @deprecated Message 1
* @deprecated Reason 2
*/
export interface C {}
/**
* Deprecated interface with default type parameter
*
* Both Interfaces with generic and with default should be deprecated
*
* @deprecated Interface with default is deprecated
*/
export interface D<T = string> {}
/**
* Summary: SomeFunction
*
* @deprecated SomeFunction is deprecated
*/
export function SomeFunction(): void
/**
* Summary: SomeInterface
*
* @deprecated SomeInterface is deprecated
*/
export interface SomeInterface {
/**
* Summary: SomeValue
*
* @deprecated SomeValue is deprecated
*/
SomeValue: number
/**
* Summary: SomeFunction
*
* @deprecated SomeFunction is deprecated
*/
SomeFunction(): void
}
/**
* Summary: SomeClass
*
* @deprecated SomeClass is deprecated
*/
export class SomeClass {
/**
* Summary: SomeValue
*
* @deprecated SomeValue is deprecated
*/
SomeValue: number;
/**
* Summary: constructor
*
* @deprecated ctor is deprecated
*/
constructor(value: string);
/**
* Summary: SomeFunction
*
* @deprecated SomeFunction is deprecated
*/
SomeFunction();
}
/**
* Summary: SomeType
*
* @deprecated SomeType is deprecated
*/
export type SomeType = {
/**
* Summary: SomeValue
*
* @deprecated SomeValue is deprecated
*/
SomeValue: number
/**
* Summary: SomeFunction
*
* @deprecated SomeFunction is deprecated
*/
SomeFunction(): void
}
/**
* Summary: SomeFunctionType
*
* @deprecated SomeFunctionType is deprecated
*/
export type SomeFunctionType = (a: number, b: string) => string
/**
* Summary: SomeAlias
*
* @deprecated SomeAlias is deprecated
*/
export type SomeAlias = string
/**
* Summary: SomeUnion
*
* @deprecated SomeUnion is deprecated
*/
export type SomeUnion =
/**
* Summary: string
*
* @deprecated SomeUnion.string is deprecated
*/
string
/**
* Summary: number
*
* @deprecated SomeUnion.number is deprecated
*/
| number
// Comments of Literal Case aren't stored in Node
/**
* Summary: SomeLiteral
*
* @deprecated SomeLiteral is deprecated
*/
export type SomeLiteral =
/**
* Summary: A
*
* @deprecated "A" is deprecated
*/
"A"
/**
* Summary: B
*
* @deprecated "B" is deprecated
*/
| "B"
/**
* Summary: SomeIntersectionType
*
* @deprecated SomeIntersectionType is deprecated
*/
export type SomeIntersectionType = number & string
/**
* Summary: SomeEnum
*
* @deprecated SomeEnum is deprecated
*/
export enum SomeEnum {
/**
* Summary: `A = 0`
*
* @deprecated A is deprecated
*/
A = 0,
/**
* Summary: `B = 1`
*
* @deprecated B is deprecated
*/
B = 1,
}
/**
* Summary: SomeStringEnum
*
* @deprecated SomeStringEnum is deprecated
*/
export enum SomeStringEnum {
/**
* Summary: `A = "A"`
*
* @deprecated A is deprecated
*/
A = "A",
/**
* Summary: `B = "B"`
*
* @deprecated B is deprecated
*/
B = "B",
}
/**
* Summary: SomeConst
*
* @deprecated SomeConst is deprecated
*/
export const SomeConst: number;
/**
* Summary: SomeVariable
*
* @deprecated SomeVariable is deprecated
*/
export declare var SomeVariable: number;
/**
* Summary: SomeNamespace
*
* @deprecated SomeNamespace is deprecated
*/
export namespace SomeNamespace {
/**
* Summary: SomeFunction
*
* @deprecated SomeFunction is deprecated
*/
export function SomeFunction();
}
/**
* Summary: SomeModule
*
* @deprecated SomeModule is deprecated
*/
export module SomeModule {
/**
* Summary: SomeFunction
*
* @deprecated SomeFunction is deprecated
*/
export function SomeFunction();
}
/**
* Summary: SomeGenericType
*
* @deprecated SomeGenericType is deprecated
*/
export type SomeGenericType<A> = {}
/**
* Summary: SomeClassWithStaticFunction
*
* @deprecated SomeClassWithStaticFunction is deprecated
*/
declare class SomeClassWithStaticFunction {
/**
* Summary: SomeStaticFunction
*
* @deprecated SomeStaticFunction is deprecated
*/
static SomeStaticFunction(): void;
}
|
<reponame>nimbus-cloud/cli<filename>src/cf/commands/route/unmap_route.go
package route
import (
"cf/api"
"cf/configuration"
"cf/requirements"
"cf/terminal"
"errors"
"github.com/codegangsta/cli"
)
type UnmapRoute struct {
ui terminal.UI
config configuration.Reader
routeRepo api.RouteRepository
appReq requirements.ApplicationRequirement
domainReq requirements.DomainRequirement
}
func NewUnmapRoute(ui terminal.UI, config configuration.Reader, routeRepo api.RouteRepository) (cmd *UnmapRoute) {
cmd = new(UnmapRoute)
cmd.ui = ui
cmd.config = config
cmd.routeRepo = routeRepo
return
}
func (cmd *UnmapRoute) GetRequirements(reqFactory requirements.Factory, c *cli.Context) (reqs []requirements.Requirement, err error) {
if len(c.Args()) != 2 {
err = errors.New("Incorrect Usage")
cmd.ui.FailWithUsage(c, "unmap-route")
return
}
appName := c.Args()[0]
domainName := c.Args()[1]
cmd.appReq = reqFactory.NewApplicationRequirement(appName)
cmd.domainReq = reqFactory.NewDomainRequirement(domainName)
reqs = []requirements.Requirement{
reqFactory.NewLoginRequirement(),
cmd.appReq,
cmd.domainReq,
}
return
}
func (cmd *UnmapRoute) Run(c *cli.Context) {
hostName := c.String("n")
domain := cmd.domainReq.GetDomain()
app := cmd.appReq.GetApplication()
route, apiResponse := cmd.routeRepo.FindByHostAndDomain(hostName, domain.Name)
if apiResponse.IsNotSuccessful() {
cmd.ui.Failed(apiResponse.Message)
}
cmd.ui.Say("Removing route %s from app %s in org %s / space %s as %s...",
terminal.EntityNameColor(route.URL()),
terminal.EntityNameColor(app.Name),
terminal.EntityNameColor(cmd.config.OrganizationFields().Name),
terminal.EntityNameColor(cmd.config.SpaceFields().Name),
terminal.EntityNameColor(cmd.config.Username()),
)
apiResponse = cmd.routeRepo.Unbind(route.Guid, app.Guid)
if apiResponse.IsNotSuccessful() {
cmd.ui.Failed(apiResponse.Message)
return
}
cmd.ui.Ok()
}
|
#!/usr/bin/env bash
/bin/rm -rf dist build
python3 setup.py sdist bdist_wheel
python3 -m twine upload dist/*
|
<reponame>movisens/SmartGattLib<gh_stars>100-1000
package com.movisens.smartgattlib.attributes;
import com.movisens.smartgattlib.Characteristics;
import com.movisens.smartgattlib.helper.AbstractAttribute;
import com.movisens.smartgattlib.helper.Characteristic;
public class DefaultAttribute extends AbstractAttribute
{
public DefaultAttribute(byte[] data)
{
this.data = data;
}
@Override
public Characteristic<DefaultAttribute> getCharacteristic()
{
return Characteristics.DEFAULT;
}
@Override
public String toString()
{
String result = this.getClass().getSimpleName() + " = ";
for (byte d : data)
{
result += String.format("0x%02x ", d);
}
return result;
}
@Override
public boolean isReadable()
{
return false;
}
@Override
public boolean isWritable()
{
return false;
}
}
|
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "kudzu/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "kudzu"
s.version = Kudzu::VERSION
s.authors = ["<NAME>"]
s.email = ["<EMAIL>"]
s.homepage = "https://github.com/kanety/kudzu"
s.summary = "A simple web crawler for ruby"
s.description = "A simple web crawler for ruby"
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "addressable"
s.add_dependency "nokogiri"
s.add_dependency "charlock_holmes"
s.add_dependency "mimemagic"
s.add_dependency "http-cookie"
s.add_development_dependency "rails"
s.add_development_dependency "rspec-rails"
s.add_development_dependency "simplecov"
s.add_development_dependency "pry-rails"
s.add_development_dependency "pry-byebug"
end
|
<filename>node_modules/react-icons-kit/icomoon/aidKit.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.aidKit = void 0;
var aidKit = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M14 4h-3v-2c0-0.55-0.45-1-1-1h-4c-0.55 0-1 0.45-1 1v2h-3c-1.1 0-2 0.9-2 2v8c0 1.1 0.9 2 2 2h12c1.1 0 2-0.9 2-2v-8c0-1.1-0.9-2-2-2zM6 2h4v2h-4v-2zM12 11h-3v3h-2v-3h-3v-2h3v-3h2v3h3v2z"
}
}]
};
exports.aidKit = aidKit; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.