text stringlengths 1 1.05M |
|---|
<filename>src/components/managerdashboard/ManagerdashboardBilling/InvoiceTable/index.js
import React, { PureComponent } from 'react'
import PropTypes from 'prop-types'
import TitleDescriptionSection from 'components/common/TitleDescriptionSection'
import Table from 'components/gui/Table'
class InvoiceTable extends PureComponent {
constructor (props) {
super(props)
}
render () {
const {
title,
data
} = this.props
return (
<TitleDescriptionSection
colorModifier='blue'
title={title}
>
<Table
{...data}
delete={true}
sortable={['File name', 'Data of upload']}/>
</TitleDescriptionSection>
)
}
}
InvoiceTable.propTypes = {
title: PropTypes.string,
data: PropTypes.object
}
export default InvoiceTable
|
def replace_account_sid(file_path)
file = File.open(File.expand_path(file_path), 'r')
original = file.read
replaced = original.gsub('WFXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX', 'WWXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')
unless original == replaced
puts "replaced: #{file_path}"
File.open(File.expand_path(file_path), 'w') { |document| document.write(replaced) }
end
file.close
rescue
puts "Error replacing content in file: #{file_path}"
end
Dir.glob('**/*') do |file|
next if file.index(/^tools\//) || file.include?('nuget/') || file.include?('vendor/') || file.include?('testable_snippets/')
replace_account_sid(file) unless File.directory?(file)
end
|
<filename>open-sphere-base/auxiliary/src/main/java/io/opensphere/auxiliary/video/ReencodingChunkProvider.java
package io.opensphere.auxiliary.video;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.log4j.Logger;
import com.xuggle.xuggler.ICodec;
import com.xuggle.xuggler.IContainer;
import com.xuggle.xuggler.IContainerFormat;
import com.xuggle.xuggler.IError;
import com.xuggle.xuggler.IPacket;
import com.xuggle.xuggler.IStream;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.io.XugglerIO;
import gnu.trove.list.TLongList;
import gnu.trove.list.array.TLongArrayList;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.io.CancellableInputStream;
import io.opensphere.core.util.io.ExceptionCapturingCancellableInputStream;
import io.opensphere.core.util.io.ListOfBytesOutputStream;
import io.opensphere.core.video.ChunkException;
import io.opensphere.core.video.VideoContentHandler;
/**
* Provides approximately five second chunks of video data for a video stream by
* taking the existing video data and breaking it into 5 second chunks with
* changing the video format.
*/
public class ReencodingChunkProvider implements VideoChunkProvider
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(ReencodingChunkProvider.class);
/**
* The approximate chunk size in milliseconds, the actual size will extend
* past this size to the next frame which precedes a key frame or until the
* end of the stream is reached.
*/
private final long myApproxSizeMS;
/** The locations of the key frames. */
private final TLongList myChunkKeyFrames = new TLongArrayList();
/** A re-usable stream for storing chunks of video data. */
private final ListOfBytesOutputStream myChunkStream = new ListOfBytesOutputStream();
/**
* The container to read the video from.
*/
private final IContainer myInputContainer;
/** The stream which will be read to produce the video chunks. */
private final CancellableInputStream myStream;
/**
* The start time of the stream (milliseconds since epoch). Using the start
* time of the stream to determine a tag's time in combination with the
* tag's time stamp (time since stream start) insures that we do not
* experience time drift.
*/
private final long myStreamStartTime;
/**
* Constructs a new re-encoding chunker.
*
* @param inStream The video stream to chunk.
* @param inputContainer An already constructed and opened
* {@link IContainer} to read the video from.
* @param streamStart The start time of the video.
* @param approxSizeMS The approximate size of the chunks.
*/
public ReencodingChunkProvider(CancellableInputStream inStream, IContainer inputContainer, long streamStart,
long approxSizeMS)
{
myStream = inStream;
myInputContainer = inputContainer;
myStreamStartTime = streamStart;
myApproxSizeMS = approxSizeMS <= 0 ? Long.MAX_VALUE : approxSizeMS;
}
@Override
public long getApproxSizeMS()
{
return myApproxSizeMS;
}
@Override
public IContainer getInputContainer()
{
return myInputContainer;
}
@Override
public long getStreamStart()
{
return myStreamStartTime;
}
@Override
public CancellableInputStream getVideoStream()
{
return myStream;
}
@Override
public boolean provideChunks(VideoChunkConsumer chunkConsumer, VideoContentHandler<ByteBuffer> contentHandler)
throws ChunkException
{
boolean success = true;
XugglerNativeUtilities.explodeXugglerNatives();
IContainer outContainer = null;
IPacket packet = null;
long startTimeEpoch = myStreamStartTime;
long timeInChunk = 0;
long firstPacketMS = -1;
long previousChunkLastEpoch = -1;
try
{
outContainer = setupOutputContainer(myInputContainer);
packet = IPacket.make();
long previousPacketEpoch = myStreamStartTime;
byte[] buf = null;
while (outContainer != null && myInputContainer.readNextPacket(packet) >= 0 && !myStream.isCancelled())
{
ICodec.Type codecType = myInputContainer.getStream(packet.getStreamIndex()).getStreamCoder().getCodecType();
boolean isVideo = codecType == ICodec.Type.CODEC_TYPE_VIDEO;
boolean isMetadata = codecType == ICodec.Type.CODEC_TYPE_UNKNOWN;
if (isVideo && packet.isComplete())
{
/* There is only one stream being written, so the index will
* be 0. */
packet.setStreamIndex(0);
long ptsMS = XugglerUtilities.getPtsMillis(packet);
if (LOGGER.isTraceEnabled())
{
LOGGER.trace("Writing packet to chunk, pts: " + ptsMS + " dts: " + XugglerUtilities.getDtsMillis(packet));
}
if (firstPacketMS == -1)
{
firstPacketMS = ptsMS;
}
else
{
timeInChunk = ptsMS - firstPacketMS;
}
long ptsEpoch = startTimeEpoch + timeInChunk;
if (timeInChunk > myApproxSizeMS && packet.isKey() || timeInChunk > myApproxSizeMS * 3)
{
outContainer.writeTrailer();
myChunkStream.flush();
/* Shift the times for the chunk to include the duration
* of the last packet before this chunk and exclude the
* duration for this chunk's last packet. This will
* allow the queried chunk to find the correct display
* frame when the desired seek time is beyond the last
* packet's PTS but before the first one in the next
* chunk. */
long chunkStart = previousChunkLastEpoch == -1 ? startTimeEpoch : previousChunkLastEpoch;
chunkConsumer.consumeVideoChunk(chunkStart, previousPacketEpoch, myChunkStream, myChunkKeyFrames);
// Reset everything to prepare for the next chunk.
previousChunkLastEpoch = previousPacketEpoch;
startTimeEpoch = ptsEpoch;
myChunkKeyFrames.clear();
outContainer.close();
outContainer = setupOutputContainer(myInputContainer);
firstPacketMS = ptsMS;
timeInChunk = 0;
}
success = addPacketToChunk(outContainer, packet, ptsMS);
if (!success)
{
outContainer.writeTrailer();
break;
}
previousPacketEpoch = ptsEpoch;
}
else if (isMetadata && packet.isComplete() && contentHandler != null)
{
buf = XugglerUtilities.handleMetadataPacket(packet, buf, New.list(contentHandler));
}
}
}
catch (IOException e)
{
if (!myStream.isCancelled())
{
myStream.cancel();
throw new ChunkException("Error while segmenting video: " + e, e);
}
}
finally
{
cleanResources(null, outContainer, packet);
}
if (myStream instanceof ExceptionCapturingCancellableInputStream)
{
success = ((ExceptionCapturingCancellableInputStream)myStream).getExceptions().isEmpty();
}
if (success)
{
/* Moved this out of a try finally because if this fails to chunkify
* a video we want to reuse the input container in a fail over chunk
* provider. */
try
{
chunkConsumer.consumeLastChunk(startTimeEpoch, startTimeEpoch + timeInChunk, previousChunkLastEpoch,
myChunkStream, myChunkKeyFrames);
}
finally
{
cleanResources(myInputContainer, null, null);
}
}
return success;
}
/**
* Write the packet to the container and the index to the key frame list if
* applicable.
*
* @param outContainer The output container which contains the chunk.
* @param packet The packet to add to the chunk
* @param ptsMS The time stamp of the packet converted to milliseconds. This
* time may be any value which is consistent with the difference
* from the first packet in the stream.
* @return True if the packet was successfully added to the container, false
* otherwise.
* @throws ChunkException If the packet cannot be written to the container.
*/
private boolean addPacketToChunk(IContainer outContainer, IPacket packet, long ptsMS) throws ChunkException
{
boolean success = true;
if (outContainer != null)
{
/* There is only one stream, so no interleaving is possible. */
int status;
if ((status = outContainer.writePacket(packet, false)) < 0)
{
IError err = IError.make(status);
LOGGER.error("Failed to write video packet to chunk. " + err.toString());
success = false;
}
/* If we are ending a chunk, do this after depositing the chunk so
* that the new chunk will start with a key frame. */
if (packet.isKey())
{
myChunkKeyFrames.add(ptsMS);
}
}
return success;
}
/**
* Cleanup the provided resources.
*
* @param inputContainer Input container.
* @param outContainer Output container.
* @param packet The packet which was used during chunking.
*/
private void cleanResources(IContainer inputContainer, IContainer outContainer, IPacket packet)
{
try
{
if (inputContainer != null)
{
inputContainer.close();
}
}
finally
{
try
{
if (outContainer != null)
{
outContainer.close();
}
}
finally
{
if (packet != null)
{
packet.delete();
}
}
}
}
/**
* Setup an output container for generating video chunks which match the
* format of the input container.
*
* @param inputContainer The input container which will provide the video
* data.
* @return The output container.
* @throws ChunkException If the stream does not contain video or there's an
* error writing to the output container.
*/
private IContainer setupOutputContainer(IContainer inputContainer) throws ChunkException
{
IStreamCoder coder = null;
for (int i = 0; i < inputContainer.getNumStreams(); ++i)
{
IStream stream = inputContainer.getStream(i);
if (stream.getStreamCoder().getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO)
{
coder = stream.getStreamCoder();
}
}
if (coder == null)
{
throw new ChunkException("Stream does not contain video.");
}
/* Whenever we have FLV video, put in in an FLV container even if the
* original video is another type (like SWF). For H.264 video force it
* to be in an MPEG container. Using an H.264 container will not cause
* any errors, but no video frames can be produced from the resulting
* chunks. */
IContainerFormat outFormat = IContainerFormat.make();
if ("flv".equalsIgnoreCase(coder.getCodec().getName()))
{
outFormat.setOutputFormat("flv", null, "video/x-flv");
}
else if ("h264".equalsIgnoreCase(coder.getCodec().getName()))
{
outFormat.setOutputFormat("mpeg", null, "video/mpeg");
}
else
{
/* If it is not FLV or H.264, try to initialize the output format to
* match the input format. */
IContainerFormat inFormat = inputContainer.getContainerFormat();
outFormat.setOutputFormat(inFormat.getInputFormatShortName(), null, null);
}
IContainer outContainer = IContainer.make();
outContainer.setFormat(outFormat);
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Starting to write to chunk");
}
myChunkStream.reset();
String outputStreamUrl = XugglerIO.map(myChunkStream);
if (outContainer.open(outputStreamUrl, IContainer.Type.WRITE, outFormat, true, false) < 0)
{
throw new ChunkException("Could not open output container for video chunk.");
}
IStream outStream = outContainer.addNewStream(coder.getCodec());
IStreamCoder outCoder = outStream.getStreamCoder();
/* The doc for getTimeBase() says that the returned object needs to be
* released, but this will cause vm crash. */
outCoder.setTimeBase(coder.getTimeBase());
outCoder.setWidth(coder.getWidth());
outCoder.setHeight(coder.getHeight());
outCoder.setPixelType(coder.getPixelType());
outCoder.setFlags(coder.getFlags());
outCoder.setSampleFormat(coder.getSampleFormat());
outCoder.setCodecTag(coder.getCodecTag());
if (outContainer.writeHeader() < 0)
{
throw new ChunkException("Failed to write header for video chunk stream.");
}
return outContainer;
}
}
|
echo '...Starting TensorFlow Serving for MNIST Image Classification Service...'
nohup $TENSORFLOW_SERVING_HOME/bazel-bin/tensorflow_serving/example/mnist_inference_2 --port=9090 $DATASETS_HOME/tensorflow/serving/mnist_model > $LOGS_HOME/serving/tensorflow/nohup-mnist.out &
echo '...tail -f $LOGS_HOME/serving/tensorflow/nohup-mnist.out...'
|
class RedmineWikicipherController < ApplicationController
def decode
redirect_to :controller => 'wiki', :action => 'show', :decode => '1',:project_id => @project
end
end
|
#!/bin/bash
set -e
echo "Generating config..."
echo $GOOGLE_KEY_JSON > /server-conf/google_key.json
export GOOGLE_APPLICATION_CREDENTIALS=/server-conf/google_key.json
envsubst < /server-conf/config.toml.tmpl > /server-conf/config.toml
# cat /server-conf/config.toml
# cat /server-conf/google_key.json
echo "Starting hauser..."
hauser -c /server-conf/config.toml |
<reponame>CSCfi/pebbles
import json
from random import randint
import os
import time
from pebbles.drivers.provisioning import base_driver
from pebbles.client import PBClient
class DummyDriver(base_driver.ProvisioningDriverBase):
""" Dummy driver mostly pretends to be a real driver for system testing
and development purposes.
It runs a time-consuming process (ping) using run_logged_process, writes the public SSH
key to the user for the user to a file and logs from the ping to the right places.
It reports a random IP address.
"""
def get_configuration(self):
from pebbles.drivers.provisioning.dummy_driver_config import CONFIG
return CONFIG
def get_running_instance_logs(self, token, instance_id):
running_log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='running')
running_log_uploader.info('dummy running logs')
def do_update_connectivity(self, token, instance_id):
pass
def do_provision(self, token, instance_id):
pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning')
self.logger.info('faking provisioning')
log_uploader.info('dummy provisioning for 5 seconds\n')
time.sleep(5)
log_uploader.info('dummy provisioning completed\n')
public_ip = '%s.%s.%s.%s' % (randint(1, 254), randint(1, 254), randint(1, 254), randint(1, 254))
instance_data = {
'endpoints': [
{'name': 'SSH', 'access': 'ssh cloud-user@%s' % public_ip},
{'name': 'Some Web Interface', 'access': 'http://%s/service-x' % public_ip},
]
}
pbclient.do_instance_patch(
instance_id,
{
'public_ip': public_ip, 'instance_data': json.dumps(instance_data)
}
)
def do_deprovision(self, token, instance_id):
pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
instance = pbclient.get_instance_description(instance_id)
cluster_name = instance['name']
instance_dir = '%s/%s' % (self.config['INSTANCE_DATA_DIR'], cluster_name)
log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='deprovisioning')
self.logger.info('faking deprovisioning\n')
log_uploader.info('dummy deprovisioning for 5 seconds\n')
time.sleep(5)
log_uploader.info('dummy deprovisioning completed\n')
# use instance id as a part of the name to make tombstones always unique
if os.path.isdir(instance_dir):
os.rename(instance_dir, '%s.deleted.%s' % (instance_dir, instance_id))
def do_housekeep(self, token):
pass
|
#!/usr/bin/env bash
# run misc/deploy.sh from project root
set -e
set -x
scp gxydb-api-linux root@gxydb.kli.one:/opt/gxydb/gxydb-api-linux.new
scp -r migrations root@gxydb.kli.one:/opt/gxydb
ssh root@gxydb.kli.one "cd /opt/gxydb && export \$(cat .env | xargs) && ./migrate -database \$DB_URL -path migrations up"
ssh root@gxydb.kli.one "/bin/cp -f /opt/gxydb/gxydb-api-linux /opt/gxydb/gxydb-api-linux.old"
ssh root@gxydb.kli.one "systemctl stop gxydb"
ssh root@gxydb.kli.one "mv /opt/gxydb/gxydb-api-linux.new /opt/gxydb/gxydb-api-linux"
ssh root@gxydb.kli.one "systemctl start gxydb"
|
package com.neusoft.service.impl;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.neusoft.entity.vo.AnalServiceVo;
import com.neusoft.mapper.AnalServiceMapper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author shkstart
* @create 2018/11/28 - 9:23
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:spring/spring-mybatisplus.xml")
public class AnalServiceServiceImplTest {
@Autowired
AnalServiceMapper analServiceMapper;
@Test
public void analServicePage() {
Page page = new Page();
List<AnalServiceVo> analServiceVos=analServiceMapper.analServicePage(page,null);
System.out.println(""+analServiceVos);
}
} |
def sum_of_digits(n):
sum = 0
# Separate the digits of the number
while n > 0:
digit = n % 10
sum += digit
n //= 10
return sum |
package com.supernoob.atfmd.registry;
import com.supernoob.atfmd.ATFMD;
import com.supernoob.atfmd.object.items.MusicDisc;
import net.minecraft.item.Item;
import net.minecraft.sound.SoundEvent;
import net.minecraft.util.Identifier;
import net.minecraft.util.Rarity;
import net.minecraft.util.registry.Registry;
public class ModItems {
public static final Item TWELVE = register("music_disc_12", ModSounds.SOUND_TWELVE);
public static final Item FOURTEEN = register("music_disc_14", ModSounds.SOUND_FOURTEEN);
public static final Item FOURTEEN_REVAMPED = register("music_disc_14_revamped", ModSounds.SOUND_FOURTEEN_REVAMPED);
public static final Item EIGHTEEN = register("music_disc_18", ModSounds.SOUND_EIGHTEEN);
public static final Item ANTI = register("music_disc_anti", ModSounds.SOUND_ANTI);
public static final Item ARCADE = register("music_disc_arcade", ModSounds.SOUND_ARCADE);
public static final Item AXOLOTL = register("music_disc_axolotl", ModSounds.SOUND_AXOLOTL);
public static final Item BEDROCK = register("music_disc_bedrock", ModSounds.SOUND_BEDROCK);
public static final Item BIRCH_BOP = register("music_disc_birch_bop", ModSounds.SOUND_BIRCH_BOP);
public static final Item BRAIN_SPLOSHED = register("music_disc_brain_sploshed", ModSounds.SOUND_BRAIN_SPLOSHED);
public static final Item BRICKS = register("music_disc_bricks", ModSounds.SOUND_BRICKS);
public static final Item CARROT = register("music_disc_carrot", ModSounds.SOUND_CARROT);
public static final Item CHAOS = register("music_disc_chaos", ModSounds.SOUND_CHAOS);
public static final Item CHARR = register("music_disc_charr", ModSounds.SOUND_CHARR);
public static final Item CHICKENBOAT = register("music_disc_chickenboat", ModSounds.SOUND_CHICKENBOAT);
public static final Item CHILL = register("music_disc_chill", ModSounds.SOUND_CHILL);
public static final Item CHORUS = register("music_disc_chorus", ModSounds.SOUND_CHORUS);
public static final Item CLAY = register("music_disc_clay", ModSounds.SOUND_CLAY);
public static final Item CLOUD = register("music_disc_cloud", ModSounds.SOUND_CLOUD);
public static final Item CORAL_LULLABY = register("music_disc_coral_lullaby", ModSounds.SOUND_CORAL_LULLABY);
public static final Item CRADLE = register("music_disc_cradle", ModSounds.SOUND_CRADLE);
public static final Item DEEP = register("music_disc_deep", ModSounds.SOUND_DEEP);
public static final Item DEEPSLATE = register("music_disc_deepslate", ModSounds.SOUND_DEEPSLATE);
public static final Item DRIFTER = register("music_disc_drifter", ModSounds.SOUND_DRIFTER);
public static final Item ENDERWAVE = register("music_disc_enderwave", ModSounds.SOUND_ENDERWAVE);
public static final Item ETHERAL = register("music_disc_etheral", ModSounds.SOUND_ETHERAL);
public static final Item EVOKER = register("music_disc_evoker", ModSounds.SOUND_EVOKER);
public static final Item FINALE = register("music_disc_finale", ModSounds.SOUND_FINALE);
public static final Item FLEDGLING = register("music_disc_fledgling", ModSounds.SOUND_FLEDGLING);
public static final Item FLOAT = register("music_disc_float", ModSounds.SOUND_FLOAT);
public static final Item FORT_STRESS = register("music_disc_fort_stress", ModSounds.SOUND_FORT_STRESS);
public static final Item FROZEN = register("music_disc_frozen", ModSounds.SOUND_FROZEN);
public static final Item GHOST = register("music_disc_ghost", ModSounds.SOUND_GHOST);
public static final Item ICE = register("music_disc_ice", ModSounds.SOUND_ICE);
public static final Item ICE_AND_RAIN = register("music_disc_ice_and_rain", ModSounds.SOUND_ICE_AND_RAIN);
public static final Item INDUSTRIAL_GREEN = register("music_disc_industrial_green", ModSounds.SOUND_INDUSTRIAL_GREEN);
public static final Item INTO_THE_JUNGLE = register("music_disc_into_the_jungle", ModSounds.SOUND_INTO_THE_JUNGLE);
public static final Item JUKE = register("music_disc_juke", ModSounds.SOUND_JUKE);
public static final Item LUSH = register("music_disc_lush", ModSounds.SOUND_LUSH);
public static final Item MELLOWFRUIT = register("music_disc_mellowfruit", ModSounds.SOUND_MELLOWFRUIT);
public static final Item MOOBLOOM = register("music_disc_moobloom", ModSounds.SOUND_MOOBLOOM);
public static final Item MOONTHICAL = register("music_disc_moonthical", ModSounds.SOUND_MOONTHICAL);
public static final Item MUSHROOM = register("music_disc_mushroom", ModSounds.SOUND_MUSHROOM);
public static final Item MUSHROOM_ISLAND = register("music_disc_mushroom_island", ModSounds.SOUND_MUSHROOM_ISLAND);
public static final Item OCEAN_DISC = register("music_disc_ocean_disc", ModSounds.SOUND_OCEAN_DISC);
public static final Item OMEN = register("music_disc_omen", ModSounds.SOUND_OMEN);
public static final Item ORESTEP = register("music_disc_orestep", ModSounds.SOUND_ORESTEP);
public static final Item OVER = register("music_disc_over", ModSounds.SOUND_OVER);
public static final Item OXIDIZATION = register("music_disc_oxidization", ModSounds.SOUND_OXIDIZATION);
public static final Item PETIOLE = register("music_disc_petiole", ModSounds.SOUND_PETIOLE);
public static final Item PHANTOM = register("music_disc_phantom", ModSounds.SOUND_PHANTOM);
public static final Item PILLAGED = register("music_disc_pillaged", ModSounds.SOUND_PILLAGED);
public static final Item PRISM = register("music_disc_prism", ModSounds.SOUND_PRISM);
public static final Item PRISMARINE = register("music_disc_prismarine", ModSounds.SOUND_PRISMARINE);
public static final Item PUMPKIN = register("music_disc_pumpkin", ModSounds.SOUND_PUMPKIN);
public static final Item RANGE = register("music_disc_range", ModSounds.SOUND_RANGE);
public static final Item REMNANT = register("music_disc_remnant", ModSounds.SOUND_REMNANT);
public static final Item SCOPOPHOBIA = register("music_disc_scopophobia", ModSounds.SOUND_SCOPOPHOBIA);
public static final Item SHOCK = register("music_disc_shock", ModSounds.SOUND_SHOCK);
public static final Item SHULK = register("music_disc_shulk", ModSounds.SOUND_SHULK);
public static final Item STEVE = register("music_disc_steve", ModSounds.SOUND_STEVE);
public static final Item STEW = register("music_disc_stew", ModSounds.SOUND_STEW);
public static final Item STORM = register("music_disc_storm", ModSounds.SOUND_STORM);
public static final Item STRING = register("music_disc_string", ModSounds.SOUND_STRING);
public static final Item TALL = register("music_disc_tall", ModSounds.SOUND_TALL);
public static final Item TIME = register("music_disc_time", ModSounds.SOUND_TIME);
public static final Item TRICKS = register("music_disc_tricks", ModSounds.SOUND_TRICKS);
public static final Item TROUBLE_IN_THE_MANSION = register("music_disc_trouble_in_the_mansion", ModSounds.SOUND_TROUBLE_IN_THE_MANSION);
public static final Item UNDER_A_BEAUTIFULLY_DARK_SKY = register("music_disc_under_a_beautifully_dark_sky", ModSounds.SOUND_UNDER_A_BEAUTIFULLY_DARK_SKY);
public static final Item VIBRATE = register("music_disc_vibrate", ModSounds.SOUND_VIBRATE);
public static final Item VOYAGE = register("music_disc_voyage", ModSounds.SOUND_VOYAGE);
public static final Item WALDEINSAMKEIT = register("music_disc_waldeinsamkeit", ModSounds.SOUND_WALDEINSAMKEIT);
public static final Item WANDERING_SOUL = register("music_disc_wandering_soul", ModSounds.SOUND_WANDERING_SOUL);
public static final Item WARPED = register("music_disc_warped", ModSounds.SOUND_WARPED);
public static final Item WEIRD = register("music_disc_weird", ModSounds.SOUND_WEIRD);
public static final Item WITHERING = register("music_disc_withering", ModSounds.SOUND_WITHERING);
public static final Item WOODLAND = register("music_disc_woodland", ModSounds.SOUND_WOODLAND);
public static final Item WOZZY = register("music_disc_wozzy", ModSounds.SOUND_WOZZY);
public static Item register(String id, SoundEvent sound) {
Item.Settings settings = new Item.Settings().rarity(Rarity.RARE).maxCount(1);
return Registry.register(Registry.ITEM, new Identifier(ATFMD.MOD_ID, id), new MusicDisc(14, sound, settings));
}
public static void init() { }
} |
<gh_stars>0
/**
* A class that holds the start-end indexes and name of a field in a line of text
*/
export class FieldToLineSubstringModel
{
constructor(public fieldName: string,
public fieldStartLocAtIndex: number,
public fieldEndLocAtIndex: number)
{
}
} |
/**
* Created by sidchik on 28.03.17.
*/
import React from 'react';
import { connect } from 'react-redux';
import { createHashHistory, createBrowserHistory, createMemoryHistory } from 'history';
import { _setRouting } from '../actions/routing';
const createHistory = (historyType) => {
let historyCreator;
switch (historyType) {
case 'hash':
historyCreator = createHashHistory;
break;
case 'browser':
historyCreator = createBrowserHistory;
break;
case 'memory':
historyCreator = createMemoryHistory;
break;
default:
historyCreator = createHashHistory;
}
class History extends React.Component {
history = historyCreator();
extractQuery(query) {
return {};
}
constructor(props) {
super(props);
this.state = {
location: null,
action: null
};
props.dispatch(_setRouting({
historyType: historyType,
history: this.history,
location: this.history.location,
query: this.extractQuery(),
}));
this.unlisten = this.history.listen((location, action) => {
// location is an object like window.location
this.setState({
location: location,
action: action
}, function () {
props.dispatch(_setRouting({
location: this.state.location,
query: this.extractQuery(),
}));
});
})
}
componentWillMount() {
}
componentWillUnmount() {
this.unlisten();
}
render() {
// # render only on ready
const {routing} = this.props;
if (!routing.location) return null;
return React.Children.only(this.props.children);
}
}
const mapStateToProps = (state /*, ownProps*/) => {
return {
routing: state.routing
}
};
return connect(mapStateToProps)(History);
};
const HashHistory = createHistory('hash');
const BrowserHistory = createHistory('browser');
const MemoryHistory = createHistory('memory');
export { HashHistory, BrowserHistory, MemoryHistory }
export default createHistory; |
<reponame>1b8/schematic<gh_stars>10-100
var fs = require('fs');
var Schematic = require('../')('1.8');
fs.readFile(__dirname+'/plane.schematic', function (err, data) {
if (err) throw err;
Schematic.parse(data, function (err, schem) {
if (err) throw err;
l('Hello');
l(schem.getBlock(0,0,0).digTime(42));
// TODO tests
});
});
function l() {console.log.apply(null, arguments);}
|
#!/bin/sh
tmp=/tmp/$$
./yes $$ 2>/dev/null |head -n 100 >$tmp
yes $$ 2>/dev/null |head -n 100|diff - $tmp >/dev/null || exit 1
rm -f $tmp
|
<filename>src/main/java/net/anatolich/subscriptions/currency/infrastructure/rest/ExchangeRateViewPayload.java
package net.anatolich.subscriptions.currency.infrastructure.rest;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.LocalDateTime;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import net.anatolich.subscriptions.currency.domain.model.ExchangeRate;
@Schema(name = "ExchangeRateView", description = "exchange rate of two currencies")
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ExchangeRateViewPayload {
@Schema(description = "source currency of exchange rate")
private String from;
@Schema(description = "target currency of exchange rate")
private String to;
@Schema(description = "conversion rate between source and target currency")
private double rate;
private LocalDateTime updatedOn;
public static ExchangeRateViewPayload from(ExchangeRate exchangeRate) {
return new ExchangeRateViewPayload(
exchangeRate.getSourceCurrency().getCurrencyCode(),
exchangeRate.getTargetCurrency().getCurrencyCode(),
exchangeRate.getRate().doubleValue(),
exchangeRate.getUpdatedOn()
);
}
}
|
#!/bin/bash
SCRIPT_FILE=$(readlink -f $0)
SCRIPT_DIR=$(dirname $SCRIPT_FILE)
OUTDIR=$SCRIPT_DIR/Artifacts
RETRY_CMD="$SCRIPT_DIR/tools/scripts/retry.sh"
TIMEOUT_CMD="$SCRIPT_DIR/tools/scripts/timeout.sh"
DOTNET_CMD="$RETRY_CMD $TIMEOUT_CMD 600 dotnet"
RUN_BUILD="$DOTNET_CMD msbuild $SCRIPT_DIR/build/build.proj /nologo"
usage() {
echo "Usage: %0 [command] [args]"
echo "Commands:"
echo " build [module] Build a specific module"
echo " full Build all modules in src/ directory"
echo " ext Build external modules in externals/ directory"
echo " dummy Generate dummy assemblies of all modules"
echo " pack [version] Make a NuGet package with build artifacts"
echo " clean Clean all artifacts"
}
cmd_build() {
if [ -z "$1" ]; then
echo "No module specified."
exit 1
fi
if [ -d /nuget ]; then
NUGET_SOURCE_OPT="/p:RestoreSources=/nuget"
fi
$RUN_BUILD /t:restore /p:Project=$1 $NUGET_SOURCE_OPT
$RUN_BUILD /t:build /p:Project=$1
}
cmd_full_build() {
if [ -d /nuget ]; then
NUGET_SOURCE_OPT="/p:RestoreSources=/nuget"
fi
$RUN_BUILD /t:clean
$RUN_BUILD /t:restore $NUGET_SOURCE_OPT
$RUN_BUILD /t:build
}
cmd_dummy_build() {
if [ -d /nuget ]; then
NUGET_SOURCE_OPT="/p:RestoreSources=/nuget"
fi
$RUN_BUILD /t:restore $NUGET_SOURCE_OPT
$RUN_BUILD /t:dummy $NUGET_SOURCE_OPT
}
cmd_ext_build() {
if [ -d /nuget ]; then
NUGET_SOURCE_OPT="/p:RestoreSources=/nuget;$SCRIPT_DIR/packages;$SCRIPT_DIR/Artifacts"
fi
PROJECTS=$(ls -1 $SCRIPT_DIR/externals/*.proj)
for p in $PROJECTS; do
$DOTNET_CMD msbuild $p /t:Build $NUGET_SOURCE_OPT /nologo
done
}
cmd_pack() {
VERSION=$1
if [ -z "$VERSION" ]; then
TIMESTAMP=$(date +"%s")
VERSION="5.0.0-local-$TIMESTAMP"
fi
$RUN_BUILD /t:pack /p:Version=$VERSION
}
cmd_clean() {
$RUN_BUILD /t:clean
}
cmd=$1; shift;
case "$cmd" in
build|--build|-b) cmd_build $@ ;;
full |--full |-f) cmd_full_build $@ ;;
dummy|--dummy|-d) cmd_dummy_build $@ ;;
ext |--ext |-e) cmd_ext_build $@ ;;
pack |--pack |-p) cmd_pack $@ ;;
clean|--clean|-c) cmd_clean $@ ;;
*) usage ;;
esac
|
#!/bin/bash
for filename in videos/*; do
./build/examples/openpose/openpose.bin --video videos/$filename --write_json output/$filename --display 0 --render_pose 0
done
|
xx = [4, -4, 2, -2]
s = 0
for i in xx:
s += max(i, 0)
if __name__ == "__main__":
xx = [4, -4, 2, -2]
s = 0
for i in xx:
s += max(i, 0)
print(s) # Output: 6 |
#!/bin/sh
if [ $# -ne 1 ]; then
echo "Usage: $(basename $0) datalackey-executable"
exit 100
fi
B=$(basename $0 .sh)
DL=$1
OUT="${B}_out.txt"
EXP="${B}_expected.txt"
cat > _script.sh << EOF
#!/usr/bin/env ruby
require 'json'
raw = \$stdin.read
begin
data = JSON.parse(raw)
rescue JSON::ParserError => e
\$stderr.puts e.to_s
\$stderr.puts raw
exit 1
end
\$stderr.puts JSON.generate(ARGV)
dada = {}
data.each_pair { |k, v| dada[k + '-out'] = v }
puts JSON.generate(dada)
exit 0
EOF
chmod a+x _script.sh
(
echo '{"label":123}'
echo '["1","run","in","JSON","stdin","out","JSON","stdout","output-prefix","fed-","program","./_script.sh"]'
echo '[null,"feed","1","input","label","foo"]'
echo '[null,"end-feed","1"]'
) | $DL -m -i stdin JSON -o stdout JSON |
replace-pid > $OUT
cat > $EXP <<EOF
[null,"data","stored","label",1]
["1","run","running","pid"]
[null,"process","started","1","pid"]
["1","run","input","closed"]
set
["1","run","exit",0]
[null,"data","stored","fed-foo-out",2]
end
[null,"process","ended","1","pid"]
["1","run","finished"]
["1","done",""]
EOF
compare-output $OUT $EXP && rm -f $OUT $EXP _script.sh
|
<reponame>m-nakagawa/sample<filename>jena-3.0.1/jena-permissions/src/main/java/org/apache/jena/permissions/graph/SecuredCapabilities.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.permissions.graph;
import org.apache.jena.graph.Capabilities ;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.graph.Triple;
import org.apache.jena.permissions.SecurityEvaluator;
import org.apache.jena.permissions.SecurityEvaluator.Action;
/**
* The interface for secured Capabilities instances.
*
*/
public class SecuredCapabilities implements Capabilities
{
// the security evaluator in use
private final SecurityEvaluator securityEvaluator;
// the graphIRI that the capabilities belong to.
private final Node graphIRI;
// the unsecured capabilities.
private final Capabilities capabilities;
/**
* Constructor.
*
* @param securityEvaluator
* The security evaluator in use.
* @param graphURI
* The graphIRI that the capabilities describe.
* @param capabilities
* The unsecured capabilities.
*/
public SecuredCapabilities( final SecurityEvaluator securityEvaluator,
final String graphURI, final Capabilities capabilities )
{
this.securityEvaluator = securityEvaluator;
this.graphIRI = NodeFactory.createURI(graphURI);
this.capabilities = capabilities;
}
/**
* @sec.graph Update
*/
@Override
public boolean addAllowed()
{
return securityEvaluator.evaluate(securityEvaluator.getPrincipal(), Action.Update, graphIRI)
&& capabilities.addAllowed();
}
/**
* @sec.graph Update
* @sec.triple Create (if everyTriple is true)
*/
@Override
public boolean addAllowed( final boolean everyTriple )
{
Object principal = securityEvaluator.getPrincipal();
boolean retval = securityEvaluator.evaluate(principal, Action.Update, graphIRI)
&& capabilities.addAllowed(everyTriple);
if (retval && everyTriple)
{
// special security check
retval = securityEvaluator.evaluate(principal, Action.Create, graphIRI,
Triple.ANY);
}
return retval;
}
@Override
public boolean canBeEmpty()
{
return capabilities.canBeEmpty();
}
/**
* @sec.graph Update
*/
@Override
public boolean deleteAllowed()
{
return securityEvaluator.evaluate(securityEvaluator.getPrincipal(), Action.Update, graphIRI)
&& capabilities.deleteAllowed();
}
/**
* @sec.graph Update
* @sec.triple Delete (if everyTriple is true)
*/
@Override
public boolean deleteAllowed( final boolean everyTriple )
{
Object principal = securityEvaluator.getPrincipal();
boolean retval = securityEvaluator.evaluate(principal, Action.Update, graphIRI)
&& capabilities.addAllowed(everyTriple);
if (retval && everyTriple)
{
// special security check
retval = securityEvaluator.evaluate(principal, Action.Delete, graphIRI,
Triple.ANY);
}
return retval;
}
@Override
public boolean findContractSafe()
{
return capabilities.findContractSafe();
}
@Override
public boolean handlesLiteralTyping()
{
return capabilities.handlesLiteralTyping();
}
/**
* @sec.graph Update
*/
@Override
public boolean iteratorRemoveAllowed()
{
return securityEvaluator.evaluate(securityEvaluator.getPrincipal(), Action.Update, graphIRI)
&& capabilities.iteratorRemoveAllowed();
}
@Override
public boolean sizeAccurate()
{
return capabilities.sizeAccurate();
}
} |
import {app, autoUpdater, dialog} from "electron"
import log from "electron-log"
import get from "lodash/get"
import semver from "semver"
import got from "got"
import open from "../lib/open"
const getFeedURLForPlatform = (platform) => {
return `https://update.electronjs.org/brimsec/brim/${platform}/${app.getVersion()}`
}
const getLatestVersion = async (): Promise<string> => {
// Check for updates for MacOS and if there are then we assume there is also one for our other supported OSs
const url = getFeedURLForPlatform("darwin-x64")
const resp = await got(url)
// the update server responds with a 204 and no body if the current version is the same as the
// latest version, but will otherwise return json naming the latest version published on github
// (even if it is behind the current version)
if (resp.statusCode === 204) return app.getVersion()
const body = JSON.parse(resp.body)
const latestVersion = get(body, "name", "")
if (!semver.valid(latestVersion))
log.error(new Error(`Invalid latest version format: ${latestVersion}`))
return latestVersion
}
const autoUpdateLinux = async () => {
const latestVersion = await getLatestVersion()
// up to date
if (semver.gte(app.getVersion(), latestVersion)) return
const dialogOpts = {
type: "info",
buttons: ["Get Update", "Later"],
title: "Application Update",
message: "A new version of Brim is available.",
detail: `Brim version ${latestVersion} is available for download; you are running v${app.getVersion()}.`
}
dialog.showMessageBox(dialogOpts).then((returnValue) => {
const navUrl = "https://www.brimsecurity.com/download/"
if (returnValue.response === 0) open(navUrl)
})
}
export async function setupAutoUpdater() {
if (process.platform === "linux") {
setUpdateRepeater(() => {
autoUpdateLinux().catch((err) => log.error(err))
})
return
}
const feedURL = getFeedURLForPlatform(process.platform)
// @ts-ignore
autoUpdater.setFeedURL(feedURL)
autoUpdater.on("update-downloaded", (event, releaseNotes, releaseName) => {
const dialogOpts = {
type: "info",
buttons: ["Restart", "Later"],
title: "Application Update",
// releaseNotes are not available for windows, so use name instead
message: process.platform === "win32" ? releaseNotes : releaseName,
detail:
"A new version of Brim has been downloaded. Restart the application to apply the update."
}
dialog.showMessageBox(dialogOpts).then((returnValue) => {
if (returnValue.response === 0) autoUpdater.quitAndInstall()
})
})
autoUpdater.on("error", (err) => {
log.error("There was a problem updating the application: " + err)
})
setUpdateRepeater(() => {
getLatestVersion()
.then((latestVersion) => {
if (semver.gte(app.getVersion(), latestVersion)) return
autoUpdater.checkForUpdates()
})
.catch((err) => log.error(err))
})
}
const setUpdateRepeater = (updateCb) => {
// check for updates 30s after startup
setTimeout(updateCb, 30 * 1000)
// then check for updates once a day
setInterval(updateCb, 24 * 60 * 60 * 1000)
}
|
/**
* Copyright (c) 2017. Lorem ipsum dolor sit amet, consectetur adipiscing elit.
* Morbi non lorem porttitor neque feugiat blandit. Ut vitae ipsum eget quam lacinia accumsan.
* Etiam sed turpis ac ipsum condimentum fringilla. Maecenas magna.
* Proin dapibus sapien vel ante. Aliquam erat volutpat. Pellentesque sagittis ligula eget metus.
* Vestibulum commodo. Ut rhoncus gravida arcu.
*/
package com.app.wechat.response;
import com.alibaba.fastjson.annotation.JSONField;
import com.app.wechat.domain.WxTempIndModel;
/**
* <p>功 能:模板消息-获取设置的行业信息API的响应信息</p>
* <p>版 权:Copyright (c) 2017</p>
* <p>创建时间:2017年7月5日 下午3:45:34</p>
*
* @author 王建
* @version 1.0
*/
public class WxTempGetIndResponse extends AbstractWxResponse {
private static final long serialVersionUID = 1L;
/**
* 主行业
*/
@JSONField(name = "primary_industry")
private WxTempIndModel primaryInd;
/**
* 副行业
*/
@JSONField(name = "secondary_industry")
private WxTempIndModel secondaryInd;
public WxTempIndModel getPrimaryInd() {
return primaryInd;
}
public void setPrimaryInd(WxTempIndModel primaryInd) {
this.primaryInd = primaryInd;
}
public WxTempIndModel getSecondaryInd() {
return secondaryInd;
}
public void setSecondaryInd(WxTempIndModel secondaryInd) {
this.secondaryInd = secondaryInd;
}
} |
package US.bittiez.baseReplace;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.player.PlayerItemBreakEvent;
import org.bukkit.event.player.PlayerItemConsumeEvent;
import org.bukkit.inventory.EquipmentSlot;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.logging.Logger;
public class main extends JavaPlugin implements Listener {
private static Logger log;
@Override
public void onEnable() {
log = getLogger();
getServer().getPluginManager().registerEvents(this, this);
}
public boolean onCommand(CommandSender who, Command cmd, String label, String[] args) {
return false;
}
@EventHandler
public void onPlayerInteract(PlayerInteractEvent e){
if(!e.getAction().equals(Action.RIGHT_CLICK_BLOCK)) //They right clicked a block
return;
if(e.isAsynchronous() || e.getItem() == null || e.getItem().getType().isEdible()) //Item isn't null, or edible.
return;
final int hotBarSlot = getItemSlot(e.getPlayer(), e.getHand());
if(hotBarSlot == -1 || hotBarSlot > 8) //Make sure their active selection is in their hand or offhand
return;
final ItemStack itemClone = e.getItem().clone();
Refill refill = new Refill(e.getPlayer().getInventory(), hotBarSlot, itemClone);
Bukkit.getScheduler().runTaskLater(this, refill::run, 1l);
}
@EventHandler
public void onPlayerItemConsumeEvent(PlayerItemConsumeEvent e){
if(e.isAsynchronous() || e.isCancelled() || e.getItem().getAmount() > 1)
return;
final int hotBarSlot = e.getPlayer().getInventory().getHeldItemSlot();
if(hotBarSlot < 0 || hotBarSlot > 8)
return;
Refill refill = new Refill(e.getPlayer().getInventory(), hotBarSlot, e.getItem().clone(), 1);
Bukkit.getScheduler().runTaskLater(this, refill::run, 1l);
}
@EventHandler
public void onItemBreak(PlayerItemBreakEvent e){
if(e.isAsynchronous())
return;
final int hotBarSlot = e.getPlayer().getInventory().getHeldItemSlot();
if(hotBarSlot < 0 || hotBarSlot > 8)
return;
Refill refill = new Refill(e.getPlayer().getInventory(), hotBarSlot, e.getBrokenItem().clone(), 1);
Bukkit.getScheduler().runTaskLater(this, refill::run, 1l);
}
private int getItemSlot(Player player, EquipmentSlot hand){
if(!hand.equals(EquipmentSlot.HAND) && !hand.equals(EquipmentSlot.OFF_HAND))
return -1;
return player.getInventory().getHeldItemSlot();
}
}
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.reducePeer = undefined;
var _extends2 = require('babel-runtime/helpers/extends');
var _extends3 = _interopRequireDefault(_extends2);
exports.default = peerReducerEnhancer;
var _peerAPI = require('../peer/peerAPI');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function peerReducerEnhancer(rootReducer) {
return function (state, action) {
return (0, _extends3.default)({}, rootReducer(state, action), {
peer: reducePeer(state.peer, action)
});
};
}
var reducePeer = exports.reducePeer = function reducePeer(peer, action) {
switch (action.type) {
case '@@PEER_INIT':
return action.peer;
case '@@PEER_OPEN':
case '@@PEER_CONNECTION':
case '@@PEER_CONNECTING':
return (0, _extends3.default)({}, peer);
default:
return peer;
}
}; |
def rotate_matrix(matrix):
# Create an empty list to store the rotated matrix
rotated = []
# Iterate through the columns of the matrix
for i in range(len(matrix[0])):
# Create a new list to store the row
row = []
# Iterate through the rows of the matrix
for j in range(len(matrix)):
# Add each element to the row
row.append(matrix[j][i])
# Add the row to the rotated matrix
rotated.append(row)
return rotated
matrix = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
]
rotated = rotate_matrix(matrix) |
# Create a unique username
username = ""
# Generate a random string
flag = 0
while flag == 0:
random_string = ''.join(random.choices(string.ascii_lowercase + string.ascii_uppercase + string.digits, k=10))
if not random_string in username_exists:
username = random_string
flag = 1
# Append the name to username
username = username + "_" + ''.join(name.split(' ')) |
'use strict';
module.exports = function uploadImage(req, res, next) {
const {axios} = req;
axios.post('image', req.files.content, {
headers: {
'Content-Type': 'multipart/form-data'
}
}).then(response => {
res.send(response.data.data);
}, (err) => {
res.sendStatus(err.response ? err.response.status : 500);
}).then(() => {
next();
});
}; |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
import mock
from py_utils import tempfile_ext
from telemetry import page
from telemetry.value import trace
from tracing.trace_data import trace_data
class ValueTest(unittest.TestCase):
def testRepr(self):
with trace.TraceValue(
page.Page('http://www.bar.com/', name='load:story:bar'),
trace_data.CreateTestTrace(), important=True, description='desc') as v:
self.assertEquals("TraceValue('load:story:bar', 'trace')", str(v))
@mock.patch('telemetry.value.trace.cloud_storage.Insert')
def testAsDictWhenTraceSerializedAndUploaded(self, insert_mock):
with tempfile_ext.TemporaryFileName('test.html') as file_path:
with trace.TraceValue(
None, trace_data.CreateTestTrace(),
file_path=file_path,
upload_bucket=trace.cloud_storage.PUBLIC_BUCKET,
remote_path='a.html',
cloud_url='http://example.com/a.html') as v:
v.SerializeTraceData()
fh = v.Serialize()
cloud_url = v.UploadToCloud()
d = v.AsDict()
self.assertTrue(os.path.exists(file_path))
self.assertEqual(d['file_id'], fh.id)
self.assertEqual(d['cloud_url'], cloud_url)
insert_mock.assert_called_with(
trace.cloud_storage.PUBLIC_BUCKET, 'a.html', file_path)
@mock.patch('telemetry.value.trace.cloud_storage.Insert')
def testAsDictWhenTraceIsNotSerializedAndUploaded(self, insert_mock):
with trace.TraceValue(
None, trace_data.CreateTestTrace(),
upload_bucket=trace.cloud_storage.PUBLIC_BUCKET,
remote_path='a.html',
cloud_url='http://example.com/a.html') as v:
v.SerializeTraceData()
cloud_url = v.UploadToCloud()
d = v.AsDict()
self.assertEqual(d['cloud_url'], cloud_url)
insert_mock.assert_called_with(
trace.cloud_storage.PUBLIC_BUCKET, 'a.html', v.filename)
def testNoLeakedTempFiles(self):
with tempfile_ext.NamedTemporaryDirectory() as tempdir:
with mock.patch('tempfile.tempdir', new=tempdir):
with trace.TraceValue(None, trace_data.CreateTestTrace()) as v:
v.SerializeTraceData()
self.assertTrue(os.path.exists(tempdir))
self.assertFalse(os.listdir(tempdir))
|
package com.ctrip.persistence.repository;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import com.ctrip.persistence.entity.MLFlowHistory;
/**
* @author <NAME>
*/
public interface MLFlowHistoryRepository
extends JpaRepository<MLFlowHistory, Long>, JpaSpecificationExecutor<MLFlowHistory> {
List<MLFlowHistory> findByMlFlowId(Long id);
MLFlowHistory findByRuntimeId(String runtimeId);
}
|
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def generateTree(expression):
root = Node("+")
root.left = Node("A")
root.right = Node("*")
root.right.left = Node("B")
root.right.right = Node("C")
return root |
<filename>kernel/security/fivm/fivm.h
/*
* Copyright (C) 2014 Allwinner Ltd.
*
* Author:
* <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation, version 2 of the
* License.
*
* File: fima.h
* File Integrity Measurement Architecture definitions
*/
#ifndef __LINUX_FIMA_H
#define __LINUX_FIMA_H
#include <linux/types.h>
#include <linux/crypto.h>
#include <linux/security.h>
#include <linux/hash.h>
/*FIMV LKM debug mode*/
/*#define FIVM_LKM_DEBUG*/
/*#define FIVM_DEBUG_TIMMNG *//*To summary the time consume*/
extern int fivm_debug;
#undef dprintk
#define dprintk(format, arg...) \
do { \
if (fivm_debug) \
printk(KERN_DEBUG "%s:" format, \
__func__, ## arg); \
} while (0)
#define fprintk(format, arg...) \
do { \
if (fivm_debug) \
printk(format, ## arg); \
} while (0)
#define derr(format, arg...) \
printk(KERN_ERR "%s: " format, \
__func__, ## arg);
#define SHA256_DIGEST_LENGTH 32
#define SHA512_DIGEST_LENGTH 64
#define SHA_DIG_MAX SHA512_DIGEST_LENGTH
#define RSA2048_SIG_LENGTH 256
#define RSA3072_SIG_LENGTH 384
#define RSA_SIG_MAX RSA3072_SIG_LENGTH
#define FILE_NAME_LEN 512
#define DIR_MAX_FILE_NUM 10000
#define DIR_BITMAP_BYTES (DIR_MAX_FILE_NUM/8)
#define BLK_SIZE 4096
#define FILE_SIG_MAGIC 0x78381729
struct FILE_LIST_HEAD {
unsigned int magic;
unsigned int version;
unsigned char sha_alg[8];
unsigned char sig[RSA_SIG_MAX];
unsigned char root_dir[64];
unsigned int file_cnt;
unsigned int file_name_len;
unsigned int reserved[4];
};
struct FILE_SIG_HEAD {
unsigned char sig[RSA_SIG_MAX];
unsigned int magic;
char sha_alg[8];
char rsa_alg[8];
unsigned char root_dir[64];
unsigned int total_len;
unsigned int actual_cnt; /*actual file count*/
};
struct FILE_SIG {
unsigned int crc; /*name crc*/
unsigned int flag;
unsigned char sha[SHA_DIG_MAX];
unsigned int name_len;
unsigned char name[FILE_NAME_LEN];
};
#ifdef CONFIG_COMPAT
struct fivm_param_t32 {
compat_uptr_t sig_head;
compat_size_t sig_head_size;
compat_uptr_t sig_table;
compat_size_t sig_table_size;
};
#endif
struct fivm_param {
void *sig_head;
unsigned int sig_head_size;
void *sig_table;
unsigned int sig_table_size;
};
enum {
FIVM_FULL_PATH = 1,
FIVM_PART_PATH,
FIVM_FAIL_PATH,
};
struct fivm_path {
char *path;
int flag;
};
#define MAX_NAME_LEN 64
int fivm_init(void);
int fivm_cleanup(void);
int fivm_enable(void);
int fivm_set(void *);
int fivm_disable(void);
int fivm_calc_hash(struct file *file, char *digest);
int fivm_cleanup(void);
#endif
|
import pygame
import sys
# Initialize PyGame library
pygame.init()
# Define colors
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
# Set up the game window
width, height = 800, 600
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption('Maze Game')
# Define maze parameters
cell_size = 40
maze = [
"####################",
"# #",
"# G #",
"# #",
"# ###### #",
"# #",
"# ###### #",
"# #",
"# ########## #",
"# #",
"# ###### #",
"# #",
"# S #",
"# #",
"####################"
]
# Define player position
player_x, player_y = 1, 12
# Define goal position
goal_x, goal_y = 10, 2
# Game loop
active = True
while active:
for event in pygame.event.get():
if event.type == pygame.QUIT:
active = False
keys = pygame.key.get_pressed()
if keys[pygame.K_UP]:
if maze[player_y - 1][player_x] != '#':
player_y -= 1
elif keys[pygame.K_DOWN]:
if maze[player_y + 1][player_x] != '#':
player_y += 1
elif keys[pygame.K_LEFT]:
if maze[player_y][player_x - 1] != '#':
player_x -= 1
elif keys[pygame.K_RIGHT]:
if maze[player_y][player_x + 1] != '#':
player_x += 1
# Check for goal reached
if player_x == goal_x and player_y == goal_y:
print("Goal reached!")
active = False
# Draw the maze
screen.fill(WHITE)
for y, row in enumerate(maze):
for x, cell in enumerate(row):
if cell == '#':
pygame.draw.rect(screen, BLACK, (x * cell_size, y * cell_size, cell_size, cell_size))
elif cell == 'G':
pygame.draw.rect(screen, GREEN, (x * cell_size, y * cell_size, cell_size, cell_size))
# Draw the player
pygame.draw.circle(screen, RED, (player_x * cell_size + cell_size // 2, player_y * cell_size + cell_size // 2), cell_size // 3)
pygame.display.flip()
pygame.quit()
sys.exit() |
#! /bin/sh
#shell script to automate IPlug Project build, code-signing and packaging on OSX
BASEDIR=$(dirname $0)
cd $BASEDIR
#---------------------------------------------------------------------------------------------------------
#variables
VERSION=`echo | grep "#define JucePlugin_Version " Source/AppConfig.h`
VERSION=${VERSION//\#define JucePlugin_Version }
VERSION=${VERSION//\'}
FULL_VERSION=$(echo "${VERSION}" | tr -d '[:space:]')
PLUGIN_NAME=`echo | grep "#define JucePlugin_Name " Source/AppConfig.h`
PLUGIN_NAME=${PLUGIN_NAME//\#define JucePlugin_Name }
PLUGIN_NAME=${PLUGIN_NAME//\"}
PLUGIN_NAME=$(echo "${PLUGIN_NAME}" | tr -d '[:space:]')
# work out the paths to the binaries
PKG="installer/build-mac/$PLUGIN_NAME Installer.pkg"
PKG_US="installer/build-mac/$PLUGIN_NAME Installer.unsigned.pkg"
echo "making $PLUGIN_NAME version $FULL_VERSION mac distribution..."
echo ""
#---------------------------------------------------------------------------------------------------------
#call python script to update version numbers
./update_version.py
#here you can use the touch command to force xcode to rebuild
#---------------------------------------------------------------------------------------------------------
# build xcode project. Change target to build individual formats
echo "Build"
xcodebuild -project Builds/MacOSX/$PLUGIN_NAME.xcodeproj -xcconfig $PLUGIN_NAME.xcconfig -target "$PLUGIN_NAME - All" -configuration Release 2> ./build-mac.log
if [ -s build-mac.log ]
then
echo "build failed due to following errors:"
echo ""
cat build-mac.log
exit 1
else
rm build-mac.log
fi
asciidoctor -r asciidoctor-pdf -b pdf manual/manual.adoc -o manual/${PLUGIN_NAME}_manual.pdf
#---------------------------------------------------------------------------------------------------------
# installer, uses Packages http://s.sudre.free.fr/Software/Packages/about.html
rm -R -f installer/$PLUGIN_NAME-mac.dmg
echo "building installer"
echo ""
chmod 0777 installer
packagesbuild installer/$PLUGIN_NAME.pkgproj
echo "code-sign installer for Gatekeeper on 10.8"
echo ""
mv "${PKG}" "${PKG_US}"
productsign --sign "Developer ID Installer: ""${CERT_ID}" "${PKG_US}" "${PKG}"
rm -R -f "${PKG_US}"
#---------------------------------------------------------------------------------------------------------
# dmg, can use dmgcanvas http://www.araelium.com/dmgcanvas/ to make a nice dmg
echo "building dmg"
echo ""
if [ -d installer/$PLUGIN_NAME.dmgCanvas ]
then
dmgcanvas installer/$PLUGIN_NAME.dmgCanvas installer/$PLUGIN_NAME-mac.dmg
else
hdiutil create installer/$PLUGIN_NAME.dmg -srcfolder installer/build-mac/ -ov -anyowners -volname $PLUGIN_NAME
if [ -f installer/$PLUGIN_NAME-mac.dmg ]
then
rm -f installer/$PLUGIN_NAME-mac.dmg
fi
hdiutil convert installer/$PLUGIN_NAME.dmg -format UDZO -o installer/$PLUGIN_NAME-mac.dmg
rm -R -f installer/$PLUGIN_NAME.dmg
fi
rm -R -f installer/build-mac/
#---------------------------------------------------------------------------------------------------------
echo "done"
|
package com.cgfy.mybatis.bussApi.utils.excel.bean;
import com.alibaba.excel.annotation.ExcelProperty;
import com.alibaba.excel.converters.url.UrlImageConverter;
import lombok.Data;
import java.net.URL;
@Data
public class FillData {
private String test1;
private String test2;
private String test3;
private String test4;
private String test5;
private String test6;
@ExcelProperty(converter = UrlImageConverter.class)
private URL imgUrl;
public String getTest1() {
return test1;
}
public void setTest1(String test1) {
this.test1 = test1;
}
public String getTest2() {
return test2;
}
public void setTest2(String test2) {
this.test2 = test2;
}
public String getTest3() {
return test3;
}
public void setTest3(String test3) {
this.test3 = test3;
}
public String getTest4() {
return test4;
}
public void setTest4(String test4) {
this.test4 = test4;
}
public String getTest5() {
return test5;
}
public void setTest5(String test5) {
this.test5 = test5;
}
public String getTest6() {
return test6;
}
public void setTest6(String test6) {
this.test6 = test6;
}
public URL getImgUrl() {
return imgUrl;
}
public void setImgUrl(URL imgUrl) {
this.imgUrl = imgUrl;
}
}
|
import Service from '@ember/service';
import Ember from 'ember';
function _convertUrl(url, clientId){
return url+ `/oauth/authorize?client_id=${clientId}&response_type=token&redirect_uri=${window.location.origin}${window.location.pathname}`
}
export default Service.extend({
restClientService: Ember.inject.service(),
regionLocatorService: Ember.inject.service(),
urlStateService: Ember.inject.service(),
authToken: undefined,
init(){
let urlParams = new URLSearchParams(window.location.hash.substr(1));
let token = urlParams.get('access_token');
if (!token) {
urlParams = new URLSearchParams(window.location.search);
this.get('urlStateService').storeState();
let clientId = urlParams.get('clientId');
let regionUrl = this.get('regionLocatorService').getRegionAuthUrl(urlParams.get('region'));
var url = _convertUrl(regionUrl,clientId);
console.log('auth url: ', url);
window.location = url;
} else {
console.log('TOKEN FOUND');
this.authToken = token;
}
}
});
|
<filename>test/lib/service/FileServiceTest.js
const td = require('testdouble');
const chai = require('chai');
const tdChai = require('testdouble-chai');
chai.should();
chai.use(tdChai(td));
const fileServiceFactory = require('../../../lib/service/FileService');
describe('FileService', () => {
const expectedFilename = 'filename';
const fsMock = {
exists: td.function(),
open: td.function(),
write: td.function(),
read: td.function(),
stat: td.function()
};
it ('should call fs.exists when checking if a file exists', () => {
const fileService = fileServiceFactory(fsMock);
fileService.exists(expectedFilename, () => {});
fsMock.exists.should.have.been.calledWith(expectedFilename, td.callback);
});
it ('should write contents to a file on writeFile', () => {
const expectedFd = 1;
const expectedContent = "some content";
td.when(fsMock.open(
td.matchers.isA(String),
'w',
td.callback
)).thenCallback(null, expectedFd);
td.when(fsMock.write(
expectedFd,
td.matchers.isA(Buffer),
0,
td.matchers.isA(Number),
0,
td.callback
)).thenCallback(null, 42);
const fileService = fileServiceFactory(fsMock);
fileService.writeFile(expectedFilename, expectedContent, (bytesWritten) => {
fsMock.open.should.have.been.calledWith(
expectedFilename,
'w',
td.callback
);
fsMock.write.should.have.been.calledWith(
expectedFd,
td.matchers.isA(Buffer),
0,
expectedContent.length,
0,
td.callback
);
});
});
it ('should throw an error if writeFile cannot open file', () => {
const expectedError = 'cannot open file';
const expectedFd = 1;
const expectedContent = "some content";
td.when(fsMock.open(
expectedFilename,
'w',
td.callback
)).thenCallback(expectedError);
const fileService = fileServiceFactory(fsMock);
fileService.writeFile(expectedFilename, expectedContent, (bytesWritten, err) => {
err.should.equal(expectedError);
});
});
it ('should throw an error if writeFile cannot write to file', () => {
const expectedError = 'cannot write file';
const expectedFd = 1;
const expectedContent = "some content";
td.when(fsMock.open(
td.matchers.isA(String),
'w',
td.callback
)).thenCallback(null, expectedFd);
td.when(fsMock.write(
expectedFd,
td.matchers.isA(Buffer),
0,
td.matchers.isA(Number),
0,
td.callback
)).thenCallback(expectedError);
const fileService = fileServiceFactory(fsMock);
fileService.writeFile(expectedFilename, expectedContent, (bytesWritten, err) => {
err.should.equal(expectedError);
});
});
it ('should read contents of a file on getFileContent', () => {
const expectedFd = 1;
const expectedContent = "some content";
td.when(fsMock.open(
td.matchers.isA(String),
'r',
td.callback
)).thenCallback(null, expectedFd);
td.when(fsMock.stat(
td.matchers.isA(String),
td.callback
)).thenCallback(null, {size: expectedContent.length});
td.when(fsMock.read(
expectedFd,
td.matchers.isA(Buffer),
0,
td.matchers.isA(Number),
td.matchers.isA(Number),
td.callback
)).thenCallback(null, expectedContent);
const fileService = fileServiceFactory(fsMock);
fileService.getFileContent(expectedFilename, (fileContents) => {
fsMock.read.should.have.been.calledWith(
expectedFd,
td.matchers.isA(Buffer),
0,
expectedContent.length,
0, // no offset specified, so start at 0
td.callback
);
});
});
it ('should read contents of a file on getFileContent with start offset', () => {
const expectedFd = 1;
const expectedContent = "some content to be read";
const expectedStartOffset = 12;
td.when(fsMock.open(
td.matchers.isA(String),
'r',
td.callback
)).thenCallback(null, expectedFd);
td.when(fsMock.stat(
td.matchers.isA(String),
td.callback
)).thenCallback(null, {size: expectedContent.length});
td.when(fsMock.read(
expectedFd,
td.matchers.isA(Buffer),
0,
td.matchers.isA(Number),
td.matchers.isA(Number),
td.callback
)).thenCallback(null, expectedContent);
const fileService = fileServiceFactory(fsMock);
fileService.getFileContent(expectedFilename, (fileContents) => {
fsMock.read.should.have.been.calledWith(
expectedFd,
td.matchers.isA(Buffer),
0,
expectedContent.length,
expectedStartOffset,
td.callback
);
},
expectedStartOffset
);
});
it ('should return error when getFileContent cannot stat', () => {
const expectedError = 'cannot stat file';
td.when(fsMock.stat(
expectedFilename,
td.callback
)).thenCallback(expectedError);
const fileService = fileServiceFactory(fsMock);
fileService.getFileContent(expectedFilename, (fileContents, err) => {
err.should.equal(expectedError);
});
});
it ('should return error when getFileContent cannot open', () => {
const expectedError = 'cannot open file';
const expectedContent = "some content to be read";
td.when(fsMock.open(
expectedFilename,
'r',
td.callback
)).thenCallback(expectedError);
td.when(fsMock.stat(
td.matchers.isA(String),
td.callback
)).thenCallback(null, {size: expectedContent.length});
const fileService = fileServiceFactory(fsMock);
fileService.getFileContent(expectedFilename, (fileContents, err) => {
err.should.equal(expectedError);
});
});
it ('should return error when getFileContent cannot read', () => {
const expectedError = 'cannot read file';
const expectedFd = 1;
const expectedContent = "some content to be read";
td.when(fsMock.open(
td.matchers.isA(String),
'r',
td.callback
)).thenCallback(null, expectedFd);
td.when(fsMock.stat(
td.matchers.isA(String),
td.callback
)).thenCallback(null, {size: expectedContent.length});
td.when(fsMock.read(
expectedFd,
td.matchers.isA(Buffer),
0,
td.matchers.isA(Number),
td.matchers.isA(Number),
td.callback
)).thenCallback(expectedError);
const fileService = fileServiceFactory(fsMock);
fileService.getFileContent(expectedFilename, (fileContents, err) => {
err.should.equal(expectedError);
});
});
it ('should make a directory when mkdir is called', () => {
const expectedDirectory = 'expectedDirectory/';
const mkdirpMock = td.function();
td.when(mkdirpMock(
td.matchers.isA(String),
td.callback
)).thenCallback();
const fileService = fileServiceFactory(null, mkdirpMock);
fileService.mkdir(expectedDirectory, () => {
mkdirpMock.should.have.been.calledWith(
expectedDirectory,
td.callback
);
});
});
}); |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/matrix-minlison/Matrix.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/matrix-minlison/Matrix.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>src/main/java/ru/vladimir/start/StartUI.java
package ru.vladimir.start;
import ru.vladimir.models.*;
public class StartUI{
private Input input;
// range для бесперебойной работы приложения через ValidateInput
private int[] range = {0,1,2,3,4,5,6};
public StartUI(Input input){
this.input = input;
}
public void init() {
Tracker tracker =new Tracker();
MenuTracker menu = new MenuTracker(this.input, tracker);
menu.fillActions();
//анонимный класс, далее создан объкт анонимного класса
do{
menu.show();
//ask переопределен с добавлением аргумента range
int key = input.ask("Select: ",range);
menu.select(key);
} while(!"y".equals(this.input.ask("Exit?(y): ")));
}
public static void main(String[] args){
Input input = new ValidateInput();
new StartUI(input).init();
}
}
|
package com.rollncode.bubbles.game.model;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.support.annotation.NonNull;
/**
* @author <NAME> <EMAIL>
* @since 21.07.16
*/
abstract class GameObject {
float mX;
float mY;
int mRadius;
public abstract void update(long time);
public abstract void draw(@NonNull Canvas canvas, @NonNull Paint paint);
float getX() {
return mX;
}
float getY() {
return mY;
}
int getRadius() {
return mRadius;
}
}
|
#!/bin/bash
mkdir -vp ${PREFIX}/bin;
cp -v chromedriver ${PREFIX}/bin/ || exit 1;
chmod -v 755 ${PREFIX}/bin/chromedriver || exit 1;
|
#!/bin/bash
function waitforenter() {
echo
echo "Press ENTER key to continue"
read
}
function cleanup() {
docker ps -a | grep "jim" | awk '{print $1}' | xargs docker stop
docker ps -a | grep "jim" | awk '{print $1}' | xargs docker rm
docker images -a | grep "jim" | awk '{print $3}' | xargs docker rmi
}
echo "Cleaning up (might create errors if never executed before)"
cleanup
echo "Building base image and run on port 2999"
docker build -t jim:base -f Dockerfile.base .
docker run -d -p 2999:3000 --name jim-base jim:base
echo "Curl base image 2999, should return BASE"
sleep 1
curl localhost:2999
waitforenter
echo "Building middle image which uses base as foundation and run on port 3000"
docker build -t jim:middle -f Dockerfile.middle .
docker run -d -p 3000:3000 --name jim-middle jim:middle
echo "Curl middle image 3000, should return BASE"
sleep 1
curl localhost:3000
waitforenter
echo "Building last image which uses middle as foundation and run on port 3001"
docker build -t jim:last -f Dockerfile.last .
docker run -d -p 3001:3000 --name jim-last jim:last
echo "Curl base image 3001, should return BASE"
sleep 1
curl localhost:3001
waitforenter
echo "Stopping jim-base containers"
docker stop jim-base
docker rm jim-base
echo "Build new base image with 'bugfix' (new app.bugfix.base.js in CMD, changes return string)"
docker build -t jim:base -f Dockerfile.base.bugfix .
docker run -d -p 2999:3000 --name jim-base jim:base
echo "Curl base image 2999, should show BASE PATCHED"
sleep 1
curl localhost:2999
waitforenter
echo "Stopping jim-last containers"
docker stop jim-last
docker rm jim-last
echo "Build new last image which uses middle as foundation and run on port 3001"
docker build -t jim:last -f Dockerfile.last .
docker run -d -p 3001:3000 --name jim-last jim:last
echo "Curl last image 3001, BASE means it took the original image (not what that base tag is pointing to). BASE PATCHED means it took most recent tag"
sleep 1
curl localhost:3001
waitforenter
echo "Stopping jim-middle containers"
docker stop jim-middle
docker rm jim-middle
echo "Building middle image which uses base as foundation and run on port 3000"
docker build -t jim:middle -f Dockerfile.middle .
docker run -d -p 3000:3000 --name jim-middle jim:middle
echo "Curl middle image 3000, should return BASE PATCHED"
sleep 1
curl localhost:3000
waitforenter
echo "Stopping jim-last containers"
docker stop jim-last
docker rm jim-last
echo "Build new last image which uses middle as foundation and run on port 3001"
docker build -t jim:last -f Dockerfile.last .
docker run -d -p 3001:3000 --name jim-last jim:last
echo "Curl last image 3001, BASE means it took the original image (not what that base tag is pointing to). BASE PATCHED means it took most recent tag"
sleep 1
curl localhost:3001
waitforenter
echo "Cleaning up"
cleanup
|
define([
'../core/file_system',
'./InMemory',
'../core/api_error',
'../core/node_fs',
'../libs/path',
'../core/util'
], function (file_system, InMemory, api_error, fs, path, util) {
'use strict';
const { BaseFileSystem } = file_system;
const {InMemoryFileSystem} = InMemory;
const { ApiError, ErrorCode } = api_error;
const { mkdirpSync } = util;
/**
* The MountableFileSystem allows you to mount multiple backend types or
* multiple instantiations of the same backend into a single file system tree.
* The file systems do not need to know about each other; all interactions are
* automatically facilitated through this interface.
*
* For example, if a file system is mounted at /mnt/blah, and a request came in
* for /mnt/blah/foo.txt, the file system would see a request for /foo.txt.
*
* You can mount file systems when you configure the file system:
* ```javascript
* BrowserFS.configure({
* fs: "MountableFileSystem",
* options: {
* '/data': { fs: 'HTTPRequest', options: { index: "http://mysite.com/files/index.json" } },
* '/home': { fs: 'LocalStorage' }
* }
* }, function(e) {
*
* });
* ```
*
* For advanced users, you can also mount file systems *after* MFS is constructed:
* ```javascript
* BrowserFS.FileSystem.HTTPRequest.Create({
* index: "http://mysite.com/files/index.json"
* }, function(e, xhrfs) {
* BrowserFS.FileSystem.MountableFileSystem.Create({
* '/data': xhrfs
* }, function(e, mfs) {
* BrowserFS.initialize(mfs);
*
* // Added after-the-fact...
* BrowserFS.FileSystem.LocalStorage.Create(function(e, lsfs) {
* mfs.mount('/home', lsfs);
* });
* });
* });
* ```
*
* Since MountableFileSystem simply proxies requests to mounted file systems, it supports all of the operations that the mounted file systems support.
*
* With no mounted file systems, `MountableFileSystem` acts as a simple `InMemory` filesystem.
*/
class MountableFileSystem extends BaseFileSystem {
/**
* Creates a new, empty MountableFileSystem.
*/
constructor(rootFs) {
super();
// Contains the list of mount points in mntMap, sorted by string length in decreasing order.
// Ensures that we scan the most specific mount points for a match first, which lets us
// nest mount points.
this.mountList = [];
this.mntMap = {};
this.rootFs = rootFs;
}
/**
* Creates a MountableFileSystem instance with the given options.
*/
static Create(opts, cb) {
InMemoryFileSystem.Create({}, (e, imfs) => {
if (imfs) {
const fs = new MountableFileSystem(imfs);
try {
Object.keys(opts).forEach((mountPoint) => {
fs.mount(mountPoint, opts[mountPoint]);
});
}
catch (e) {
return cb(e);
}
cb(null, fs);
}
else {
cb(e);
}
});
}
static isAvailable() {
return true;
}
/**
* Mounts the file system at the given mount point.
*/
mount(mountPoint, fs) {
if (mountPoint[0] !== '/') {
mountPoint = `/${mountPoint}`;
}
mountPoint = path.resolve(mountPoint);
if (this.mntMap[mountPoint]) {
throw new ApiError(ErrorCode.EINVAL, "Mount point " + mountPoint + " is already taken.");
}
mkdirpSync(mountPoint, 0x1ff, this.rootFs);
this.mntMap[mountPoint] = fs;
this.mountList.push(mountPoint);
this.mountList = this.mountList.sort((a, b) => b.length - a.length);
}
umount(mountPoint) {
if (mountPoint[0] !== '/') {
mountPoint = `/${mountPoint}`;
}
mountPoint = path.resolve(mountPoint);
if (!this.mntMap[mountPoint]) {
throw new ApiError(ErrorCode.EINVAL, "Mount point " + mountPoint + " is already unmounted.");
}
delete this.mntMap[mountPoint];
this.mountList.splice(this.mountList.indexOf(mountPoint), 1);
while (mountPoint !== '/') {
if (this.rootFs.readdirSync(mountPoint).length === 0) {
this.rootFs.rmdirSync(mountPoint);
mountPoint = path.dirname(mountPoint);
}
else {
break;
}
}
}
/**
* Returns the file system that the path points to.
*/
_getFs(path) {
const mountList = this.mountList, len = mountList.length;
for (let i = 0; i < len; i++) {
const mountPoint = mountList[i];
// We know path is normalized, so it is a substring of the mount point.
if (mountPoint.length <= path.length && path.indexOf(mountPoint) === 0) {
path = path.substr(mountPoint.length > 1 ? mountPoint.length : 0);
if (path === '') {
path = '/';
}
return { fs: this.mntMap[mountPoint], path: path, mountPoint: mountPoint };
}
}
// Query our root file system.
return { fs: this.rootFs, path: path, mountPoint: '/' };
}
// Global information methods
getName() {
return MountableFileSystem.Name;
}
diskSpace(path, cb) {
cb(0, 0);
}
isReadOnly() {
return false;
}
supportsLinks() {
// I'm not ready for cross-FS links yet.
return false;
}
supportsProps() {
return false;
}
supportsSynch() {
return true;
}
/**
* Fixes up error messages so they mention the mounted file location relative
* to the MFS root, not to the particular FS's root.
* Mutates the input error, and returns it.
*/
standardizeError(err, path, realPath) {
const index = err.message.indexOf(path);
if (index !== -1) {
err.message = err.message.substr(0, index) + realPath + err.message.substr(index + path.length);
err.path = realPath;
}
return err;
}
// The following methods involve multiple file systems, and thus have custom
// logic.
// Note that we go through the Node API to use its robust default argument
// processing.
rename(oldPath, newPath, cb) {
// Scenario 1: old and new are on same FS.
const fs1rv = this._getFs(oldPath);
const fs2rv = this._getFs(newPath);
if (fs1rv.fs === fs2rv.fs) {
return fs1rv.fs.rename(fs1rv.path, fs2rv.path, (e) => {
if (e) {
this.standardizeError(this.standardizeError(e, fs1rv.path, oldPath), fs2rv.path, newPath);
}
cb(e);
});
}
// Scenario 2: Different file systems.
// Read old file, write new file, delete old file.
return fs.readFile(oldPath, function (err, data) {
if (err) {
return cb(err);
}
fs.writeFile(newPath, data, function (err) {
if (err) {
return cb(err);
}
fs.unlink(oldPath, cb);
});
});
}
renameSync(oldPath, newPath) {
// Scenario 1: old and new are on same FS.
const fs1rv = this._getFs(oldPath);
const fs2rv = this._getFs(newPath);
if (fs1rv.fs === fs2rv.fs) {
try {
return fs1rv.fs.renameSync(fs1rv.path, fs2rv.path);
}
catch (e) {
this.standardizeError(this.standardizeError(e, fs1rv.path, oldPath), fs2rv.path, newPath);
throw e;
}
}
// Scenario 2: Different file systems.
const data = fs.readFileSync(oldPath);
fs.writeFileSync(newPath, data);
return fs.unlinkSync(oldPath);
}
readdirSync(p) {
const fsInfo = this._getFs(p);
// If null, rootfs did not have the directory
// (or the target FS is the root fs).
let rv = null;
// Mount points are all defined in the root FS.
// Ensure that we list those, too.
if (fsInfo.fs !== this.rootFs) {
try {
rv = this.rootFs.readdirSync(p);
}
catch (e) {
// Ignore.
}
}
try {
const rv2 = fsInfo.fs.readdirSync(fsInfo.path);
if (rv === null) {
return rv2;
}
else {
// Filter out duplicates.
return rv2.concat(rv.filter((val) => rv2.indexOf(val) === -1));
}
}
catch (e) {
if (rv === null) {
throw this.standardizeError(e, fsInfo.path, p);
}
else {
// The root FS had something.
return rv;
}
}
}
readdir(p, cb) {
const fsInfo = this._getFs(p);
fsInfo.fs.readdir(fsInfo.path, (err, files) => {
if (fsInfo.fs !== this.rootFs) {
try {
const rv = this.rootFs.readdirSync(p);
if (files) {
// Filter out duplicates.
files = files.concat(rv.filter((val) => files.indexOf(val) === -1));
}
else {
files = rv;
}
}
catch (e) {
// Root FS and target FS did not have directory.
if (err) {
return cb(this.standardizeError(err, fsInfo.path, p));
}
}
}
else if (err) {
// Root FS and target FS are the same, and did not have directory.
return cb(this.standardizeError(err, fsInfo.path, p));
}
cb(null, files);
});
}
realpathSync(p, cache) {
const fsInfo = this._getFs(p);
try {
const mountedPath = fsInfo.fs.realpathSync(fsInfo.path, {});
// resolve is there to remove any trailing slash that may be present
return path.resolve(path.join(fsInfo.mountPoint, mountedPath));
}
catch (e) {
throw this.standardizeError(e, fsInfo.path, p);
}
}
realpath(p, cache, cb) {
const fsInfo = this._getFs(p);
fsInfo.fs.realpath(fsInfo.path, {}, (err, rv) => {
if (err) {
cb(this.standardizeError(err, fsInfo.path, p));
}
else {
// resolve is there to remove any trailing slash that may be present
cb(null, path.resolve(path.join(fsInfo.mountPoint, rv)));
}
});
}
rmdirSync(p) {
const fsInfo = this._getFs(p);
if (this._containsMountPt(p)) {
throw ApiError.ENOTEMPTY(p);
}
else {
try {
fsInfo.fs.rmdirSync(fsInfo.path);
}
catch (e) {
throw this.standardizeError(e, fsInfo.path, p);
}
}
}
rmdir(p, cb) {
const fsInfo = this._getFs(p);
if (this._containsMountPt(p)) {
cb(ApiError.ENOTEMPTY(p));
}
else {
fsInfo.fs.rmdir(fsInfo.path, (err) => {
cb(err ? this.standardizeError(err, fsInfo.path, p) : null);
});
}
}
/**
* Returns true if the given path contains a mount point.
*/
_containsMountPt(p) {
const mountPoints = this.mountList, len = mountPoints.length;
for (let i = 0; i < len; i++) {
const pt = mountPoints[i];
if (pt.length >= p.length && pt.slice(0, p.length) === p) {
return true;
}
}
return false;
}
}
MountableFileSystem.Name = "MountableFileSystem";
MountableFileSystem.Options = {};
/**
* Tricky: Define all of the functions that merely forward arguments to the
* relevant file system, or return/throw an error.
* Take advantage of the fact that the *first* argument is always the path, and
* the *last* is the callback function (if async).
* @todo Can use numArgs to make proxying more efficient.
* @hidden
*/
function defineFcn(name, isSync, numArgs) {
if (isSync) {
return function (...args) {
const path = args[0];
const rv = this._getFs(path);
args[0] = rv.path;
try {
return rv.fs[name].apply(rv.fs, args);
}
catch (e) {
this.standardizeError(e, rv.path, path);
throw e;
}
};
}
else {
return function (...args) {
const path = args[0];
const rv = this._getFs(path);
args[0] = rv.path;
if (typeof args[args.length - 1] === 'function') {
const cb = args[args.length - 1];
args[args.length - 1] = (...args) => {
if (args.length > 0 && args[0] instanceof ApiError) {
this.standardizeError(args[0], rv.path, path);
}
cb.apply(null, args);
};
}
return rv.fs[name].apply(rv.fs, args);
};
}
}
/**
* @hidden
*/
const fsCmdMap = [
// 1 arg functions
['exists', 'unlink', 'readlink'],
// 2 arg functions
['stat', 'mkdir', 'truncate'],
// 3 arg functions
['open', 'readFile', 'chmod', 'utimes'],
// 4 arg functions
['chown'],
// 5 arg functions
['writeFile', 'appendFile']
];
for (let i = 0; i < fsCmdMap.length; i++) {
const cmds = fsCmdMap[i];
for (const fnName of cmds) {
MountableFileSystem.prototype[fnName] = defineFcn(fnName, false, i + 1);
MountableFileSystem.prototype[fnName + 'Sync'] = defineFcn(fnName + 'Sync', true, i + 1);
}
}
return MountableFileSystem;
}); |
#!/usr/bin/python3
def square_matrix_simple(matrix=[]):
return ([list(map(lambda x: x ** 2, i))for i in matrix])
|
#!/bin/sh
# Let's make the script more robust:
# -e: fail fast if any command in the script fails
# -u: check that all variables used in this script are set (if not, exit)
# -o pipefail: fail even faster, if an error occures in a pipeline
set -eu -o pipefail
# include parse_yaml function
. ./tools/libs/parse-yaml.sh
# read yaml file
eval $(parse_yaml app.yaml "")
# Don't set up memcached variables here if you are deploying to google app engine
NEW_RELIC_APP_NAME=$env_variables_NEW_RELIC_APP_NAME NEW_RELIC_LICENSE=$env_variables_NEW_RELIC_LICENSE LOGGING_LEVEL=$env_variables_LOGGING_LEVEL APP_WIT_TOKEN=$env_variables_APP_WIT_TOKEN APP_WIT_VERSION=$env_variables_APP_WIT_VERSION GOOGLE_PROJECT_ID=$env_variables_GOOGLE_PROJECT_ID FACEBOOK_PAGE_TOKEN=$env_variables_FACEBOOK_PAGE_TOKEN NODE_ENV=production nodemon dist/index.js
|
<filename>js/scripts.js
$('.submit').click(function () {
var Name = $('#fname').val();
var Email = $('#email').val();
var Message = $('message');
var key ='<KEY>';
if (Name == '' || Email == '' || Message == '') {
alert('Please fill in the missing information!');
} else {
alert(' Hello ' + Name + ', we have received your message. Thank you for reaching out to us.');
}
});
function getSizeCost() {
var selectedSize = document.getElementById("size").value;
return parseInt(selectedSize);
}
function getMaterialCost() {
var selectedCrust = document.getElementById("materials").value;
return parseInt(selectedCrust);
}
function getNumber() {
var selectedNumber = document.getElementById("numberofcleaners").value;
return parseInt(selectedNumber);
}
function cleaning() {
var cleaning = 0;
var addCleaning = document.getElementById("clean",);
if (addCleaning.checked === true) {
cleaning = 300;
}
return parseInt(cleaning);
}
function calctotalPrice(e) {
event.preventDefault();
var totalPrice = (getSizeCost() + getMaterialCost() + cleaning()) * (getNumber());
var name = $("input#name").val();
var number = $("input#number").val();
var location = $("input#location").val();
console.log(totalPrice);
alert("Hello " + name + ". Thank you for chosing KFMP Cleaners Ltd. Your request of " + getNumber() + " Cleaner(s) has been processed successfully and we will be sending cleaner(s) to " + location + " . " + "Your total amount payable is " + totalPrice + ". We will call you shortly on " + number);
} |
#!/bin/bash
set -e
source /build/buildconfig
set -x
## Temporarily disable dpkg fsync to make building faster.
echo force-unsafe-io > /etc/dpkg/dpkg.cfg.d/02apt-speedup
## Prevent initramfs updates from trying to run grub and lilo.
## https://journal.paul.querna.org/articles/2013/10/15/docker-ubuntu-on-rackspace/
## http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=594189
export INITRD=no
mkdir -p /etc/container_environment
echo -n no > /etc/container_environment/INITRD
## enable wheezy backports
## echo "deb http://ftp.us.debian.org/debian wheezy-backports main" >>/etc/apt/sources.list
## Fix some issues with APT packages.
## See https://github.com/dotcloud/docker/issues/1024
dpkg-divert --local --rename --add /sbin/initctl
ln -sf /bin/true /sbin/initctl
## Replace the 'ischroot' tool to make it always return true.
## Prevent initscripts updates from breaking /dev/shm.
## https://journal.paul.querna.org/articles/2013/10/15/docker-ubuntu-on-rackspace/
## https://bugs.launchpad.net/launchpad/+bug/974584
dpkg-divert --local --rename --add /usr/bin/ischroot
ln -sf /bin/true /usr/bin/ischroot
## Workaround https://github.com/dotcloud/docker/issues/2267,
## not being able to modify /etc/hosts.
mkdir -p /etc/workaround-docker-2267
ln -s /etc/workaround-docker-2267 /cte
cp /build/bin/workaround-docker-2267 /usr/bin/
## Install HTTPS support for APT.
$minimal_apt_get_install apt-transport-https ca-certificates
## Install add-apt-repository
$minimal_apt_get_install software-properties-common
## Upgrade all packages.
apt-get dist-upgrade -y --no-install-recommends
## Fix locale.
$minimal_apt_get_install locales
locale-gen en_US
|
import React from "react";
const Display = () => {
return <div className="display">{/* Display any props data here */}</div>;
};
|
/*ckwg +29
* Copyright 2013-2019 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \file
* \brief test VXL image class functionality
*/
#include <vital/types/image.h>
#include <vital/types/image_container.h>
#include <gtest/gtest.h>
#include <cmath>
namespace {
// ----------------------------------------------------------------------------
double value_at( int i, int j, int p )
{
static constexpr double pi = 3.14159265358979323846;
auto const w = 0.1 * static_cast<double>( p + 1 );
auto const u = std::sin( pi * static_cast<double>( i ) * w );
auto const v = std::sin( pi * static_cast<double>( j ) * w );
return 0.5 * ( ( u * v ) + 1.0 );
}
// ----------------------------------------------------------------------------
// Helper function to populate the image with a pattern; the dynamic range is
// stretched between minv and maxv
template <typename T>
void
populate_vital_image(kwiver::vital::image& img, T minv, T maxv)
{
const double range = static_cast<double>(maxv) - static_cast<double>(minv);
const double offset = - static_cast<double>(minv);
for( unsigned int p=0; p<img.depth(); ++p )
{
for( unsigned int j=0; j<img.height(); ++j )
{
for( unsigned int i=0; i<img.width(); ++i )
{
img.at<T>(i,j,p) = static_cast<T>(value_at(i, j, p) * range + offset);
}
}
}
}
// ----------------------------------------------------------------------------
// helper function to populate the image with a pattern
template <typename T>
void
populate_vital_image(kwiver::vital::image& img)
{
const T minv = std::numeric_limits<T>::is_integer ? std::numeric_limits<T>::min() : T(0);
const T maxv = std::numeric_limits<T>::is_integer ? std::numeric_limits<T>::max() : T(1);
populate_vital_image<T>(img, minv, maxv);
}
// Parameters for common test of get_image function
constexpr unsigned int full_width = 60;
constexpr unsigned int full_height = 40;
constexpr unsigned int cropped_width = 30;
constexpr unsigned int cropped_height = 20;
constexpr unsigned int x_offset = 10;
constexpr unsigned int y_offset = 5;
// ----------------------------------------------------------------------------
// helper function to generate common test of get_image funcion
template <typename T>
void test_get_image_crop( kwiver::vital::image_container_sptr img_cont )
{
kwiver::vital::image cropped_img =
img_cont->get_image(x_offset, y_offset, cropped_width, cropped_height);
kwiver::vital::image full_img = img_cont->get_image();
EXPECT_EQ( cropped_img.width(), cropped_width );
EXPECT_EQ( cropped_img.height(), cropped_height );
for ( int c = 0; c < cropped_img.depth(); c++ )
{
for ( int i = 0; i < cropped_width; ++i )
{
for ( int j = 0; j< cropped_height; ++j )
{
ASSERT_EQ( cropped_img.at<T>( i, j, c ),
full_img.at<T>( i + x_offset, j + y_offset , c ) );
}
}
}
}
} // end anonymous namespace
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: <NAME>
import logging
import os
import time
from typing import Dict
import requests
from elasticsearch import Elasticsearch
from observatory.platform.docker.compose import ComposeRunner, ProcessOutput
from observatory.platform.utils.config_utils import module_file_path
class ElasticEnvironment(ComposeRunner):
HTTP_OK = 200
def __init__(
self,
build_path: str,
elastic_port: int = 9200,
kibana_port: int = 5601,
wait: bool = True,
wait_time_secs: int = 120,
):
"""Construct an Elasticsearch and Kibana environment.
:param build_path: the path to the build directory.
:param elastic_port: the Elastic port.
:param kibana_port: the Kibana port.
:param wait: whether to wait until Elastic and Kibana have started.
:param wait_time_secs: the maximum wait time in seconds.
"""
self.elastic_module_path = module_file_path("observatory.platform.elastic")
self.wait = wait
self.wait_time_secs = wait_time_secs
self.elastic_uri = f"http://localhost:{elastic_port}/"
self.kibana_uri = f"http://localhost:{kibana_port}/"
super().__init__(
compose_template_path=os.path.join(self.elastic_module_path, "docker-compose.yml.jinja2"),
build_path=build_path,
compose_template_kwargs={"elastic_port": elastic_port, "kibana_port": kibana_port},
debug=True,
)
# Add files
self.add_file(
path=os.path.join(self.elastic_module_path, "elasticsearch.yml"), output_file_name="elasticsearch.yml"
)
# Stop the awful unnecessary Elasticsearch connection warnings being logged
logging.basicConfig()
logging.getLogger().setLevel(logging.ERROR)
def make_environment(self) -> Dict:
"""Make the environment when running the Docker Compose command.
:return: the environment.
"""
return os.environ.copy()
def start(self) -> ProcessOutput:
"""Start the Elastic environment.
:return: ProcessOutput.
"""
self.stop()
process_output = super().start()
if self.wait:
self.wait_until_started()
return process_output
def kibana_ping(self):
"""Check if Kibana has started or not.
:return: whether Kibana has started or not.
"""
try:
response = requests.get(self.kibana_uri)
return response.status_code == self.HTTP_OK
except (ConnectionResetError, requests.exceptions.ConnectionError):
pass
return False
def wait_until_started(self):
"""Wait until Elastic and Kibana have started.
:return: whether started or not.
"""
es = Elasticsearch([self.elastic_uri])
start = time.time()
while True:
elastic_found = es.ping()
kibana_found = self.kibana_ping()
services_found = elastic_found and kibana_found
if services_found:
break
# Break out if time lasts too long
elapsed = time.time() - start
if elapsed > self.wait_time_secs:
break
return services_found
|
const test = require( 'unit.js' );
let guest, admin, config, user1, user2;
const filePath = './test/media/file.png';
describe( 'Testing file renaming', function() {
before( function() {
const header = require( '../header.js' );
guest = header.users.guest;
admin = header.users.admin;
user1 = header.users.user1;
user2 = header.users.user2;
config = header.config;
} )
it( 'regular user did create a bucket dinosaurs', function( done ) {
user1.post( `/buckets/user/${user1.username}/dinosaurs` )
.then( res => {
test.bool( res.body.error ).isNotTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'regular user did upload a file to dinosaurs', function( done ) {
user1
.attach( 'small-image', filePath )
.post( "/buckets/dinosaurs/upload" )
.then( (res) => {
test.bool( res.body.error ).isNotTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'uploaded file has the name "file.png"', function( done ) {
user1
.attach( 'small-image', filePath )
.get( `/files/users/${user1.username}/buckets/dinosaurs` )
.then( ( res ) => {
fileId = res.body.data[0].identifier;
test.string( res.body.message ).is( "Found [1] files" );
test.string( res.body.data[0].name ).is( "file.png" );
test.bool( res.body.error ).isNotTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'regular user did not rename an incorrect file to testy', function( done ) {
user1.put( `/files/123/rename-file`, { name: "testy" } )
.then( res => {
test.object( res.body ).hasProperty( "message" );
test.string( res.body.message ).is( "File '123' does not exist" );
test.bool( res.body.error ).isTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'regular user regular user did not rename a correct file with an empty name', function( done ) {
user1.put( `/files/${fileId}/rename-file`, { name: "" } )
.then( res => {
test.object( res.body ).hasProperty( "message" );
test.string( res.body.message ).is( "Please specify the new name of the file" );
test.bool( res.body.error ).isTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'regular user did rename a correct file to testy', function( done ) {
user1.put( `/files/${fileId}/rename-file`, { name: "testy" } )
.then( res => {
test.object( res.body ).hasProperty( "message" );
test.string( res.body.message ).is( "Renamed file to 'testy'" );
test.bool( res.body.error ).isNotTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'did rename the file to "testy" as reflected in the GET', function( done ) {
user1
.attach( 'small-image', filePath )
.get( `/files/users/${user1.username}/buckets/dinosaurs` )
.then( ( res ) => {
test.string( res.body.data[0].name ).is( "testy" );
test.bool( res.body.error ).isNotTrue();
done();
} ).catch( err => done( err ) );
} )
it( 'regular user did remove the bucket dinosaurs', function( done ) {
user1.delete( `/buckets/dinosaurs` )
.then( res => {
test.bool( res.body.error ).isNotTrue();
done();
} ).catch( err => done( err ) );
} )
} ) |
import requests
from bs4 import BeautifulSoup
def extract_blog_data(topics: list) -> list:
data = []
for topic in topics:
try:
url = f"https://exampleblog.com/{topic}" # Replace with actual blog URL structure
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
blog_tag = soup.find("tag-selector") # Replace with actual tag selector
blog_date = soup.find("date-selector") # Replace with actual date selector
blog_link = soup.find_all("link-selector") # Replace with actual link selector
blog_data = {
"topic": blog_tag.text.strip().split(",")[0],
"date": blog_date.text.lstrip("-").rstrip("."),
"link": list(blog_link)[0]['href'],
}
data.append(blog_data)
except Exception as e:
print(f"Error processing {topic}: {e}")
continue
return data |
<gh_stars>1-10
import React, {
useState,
forwardRef,
useImperativeHandle,
useRef,
ReactNode,
FC
} from 'react'
import { Table } from 'antd'
import useService from '@/utils/tableHook'
import SearchView from '@/components/SearchForm'
/**
* 封装列表、分页、多选、搜索组件
* @param {RefType} ref 表格的实例,用于调用内部方法
* @param {object[]} columns 表格列的配置
* @param {function} apiFun 表格数据的请求方法
* @param {object[]} searchConfigList 搜索栏配置
* @param {function} beforeSearch 搜索前的操作(如处理一些特殊数据)
* @param {function} onFieldsChange 处理搜索栏表单联动事件
* @param {object} extraProps 额外的搜索参数(不在搜索配置内的)
* @param {function} onSelectRow 复选框操作回调
* @param {string} rowKey 表格行的key
* @param {function} sortConfig 自定义表格排序字段
* @param {function} expandedRowRender 额外的展开行
* @param {function} onExpand 点击展开图标时触发
* @param {string} rowClassName 表格行的样式名
* @param {boolean} small 表格和分页的展示大小
* @param {string[]} extraPagation 额外的分页大小
*/
interface TableProps {
columns: object[];
apiFun: (arg0?: unknown[]) => Promise<{}>;
ref?: RefType;
searchConfigList?: object[];
extraProps?: object;
rowKey?: string;
rowClassName?: string;
small?: boolean;
showHeader?: boolean;
extraPagation?: string[];
beforeSearch?: (arg0?: unknown) => void;
onSelectRow?: (arg0?: string[], arg1?: string[]) => void;
onFieldsChange?: (arg0?: unknown, arg1?: unknown) => void;
sortConfig?: (arg0?: object) => any;
expandedRowRender?: () => ReactNode;
onExpand?: () => void;
}
const MyTable: FC<TableProps> = forwardRef(
(props: TableProps, ref: RefType) => {
/**
* @forwardRef
* 引用父组件的ref实例,成为子组件的一个参数
* 可以引用父组件的ref绑定到子组件自身的节点上.
*/
const searchForm: RefType = useRef(null)
const {
columns,
apiFun,
searchConfigList,
extraProps,
rowKey,
rowClassName,
small,
showHeader,
extraPagation,
beforeSearch,
onSelectRow,
onFieldsChange,
sortConfig,
expandedRowRender,
onExpand
} = props
// 搜索参数,如果有特殊需要处理的参数,就处理
const searchObj = searchConfigList.reduce(
(prev: CommonObjectType, next: CommonObjectType) =>
Object.assign(prev, {
[next.key]: next.fn ? next.fn(next.initialValue) : next.initialValue
}),
{}
)
// 初始参数
const initParams = {
...searchObj,
...extraProps,
page: 1,
pageSize: 20
}
// 多选框的选择值
const [selectedKeys, setSelectedKeys] = useState([])
// 列表所有的筛选参数(包括搜索、分页、排序等)
const [tableParams, setTableParams] = useState(initParams)
// 列表搜索参数
const [searchParams, setSearchParams] = useState(searchObj)
// 列表排序参数
const [sortParams, setSortParams] = useState({})
// 列表分页参数
const [curPageNo, setCurPageNo] = useState(initParams.page)
const [curPageSize, setCurPageSize] = useState(initParams.pageSize)
const { loading = false, response = {} }: CommonObjectType = useService(
apiFun,
tableParams
)
const validData = response?.data ? response.data : {}
const { rows: tableData = [], total } = validData
// 执行搜索操作
const handleSearch = (val: object): void => {
setSearchParams(val)
setTableParams({ ...tableParams, ...val, page: 1 })
}
// 重置列表部分状态
const resetAction = (page?: number): void => {
setSelectedKeys([])
const nextPage = page || curPageNo
const nextParmas = page === 1 ? {} : { ...searchParams, ...sortParams }
setCurPageNo(nextPage)
setTableParams({
...initParams,
...nextParmas,
page: nextPage,
pageSize: curPageSize
})
}
// 列表复选框选中变化
const onSelectChange = (
selectedRowKeys: any[],
selectedRows: any[]
): void => {
setSelectedKeys(selectedRowKeys)
onSelectRow(selectedRowKeys, selectedRows)
}
// 复选框配置
const rowSelection = {
selectedRowKeys: selectedKeys,
onChange: onSelectChange
}
// 判断是否有复选框显示
const showCheckbox = onSelectRow ? { rowSelection } : {}
// 展开配置
const expendConfig = {
expandedRowRender,
onExpand,
rowClassName
}
// 判断是否有展开行
const showExpend = expandedRowRender ? expendConfig : {}
// 表格和分页的大小
const tableSize = small ? 'small' : 'middle'
const pagationSize = small ? 'small' : 'default'
// 分页、筛选、排序变化时触发
const onTableChange = (
pagination: CommonObjectType,
filters: CommonObjectType,
sorter: object
): void => {
// 如果有sort排序并且sort参数改变时,优先排序
const sortObj = sortConfig ? sortConfig(sorter) : {}
setSortParams(sortObj)
const { current: page, pageSize } = pagination
setCurPageNo(page)
setCurPageSize(pageSize)
setTableParams({
...initParams,
...searchParams,
...sortObj,
page,
pageSize
})
}
/**
* @useImperativeHandle
* 第一个参数,接收一个通过forwardRef引用父组件的ref实例
* 第二个参数一个回调函数,返回一个对象,对象里面存储需要暴露给父组件的属性或方法
*/
useImperativeHandle(ref, () => ({
// 更新列表
update(page?: number): void {
resetAction(page)
},
// 更新列表,并重置搜索字段
resetForm(page?: number): void {
if (searchForm.current) searchForm.current.resetFields()
setSearchParams({})
resetAction(page)
},
// 仅重置搜索字段
resetField(field?: string[]): void {
return field
? searchForm.current.resetFields([...field])
: searchForm.current.resetFields()
},
// 获取当前列表数据
getTableData(): CommonObjectType[] {
return tableData
}
}))
return (
<div>
{/* 搜索栏 */}
{searchConfigList.length > 0 && (
<SearchView
ref={searchForm}
config={searchConfigList}
beforeSearch={beforeSearch}
handleSearch={handleSearch}
onFieldsChange={onFieldsChange}
/>
)}
{/* 列表 */}
<Table
{...showCheckbox}
{...showExpend}
rowKey={rowKey}
loading={loading}
dataSource={tableData}
columns={columns}
onChange={onTableChange}
size={tableSize}
showHeader={showHeader}
pagination={{
size: pagationSize,
total,
pageSize: tableParams.pageSize,
current: tableParams.page,
showQuickJumper: true,
showSizeChanger: true,
pageSizeOptions: ['20', '50', '100', '200', ...extraPagation],
showTotal: (all) => `共 ${all} 条`
}}
/>
</div>
)
}
)
MyTable.defaultProps = {
searchConfigList: [],
ref: null,
extraProps: {},
rowKey: 'id',
rowClassName: '',
small: false,
showHeader: true,
extraPagation: [],
beforeSearch: () => {},
onSelectRow: () => {},
onFieldsChange: () => {},
sortConfig: () => {},
expandedRowRender: null,
onExpand: () => {}
}
export default MyTable
|
#!/busybox/sh
set -euo pipefail
export PATH=$PATH:/kaniko/
REGISTRY=${PLUGIN_REGISTRY:-index.docker.io}
if [ "${PLUGIN_USERNAME:-}" ] || [ "${PLUGIN_PASSWORD:-}" ]; then
DOCKER_AUTH=`echo -n "${PLUGIN_USERNAME}:${PLUGIN_PASSWORD}" | /busybox/base64 | tr -d "\n"`
cat > /kaniko/.docker/config.json <<DOCKERJSON
{
"auths": {
"https://${REGISTRY}": {
"auth": "${DOCKER_AUTH}"
}
}
}
DOCKERJSON
fi
if [ "${PLUGIN_JSON_KEY:-}" ];then
echo "${PLUGIN_JSON_KEY}" > /kaniko/gcr.json
export GOOGLE_APPLICATION_CREDENTIALS=/kaniko/gcr.json
fi
DOCKERFILE=${PLUGIN_DOCKERFILE:-Dockerfile}
CONTEXT=${PLUGIN_CONTEXT:-$PWD}
LOG=${PLUGIN_LOG:-info}
EXTRA_OPTS=""
if [[ -n "${PLUGIN_TARGET:-}" ]]; then
TARGET="--target=${PLUGIN_TARGET}"
fi
if [[ "${PLUGIN_SKIP_TLS_VERIFY:-}" == "true" ]]; then
EXTRA_OPTS="--skip-tls-verify=true"
fi
if [[ "${PLUGIN_CACHE:-}" == "true" ]]; then
CACHE="--cache=true"
fi
if [ -n "${PLUGIN_CACHE_REPO:-}" ]; then
CACHE_REPO="--cache-repo=${REGISTRY}/${PLUGIN_CACHE_REPO}"
fi
if [ -n "${PLUGIN_CACHE_TTL:-}" ]; then
CACHE_TTL="--cache-ttl=${PLUGIN_CACHE_TTL}"
fi
if [ -n "${PLUGIN_BUILD_ARGS:-}" ]; then
BUILD_ARGS=$(echo "${PLUGIN_BUILD_ARGS}" | tr ',' '\n' | while read build_arg; do echo "--build-arg=${build_arg}"; done)
fi
if [ -n "${PLUGIN_BUILD_ARGS_FROM_ENV:-}" ]; then
BUILD_ARGS_FROM_ENV=$(echo "${PLUGIN_BUILD_ARGS_FROM_ENV}" | tr ',' '\n' | while read build_arg; do echo "--build-arg ${build_arg}=$(eval "echo \$$build_arg")"; done)
fi
# auto_tag, if set auto_tag: true, auto generate .tags file
# support format Major.Minor.Release or start with `v`
# docker tags: Major, Major.Minor, Major.Minor.Release and latest
if [[ "${PLUGIN_AUTO_TAG:-}" == "true" ]]; then
TAG=$(echo "${DRONE_TAG:-}" |sed 's/^v//g')
part=$(echo "${TAG}" |tr '.' '\n' |wc -l)
# expect number
echo ${TAG} |grep -E "[a-z-]" &>/dev/null && isNum=1 || isNum=0
if [ ! -n "${TAG:-}" ];then
echo "latest" > .tags
elif [ ${isNum} -eq 1 -o ${part} -gt 3 ];then
echo "${TAG},latest" > .tags
else
major=$(echo "${TAG}" |awk -F'.' '{print $1}')
minor=$(echo "${TAG}" |awk -F'.' '{print $2}')
release=$(echo "${TAG}" |awk -F'.' '{print $3}')
major=${major:-0}
minor=${minor:-0}
release=${release:-0}
echo "${major},${major}.${minor},${major}.${minor}.${release},latest" > .tags
fi
fi
if [ -n "${PLUGIN_TAGS:-}" ]; then
DESTINATIONS=$(echo "${PLUGIN_TAGS}" | tr ',' '\n' | while read tag; do echo "--destination=${REGISTRY}/${PLUGIN_REPO}:${tag} "; done)
elif [ -f .tags ]; then
DESTINATIONS=$(cat .tags| tr ',' '\n' | while read tag; do echo "--destination=${REGISTRY}/${PLUGIN_REPO}:${tag} "; done)
elif [ -n "${PLUGIN_REPO:-}" ]; then
DESTINATIONS="--destination=${REGISTRY}/${PLUGIN_REPO}:latest"
else
DESTINATIONS="--no-push"
# Cache is not valid with --no-push
CACHE=""
fi
echo ${BUILD_ARGS:-}
/kaniko/executor -v ${LOG} \
--context=${CONTEXT} \
--dockerfile=${DOCKERFILE} \
${EXTRA_OPTS} \
${DESTINATIONS} \
${CACHE:-} \
${CACHE_TTL:-} \
${CACHE_REPO:-} \
${TARGET:-} \
${BUILD_ARGS:-} \
${BUILD_ARGS_FROM_ENV:-}
|
#!/bin/bash
ctx logger debug "${COMMAND}"
start_number=2010000000
end_number=$(($start_number + $number_of_subscribers))
while [ ! -f /etc/clearwater/shared_config ]
do
sleep 10
done
cd /usr/share/clearwater/crest/tools/sstable_provisioning/
if [ ! -d "/usr/share/clearwater/crest/tools/sstable_provisioning/homestead_cache" ]; then
sudo ./BulkProvision homestead-local $start_number $end_number $public_domain toto
sudo ./BulkProvision homestead-hss $start_number $end_number $public_domain toto
sudo ./BulkProvision homer $start_number $end_number $public_domain toto
fi
sleep 30
sstableloader -v -d ${cassandra_hostname:-$local_ip} homer/simservs
sstableloader -v -d ${cassandra_hostname:-$local_ip} homestead_cache/impi
sstableloader -v -d ${cassandra_hostname:-$local_ip} homestead_cache/impu
sstableloader -v -d ${cassandra_hostname:-$local_ip} homestead_provisioning/implicit_registration_sets
sstableloader -v -d ${cassandra_hostname:-$local_ip} homestead_provisioning/public
sstableloader -v -d ${cassandra_hostname:-$local_ip} homestead_provisioning/private
sstableloader -v -d ${cassandra_hostname:-$local_ip} homestead_provisioning/service_profiles
|
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")" \
&& . "utils.sh"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
add_ssh_configs() {
printf "%s\n" \
"Host github.com" \
" IdentityFile $1" \
" LogLevel ERROR" >> ~/.ssh/config
print_result $? "Add SSH configs"
}
copy_public_ssh_key_to_clipboard () {
if cmd_exists "pbcopy"; then
pbcopy < "$1"
print_result $? "Copy public SSH key to clipboard"
elif cmd_exists "xclip"; then
xclip -selection clip < "$1"
print_result $? "Copy public SSH key to clipboard"
else
print_warning "Please copy the public SSH key ($1) to clipboard"
fi
}
generate_ssh_keys() {
ask "Please provide an email address: " && printf "\n"
ssh-keygen -t rsa -b 4096 -C "$(get_answer)" -f "$1"
print_result $? "Generate SSH keys"
}
open_github_ssh_page() {
declare -r GITHUB_SSH_URL="https://github.com/settings/ssh"
# The order of the following checks matters
# as on Ubuntu there is also a utility called `open`.
if cmd_exists "xdg-open"; then
xdg-open "$GITHUB_SSH_URL"
elif cmd_exists "open"; then
open "$GITHUB_SSH_URL"
else
print_warning "Please add the public SSH key to GitHub ($GITHUB_SSH_URL)"
fi
}
set_github_ssh_key() {
local sshKeyFileName="$HOME/.ssh/github"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# If there is already a file with that
# name, generate another, unique, file name.
if [ -f "$sshKeyFileName" ]; then
sshKeyFileName="$(mktemp -u "$HOME/.ssh/github_XXXXX")"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
generate_ssh_keys "$sshKeyFileName"
add_ssh_configs "$sshKeyFileName"
copy_public_ssh_key_to_clipboard "${sshKeyFileName}.pub"
open_github_ssh_page
test_ssh_connection \
&& rm "${sshKeyFileName}.pub"
}
test_ssh_connection() {
while true; do
ssh -T git@github.com &> /dev/null
[ $? -eq 1 ] && break
sleep 5
done
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
main() {
if [ "$(get_os)" != "macos" ] && [ "$(get_os_dir_name | cut -d- -f3)" != "desk" ]; then
print_in_purple "\n • Set up GitHub SSH keys ... skipping for non-desktop OS\n\n"
else
print_in_purple "\n • Set up GitHub SSH keys\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if ! is_git_repository; then
print_error "Not a Git repository"
exit 1
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ssh -T git@github.com &> /dev/null
if [ $? -ne 1 ]; then
set_github_ssh_key
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
print_result $? "Set up GitHub SSH keys"
fi
}
main
|
class Point {
constructor(x, y) {
this.x = x;
this.y = y;
}
} |
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import androidx.appcompat.app.AppCompatActivity;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class MainActivity extends AppCompatActivity {
// TextViews for displaying weather information
TextView locationTextView, temperatureTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Connect the TextView variables to their corresponding views
locationTextView = findViewById(R.id.locationTextView);
temperatureTextView = findViewById(R.id.temperatureTextView);
}
// Fetch weather data
public void fetchWeather(View view) {
new FetchWeather().execute("<Placeholder City Name>");
}
// Async task to fetch weather information
class FetchWeather extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... strings) {
String city = strings[0];
String openWeatherMapAPIKey = "<API Key>";
// Create URL string for this API
String URLString = "https://api.openweathermap.org/data/2.5/weather?q=" + city + "&APPID=" + openWeatherMapAPIKey;
try {
URL url = new URL(URLString);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
StringBuilder json = new StringBuilder(1024);
String temp;
while ((temp = reader.readLine()) != null) {
json.append(temp).append("\n");
}
reader.close();
return json.toString();
} catch (Exception e) {
return e.toString();
}
}
// Parses the json string returned by the API request
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
try {
JSONObject jsonObject = new JSONObject(s);
// Parse the json object to get relevant information
if(jsonObject.getInt("cod") == 200) {
locationTextView.setText(jsonObject.getString("name"));
double currentTemp = jsonObject.getJSONObject("main")
.getDouble("temp") - 273.15;
temperatureTextView.setText(String.format("%.2f °C", currentTemp));
}
} catch (JSONException e) {
e.printStackTrace();
}
}
}
} |
#!/bin/bash
export RED='\033[0;31m' # red color
export NC='\033[0m' # no color
export YELLOW='\033[33m' # yellow color
test_gpu(){
exe_bin=$1 # ./build/clas_benchmark
model_name=$2
model_path=$3
params_path=$4
accuracy="1e-5"
if [ $# -ge 5 ]; then
accuracy=$5
fi
image_shape="3,640,640"
if [ $# -ge 6 ]; then
image_shape=$6
fi
printf "${YELLOW} ${model_name} input image_shape = ${image_shape} ${NC} \n";
use_gpu=true;
for batch_size in "1" "2"
do
echo " "
printf "start ${YELLOW} ${model_name}, use_gpu: ${use_gpu}, batch_size: ${batch_size}${NC}\n"
$OUTPUT_BIN/${exe_bin} --model_name=${model_name} \
--model_path=${model_path} \
--params_path=${params_path} \
--batch_size=${batch_size} \
--use_gpu=${use_gpu} \
--accuracy=${accuracy} \
--image_shape=${image_shape} \
--gtest_output=xml:test_${model_name}_gpu_${accuracy}_bz${batch_size}.xml
python3.6 ${CASE_ROOT}/py_sed.py --input_file=test_${model_name}_gpu_${accuracy}_bz${batch_size}.xml \
--testsuite_old_name="test_rcnn_model"
printf "finish ${RED} ${model_name}, use_gpu: ${use_gpu}, batch_size: ${batch_size}${NC}\n"
echo " "
done
}
main(){
printf "${YELLOW} ==== start benchmark ==== ${NC} \n"
model_root=$1
rcnn_model="mask_rcnn_r50_1x \
faster_rcnn_r50_1x \
faster_rcnn_dcn_r50_vd_fpn_3x_server_side"
for tests in ${rcnn_model}
do
test_gpu "test_rcnn_model" "${tests}" \
${model_root}/${tests}/__model__ \
${model_root}/${tests}/__params__
done
printf "${YELLOW} ==== finish benchmark ==== ${NC} \n"
}
model_root=${DATA_ROOT}/PaddleDetection/infer_static
if [ $# -ge 1 ]; then
model_root=$1
fi
main ${model_root}
|
<reponame>vany152/FilesHash
// Copyright 2021 <NAME>.
// Distributed under the Boost Software License, Version 1.0.
// https://www.boost.org/LICENSE_1_0.txt
#include <boost/config/pragma_message.hpp>
#if defined(__GNUC__) && !defined(__clang__) && __cplusplus < 201100L
BOOST_PRAGMA_MESSAGE("Skipping test under GCC in C++98 mode")
int main() {}
#else
#if defined(__clang__)
# pragma clang diagnostic ignored "-Wlong-long"
#endif
#include <boost/container_hash/hash.hpp>
#include <boost/core/lightweight_test.hpp>
#include <string>
#include <vector>
#include <list>
#include <utility>
#include <complex>
#include <limits>
#include <climits>
#include <cfloat>
#include <cstddef>
// This test checks whether hash values have changed
template<class T> std::size_t hv( T const& t )
{
return boost::hash<T>()( t );
}
int main()
{
// char
BOOST_TEST_EQ( hv('\x00'), 0 );
BOOST_TEST_EQ( hv('A'), 'A' );
BOOST_TEST_EQ( hv('\x7F'), 0x7F );
// signed char
BOOST_TEST_EQ( hv((signed char)0), 0 );
BOOST_TEST_EQ( hv((signed char)+1), +1 );
BOOST_TEST_EQ( hv((signed char)-1), (std::size_t)-1 );
BOOST_TEST_EQ( hv((signed char)+127), 127 );
BOOST_TEST_EQ( hv((signed char)-128), (std::size_t)-128 );
// unsigned char
BOOST_TEST_EQ( hv((unsigned char)0), 0 );
BOOST_TEST_EQ( hv((unsigned char)1), 1 );
BOOST_TEST_EQ( hv((unsigned char)255), 255 );
// short
BOOST_TEST_EQ( hv((short)0), 0 );
BOOST_TEST_EQ( hv((short)+1), 1 );
BOOST_TEST_EQ( hv((short)-1), (std::size_t)-1 );
BOOST_TEST_EQ( hv((short)+32767), 32767 );
BOOST_TEST_EQ( hv((short)-32768), (std::size_t)-32768 );
// unsigned short
BOOST_TEST_EQ( hv((unsigned short)0), 0 );
BOOST_TEST_EQ( hv((unsigned short)1), 1 );
BOOST_TEST_EQ( hv((unsigned short)65535), 65535 );
// int
BOOST_TEST_EQ( hv(0), 0 );
BOOST_TEST_EQ( hv(+1), 1 );
BOOST_TEST_EQ( hv(-1), (std::size_t)-1 );
BOOST_TEST_EQ( hv(+32767), 32767 );
BOOST_TEST_EQ( hv(-32768), (std::size_t)-32768 );
// unsigned int
BOOST_TEST_EQ( hv((unsigned)0), 0 );
BOOST_TEST_EQ( hv((unsigned)1), 1 );
BOOST_TEST_EQ( hv((unsigned)65535), 65535 );
BOOST_TEST_EQ( hv((unsigned)-1), (std::size_t)(unsigned)-1 );
// long
BOOST_TEST_EQ( hv(0L), 0 );
BOOST_TEST_EQ( hv(+1L), 1 );
BOOST_TEST_EQ( hv(-1L), (std::size_t)-1 );
BOOST_TEST_EQ( hv(+32767L), 32767 );
BOOST_TEST_EQ( hv(-32768L), (std::size_t)-32768 );
// unsigned long
BOOST_TEST_EQ( hv(0UL), 0 );
BOOST_TEST_EQ( hv(1UL), 1 );
BOOST_TEST_EQ( hv(65535UL), 65535 );
BOOST_TEST_EQ( hv((unsigned long)-1), (std::size_t)(unsigned long)-1 );
// long long
BOOST_TEST_EQ( hv(0LL), 0 );
BOOST_TEST_EQ( hv(+1LL), 1 );
BOOST_TEST_EQ( hv(-1LL), (std::size_t)-1 );
BOOST_TEST_EQ( hv(+32767LL), 32767 );
BOOST_TEST_EQ( hv(-32768LL), (std::size_t)-32768 );
// unsigned long long
BOOST_TEST_EQ( hv(0ULL), 0 );
BOOST_TEST_EQ( hv(1ULL), 1 );
BOOST_TEST_EQ( hv(65535ULL), 65535 );
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv((unsigned long long)-1), 3221225537U );
#else
BOOST_TEST_EQ( hv((unsigned long long)-1), (std::size_t)-1 );
#endif
#if defined(BOOST_HAS_INT128)
typedef boost::int128_type int128;
BOOST_TEST_EQ( hv((int128)0), 0 );
BOOST_TEST_EQ( hv((int128)1), 1 );
BOOST_TEST_EQ( hv((int128)-1), (std::size_t)-1 );
BOOST_TEST_EQ( hv((int128)+32767), 32767 );
BOOST_TEST_EQ( hv((int128)-32768), (std::size_t)-32768 );
typedef boost::uint128_type uint128;
BOOST_TEST_EQ( hv((uint128)0), 0 );
BOOST_TEST_EQ( hv((uint128)1), 1 );
BOOST_TEST_EQ( hv((uint128)65535), 65535 );
#if defined(BOOST_GCC) && BOOST_GCC < 100000
// This looks like some sort of miscompilation.
// Under CI, both GHA and Appveyor GCCs produce this value.
// But the exact same test on godbolt.org produces the correct
// value, below.
// BOOST_TEST_EQ( hv((uint128)-1), 18446744073709551615ULL );
#else
BOOST_TEST_EQ( hv((uint128)-1), 13835058055282163777ULL );
#endif
#endif
// float
BOOST_TEST_EQ( hv(0.0f), 0 );
BOOST_TEST_EQ( hv(-0.0f), 0 );
BOOST_TEST_EQ( hv(1.0f), 1065353216U );
BOOST_TEST_EQ( hv(-1.0f), 3212836864U );
BOOST_TEST_EQ( hv(3.14f), 1078523331U );
BOOST_TEST_EQ( hv(-3.14f), 3226006979U );
BOOST_TEST_EQ( hv(1e-38f), 7136238U );
BOOST_TEST_EQ( hv(-1e-38f), 2154619886U );
BOOST_TEST_EQ( hv(1e+38f), 2123789977U );
BOOST_TEST_EQ( hv(-1e+38f), 4271273625U );
#if !defined(__GLIBCXX__)
BOOST_TEST_EQ( hv(std::numeric_limits<float>::infinity()), 2139095040U );
BOOST_TEST_EQ( hv(-std::numeric_limits<float>::infinity()), 4286578688U );
#elif SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::numeric_limits<float>::infinity()), 4294967295U );
BOOST_TEST_EQ( hv(-std::numeric_limits<float>::infinity()), 4294967294U );
#else
BOOST_TEST_EQ( hv(std::numeric_limits<float>::infinity()), 18446744073709551615ULL );
BOOST_TEST_EQ( hv(-std::numeric_limits<float>::infinity()), 18446744073709551614ULL );
#endif
// double
BOOST_TEST_EQ( hv(0.0), 0 );
BOOST_TEST_EQ( hv(-0.0), 0 );
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(1.0), 1072693248U );
BOOST_TEST_EQ( hv(-1.0), 3220176896U );
BOOST_TEST_EQ( hv(3.14), 2660156064U );
BOOST_TEST_EQ( hv(-3.14), 512672416U );
BOOST_TEST_EQ( hv(1e-308), 1553872728U );
BOOST_TEST_EQ( hv(-1e-308), 3701356376U );
BOOST_TEST_EQ( hv(1e+308), 2577739707U );
BOOST_TEST_EQ( hv(-1e+308), 430256059U );
#if !defined(__GLIBCXX__)
BOOST_TEST_EQ( hv(std::numeric_limits<double>::infinity()), 2146435072U );
BOOST_TEST_EQ( hv(-std::numeric_limits<double>::infinity()), 4293918720U );
#else
BOOST_TEST_EQ( hv(std::numeric_limits<double>::infinity()), 4294967295U );
BOOST_TEST_EQ( hv(-std::numeric_limits<double>::infinity()), 4294967294U );
#endif
#else
BOOST_TEST_EQ( hv(1.0), 4607182418800017408ULL );
BOOST_TEST_EQ( hv(-1.0), 13830554455654793216ULL );
BOOST_TEST_EQ( hv(3.14), 4614253070214989087ULL );
BOOST_TEST_EQ( hv(-3.14), 13837625107069764895ULL );
BOOST_TEST_EQ( hv(1e-308), 2024022533073106ULL );
BOOST_TEST_EQ( hv(-1e-308), 9225396059387848914ULL );
BOOST_TEST_EQ( hv(1e+308), 9214871658872686752ULL );
BOOST_TEST_EQ( hv(-1e+308), 18438243695727462560ULL );
#if !defined(__GLIBCXX__)
BOOST_TEST_EQ( hv(std::numeric_limits<double>::infinity()), 9218868437227405312ULL );
BOOST_TEST_EQ( hv(-std::numeric_limits<double>::infinity()), 18442240474082181120ULL );
#else
BOOST_TEST_EQ( hv(std::numeric_limits<double>::infinity()), 18446744073709551615ULL );
BOOST_TEST_EQ( hv(-std::numeric_limits<double>::infinity()), 18446744073709551614ULL );
#endif
#endif
// long double
BOOST_TEST_EQ( hv(0.0L), 0 );
BOOST_TEST_EQ( hv(-0.0L), 0 );
#if defined(_WIN32) && !defined(__GNUC__) // Under MS ABI, long double == double
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(1.0L), 1072693248U );
BOOST_TEST_EQ( hv(-1.0L), 3220176896U );
BOOST_TEST_EQ( hv(3.14L), 2660156064U );
BOOST_TEST_EQ( hv(-3.14L), 512672416U );
BOOST_TEST_EQ( hv(std::numeric_limits<long double>::infinity()), 2146435072U );
BOOST_TEST_EQ( hv(-std::numeric_limits<long double>::infinity()), 4293918720U );
#else
BOOST_TEST_EQ( hv(1.0L), 4607182418800017408ULL );
BOOST_TEST_EQ( hv(-1.0L), 13830554455654793216ULL );
BOOST_TEST_EQ( hv(3.14L), 4614253070214989087ULL );
BOOST_TEST_EQ( hv(-3.14L), 13837625107069764895ULL );
BOOST_TEST_EQ( hv(std::numeric_limits<long double>::infinity()), 9218868437227405312ULL );
BOOST_TEST_EQ( hv(-std::numeric_limits<long double>::infinity()), 18442240474082181120ULL );
#endif
#else
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(1.0L), 2684370943U );
BOOST_TEST_EQ( hv(-1.0L), 2684403711U );
BOOST_TEST_EQ( hv(3.14L), 83002659U );
BOOST_TEST_EQ( hv(-3.14L), 82969891U );
#if !defined(__GLIBCXX__)
BOOST_TEST_EQ( hv(std::numeric_limits<long double>::infinity()), 0xA0007FFFu );
BOOST_TEST_EQ( hv(-std::numeric_limits<long double>::infinity()), 0xA000FFFFu );
#else
BOOST_TEST_EQ( hv(std::numeric_limits<long double>::infinity()), 4294967295U );
BOOST_TEST_EQ( hv(-std::numeric_limits<long double>::infinity()), 4294967294U );
#endif
#else
BOOST_TEST_EQ( hv(1.0L), 11529215046068486143ULL );
BOOST_TEST_EQ( hv(-1.0L), 11529215046068518911ULL );
BOOST_TEST_EQ( hv(3.14L), 12059468778148142067ULL );
BOOST_TEST_EQ( hv(-3.14L), 12059468778147191795ULL );
#if !defined(__GLIBCXX__)
BOOST_TEST_EQ( hv(std::numeric_limits<long double>::infinity()), 11529215046068502527ULL );
BOOST_TEST_EQ( hv(-std::numeric_limits<long double>::infinity()), 11529215046068535295ULL );
#else
BOOST_TEST_EQ( hv(std::numeric_limits<long double>::infinity()), 18446744073709551615ULL );
BOOST_TEST_EQ( hv(-std::numeric_limits<long double>::infinity()), 18446744073709551614ULL );
#endif
#endif
#endif
// C array
{
int a1[] = { 0 };
int a2[] = { 0, 0 };
int a3[] = { 0, 0, 0 };
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(a1), 3864292196U );
BOOST_TEST_EQ( hv(a2), 2842917718U );
BOOST_TEST_EQ( hv(a3), 325752138U );
#else
BOOST_TEST_EQ( hv(a1), 3864292196ULL );
BOOST_TEST_EQ( hv(a2), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(a3), 17867750819888810972ULL );
#endif
}
// string
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::string()), 0 );
BOOST_TEST_EQ( hv(std::string("abc")), 1849538372U );
BOOST_TEST_EQ( hv(std::string("\0", 1)), 3864292196U );
BOOST_TEST_EQ( hv(std::string("\0\0", 2)), 2842917718U );
BOOST_TEST_EQ( hv(std::string("\0\0\0", 3)), 325752138U );
#else
BOOST_TEST_EQ( hv(std::string()), 0 );
BOOST_TEST_EQ( hv(std::string("abc")), 6420922261882292859ULL );
BOOST_TEST_EQ( hv(std::string("\0", 1)), 3864292196ULL );
BOOST_TEST_EQ( hv(std::string("\0\0", 2)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::string("\0\0\0", 3)), 17867750819888810972ULL );
#endif
// pointer
BOOST_TEST_EQ( hv((void*)0), 0 );
BOOST_TEST_EQ( hv((void*)0x200014A0), 603985716U );
// complex<int>
BOOST_TEST_EQ( hv(std::complex<int>(0, 0)), 0U );
BOOST_TEST_EQ( hv(std::complex<int>(+1, 0)), 1U );
BOOST_TEST_EQ( hv(std::complex<int>(0, +1)), 65U );
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::complex<int>(-1, 0)), 4294967295U );
BOOST_TEST_EQ( hv(std::complex<int>(0, -1)), 3221225536U );
#else
BOOST_TEST_EQ( hv(std::complex<int>(-1, 0)), 18446744073709551615ULL );
BOOST_TEST_EQ( hv(std::complex<int>(0, -1)), 13835058055282163776ULL );
#endif
// complex<float>
BOOST_TEST_EQ( hv(std::complex<float>(0.0f, 0.0f)), 0U );
BOOST_TEST_EQ( hv(std::complex<float>(+1.0f, 0.0f)), 1065353216U );
BOOST_TEST_EQ( hv(std::complex<float>(-1.0f, 0.0f)), 3212836864U );
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::complex<float>(0.0f, +1.0f)), 3495952384U );
BOOST_TEST_EQ( hv(std::complex<float>(0.0f, -1.0f)), 2959081472U );
#else
BOOST_TEST_EQ( hv(std::complex<float>(0.0f, +1.0f)), 67920461824ULL );
BOOST_TEST_EQ( hv(std::complex<float>(0.0f, -1.0f)), 209117511680ULL );
#endif
// complex<double>
BOOST_TEST_EQ( hv(std::complex<double>(0.0, 0.0)), 0U );
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::complex<double>(+1.0, 0.0)), 1072693248U );
BOOST_TEST_EQ( hv(std::complex<double>(-1.0, 0.0)), 3220176896U );
BOOST_TEST_EQ( hv(std::complex<double>(0.0, +1.0)), 873201664U );
BOOST_TEST_EQ( hv(std::complex<double>(0.0, -1.0)), 2483814400U );
#else
BOOST_TEST_EQ( hv(std::complex<double>(+1.0, 0.0)), 4607182418800017408ULL );
BOOST_TEST_EQ( hv(std::complex<double>(-1.0, 0.0)), 13830554455654793216ULL );
BOOST_TEST_EQ( hv(std::complex<double>(0.0, +1.0)), 3750372589692780544ULL );
BOOST_TEST_EQ( hv(std::complex<double>(0.0, -1.0)), 10667901617333862400ULL );
#endif
// pair
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::make_pair(0, 0)), 2842917718U );
BOOST_TEST_EQ( hv(std::make_pair(1, 2)), 2507434894U );
BOOST_TEST_EQ( hv(std::make_pair(-1, -2)), 1874100199 );
#else
BOOST_TEST_EQ( hv(std::make_pair(0, 0)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::make_pair(1, 2)), 3370697991563800380ULL );
BOOST_TEST_EQ( hv(std::make_pair(-1, -2)), 4139767141999124554ULL );
#endif
// vector<char>
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::vector<char>(0)), 0 );
BOOST_TEST_EQ( hv(std::vector<char>(1)), 3864292196U );
BOOST_TEST_EQ( hv(std::vector<char>(2)), 2842917718U );
BOOST_TEST_EQ( hv(std::vector<char>(3)), 325752138U );
#else
BOOST_TEST_EQ( hv(std::vector<char>(0)), 0 );
BOOST_TEST_EQ( hv(std::vector<char>(1)), 3864292196ULL );
BOOST_TEST_EQ( hv(std::vector<char>(2)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::vector<char>(3)), 17867750819888810972ULL );
#endif
// vector<int>
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::vector<int>(0)), 0 );
BOOST_TEST_EQ( hv(std::vector<int>(1)), 3864292196U );
BOOST_TEST_EQ( hv(std::vector<int>(2)), 2842917718U );
BOOST_TEST_EQ( hv(std::vector<int>(3)), 325752138U );
#else
BOOST_TEST_EQ( hv(std::vector<int>(0)), 0 );
BOOST_TEST_EQ( hv(std::vector<int>(1)), 3864292196ULL );
BOOST_TEST_EQ( hv(std::vector<int>(2)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::vector<int>(3)), 17867750819888810972ULL );
#endif
// vector<vector<int>>
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(0)), 0 );
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(1)), 3864292196U );
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(2)), 2842917718U );
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(3)), 325752138U );
#else
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(0)), 0 );
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(1)), 3864292196ULL );
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(2)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::vector<std::vector<int> >(3)), 17867750819888810972ULL );
#endif
// list<char>
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::list<char>(0)), 0 );
BOOST_TEST_EQ( hv(std::list<char>(1)), 3864292196U );
BOOST_TEST_EQ( hv(std::list<char>(2)), 2842917718U );
BOOST_TEST_EQ( hv(std::list<char>(3)), 325752138U );
#else
BOOST_TEST_EQ( hv(std::list<char>(0)), 0 );
BOOST_TEST_EQ( hv(std::list<char>(1)), 3864292196ULL );
BOOST_TEST_EQ( hv(std::list<char>(2)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::list<char>(3)), 17867750819888810972ULL );
#endif
// list<int>
#if SIZE_MAX == 4294967295U
BOOST_TEST_EQ( hv(std::list<int>(0)), 0 );
BOOST_TEST_EQ( hv(std::list<int>(1)), 3864292196U );
BOOST_TEST_EQ( hv(std::list<int>(2)), 2842917718U );
BOOST_TEST_EQ( hv(std::list<int>(3)), 325752138U );
#else
BOOST_TEST_EQ( hv(std::list<int>(0)), 0 );
BOOST_TEST_EQ( hv(std::list<int>(1)), 3864292196ULL );
BOOST_TEST_EQ( hv(std::list<int>(2)), 14642545639667855512ULL );
BOOST_TEST_EQ( hv(std::list<int>(3)), 17867750819888810972ULL );
#endif
return boost::report_errors();
}
#endif
|
export CUDA_VISIBLE_DEVICES=1
python3 main.py \
--epoch 25 \
--learning_rate .0001 \
--beta 0.5 \
--batch_size 4 \
--sample_size 9 \
--input_height 28 \
--output_height 28 \
--lambda_val 1.0 \
--smoothing 1 \
--dataset mnist \
--input_fname_pattern */*.jpg \
--checkpoint_dir checkpoint \
--sample_dir samples \
--crop False \
--visualize False \
--can True \
--wgan False \
--train
|
public static int lis(int[] arr) {
int n = arr.length;
// array to store LIS
int[] lis = new int[n];
// initialze each lis element with 1
for (int i = 0; i < n; i++)
lis[i] = 1;
// find longest increasing subsequence
for (int i = 0; i < n; i++) {
for (int j = 0; j < i; j++) {
if (arr[j] < arr[i] && lis[i] < lis[j] + 1)
lis[i] = lis[j] + 1;
}
}
// longest increasing subsequence
int max_lis = 0;
for (int i = 0; i < n; i++)
max_lis = Math.max(max_lis, lis[i]);
return max_lis;
} |
import React, { useState, useEffect } from 'react';
import Button from '@material-ui/core/Button';
import { makeStyles } from '@material-ui/core/styles';
import DeleteIcon from '@material-ui/icons/Delete';
import CloudUploadIcon from '@material-ui/icons/CloudUpload';
import KeyboardVoiceIcon from '@material-ui/icons/KeyboardVoice';
import Icon from '@material-ui/core/Icon';
import SaveIcon from '@material-ui/icons/Save';
import AddIcon from '@material-ui/icons/Add';
import AddCircleIcon from '@material-ui/icons/AddCircle';
import CreateDialog from './CreateDialog.js';
import UploadExcelDialog from './UploadExcelDialog.js';
import PostAddIcon from '@material-ui/icons/PostAdd';
const useStyles = makeStyles((theme) => ({
createButton: {
margin: theme.spacing(1),
backgroundColor: 'green',
color: 'white',
borderColor: 'green',
'&:hover': {
backgroundColor: '#004d00',
},
},
excelButton: {
margin: theme.spacing(1),
backgroundColor: 'green',
color: 'white',
borderColor: 'green',
'&:hover': {
backgroundColor: '#004d00',
},
},
}));
function CreateButton({ showCreateModal, showSuccessSnackBar, prependDoituong, prepenDoituongs}) {
const classes = useStyles();
const [isOpen, setIsOpen] = useState(false);
const [isUploadExcelDialogOpen, setIsUploadExcelDialogOpen] = useState(false);
function showCreateDialog(e) {
e.preventDefault();
setIsOpen(true);
}
function hideCreateDialog() {
setIsOpen(false);
}
function showUploadExcelDialog(e) {
e.preventDefault();
setIsUploadExcelDialogOpen(true);
}
function hideUploadExcelDialog() {
setIsUploadExcelDialogOpen(false);
}
return (
<div>
<Button
variant="outlined"
className={classes.createButton}
startIcon={<AddCircleIcon />}
onClick={showCreateDialog}
>
Tạo Mới
</Button>
<Button
variant="outlined"
className={classes.excelButton}
startIcon={<PostAddIcon />}
onClick={showUploadExcelDialog}
>
Tải Excel
</Button>
<CreateDialog isOpen={isOpen} hideCreateDialog={hideCreateDialog} showSuccessSnackBar={showSuccessSnackBar} prependDoituong={prependDoituong}/>
<UploadExcelDialog isOpen={isUploadExcelDialogOpen} hideUploadExcelDialog={hideUploadExcelDialog} showSuccessSnackBar={showSuccessSnackBar} prepenDoituongs={prepenDoituongs}/>
</div>
);
}
export default CreateButton; |
<filename>modules/caas/backend/src/main/java/io/cattle/iaas/healthcheck/process/HealthcheckChangeLock.java
package io.cattle.iaas.healthcheck.process;
import io.cattle.platform.lock.definition.AbstractBlockingLockDefintion;
public class HealthcheckChangeLock extends AbstractBlockingLockDefintion {
public HealthcheckChangeLock(Long instanceId) {
super("healthcheck.change.instance." + instanceId);
}
}
|
export function untilAsync(
fn: () => any,
predicate: () => boolean,
interval: number
) {
const intv = setInterval(() => {
fn();
if (predicate()) {
clearInterval(intv);
}
}, interval);
}
|
const express = require('express');
const app = express();
const bodyParser = require('body-parser');
const Blockchain = require('./blockchain');
const uuid = require('uuid/v1');
const port = process.env.PORT || process.argv[2];
const rp = require('request-promise');
const nodeAddress = uuid().split('-').join('');
const kycBlockchain = new Blockchain();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
// get entire blockchain
app.get('/blockchain', function (req, res) {
res.json(kycBlockchain);
});
// create a new transaction
app.post('/transaction', function(req, res) {
const newTransaction = req.body;
const blockIndex = kycBlockchain.addTransactionToPendingTransactions(newTransaction);
res.json({ note: `Transaction will be added in block ${blockIndex}.` });
});
// broadcast transaction
app.post('/transaction/broadcast', function(req, res) {
const newTransaction = kycBlockchain.createNewTransaction(req.body.documentName, req.body.documentId, req.body.fullName, req.body.address, req.body.DOB, req.body.kycId, req.body.kycApprovedOn);
kycBlockchain.addTransactionToPendingTransactions(newTransaction);
const requestPromises = [];
kycBlockchain.networkNodes.forEach(networkNodeUrl => {
const requestOptions = {
uri: networkNodeUrl + '/transaction',
method: 'POST',
body: newTransaction,
json: true
};
requestPromises.push(rp(requestOptions));
});
Promise.all(requestPromises)
.then(data => {
res.json({ note: 'Transaction created and broadcast successfully.' });
});
});
// mine a block
app.get('/mine', function(req, res) {
const lastBlock = kycBlockchain.getLastBlock();
const previousBlockHash = lastBlock['hash'];
const currentBlockData = {
transactions: kycBlockchain.pendingTransactions,
index: lastBlock['index'] + 1
};
const nonce = kycBlockchain.proofOfWork(previousBlockHash, currentBlockData);
const blockHash = kycBlockchain.hashBlock(previousBlockHash, currentBlockData, nonce);
const newBlock = kycBlockchain.createNewBlock(nonce, previousBlockHash, blockHash);
const requestPromises = [];
kycBlockchain.networkNodes.forEach(networkNodeUrl => {
const requestOptions = {
uri: networkNodeUrl + '/receive-new-block',
method: 'POST',
body: { newBlock: newBlock },
json: true
};
requestPromises.push(rp(requestOptions));
});
Promise.all(requestPromises)
// This piece of code is used to create transcaction for mining reward.
// .then(data => {
// const requestOptions = {
// uri: kycBlockchain.currentNodeUrl + '/transaction/broadcast',
// method: 'POST',
// body: {
// amount: 12.5,
// sender: "00",
// recipient: nodeAddress
// },
// json: true
// };
// return rp(requestOptions);
// })
.then(data => {
res.json({
note: "New block mined & broadcast successfully",
block: newBlock
});
});
});
// receive new block
app.post('/receive-new-block', function(req, res) {
const newBlock = req.body.newBlock;
const lastBlock = kycBlockchain.getLastBlock();
const correctHash = lastBlock.hash === newBlock.previousBlockHash;
const correctIndex = lastBlock['index'] + 1 === newBlock['index'];
if (correctHash && correctIndex) {
kycBlockchain.chain.push(newBlock);
kycBlockchain.pendingTransactions = [];
res.json({
note: 'New block received and accepted.',
newBlock: newBlock
});
} else {
res.json({
note: 'New block rejected.',
newBlock: newBlock
});
}
});
// register a node and broadcast it the network
app.post('/register-and-broadcast-node', function(req, res) {
const newNodeUrl = req.body.newNodeUrl;
if (kycBlockchain.networkNodes.indexOf(newNodeUrl) == -1) kycBlockchain.networkNodes.push(newNodeUrl);
const regNodesPromises = [];
kycBlockchain.networkNodes.forEach(networkNodeUrl => {
const requestOptions = {
uri: networkNodeUrl + '/register-node',
method: 'POST',
body: { newNodeUrl: newNodeUrl },
json: true
};
regNodesPromises.push(rp(requestOptions));
});
Promise.all(regNodesPromises)
.then(data => {
const bulkRegisterOptions = {
uri: newNodeUrl + '/register-nodes-bulk',
method: 'POST',
body: { allNetworkNodes: [ ...kycBlockchain.networkNodes, kycBlockchain.currentNodeUrl ] },
json: true
};
return rp(bulkRegisterOptions);
})
.then(data => {
res.json({ note: 'New node registered with network successfully.' });
});
});
// register a node with the network
app.post('/register-node', function(req, res) {
const newNodeUrl = req.body.newNodeUrl;
const nodeNotAlreadyPresent = kycBlockchain.networkNodes.indexOf(newNodeUrl) == -1;
const notCurrentNode = kycBlockchain.currentNodeUrl !== newNodeUrl;
if (nodeNotAlreadyPresent && notCurrentNode) kycBlockchain.networkNodes.push(newNodeUrl);
res.json({ note: 'New node registered successfully.' });
});
// register multiple nodes at once
app.post('/register-nodes-bulk', function(req, res) {
const allNetworkNodes = req.body.allNetworkNodes;
allNetworkNodes.forEach(networkNodeUrl => {
const nodeNotAlreadyPresent = kycBlockchain.networkNodes.indexOf(networkNodeUrl) == -1;
const notCurrentNode = kycBlockchain.currentNodeUrl !== networkNodeUrl;
if (nodeNotAlreadyPresent && notCurrentNode) kycBlockchain.networkNodes.push(networkNodeUrl);
});
res.json({ note: 'Bulk registration successful.' });
});
// consensus
app.get('/consensus', function(req, res) {
const requestPromises = [];
kycBlockchain.networkNodes.forEach(networkNodeUrl => {
const requestOptions = {
uri: networkNodeUrl + '/blockchain',
method: 'GET',
json: true
};
requestPromises.push(rp(requestOptions));
});
Promise.all(requestPromises)
.then(blockchains => {
const currentChainLength = kycBlockchain.chain.length;
let maxChainLength = currentChainLength;
let newLongestChain = null;
let newPendingTransactions = null;
blockchains.forEach(blockchain => {
if (blockchain.chain.length > maxChainLength) {
maxChainLength = blockchain.chain.length;
newLongestChain = blockchain.chain;
newPendingTransactions = blockchain.pendingTransactions;
};
});
if (!newLongestChain || (newLongestChain && !kycBlockchain.chainIsValid(newLongestChain))) {
res.json({
note: 'Current chain has not been replaced.',
chain: kycBlockchain.chain
});
}
else {
kycBlockchain.chain = newLongestChain;
kycBlockchain.pendingTransactions = newPendingTransactions;
res.json({
note: 'This chain has been replaced.',
chain: kycBlockchain.chain
});
}
});
});
// get transaction by kycId
app.get('/kycDetailById/:kycId', function(req, res) {
const kycId = req.params.kycId;
const transactionData = kycBlockchain.getTransactionBykycId(kycId);
res.json({
transactionData: transactionData.transaction
});
});
// get transaction by fullname
app.get('/kycDetailByName/:name', function(req, res) {
const name = req.params.name;
const transactionData = kycBlockchain.getTransactionByName(name);
res.json({
transactionData: transactionData.transaction
});
});
// get transaction by documentId
app.get('/kycDetailByDocumentId/:documentId', function(req, res) {
const documentId = req.params.documentId;
const transactionData = kycBlockchain.getTransactionBydocumentId(documentId);
res.json({
transactionData: transactionData.transaction
});
});
// get registered nodes
app.get('/registeredNodes', function(req, res) {
const transactionData = kycBlockchain.networkNodes;
res.json({
transactionData: transactionData
});
});
app.listen(port, function() {
console.log(`Listening on port ${port}...`);
});
|
#!/bin/sh
echo "Running the value sets build script..."
(cd valuesets ; ./build.sh)
echo "Running the tooling build script..."
(cd tooling ; ./build.sh)
|
#!/bin/bash
. ./env.sh
sudo docker exec -it mariadb mysql
|
<gh_stars>0
export class FeeComponents {
name: string;
amount: number;
}
export class FeeStructure {
docId: string;
name: string;
feeComponents: FeeComponents[];
total: number;
active?: boolean;
createdOn: Date;
updatedOn: Date;
}
|
#!/bin/sh
# dash_places_menu.sh - a shell (hopefully dash!) places openbox pipe menu
# Copyright (C) 2010 John Crawley
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# Usage: add
# <menu id="places" label="Places" execute="/path/to/dash_places_menu.sh ~" />
# to your .config/openbox/menu.xml
# or, if you want the "recent files" menu incorporated at the top, use:
# <menu id="places" label="Places" execute="/path/to/dash_places_menu.sh --recent ~" />
# make sure you have recently_opened_menu.sh somewhere, and enter its path below.
# path to your "recent files" script, if you want to incorporate it:
recent_script="$HOME"/scripts/recently_opened_menu.sh
# Command to open folders at "Browse here..." - any file manager
open_folder_cmd=thunar
# Default command to open files with - others might be xdg-open, gnome-open, pcmanfm...
default_open_cmd=exo-open # exo-open comes with thunar
# Text editor of choice
text_editor=gedit
# function to open files with default open command, or alternative command for certain files
# - add other conditions to choice
open_file() {
[ -x "$1" ] && exec "$text_editor" "$1" # comment out this line if you don't want to edit executables instead of executing
#[ -x "$1" ] && exec "terminator -e" "$1" # uncomment this and comment out previous line to run executables in terminal instead of editing
[ "${1##*.}" = desktop ] && exec "$text_editor" "$1" # comment out this line if you don't want to edit .desktop files instead of executing
exec "$default_open_cmd" "$1" # use default open command if above conditions not satisfied
}
# extra dotfiles to display in HOME folder (dotfiles are hidden by default)
# edit the list (space separated, surrounded by single quotes) or comment this line out, to taste:
shown_dotfiles='.config .local .Xdefaults .bash_aliases .bashrc .fonts.conf .gtkrc-2.0.mine .profile .xsession-errors'
# By default, this script will display directories separately, before files.
# To change this behaviour, see NOTE1, NOTE2 and NOTE3 below, near end of page.
#######################################################################
case $1 in
# if "--open" option is sent as $1, open file ($2) instead of generating menu
--open)
open_file "$2"
echo "$0 : failed to open $2" >&2
exit;; # in case exec command fails
# if "--recent" option is sent, incorporate "recent files" menu
--recent)
shift
output='<openbox_pipe_menu>
'
if [ -x "$recent_script" ]
then
output="$output"'<separator label="Recently opened..." />
<menu execute="'"$recent_script"'" id="recent" label="files" />
'
else
echo "$0 : cannot find executable script $recent_script" >&2
fi;;
*)
output='<openbox_pipe_menu>
';;
esac
path="${1:-$HOME}" # default starting place is ~, otherwise $1
path="$( echo "${path}"/ | tr -s '/' )" # ensure one final slash
[ -d "$path" ] || { echo "$0 : $path is not a directory" >&2; exit 1; }
case "$path" in # only escape if string needs it
*\&*|*\<*|*\>*|*\"*|*\'*) pathe=$(sed "s/\&/\&/g;s/</\</g;s/>/\>/g;s/\"/\"/g;s/'/\'/g;") <<XXX
$path
XXX
;;
*)pathe=$path;;
esac
case "$pathe" in
*\&apos\;*) pathe_apos=$(sed 's/\'/\'\"\'\"\'/g;')<<XXX
$pathe
XXX
;;
*) pathe_apos=$pathe;;
esac
output="$output"'<separator label="'$pathe'" />
<item label="Browse here...">
<action name="Execute">
<command>
''"$open_folder_cmd"'' ''"$pathe_apos"''
</command>
</action>
</item>
<separator />
'
unset extra_entries directories_menu files_menu
[ "$path" = "$HOME"/ ] && extra_entries="$shown_dotfiles"
for i in "$path"* $extra_entries
do
[ -e "$i" ] || continue # only output code if file exists
shortname="${i##*/}"
case $shortname in
*\&*|*\<*|*\>*|*\"*|*\'*) shortnamee=$(sed "s/\&/\&/g;s/</\</g;s/>/\>/g;s/\"/\"/g;s/'/\'/g;") <<XXX
$shortname
XXX
;;
*) shortnamee=$shortname;;
esac
case $shortnamee in
*\&apos\;*) shortnamee_apos=$(sed 's/\'/\'\"\'\"\'/g;')<<XXX
$shortnamee
XXX
;;
*) shortnamee_apos=$shortnamee;;
esac
[ -d "$i" ] && {
# NOTE1 If you want directories and files listed together
# change next line (directories_menu="$directories_menu"') to read: files_menu="$files_menu"' (note the one single quote at the end)
directories_menu="$directories_menu"'
<menu id="'"${pathe_apos}${shortnamee_apos}"'" label="'"$shortnamee"'" execute="''"$0"'' ''"${pathe_apos}${shortnamee_apos}"''" />'; continue; }
files_menu="$files_menu"'
<item label="'"$shortnamee"'">
<action name="Execute">
<command>
''"$0"'' --open ''"${pathe_apos}${shortnamee_apos}"''
</command>
</action>
</item>'
done
[ -n "$directories_menu" ] && {
# NOTE2 comment out next 2 lines if you don't want "Directories" label
output="${output}"'<separator label="Directories" />
'
output="${output}${directories_menu}"'
'; }
[ -n "$files_menu" ] && {
# NOTE3 comment out next 2 lines if you don't want "Files" label
output="${output}"'<separator label="Files" />
'
output="${output}${files_menu}"'
'; }
output="${output}"'</openbox_pipe_menu>
'
printf '%s' "$output"
exit
|
mongoimport --host ${MONGODB_IMPORT_URL} --drop --ssl --username ${MONGODB_ADMIN} --password ${MONGODB_PASSWORD} --authenticationDatabase admin --db digits --collection digits_train --type csv --file ../../data/mnist_train.csv --headerline
mongoimport --host ${MONGODB_IMPORT_URL} --drop --ssl --username ${MONGODB_ADMIN} --password ${MONGODB_PASSWORD} --authenticationDatabase admin --db digits --collection digits_test --type csv --file ../../data/mnist_test_halfed.csv --headerline |
#!/bin/sh
wd=`pwd`
echo $wd
mkdir -p gethworkdir/src/github.com/ethereum/
export GOPATH=$wd/gethworkdir
export PATH=$GOPATH/bin:$PATH
# 下载自定义版本geth
cd $GOPATH/src/github.com/ethereum/
git clone https://github.com/nkbai/go-ethereum.git
cd go-ethereum
go get ./...
git pull
cd cmd/geth
go install
cd $wd
#有可能上一个节点还在运行着没有退出 kill它
ps -ef | grep geth |grep 7888| grep -v grep | awk '{print $2}' |xargs kill -9
## 准备搭建私链
geth version
rm -rf privnet/geth
geth --datadir privnet init baipoatestnetmatrix.json
# 尽量避免不必要的log输出,干扰photon信息
geth --datadir=./privnet --unlock 3de45febbd988b6e417e4ebd2c69e42630fefbf0 --password ./privnet/keystore/pass --port 40404 --networkid 7888 --ws --wsaddr 0.0.0.0 --wsorigins "*" --wsport 30306 --rpc --rpccorsdomain "*" --rpcapi eth,admin,web3,net,debug,personal --rpcport 30307 --rpcaddr 127.0.0.1 --mine --verbosity 1 --nodiscover &
#newtestenv因为总是使用固定的账户,所以合约地址是固定的
##0x50839B01D28390048616C8f28dD1A21CF3CacbfF
|
#!/bin/bash
mkdir -p "bin"
pushd "src"
../deps/linux/nasm -f bin -o ../bin/9os.com 9os.asm
popd
|
#pragma once
#include <PlayFabComboSdk/PlayFabError.h>
namespace PlayFabComboSdk
{
class PlayFabSettings
{
public:
static PlayFabSettings *playFabSettings; // Global settings for all Apis
const AZStd::string playFabVersionString;
const AZStd::string buildIdentifier;
bool useDevelopmentEnvironment;
AZStd::string developmentEnvironmentURL;
AZStd::string productionEnvironmentURL;
AZStd::string titleId; // You must set this value for PlayFab to work properly (Found in the Game Manager for your title, at the PlayFab Website)
ErrorCallback globalErrorHandler;
AZStd::string developerSecretKey; // You must set this value for PlayFab to work properly (Found in the Game Manager for your title, at the PlayFab Website)
AZStd::string advertisingIdType; // Set this to the appropriate AD_TYPE_X constant below
AZStd::string advertisingIdValue; // Set this to corresponding device value
// DisableAdvertising is provided for completeness, but changing it is not suggested
// Disabling this may prevent your advertising-related PlayFab marketplace partners from working correctly
bool disableAdvertising;
const AZStd::string AD_TYPE_IDFA;
const AZStd::string AD_TYPE_ANDROID_ID;
PlayFabSettings();
AZStd::string getURL(const AZStd::string& callPath)
{
if (serverURL.length() == 0)
serverURL = "https://" + titleId + (useDevelopmentEnvironment ? developmentEnvironmentURL : productionEnvironmentURL);
return serverURL + callPath;
}
private:
AZStd::string serverURL; // A cache of the constructed url string
};
}
|
<reponame>yupiik/bundlebee
/*
* Copyright (c) 2021 - <NAME> - https://www.yupiik.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.yupiik.bundlebee.core.kube.model;
import lombok.Data;
import javax.json.bind.annotation.JsonbProperty;
import java.util.List;
@Data
public class APIResourceList {
@JsonbProperty
private String kind;
private String apiVersion;
private List<Item> resources;
@Data
public static class Item {
@JsonbProperty
private String group;
private String kind;
private String name; // plural name
private String singularName;
private String version;
private boolean namespaced;
private List<String> verb;
}
}
|
/*
Client main js script
*/
// Fill in to get ES6:ish environment
require('babel-polyfill');
// Riot + tags
var riot = require('riot');
var liveview = require('./tags/liveview.tag');
var statusview = require('./tags/statusview.tag');
var modeselector = require('./tags/modeselector.tag');
var modeeditor = require('./tags/modeeditor.tag');
var scripteditor = require('./tags/scripteditor.tag');
var setup = require('./tags/setup.tag');
// Get socket from the window global
var socket = window.socket;
// Mount all tags..
riot.mount('liveview, statusview, modeselector, modeeditor, setup', socket);
riot.mount('#dsp-scripteditor', {
"socket": socket,
"title": "Display scripts",
"events": {
"update": "scriptschanged",
"set": "setscript",
"delete": "deletescript"
},
"template":
`"use strict";
var code = class {
onSetup(configuration, dataSource){
// set properties of this animation script
// pull data from data source
// set up animation
}
onFrame(oldFrame, timePassedInSeconds){
// calculate one frame of animation,
// update oldFrame
// and return ms to next callback.
// Return 0 to end the script.
//
// oldFrame.fill(0);
// return 1000;
}
};
`
});
riot.mount('#data-scripteditor', {
"socket": socket,
"title": "Data scripts",
"events": {
"update": "datascriptschanged",
"set": "setdatascript",
"delete": "deletedatascript",
"result": "data"
},
"template":
`"use strict";
var code = class {
constructor(){
// set up stuff
}
onUpdate(){
// get fresh data
// ...
// return data object
}
};
`
});
|
#!/bin/bash
set -e
./configure --prefix=/usr \
--exec-prefix= \
--libdir=/usr/lib \
--docdir=/usr/share/doc/procps-ng-3.3.17 \
--disable-static \
--disable-kill \
--with-systemd
make
make check
make install
mv -v /usr/lib/libprocps.so.* /lib
ln -sfv ../../lib/$(readlink /usr/lib/libprocps.so) /usr/lib/libprocps.so
|
/**
* Directive that allows a user to enter a YouTube Url or Id which will be
* verified and then used to get the video information from YouTube, which
* is then passed to the optional cmsOnVideoSelected scope function.
* Does not support non-edit mode since so far it's only used in the
* YouTubePickerDialog.
*/
angular.module('cms.shared').directive('cmsFormFieldYoutubeId', [
'_',
'shared.pluginModulePath',
'shared.LoadState',
'shared.youTubeService',
'shared.validationErrorService',
'baseFormFieldFactory',
function (
_,
modulePath,
LoadState,
youTubeService,
validationErrorService,
baseFormFieldFactory) {
var config = {
templateUrl: modulePath + 'UIComponents/FormFieldYouTubeId.html',
scope: _.extend(baseFormFieldFactory.defaultConfig.scope, {
onVideoSelected: '&cmsOnVideoSelected'
}),
passThroughAttributes: [
'required'
],
link: link
};
return baseFormFieldFactory.create(config);
/* LINK */
function link(scope, el, attributes, controllers) {
var vm = scope.vm,
isRequired = _.has(attributes, 'required'),
formController = controllers[0];
init();
return baseFormFieldFactory.defaultConfig.link(scope, el, attributes, controllers);
/* INIT */
function init() {
vm.setEditing = toggleEditing.bind(null, true);
vm.updateVideoId = updateVideoId;
vm.cancelEditing = cancelEditing;
vm.updateIdLoadState = new LoadState();
scope.$watch('vm.model', function (newValue) {
toggleEditing(!newValue);
});
}
/* ACTIONS */
function updateVideoId() {
var inputId = vm.idOrUrlInput,
videoId = parseVideoId(inputId);
if (!inputId) {
vm.model = null;
triggerOnVideoSelected(null);
} else if (inputId && !videoId) {
addError('The url/id is invalid');
}
else if (!videoId || videoId == vm.model) {
cancelEditing();
} else {
vm.updateIdLoadState.on();
youTubeService
.getVideoInfo(videoId)
.then(onInfoLoaded)
.catch(onFail)
.finally(vm.updateIdLoadState.off);
}
function onFail(response) {
addError('There was a problem accessing YouTube');
}
function onInfoLoaded(info) {
if (info) {
vm.model = vm.idOrUrlInput = videoId;
triggerOnVideoSelected(info);
} else {
addError('Video not found');
}
}
function triggerOnVideoSelected(info) {
if (vm.onVideoSelected) vm.onVideoSelected({ model: info })
}
function addError(message) {
validationErrorService.raise([{
properties: [vm.modelName],
message: message
}]);
}
}
function cancelEditing() {
vm.idOrUrlInput = vm.model;
vm.onChange();
toggleEditing(false);
}
/* Helpers */
function toggleEditing(isEditing) {
vm.isEditing = isEditing;
}
function parseVideoId(urlOrId) {
var urlRegex = /(?:youtube\.com\/(?:[^\/]+\/.+\/|(?:v|e(?:mbed)?)\/|.*[?&]v=)|youtu\.be\/)([^"&?\/ ]{11})/i,
matches;
if (!urlOrId) return;
if (/^[^"&?\/ ]{11}$/.test(urlOrId)) {
return urlOrId;
}
matches = urlRegex.exec(urlOrId);
return matches && matches[1];
}
}
}]); |
#!/bin/sh
docker build . -t conference-service-micronaut
echo
echo
echo "To run the docker container execute:"
echo " $ docker run -p 8080:8080 conference-service-micronaut"
|
<filename>js/main.js
import html2canvas from "html2canvas";
import tippy from "tippy.js";
import FileSaver from "file-saver";
document.addEventListener("DOMContentLoaded", (e) => {
const container = document.querySelector(".image-container");
const downloadBtn = document.getElementById("download-button");
downloadBtn.addEventListener("click", (e) => {
html2canvas(container).then((canvas) => {
canvas.toBlob((blob) => FileSaver.saveAs(blob, "佛系.png"));
});
});
tippy(".image-container", {
placement: "left",
arrow: true,
size: "large",
distance: 20,
});
});
|
#!/bin/bash
set -eo pipefail
shopt -s nullglob
for x in ${PROJ_ROOT}/docker/pre-build.d/*; do
if [ ! -d "${x}" -a -x "${x}" ]; then
echo "----> Running ${x}"
"${x}"
fi
done
if [ -f ${PROJ_ROOT}/docker/requirements.txt ]; then
echo "Installing python requirements from ${PROJ_ROOT}/docker/requirements.txt"
pip install -r ${PROJ_ROOT}/docker/requirements.txt
fi
if [ -f ${PACKAGE_JSON} ]; then
# Install the javascript dependencies in /node_modules so that our app
# can find them easily without having them within our source code base.
echo "Installing production javascript dependencies defined in ${PACKAGE_JSON} to /node_modules via npm."
mkdir -p /node_modules
ln -s ${PACKAGE_JSON} /
(cd $(dirname ${PACKAGE_JSON}) && npm install --production --prefix /)
fi
for x in ${PROJ_ROOT}/docker/post-build.d/*; do
if [ ! -d "${x}" -a -x "${x}" ]; then
echo "----> Running ${x}"
"${x}"
fi
done
|
#!/bin/bash
HI=Hello
echo HI # displays HI
echo $HI # displays Hello
echo \$HI # displays $HI
echo "$HI" # displays Hello
echo '$HI' # displays $HI
echo "$HIAlex" # displays nothing
echo "${HI}Alex" # displays HelloAlex
echo `pwd` # displays working directory
echo $(pwd) # displays working directory
|
/**
*
* MIT License
*
* Copyright (c) 2018, MEXC Program Developers.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
import expectThrow from 'zeppelin-solidity/test/helpers/expectThrow';
import assertRevert from 'zeppelin-solidity/test/helpers/assertRevert';
var MEXCToken = artifacts.require('./MEXCToken.sol');
contract('MEXCTokenTest', (accounts) => {
let symbol = 'MEXC';
let decimals = 18;
// accounts
let owner = accounts[0];
let acc1 = accounts[1];
let acc2 = accounts[2];
let acc3 = accounts[3];
let token;
beforeEach(async () => {
token = await MEXCToken.deployed();
});
it('should have MEXC symbol', async () => {
let symbol = await token.symbol();
assert.equal('MEXC', symbol, 'Symbol should be MEXC');
});
it('should have 18 decimals', async () => {
let dec = await token.decimals();
assert.equal(18, dec, 'Decimals should be 18');
});
it('should be able to mint 4000 for acc1', async () => {
let res = await token.mint(acc1, web3.toWei(4000, 'ether'));
let bal = await token.balanceOf(acc1);
let supply = await token.totalSupply.call();
let balance = bal.toString('10');
assert.equal(web3.toWei(4000, 'ether').toString('10'), balance, 'Balance should be 3500 ether');
let s = supply.toString('10');
let expected = web3.toWei(4000, 'ether').toString('10');
assert.equal(s, expected, 'Total supply should be 4000 ether');
});
it('should disable transfers to acc2', async () => {
await expectThrow(token.transferFrom(acc1, acc2, web3.toWei(1, 'ether')));
let bal = await token.balanceOf(acc2);
assert.equal('0', bal.toString('10'), 'Balance should be 0');
});
it('should enable transfer', async () => {
let r = await token.allowTransfers();
let status = await token.transferDisabled();
assert.equal(false, status, 'Transfer should be enabled');
});
it('should enable transfer to acc2', async () => {
let res = await token.mint(acc1, web3.toWei(20, 'ether'));
await token.transfer(acc2, web3.toWei(1, 'ether'), {from: acc1});
let bal = await token.balanceOf(acc2);
assert.equal(web3.toWei(1, 'ether'), bal.toString('10'), 'Balance should be 1 ether');
});
it('should blackList acc3', async() => {
let res = await token.mint(acc3, web3.toWei(2, 'ether'));
await token.blackListAddress(acc3);
// acc3 transfer to acc2
await expectThrow(token.transfer(acc2, web3.toWei(1, 'ether'), {from: acc3}));
let bal = await token.balanceOf(acc3);
assert.equal(web3.toWei(2, 'ether'), bal.toString('10'), 'Balance should still be 2 ether');
});
it('should be able to confiscate acc3 balance', async () => {
let ownBal = await token.balanceOf(owner);
let res = await token.mint(acc3, web3.toWei(2, 'ether'));
let acc3Bal = await token.balanceOf(acc3);
assert.equal(web3.toWei(4, 'ether'), acc3Bal.toString('10'), 'Balance should be 4 ether');
// confiscate
await token.confiscate(acc3);
let acc3BalNow = await token.balanceOf(acc3);
assert.equal(web3.toWei(0, 'ether'), acc3BalNow.toString('10'), 'Balance should be 0 ether');
});
})
|
<filename>src/icon/IconPocket.tsx
import React from 'react';
export interface IconPocketProps extends React.SVGAttributes<SVGElement> {
color?: string;
size?: string | number;
className?: string;
style?: React.CSSProperties;
}
export const IconPocket: React.SFC<IconPocketProps> = (
props: IconPocketProps
): React.ReactElement => {
const { color, size, style, ...restProps } = props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size}
height={size}
viewBox="0 0 24 24"
fill="none"
stroke={color}
className="feather feather-pocket"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
style={{ verticalAlign: 'middle', ...style }}
{...restProps}
>
<path d="M4 3h16a2 2 0 0 1 2 2v6a10 10 0 0 1-10 10A10 10 0 0 1 2 11V5a2 2 0 0 1 2-2z" />
<polyline points="8 10 12 14 16 10" />
</svg>
);
};
IconPocket.defaultProps = {
color: 'currentColor',
size: '1em',
};
export default IconPocket;
|
set -e;
docker login $1;
docker run \
-d \
-p 80:80 \
--restart=always \
--name web \
$1/docker-notifier/static;
docker run \
-d \
-p 8989:8989 \
-p 31337:31337 \
-v /var/run/docker.sock:/var/run/docker.sock \
--env TWILIO_ACCOUNT_SID=$2 \
--env TWILIO_AUTH_TOKEN=$3 \
--env TWILIO_NOTIFICATION_SERVICE_SID=$4 \
--env SLACK_ENDPOINT=$5 \
--name docker-notifier \
$1/docker-notifier/backend;
|
package com.sandip.basicbankingapp.UI;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import com.sandip.basicbankingapp.R;
public class UserData extends AppCompatActivity {
TextView name, email, accountNo, balance, ifscCode, phoneNo;
Button transferMoney;
AlertDialog dialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_user_data);
name = findViewById(R.id.name);
email = findViewById(R.id.email_id);
accountNo = findViewById(R.id.account_no);
balance = findViewById(R.id.avail_balance);
ifscCode = findViewById(R.id.ifsc_id);
phoneNo = findViewById(R.id.phone_no);
transferMoney = findViewById(R.id.transfer_money);
// Getting the intent
Intent intent = getIntent();
Bundle extras = intent.getExtras();
// Extracting the data
if (extras != null){
name.setText(extras.getString("NAME"));
accountNo.setText(String.valueOf(extras.getInt("ACCOUNT_NO")));
email.setText(extras.getString("EMAIL"));
phoneNo.setText(extras.getString("PHONE_NO"));
ifscCode.setText(extras.getString("IFSC_CODE"));
balance.setText(extras.getString("BALANCE"));
}
else {
Log.d("TAG", "Empty Intent");
}
transferMoney.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
enterAmount();
}
});
}
private void enterAmount() {
final AlertDialog.Builder mBuilder = new AlertDialog.Builder(UserData.this);
View mView = getLayoutInflater().inflate(R.layout.dialog_box, null);
mBuilder.setTitle("Enter Amount").setView(mView).setCancelable(false);
final EditText mAmount = (EditText) mView.findViewById(R.id.enter_money);
mBuilder.setPositiveButton("SEND", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) { }
}).setNegativeButton("CANCEL", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
transactionCancel();
}
});
dialog = mBuilder.create();
dialog.show();
dialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Checking whether amount entered is correct or not
int currentBalance = Integer.parseInt(String.valueOf(balance.getText()));
if (mAmount.getText().toString().isEmpty()) {
mAmount.setError("Amount can't be empty");
} else if (Integer.parseInt(mAmount.getText().toString()) > currentBalance){
mAmount.setError("Your account don't have enough balance");
} else {
Intent intent = new Intent(UserData.this, SendToUserList.class);
intent.putExtra("FROM_USER_ACCOUNT_NO", Integer.parseInt(accountNo.getText().toString())); // PRIMARY_KEY
intent.putExtra("FROM_USER_NAME", name.getText());
intent.putExtra("FROM_USER_ACCOUNT_BALANCE", balance.getText());
intent.putExtra("TRANSFER_AMOUNT", mAmount.getText().toString());
startActivity(intent);
finish();
}
}
});
}
private void transactionCancel() {
AlertDialog.Builder builder_exitbutton = new AlertDialog.Builder(UserData.this);
builder_exitbutton.setTitle("Do you want to cancel the transaction?").setCancelable(false)
.setPositiveButton("yes", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
Toast.makeText(UserData.this, "Transaction Cancelled!", Toast.LENGTH_LONG).show();
}
}).setNegativeButton("No", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
enterAmount();
}
});
AlertDialog alertexit = builder_exitbutton.create();
alertexit.show();
}
}
|
#!/bin/bash
kubectlVersion=$(jq -r '.kubectlVersion' ${HOME}/install/config.json)
helmVersion=$(jq -r '.helmVersion' ${HOME}/install/config.json)
tempDir=$(mktemp -d)
pushd ${tempDir}
curl -LO https://storage.googleapis.com/kubernetes-release/release/v${kubectlVersion}/bin/linux/amd64/kubectl
chmod 744 ${tempDir}/kubectl
mv ${tempDir}/kubectl /usr/local/bin/kubectl
curl -LO https://get.helm.sh/helm-v${helmVersion}-linux-amd64.tar.gz
tar xvf helm*.tar.gz
chmod 744 ${tempDir}/linux-amd64/helm
mv ${tempDir}/linux-amd64/helm /usr/local/bin/helm
popd
rm -rf ${tempDir}
echo '' >> ${HOME}/.bashrc
echo 'source /etc/bash_completion' >> ${HOME}/.bashrc
echo 'source ${HOME}/.kuberc' >> ${HOME}/.bashrc
echo 'source <(kubectl completion bash)' >> ${HOME}/.bashrc
echo 'source <(helm completion bash)' >> ${HOME}/.bashrc
|
#!/bin/bash
target_branch=${1:-develop}
echo "Linting changes against $target_branch branch"
BASE_REV=`git merge-base ${target_branch} $(git rev-parse --abbrev-ref HEAD)`
UPDATED=`git diff --diff-filter=ACMR --name-only $BASE_REV HEAD | grep -e '\.js$'`
if [[ -n $UPDATED ]]
then
node_modules/.bin/eslint --fix $UPDATED
exit
else
echo "Nothing to Lint"
fi
exit 1
|
#!/bin/sh
envs=`printenv`
for env in $envs
do
IFS== read name value <<EOF
$env
EOF
sed -i "s|\${${name}}|${value}|g" /etc/varnish/user.vcl
done
varnishd -s malloc,${VARNISH_MEMORY} -a :${VARNISH_PORT} -f /etc/varnish/user.vcl
sleep 1
varnishlog |
/**
* businessLogic.js
* @author <NAME> and <NAME>
* @description Business Logic of the Black Jack game. Contains code for the random card generator and game logic.
* @created Wed Nov 08 2017 17:54:59 GMT-0800 (PST)
* @copyright 2017 <NAME> and <NAME>
* @last-modified Wed Nov 08 2017 17:54:59 GMT-0800 (PST)
*/
/**
* Changelog v0.1:
* • Code tested with Simulator, working fine. Edge cases to be taken care of before publishing :)
* • Code clean up, documentation added for functions
* • Added ES6 syntax
*/
//# ES6 import
import { head, last, tail } from "./blackjack-utils.js";
//# ES6 import
//# Card constants
// card value determines the card
const cardFaces = ["2", "3", "4", "5", "6", "7", "8", "9", "10", "J", "Q", "K", "A"]; // card faces
const cardSuites = ["Spades", "Club", "Hearts", "Diamond"]; // card suites
//# Card constants
//# card
/**
* A Card object
*/
export class Card {
/**
* The constructor of the Card, creates a card for the given face and suite
* @param {String} face
* @param {String} suite
*/
constructor(face, suite) {
this.face = face;
this.suite = suite;
}
/**
* Tests equality of this card with another Card
* Both cards are equal if their suites and faces are equal
* @param {Card} card The other card
*/
equals(card) {
return this.face === card.face && this.suite === card.suite;
}
/**
* The string representation of the card
* @returns {string} The string representation of the card
*/
toString() {
switch (this.face) {
case "A":
return `Ace of ${this.suite}`;
case "K":
return `King of ${this.suite}`;
case "Q":
return `Queen of ${this.suite}`;
case "J":
return `Jack of ${this.suite}`;
default:
return `${this.face} of ${this.suite}`;
}
}
}
//# card
/**
* Generates a random card from the entire deck of cards
*
* @returns {Card} a Card from the deck
*/
const randomCardGenerator = function() {
return new Card(
cardFaces[Math.floor(Math.random() * cardFaces.length + 0)],
cardSuites[Math.floor(Math.random() * cardSuites.length + 0)]
);
};
//# Random card generator
/**
* `getRandomCard` function calls `randomCardGenerator` function to generate a randomCard and then checks if that card is already fetched earlier.
* If the card is already picked, then the function `randomCardGenerator` is called again, the process is repeated untill a unique card is fetched from the deck.
* @param {Card[]} cardStack The player's card stack to keep track of the card dealt to the player
* @returns {[Card, Card[]]} the random card and the updated card stack tuple
*/
export const getRandomCard = function(cardStack) {
let randomCard = randomCardGenerator();
// console.log(`random card :: ${JSON.stringify(randomCard)}`);
if (!isCardInStack(randomCard, cardStack)) {
let updatedCardStack = cardStack.splice(0).concat([randomCard]);
return [last(updatedCardStack), updatedCardStack];
} else {
return getRandomCard(cardStack);
}
};
//# Random card generator
//# Utility method #1
/**
* Checks if the randomCard is in the card stack or not. True if yes else false.
* @param {Card} randomCard The card to check for in the card stack
* @param {Card[]} cardStack The stack of cards that have been dealt to the player
* @returns {boolean} true if card is in the cardStack else false
*/
const isCardInStack = function(randomCard, cardStack) {
const res = cardStack.filter(card => card.equals(randomCard));
if (!res || res.length === 0) return false;
return true;
};
//# Utility method #1
/**
* Calculates the maximum score that could be achieved for the cards dealt to the player
* @param {Card[]} stack The stack of cards that have been dealt to the player
* @returns {number} the score, an Int
*/
export const calculateMaxScore = function(stack) {
return closestTo21(stack) | 0; // for making it into an Int in case of trouble?
};
/**
* Gets the corressponding score for the face value
* @param {string} value The face of the card
* @returns {number} The score for the particular face value, returns -1 for A
*/
const getFaceValue = function(value) {
switch (value) {
case "K":
case "Q":
case "J":
return 10;
case "A":
return -1;
default:
return parseInt(value); // from 2-10
}
};
/**
* Checks if the total score is greater than 21
* @param {number} score The score of the playe
* @returns {boolean} true if the score is greater than 21 else false
*/
export const topple = function(score) {
if (score > 21) return true;
return false;
};
/**
* Computes the score after deciding if A or Ace should have value 1 or 11
* @param {number} countA The number of (A)s in the stack
* @param {number} score the final score
* @returns {number} The score after deciding the value of (A)s for the player
*/
const logicToCalculate_A = function(countA, score) {
if(countA<=0) { //base case: when there are no Aces(A's) in the stack of cards, simply return the current score.
return score;
}
score = score+(countA-1)*1; // Explaination: Aces(A's) with value 11 could be added only once, rest of the Aces(A's) will add up to the score with value 1.
if (score + 11 > 21) {
score = score+1;
} else {
score = score +11;
}
return score;
};
/**
* Computes the max score that could be achieved by the player that is closer to 21.
* Note: score is calculated such that, it is closer as closer to 21 as possible.
* @param {Card[]} stack The player's card stack
* @returns {number} The max score of the player
*/
const closestTo21 = function(stack) {
let countA = stack.filter(card => card.face === "A").length; // the number of (A)s or aces in the player's card stack
let score = logicToCalculate_A(
countA,
stack.filter(card => card.face !== "A").reduceRight((acc, card) => getFaceValue(card.face) + acc, 0)
);
return score;
};
/**
* Checks which stack is leading currently and closer to winning
* @param {Card[]} stack1 The stack of cards of player 1
* @param {Card[]} stack2 The stack of cards of player 2
* @returns {boolean} true if player 1 won else false
*/
export const getWinner = function(stack1, stack2) {
let score1 = calculateMaxScore(stack1);
let score2 = calculateMaxScore(stack2);
return getWinnerFromScore(score1, score2);
};
/**
* Scores of both players are compared. If player 1 wins; i.e score1 > score2 then returns true else false.
* @param {number} score1 The score of first player
* @param {number} score2 The score of the second player
* @returns {boolean} true if player 1 wins else returns false.
*/
const getWinnerFromScore = function(score1, score2) {
if (score1 > score2) return true;
return false;
};
|
#注意:修改后的本文件不要上传代码库中
#需要设置下面变量:
#QT_ROOT=/home/l/Qt5.5.1/5.5/gcc_64 #QT 安装根目录,默认为:${RabbitImRoot}/ThirdLibrary/unix/qt
JOM=make #设置 QT make 工具 JOM
MAKE=make
RABBITIM_MAKE_JOB_PARA="-j2" #make 同时工作进程参数
RABBITIM_CLEAN=TRUE #编译前清理
#RABBITIM_BUILD_STATIC="static" #设置编译静态库,注释掉,则为编译动态库
#RABBITIM_USE_REPOSITORIES="FALSE" #下载指定的压缩包。省略,则下载开发库。
# RABBITIM_BUILD_PREFIX=`pwd`/../${RABBITIM_BUILD_TARGERT} #修改这里为安装前缀
# RABBITIM_BUILD_CROSS_PREFIX #交叉编译前缀
# RABBITIM_BUILD_CROSS_SYSROOT #交叉编译平台的 sysroot
if [ -n "${RabbitImRoot}" ]; then
RABBITIM_BUILD_PREFIX=${RabbitImRoot}/ThirdLibrary/unix
else
RABBITIM_BUILD_PREFIX=`pwd`/../unix #修改这里为安装前缀
fi
if [ "$RABBITIM_BUILD_STATIC" = "static" ]; then
RABBITIM_BUILD_PREFIX=${RABBITIM_BUILD_PREFIX}_static
fi
if [ ! -d ${RABBITIM_BUILD_PREFIX} ]; then
mkdir -p ${RABBITIM_BUILD_PREFIX}
fi
if [ -z "$RABBITIM_USE_REPOSITORIES" ]; then
RABBITIM_USE_REPOSITORIES="TRUE" #下载开发库。省略,则下载指定的压缩包
fi
if [ -z "$QT_ROOT" ]; then
QT_ROOT=${RABBITIM_BUILD_PREFIX}/qt
fi
QT_BIN=${QT_ROOT}/bin #设置用于 android 平台编译的 qt bin 目录
QMAKE=${QT_BIN}/qmake #设置用于 unix 平台编译的 QMAKE。
#这里设置的是自动编译时的配置,你需要修改为你本地qt编译环境的配置.
echo "QT_BIN:$QT_BIN"
#pkg-config帮助文档:http://linux.die.net/man/1/pkg-config
export PKG_CONFIG=pkg-config
if [ "${RABBITIM_BUILD_THIRDLIBRARY}" = " true" ]; then
#不用系统的第三方库,用下面
export PKG_CONFIG_PATH=${RABBITIM_BUILD_PREFIX}/lib/pkgconfig
export PKG_CONFIG_LIBDIR=${PKG_CONFIG_PATH}
export PKG_CONFIG_SYSROOT_DIR=${RABBITIM_BUILD_PREFIX}
else
#如果用系统的库,就用下面
export PKG_CONFIG_PATH=${RABBITIM_BUILD_PREFIX}/lib/pkgconfig:${PKG_CONFIG_PATH}
fi
export PATH=${QT_BIN}:$PATH
|
var bitcoin = require('bitcoin');
// all config options are optional
const client = new bitcoin.Client({
host: 'localhost',
port: 8332,
user: 'btc',
pass: '<PASSWORD>=',
timeout: 30000
});
client.getBlockchainInfo(function (err, info, headers) {
if (err) {
console.log("Error", err);
} else {
console.log("info", info);
console.log("headers", headers);
}
}); |
<filename>internal/service/grpc-service/upload.go<gh_stars>1-10
// @Author: 2014BDuck
// @Date: 2021/8/3
package grpc_service
import (
"context"
"errors"
"github.com/2014bduck/entry-task/global"
"github.com/2014bduck/entry-task/internal/dao"
"github.com/2014bduck/entry-task/pkg/upload"
"github.com/2014bduck/entry-task/proto"
"os"
)
type UploadService struct {
ctx context.Context
dao *dao.Dao
cache *dao.RedisCache
proto.UnimplementedUploadServiceServer
}
func NewUploadService(ctx context.Context) UploadService {
svc := UploadService{ctx: ctx}
svc.dao = dao.New(global.DBEngine)
svc.cache = dao.NewCache(global.CacheClient)
return svc
}
func (svc UploadService) UploadFile(ctx context.Context, r *proto.UploadRequest) (*proto.UploadReply, error) {
fileName := upload.GetFileName(r.FileName) // MD5'd
uploadSavePath := upload.GetSavePath()
dst := uploadSavePath + "/" + fileName
if upload.CheckSavePath(uploadSavePath) {
if err := upload.CreateSavePath(dst, os.ModePerm); err != nil {
return nil, errors.New("svc.UploadFile: failed to create save directory")
}
}
if upload.CheckPermission(uploadSavePath) {
return nil, errors.New("svc.UploadFile: insufficient file permissions")
}
if err := upload.SaveFileByte(&r.Content, dst); err != nil {
return nil, err
}
fileUrl := global.AppSetting.UploadServerUrl + "/" + fileName
return &proto.UploadReply{FileUrl: fileUrl, FileName: fileName}, nil
}
|
const fs = require('firebase')
require('firebase/auth')
export function tokenAuthenticate (token, config) {
if (fs.apps.length === 0) {
fs.initializeApp(config)
}
return fs.auth().signInWithPopup(token)
}
|
#!/bin/sh
set -e
# This setup.sh script requires:
# * git
# * sh
ROOT=$(cd "$(dirname "$0")"; git rev-parse --show-toplevel)
# To support dotfiles referencing other scripts and files
# without needing to copy them, we need a standard path
# they can reference. Let's standarize on "~/.dotfiles"
if [ "$ROOT" != "$HOME/.dotfiles" ]; then
echo "ERROR: you must clone the repo as ~/.dotfiles/" >&2
exit 1
fi
# Prepare backup directory
BACKUPS="$ROOT/backups/$(date +%s)/"
mkdir -p "$BACKUPS"
cd "$ROOT"
for module in configs/* configs-private/*; do
# not a module - skip
if [ ! -d "$module" ]; then
continue;
fi
# incomplete module - skip
if [ ! -f "$module/dotfiles" ]; then
continue;
fi
# run build script if provided
if [ -x "$module/build.sh" ]; then
echo "Building dotfiles for $module"
(cd "$module" && ./build.sh)
fi
echo "Installing dotfiles for $module"
# install dotfiles - back up existing ones
while read -r dotfile; do
MODULE="$ROOT/$module"
ORIG="$MODULE/$(echo "$dotfile" | cut -f1 -d:)"
DEST="$HOME/$(echo "$dotfile" | cut -f2 -d:)"
# if destination is unset, use default
if [ "$DEST" = "$HOME/" ]; then
DEST="$HOME/$ORIG"
fi
# if dotfile already exists, back it up
if [ -f "$DEST" ] || [ -L "$DEST" ] || [ -d "$DEST" ]; then
mv "$DEST" "$BACKUPS/"
fi
# if destination directory doesn't exist, create it
DESTDIR=$(dirname "$DEST")
if [ ! -d "$DESTDIR/" ]; then
mkdir -vp "$DESTDIR"
fi
# link the dotfile
ln -vs "$ORIG" "$DEST"
done < "$module/dotfiles"
done
|
<filename>src/modules/todo/todo.service.ts
import { Injectable } from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { Model } from 'mongoose';
import { Todo, TodoDocument } from 'src/schemas/todo.schema';
import { CreateTodoDto } from './dto/create-todo.dto';
@Injectable()
export class TodoService {
constructor(@InjectModel(Todo.name) private todoModel: Model<TodoDocument>) {}
async create(dto: CreateTodoDto): Promise<Todo> {
const createdTodo = await this.todoModel.create(dto);
return createdTodo;
}
async findAll(): Promise<Todo[]> {
return this.todoModel.find().exec();
}
async findOne(id: string): Promise<Todo> {
return this.todoModel.findOne({ _id: id }).exec();
}
async delete(id: string) {
const deletedTodo = await this.todoModel
.findByIdAndRemove({ _id: id })
.exec();
return deletedTodo;
}
}
|
<gh_stars>1-10
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.java.examples.old.stomp;
import org.vertx.java.addons.old.stomp.StompConnectHandler;
import org.vertx.java.addons.old.stomp.StompConnection;
import org.vertx.java.addons.old.stomp.StompMsgCallback;
import org.vertx.java.core.buffer.Buffer;
import java.util.Map;
public class ClientExample {
public static void main(String[] args) throws Exception {
org.vertx.java.addons.old.stomp.StompClient.connect(8181, new StompConnectHandler() {
public void onConnect(final StompConnection conn) {
// Subscribe to a topic
conn.subscribe("test-topic", new StompMsgCallback() {
public void onMessage(Map<String, String> headers, Buffer body) {
System.out.println("Received message: " + body.toString());
}
});
// Send some messages (without receipt)
for (int i = 0; i < 5; i++) {
conn.send("test-topic", Buffer.create("message " + i));
}
// Now send some more with receipts
for (int i = 5; i < 10; i++) {
final int count = i;
conn.send("test-topic", Buffer.create("message " + i), new Runnable() {
public void run() {
System.out.println("Got receipt " + count);
}
});
}
}
});
System.out.println("Any key to exit");
System.in.read();
}
}
|
angular.module("niceComponent", [])
/**
* Nice导航栏
*/
.directive("niceNavbar", function() {
return {
restrict : "E",
templateUrl : "component/navbar.html",
scope: {
// active: "=",
},
};
}).directive("niceFooter", function() {
return {
restrict : "E",
templateUrl : "component/footer.html",
scope: {
},
};
}); |
<gh_stars>0
'use strict';
const router = require('express').Router();
const { Payment } = require("../config/db");
router.get("/getAll", (req, res, next) => {
Payment.find((err, products) => {
if (err) {
next(err);
}
res.send(products);
});
});
router.get("/get/:id", (req, res, next) => {
Payment.findById(req.params.id, (err, result) => {
if (err) {
next(err);
}
res.status(200).send(result);
})
})
router.post("/create", ({body}, res, next) => {
const item = new Payment(body);
item.save()
.then((result) => {
res.status(201).send(result);
})
.catch((err) => next(err));
});
router.delete("/delete/:id", (req, res, next) => {
Payment.findByIdAndDelete(req.params.id, (err) => {
if (err) {
next(err);
}
res.status(204).send(`Successfully deleted`);
});
});
router.patch("/update/:id", (req, res, next) => {
Payment.findByIdAndUpdate(req.params.id,
req.body,
{ new: true },
(err) => {
if (err) {
next(err);
}
res.status(202).send(`Successfully updated: ${req.params.id}`);
})
});
router.patch("/replace/:id", (req, res, next) => {
Payment.findByIdAndUpdate(req.params.id, req.body, { new: true }, (err) => {
if (err) {
next(err);
}
res.status(202).send(`Successfully replaced: ${req.params.id}`);
});
});
module.exports = router; |
package main
import (
"flag"
"log"
"os"
"awoo.nl/hatchcert"
)
// TODO:
//
// hatchcert
// Ensure all certificates listed in the configuration file are within the
// desired validity period.
//
// hatchcert account
// Perform account registration and key management.
//
// -refresh Forcefully unset saved registration and fetch/create it again
// -rekey Forcefully create new account key
//
// hatchcert issue [name]
// Forcefully issue certificates, ignoring current validity.
func main() {
path := flag.String("path", "/var/lib/acme", "Output directory")
cfile := flag.String("conf", "/etc/hatchcert/config", "Config file")
flag.Parse()
conf := hatchcert.Conf(*cfile)
if !conf.AcceptedTOS {
log.Fatalln("You must accept the terms of service")
}
if conf.Email == "" {
log.Fatalln("Email is required")
}
var err error
var want []hatchcert.Cert
hook := false
switch opt := flag.Arg(0); opt {
case "reconcile", "":
hook = true
want, err = hatchcert.ScanCerts(*path, conf.Certs)
if err != nil {
log.Println("ScanCerts:", err)
}
if len(want) == 0 {
// Nothing to do
return
}
case "issue":
want = conf.Certs
case "account":
case "status":
hatchcert.Active(*path, conf.Certs)
return
default:
log.Fatalf("Unknown command: %v", opt)
}
account := hatchcert.Account(*path)
if err := hatchcert.Setup(account, conf.ACME, conf.Email); err != nil {
log.Fatalln(err)
}
if len(want) == 0 {
return
}
if len(conf.Challenge.HTTP)+len(conf.Challenge.DNS) == 0 {
log.Fatalln("Cannot issue certificates without challenge method")
}
must := func(err error) {
if err != nil {
log.Fatal(err)
}
}
must(hatchcert.ChallengesHTTP(account.Client, conf.Challenge.HTTP))
must(hatchcert.ChallengesDNS(account.Client, conf.Challenge.DNS))
// Default action: create or refresh certs
failed := false
issued := false
for _, req := range want {
err := hatchcert.Issue(account, req)
if err != nil {
failed = true
log.Println("Failed to issue:", err)
} else {
issued = true
}
}
if issued && hook {
for _, hook := range conf.UpdateHooks {
if err := hatchcert.Hook(hook); err != nil {
log.Println("Failed to run update hook:", err)
failed = true
}
}
}
if failed {
os.Exit(1)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.