text stringlengths 1 1.05M |
|---|
/*
* __ ___ __ ____ __ __
* / |/ /__ _____/ /__ / _// /_/ /
* / /|_/ / _ `/ __/ '_/_/ / / __/_/
* /_/ /_/\_,_/_/ /_/\_\/___/ \__(_)
*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef SHA256_H
#define SHA256_H
#include <string>
class SHA256
{
protected:
typedef unsigned char uint8;
typedef unsigned int uint32;
typedef unsigned long long uint64;
const static uint32 sha256_k[];
static const unsigned int SHA224_256_BLOCK_SIZE = (512/8);
public:
void init();
void update(const unsigned char *message, unsigned int len);
void final(unsigned char *digest);
static const unsigned int DIGEST_SIZE = ( 256 / 8);
protected:
void transform(const unsigned char *message, unsigned int block_nb);
unsigned int m_tot_len;
unsigned int m_len;
unsigned char m_block[2*SHA224_256_BLOCK_SIZE];
uint32 m_h[8];
};
std::string sha256(std::string input);
#define SHA2_SHFR(x, n) (x >> n)
#define SHA2_ROTR(x, n) ((x >> n) | (x << ((sizeof(x) << 3) - n)))
#define SHA2_ROTL(x, n) ((x << n) | (x >> ((sizeof(x) << 3) - n)))
#define SHA2_CH(x, y, z) ((x & y) ^ (~x & z))
#define SHA2_MAJ(x, y, z) ((x & y) ^ (x & z) ^ (y & z))
#define SHA256_F1(x) (SHA2_ROTR(x, 2) ^ SHA2_ROTR(x, 13) ^ SHA2_ROTR(x, 22))
#define SHA256_F2(x) (SHA2_ROTR(x, 6) ^ SHA2_ROTR(x, 11) ^ SHA2_ROTR(x, 25))
#define SHA256_F3(x) (SHA2_ROTR(x, 7) ^ SHA2_ROTR(x, 18) ^ SHA2_SHFR(x, 3))
#define SHA256_F4(x) (SHA2_ROTR(x, 17) ^ SHA2_ROTR(x, 19) ^ SHA2_SHFR(x, 10))
#define SHA2_UNPACK32(x, str) \
{ \
*((str) + 3) = (uint8) ((x) ); \
*((str) + 2) = (uint8) ((x) >> 8); \
*((str) + 1) = (uint8) ((x) >> 16); \
*((str) + 0) = (uint8) ((x) >> 24); \
}
#define SHA2_PACK32(str, x) \
{ \
*(x) = ((uint32) *((str) + 3) ) \
| ((uint32) *((str) + 2) << 8) \
| ((uint32) *((str) + 1) << 16) \
| ((uint32) *((str) + 0) << 24); \
}
#endif
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
rm -R -f fifo/*
rm -R -f work/*
mkdir work/kat/
fmpy -a2 --create-financial-structure-files
mkdir work/gul_S1_summaryleccalc
mkdir work/gul_S1_summaryaalcalc
mkdir work/gul_S2_summaryleccalc
mkdir work/gul_S2_summaryaalcalc
mkdir work/il_S1_summaryleccalc
mkdir work/il_S1_summaryaalcalc
mkdir work/il_S2_summaryleccalc
mkdir work/il_S2_summaryaalcalc
mkfifo fifo/gul_P2
mkfifo fifo/gul_S1_summary_P2
mkfifo fifo/gul_S1_summary_P2.idx
mkfifo fifo/gul_S1_eltcalc_P2
mkfifo fifo/gul_S1_summarycalc_P2
mkfifo fifo/gul_S1_pltcalc_P2
mkfifo fifo/gul_S2_summary_P2
mkfifo fifo/gul_S2_summary_P2.idx
mkfifo fifo/gul_S2_eltcalc_P2
mkfifo fifo/gul_S2_summarycalc_P2
mkfifo fifo/gul_S2_pltcalc_P2
mkfifo fifo/il_P2
mkfifo fifo/il_S1_summary_P2
mkfifo fifo/il_S1_summary_P2.idx
mkfifo fifo/il_S1_eltcalc_P2
mkfifo fifo/il_S1_summarycalc_P2
mkfifo fifo/il_S1_pltcalc_P2
mkfifo fifo/il_S2_summary_P2
mkfifo fifo/il_S2_summary_P2.idx
mkfifo fifo/il_S2_eltcalc_P2
mkfifo fifo/il_S2_summarycalc_P2
mkfifo fifo/il_S2_pltcalc_P2
# --- Do insured loss computes ---
eltcalc -s < fifo/il_S1_eltcalc_P2 > work/kat/il_S1_eltcalc_P2 & pid1=$!
summarycalctocsv -s < fifo/il_S1_summarycalc_P2 > work/kat/il_S1_summarycalc_P2 & pid2=$!
pltcalc -s < fifo/il_S1_pltcalc_P2 > work/kat/il_S1_pltcalc_P2 & pid3=$!
eltcalc -s < fifo/il_S2_eltcalc_P2 > work/kat/il_S2_eltcalc_P2 & pid4=$!
summarycalctocsv -s < fifo/il_S2_summarycalc_P2 > work/kat/il_S2_summarycalc_P2 & pid5=$!
pltcalc -s < fifo/il_S2_pltcalc_P2 > work/kat/il_S2_pltcalc_P2 & pid6=$!
tee < fifo/il_S1_summary_P2 fifo/il_S1_eltcalc_P2 fifo/il_S1_summarycalc_P2 fifo/il_S1_pltcalc_P2 work/il_S1_summaryaalcalc/P2.bin work/il_S1_summaryleccalc/P2.bin > /dev/null & pid7=$!
tee < fifo/il_S1_summary_P2.idx work/il_S1_summaryleccalc/P2.idx > /dev/null & pid8=$!
tee < fifo/il_S2_summary_P2 fifo/il_S2_eltcalc_P2 fifo/il_S2_summarycalc_P2 fifo/il_S2_pltcalc_P2 work/il_S2_summaryaalcalc/P2.bin work/il_S2_summaryleccalc/P2.bin > /dev/null & pid9=$!
tee < fifo/il_S2_summary_P2.idx work/il_S2_summaryleccalc/P2.idx > /dev/null & pid10=$!
summarycalc -m -f -1 fifo/il_S1_summary_P2 -2 fifo/il_S2_summary_P2 < fifo/il_P2 &
# --- Do ground up loss computes ---
eltcalc -s < fifo/gul_S1_eltcalc_P2 > work/kat/gul_S1_eltcalc_P2 & pid11=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P2 > work/kat/gul_S1_summarycalc_P2 & pid12=$!
pltcalc -s < fifo/gul_S1_pltcalc_P2 > work/kat/gul_S1_pltcalc_P2 & pid13=$!
eltcalc -s < fifo/gul_S2_eltcalc_P2 > work/kat/gul_S2_eltcalc_P2 & pid14=$!
summarycalctocsv -s < fifo/gul_S2_summarycalc_P2 > work/kat/gul_S2_summarycalc_P2 & pid15=$!
pltcalc -s < fifo/gul_S2_pltcalc_P2 > work/kat/gul_S2_pltcalc_P2 & pid16=$!
tee < fifo/gul_S1_summary_P2 fifo/gul_S1_eltcalc_P2 fifo/gul_S1_summarycalc_P2 fifo/gul_S1_pltcalc_P2 work/gul_S1_summaryaalcalc/P2.bin work/gul_S1_summaryleccalc/P2.bin > /dev/null & pid17=$!
tee < fifo/gul_S1_summary_P2.idx work/gul_S1_summaryleccalc/P2.idx > /dev/null & pid18=$!
tee < fifo/gul_S2_summary_P2 fifo/gul_S2_eltcalc_P2 fifo/gul_S2_summarycalc_P2 fifo/gul_S2_pltcalc_P2 work/gul_S2_summaryaalcalc/P2.bin work/gul_S2_summaryleccalc/P2.bin > /dev/null & pid19=$!
tee < fifo/gul_S2_summary_P2.idx work/gul_S2_summaryleccalc/P2.idx > /dev/null & pid20=$!
summarycalc -m -i -1 fifo/gul_S1_summary_P2 -2 fifo/gul_S2_summary_P2 < fifo/gul_P2 &
eve 2 2 | getmodel | gulcalc -S0 -L0 -r -a0 -i - | tee fifo/gul_P2 | fmpy -a2 > fifo/il_P2 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P2 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P2 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P2 > output/il_S1_summarycalc.csv & kpid3=$!
kat -s work/kat/il_S2_eltcalc_P2 > output/il_S2_eltcalc.csv & kpid4=$!
kat work/kat/il_S2_pltcalc_P2 > output/il_S2_pltcalc.csv & kpid5=$!
kat work/kat/il_S2_summarycalc_P2 > output/il_S2_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P2 > output/gul_S1_eltcalc.csv & kpid7=$!
kat work/kat/gul_S1_pltcalc_P2 > output/gul_S1_pltcalc.csv & kpid8=$!
kat work/kat/gul_S1_summarycalc_P2 > output/gul_S1_summarycalc.csv & kpid9=$!
kat -s work/kat/gul_S2_eltcalc_P2 > output/gul_S2_eltcalc.csv & kpid10=$!
kat work/kat/gul_S2_pltcalc_P2 > output/gul_S2_pltcalc.csv & kpid11=$!
kat work/kat/gul_S2_summarycalc_P2 > output/gul_S2_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
|
<reponame>VICEMedia/bitmovin-javascript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var urljoin = require("url-join");
var http_1 = require("../utils/http");
exports.thirdPartyLicensing = function (configuration, licenseId, httpClient) {
var get = httpClient.get, post = httpClient.post, delete_ = httpClient.delete_;
return {
delete: function () {
var url = urljoin(configuration.apiBaseUrl, 'player/licenses', licenseId, 'third-party-licensing');
return delete_(configuration, url);
},
get: function () {
var url = urljoin(configuration.apiBaseUrl, 'player/licenses', licenseId, 'third-party-licensing');
return get(configuration, url);
},
add: function (thirdPartyLicensingPayload) {
var url = urljoin(configuration.apiBaseUrl, 'player/licenses', licenseId, 'third-party-licensing');
return post(configuration, url, thirdPartyLicensingPayload);
}
};
};
exports.default = (function (configuration, licenseId) {
return exports.thirdPartyLicensing(configuration, licenseId, http_1.default);
});
//# sourceMappingURL=thirdPartyLicensing.js.map |
#!/bin/bash
#set -x
OMXPLAYER_DBUS_ADDR="/tmp/omxplayerdbus.${USER:-root}"
OMXPLAYER_DBUS_PID="/tmp/omxplayerdbus.${USER:-root}.pid"
export DBUS_SESSION_BUS_ADDRESS=`cat $OMXPLAYER_DBUS_ADDR`
export DBUS_SESSION_BUS_PID=`cat $OMXPLAYER_DBUS_PID`
[ -z "$DBUS_SESSION_BUS_ADDRESS" ] && { echo "Must have DBUS_SESSION_BUS_ADDRESS" >&2; exit 1; }
case $1 in
status)
duration=`dbus-send --print-reply=literal --session --reply-timeout=500 --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.freedesktop.DBus.Properties.Get string:"org.mpris.MediaPlayer2.Player" string:"Duration"`
[ $? -ne 0 ] && exit 1
duration="$(awk '{print $2}' <<< "$duration")"
position=`dbus-send --print-reply=literal --session --reply-timeout=500 --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.freedesktop.DBus.Properties.Get string:"org.mpris.MediaPlayer2.Player" string:"Position"`
[ $? -ne 0 ] && exit 1
position="$(awk '{print $2}' <<< "$position")"
playstatus=`dbus-send --print-reply=literal --session --reply-timeout=500 --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.freedesktop.DBus.Properties.Get string:"org.mpris.MediaPlayer2.Player" string:"PlaybackStatus"`
[ $? -ne 0 ] && exit 1
playstatus="$(sed 's/^ *//;s/ *$//;' <<< "$playstatus")"
paused="true"
[ "$playstatus" == "Playing" ] && paused="false"
echo "Duration: $duration"
echo "Position: $position"
echo "Paused: $paused"
;;
openuri)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.OpenUri string:"$2" >/dev/null
;;
volume)
volume=`dbus-send --print-reply=double --session --reply-timeout=500 --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.freedesktop.DBus.Properties.Set string:"org.mpris.MediaPlayer2.Player" string:"Volume" ${2:+double:}$2`
[ $? -ne 0 ] && exit 1
volume="$(awk '{print $2}' <<< "$volume")"
echo "Volume: $volume"
;;
pause)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:16 >/dev/null
;;
stop)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:15 >/dev/null
;;
seek)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Seek int64:$2 >/dev/null
;;
setposition)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.SetPosition objpath:/not/used int64:$2 >/dev/null
;;
setalpha)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.SetAlpha objpath:/not/used int64:$2 >/dev/null
;;
setlayer)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.SetLayer int64:$2 >/dev/null
;;
setvideopos)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.VideoPos objpath:/not/used string:"$2 $3 $4 $5" >/dev/null
;;
setvideocroppos)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.SetVideoCropPos objpath:/not/used string:"$2 $3 $4 $5" >/dev/null
;;
setaspectmode)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.SetAspectMode objpath:/not/used string:"$2" >/dev/null
;;
hidevideo)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:28 >/dev/null
;;
unhidevideo)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:29 >/dev/null
;;
volumeup)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:18 >/dev/null
;;
volumedown)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:17 >/dev/null
;;
togglesubtitles)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:12 >/dev/null
;;
hidesubtitles)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:30 >/dev/null
;;
showsubtitles)
dbus-send --print-reply=literal --session --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Action int32:31 >/dev/null
;;
getsource)
source=$(dbus-send --print-reply=literal --session --reply-timeout=500 --dest=org.mpris.MediaPlayer2.omxplayer /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.GetSource)
[ $? -ne 0 ] && exit 1
echo "$source" | sed 's/^ *//'
;;
*)
echo "usage: $0 status|openuri|pause|stop|seek|volumeup|volumedown|setposition [position in microseconds]|hidevideo|unhidevideo|togglesubtitles|hidesubtitles|showsubtitles|setvideopos [x1 y1 x2 y2]|setvideocroppos [x1 y1 x2 y2]|setaspectmode [letterbox,fill,stretch,default]|setalpha [alpha (0..255)]|setlayer [layer]|getsource" >&2
exit 1
;;
esac |
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/2/15.
//
#ifndef CPP_0094__SOLUTION2_H_
#define CPP_0094__SOLUTION2_H_
#include "TreeNode.h"
#include <stack>
#include <unordered_set>
/**
* iteration
*/
class Solution {
public:
vector<int> inorderTraversal(TreeNode *root) {
if (!root) return {};
vector<int> ans;
stack<TreeNode *> stack;
unordered_set<TreeNode *> set;
stack.push(root);
TreeNode *curNode;
while (!stack.empty()) {
curNode = stack.top();
while (!set.count(curNode) && curNode->left) {
set.insert(curNode);
stack.push(curNode->left);
curNode = curNode->left;
}
curNode = stack.top();
stack.pop();
ans.push_back(curNode->val);
if (curNode->right) {
stack.push(curNode->right);
}
}
return ans;
}
};
#endif //CPP_0094__SOLUTION2_H_
|
<gh_stars>1-10
#ifndef gl_LShaderProgram_h_
#define gl_LShaderProgram_h_
#include "gl/glLOpenGL.h"
#include <stdbool.h>
typedef struct
{
// program id
GLuint program_id;
} gl_LShaderProgram;
///
/// Create a new shader program.
///
/// \return Newly created gl_LShaderProgram returned as pointer to gl_LShaderProgram
///
extern gl_LShaderProgram* gl_LShaderProgram_new();
///
/// Free shader program.
///
/// \param shader_program Pointer to gl_LShaderProgram
///
extern void gl_LShaderProgram_free(gl_LShaderProgram* shader_program);
///
/// Free shader program
///
/// \param shader_program Pointer to gl_LShaderProgram
///
extern void gl_LShaderProgram_free_program(gl_LShaderProgram* shader_program);
///
/// Bind this shader program, thus set this shader program as the current shader program
///
/// \param shader_program Pointer to gl_LShaderProgram
/// \return True if bind successfully, otherwise return false
///
extern bool gl_LShaderProgram_bind(gl_LShaderProgram* shader_program);
///
/// Unbind this shader program, thus unset it as the current shader program.
///
/// \param shader_program Pointer to gl_LShaderProgram
///
extern void gl_LShaderProgram_unbind(gl_LShaderProgram* shader_program);
///
/// Print out log for input program id (or say program name).
///
/// \param program_id Program id to print log
///
extern void gl_LShaderProgram_print_program_log(GLuint program_id);
///
/// Print out log for input shader id (or say shader name).
///
/// \param shader_id Shader id to print log
///
extern void gl_LShaderProgram_print_shader_log(GLuint shader_id);
#endif
|
#!/bin/bash
# The last node in /alluxio/conf/workers is the master for under filesystem,
# this is guaranteed by generation process of /alluxio/conf/workers in script vagrant/create.
UFS_MASTER=$(tail -n1 /alluxio/conf/workers)
cat >> /alluxio/conf/alluxio-env.sh << EOF
ALLUXIO_MASTER_MOUNT_TABLE_ROOT_UFS="hdfs://${UFS_MASTER}:9000"
EOF
|
import java.sql.ResultSet;
import java.sql.SQLException;
public class TeamMapper implements RowMapper<Team> {
@Override
public Team mapRow(ResultSet resultSet, int rowNum) throws SQLException {
Team team = new Team();
team.setId(resultSet.getInt("id"));
team.setName(resultSet.getString("name"));
team.setCity(resultSet.getString("city"));
return team;
}
} |
<reponame>vnunez85/easy-email<gh_stars>0
import React from 'react';
import { Padding } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/Padding';
import { Stack } from '@/components/UI/Stack';
import { TextDecoration } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/TextDecoration';
import { FontWeight } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/FontWeight';
import { FontStyle } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/FontStyle';
import { TextTransform } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/TextTransform';
import { FontFamily } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/FontFamliy';
import { Height } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/Height';
import { ContainerBackgroundColor } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/ContainerBackgroundColor';
import { FontSize } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/FontSize';
import { Color } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/Color';
import { Align } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/Align';
import { LineHeight } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/LineHeight';
import { LetterSpacing } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/LetterSpacing';
import { RichTextField } from '@/components/core/Form';
import { useFocusIdx } from '@/hooks/useFocusIdx';
export function Panel() {
const { focusIdx } = useFocusIdx();
return (
<Stack vertical>
<RichTextField
idx={focusIdx}
name={`${focusIdx}.data.value.content`}
label=''
labelHidden
/>
<Color />
<ContainerBackgroundColor />
<FontSize />
<LineHeight />
<Align />
<FontStyle />
<FontWeight />
<LetterSpacing />
<Height />
<FontFamily />
<TextDecoration />
<TextTransform />
<Padding />
</Stack>
);
}
|
SELECT COUNT(*)
FROM people
WHERE zipcode = '90210' |
<filename>src/ch13/ex5/ex135.java<gh_stars>0
package ch13.ex5;
import java.io.*;
import static java.lang.System.*;
/**
* Project: ch13.ex5
* Date: 2/26/2018
*
* @author <NAME>
*/
public class ex135
{
public void start()
{
final int BYTES_PER_REC = 3;
final int LETTERS_TO_READ = 5;
final long INDEX_LIMIT;
try (BufferedReader input = new BufferedReader(new InputStreamReader(in)))
{
out.print("Enter filename: ");
try (RandomAccessFile file = new RandomAccessFile(input.readLine(), "r"))
{
out.print("Enter record number: ");
file.seek((Long.parseLong(input.readLine()) - 1) * BYTES_PER_REC);
INDEX_LIMIT = file.getFilePointer() + (LETTERS_TO_READ * BYTES_PER_REC);
while (file.getFilePointer() < INDEX_LIMIT)
{
out.print(Character.toUpperCase((char)file.readByte()));
}
}
catch (EOFException e)
{
out.println();
}
}
catch (IOException | NumberFormatException e)
{
err.println("Error: " + e.getMessage());
exit(1);
}
}
public static void main(String[] args)
{
new ex135().start();
}
} |
# -*- coding: utf-8 -*-
from app.api import create_app
app = create_app()
if __name__ == "__main__":
import uvicorn
uvicorn.run(app='main:app', host="0.0.0.0", port=8000,reload=True, debug=True)
|
<reponame>jayce-incognito/leapxpert-test-fe
export interface IReducer {}
const initialState: IReducer = {};
const reducer = (
state = initialState,
action: {
type: string;
payload: any;
},
) => {
switch (action.type) {
case '': {
return {
...state,
};
}
default:
return state;
}
};
export default reducer;
|
const path = require('path');
exports.onCreateWebpackConfig = ({
stage,
rules,
loaders,
plugins,
actions,
}) => {
if (stage === "build-html") {
actions.setWebpackConfig({
resolve: {
alias: {
'json-schema-faker': path.resolve(__dirname, 'node_modules/json-schema-faker/dist/main.cjs.js'),
'decimal.js': path.resolve(__dirname, 'node_modules/decimal.js/decimal.js'),
},
},
node: {
// Needed for node_modules/@stoplight/prism-http/dist/getHttpOperations.js
fs: "empty",
},
module: {
rules: [
{
test: /canvas/,
use: loaders.null(),
},
],
}
})
}
};
exports.onCreatePage = async ({ page, actions }) => {
const { createPage } = actions;
// page.matchPath is a special key that's used for matching pages
// only on the client.
const match = page.path.match(/^\/(elements|zoom-api)/);
if (match) {
page.matchPath = `${match[0]}/*`;
// Update the page.
createPage(page);
}
};
|
#!/bin/bash
source "$(dirname "${BASH_SOURCE}")/../macosx/sha1.sh" "${@}"
|
<gh_stars>0
var gameState=0;
let form;
var backgroundImg;
var database,player;
var wall33
function preload() {
button = loadImage("button.png");
backgroundImg = loadImage("background.png");
wallpaper = loadImage("OIP.jpg");
bj = loadImage("bj.jpg")
level1 = loadImage("level1.png")
level2 = loadImage("level2.png")
level3 = loadImage("level3.png")
level4 = loadImage("level4.png")
level5 = loadImage("level5.png")
level6 = loadImage("level6.jpg")
texti=loadImage("text.gif")
}
function setup() {
canves = createCanvas(windowWidth,windowHeight);
database = firebase.database();
game= new Game();
game.getState();
wall2=createSprite(windowWidth/2,windowHeight/2,windowWidth,windowHeight);
wall2.addImage(backgroundImg);
wall2.scale=0.56;
wall=createSprite(windowWidth/2,windowHeight/2,windowWidth,windowHeight);
wall.addImage(wallpaper);
wall.scale=3;
button1=createSprite(windowWidth/2,windowHeight/2+250,50,50);
button1.addImage(button);
button1.scale=0.15
form=new Form();
}
function draw() {
background(bj);
//backgroundImg.scale=windowWidth,windowHeight;
//form.show();
if (gameState===1){
form.show();
game.update(1);
}
if(gameState===2){
wall33=createSprite(windowWidth/2,windowHeight/2,100,100);
wall33.addImage(texti);
// if (gameState===3){
// wall33.visible=false;
// }
}
if(gameState ===3){
level=createSprite(windowWidth/2-500,windowHeight/2-200,50,50);
level.addImage(level1);
//level.scale=0.5
l2=createSprite(windowWidth/2-350,windowHeight/2-200,50,50);
l2.addImage(level2);
l3=createSprite(windowWidth/2-200,windowHeight/2-200,50,50);
l3.addImage(level3);
l4=createSprite(windowWidth/2-50,windowHeight/2-200,50,50);
l4.addImage(level4);
l5=createSprite(windowWidth/2+100,windowHeight/2-200,50,50);
l5.addImage(level5);
l6=createSprite(windowWidth/2-450,windowHeight/2+50,50,50);
l6.addImage(level6);
l6.scale=0.25
}
game.start();
game.play();
drawSprites();
}
// function loadVideo(id)
// {
// var video = document.getElementById('video');
//var mp4 = document.getElementById('mp4');
// mp4.src = "vidz/" + id;
// video.load();
// video.play();
// }
|
package io.opensphere.controlpanels.component.map.boundingbox;
import static org.junit.Assert.assertEquals;
import java.util.List;
import java.util.Observer;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.math.LineSegment2d;
import io.opensphere.core.math.Vector2d;
import io.opensphere.core.util.collections.New;
/**
* Tests the BoundingBoxModel class.
*/
public class BoundingBoxModelTest
{
/**
* Tests the BoundingBoxModel.
*/
@Test
public void testSetBoundingBox()
{
BoundingBoxModel model = new BoundingBoxModel();
List<LineSegment2d> boundingBox = New.list(new LineSegment2d(new Vector2d(0, 0), new Vector2d(10, 10)));
EasyMockSupport support = new EasyMockSupport();
Observer observer = createObserver(support, model);
support.replayAll();
model.addObserver(observer);
model.setBoundingBox(boundingBox);
assertEquals(boundingBox.get(0), model.getBoundingBox().iterator().next());
support.verifyAll();
}
/**
* Creates an easy mocked observer.
*
* @param support Used to create the mock.
* @param model The expected model.
* @return The observer.
*/
private Observer createObserver(EasyMockSupport support, BoundingBoxModel model)
{
Observer observer = support.createMock(Observer.class);
observer.update(EasyMock.eq(model), EasyMock.cmpEq(BoundingBoxModel.BOUNDING_BOX_PROP));
return observer;
}
}
|
#!/bin/sh
inputFile="input.txt"
previousMeasurement0=$(head -1 $inputFile)
previousMeasurement1=$(head -2 $inputFile | tail -1)
previousMeasurement2=$(head -3 $inputFile | tail -1)
previousSum=$(($previousMeasurement0+$previousMeasurement1+$previousMeasurement2))
increaseCounter=0
for line in $(tail -n +4 $inputFile)
do
previousMeasurement0=$previousMeasurement1
previousMeasurement1=$previousMeasurement2
previousMeasurement2=$line
sum=$(($previousMeasurement0+$previousMeasurement1+$previousMeasurement2))
if [ $sum -gt $previousSum ]; then
increaseCounter=$(($increaseCounter+1))
fi
previousSum=$sum
done
echo $increaseCounter |
from typing import List
def merge_sources(file_sources: List[str], resource_sources: List[str]) -> List[str]:
return file_sources + resource_sources |
sbcl --eval "(asdf:operate :build-op :site-generator)"
|
/* eslint-disable functional/no-return-void */
import test, { ExecutionContext } from 'ava';
import * as E from 'fp-ts/Either';
import * as O from 'fp-ts/Option';
import { pipe } from 'fp-ts/function';
import * as PR from 'io-ts/PathReporter';
import dayjs from './dayjs';
import {
advanceStartDate,
DiscoverGranulesProps,
formatProviderPath,
} from './discovery';
import { PropsHandler } from './lambda';
const shouldDecode = (input: unknown, expected: unknown) => {
const impl = (t: ExecutionContext) =>
pipe(
DiscoverGranulesProps.decode(input),
E.match(
(errors) => t.fail(PR.failure(errors).join('\n')),
(actual) => t.deepEqual(actual, expected)
)
);
// eslint-disable-next-line functional/immutable-data
impl.title = () => `should decode ${JSON.stringify(input)}`;
return impl;
};
const shouldFailToDecode = (input: unknown, paths: readonly (readonly string[])[]) => {
const impl = (t: ExecutionContext) =>
pipe(
DiscoverGranulesProps.decode(input),
E.match(
(errors) => {
const messages = PR.failure(errors);
// Match all occurrences of '}/NAME' (excluding '}' with lookbehind)
const actualPaths = messages.map((message) =>
(message.match(/(?<=})[/][^:]+/g) ?? []).join('')
);
const expectedPaths = paths.map((path) =>
path.map((segment) => `/${segment}`).join('')
);
t.deepEqual(actualPaths, expectedPaths, messages.join('\n'));
},
(output) => t.fail(`Unexpected output: ${JSON.stringify(output)}`)
)
);
// eslint-disable-next-line functional/immutable-data
impl.title = () => `should fail to decode ${JSON.stringify(input)}`;
return impl;
};
const shouldOutput = (
f: PropsHandler<typeof DiscoverGranulesProps, DiscoverGranulesProps, unknown>,
input: unknown,
expected: unknown
) => {
const impl = (t: ExecutionContext) =>
pipe(
DiscoverGranulesProps.decode(input),
E.match(
(errors) => t.fail(PR.failure(errors).join('\n')),
(event) => t.deepEqual(f(event), expected)
)
);
// eslint-disable-next-line functional/immutable-data
impl.title = () => `should successfully compute ${f.name}(${JSON.stringify(input)})`;
return impl;
};
//------------------------------------------------------------------------------
// Expected decoding failures
//------------------------------------------------------------------------------
test(
shouldFailToDecode(
{
config: {
providerPathFormat: 'planet/PSScene3Band-yyyyMM',
startDate: '2018-08',
},
},
[['config', 'providerPathFormat']]
)
);
test(
shouldFailToDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: 'hello',
},
},
[['config', 'startDate']]
)
);
test(
shouldFailToDecode(
{
config: {
providerPathFormat: 'planet/PSScene3Band-yyyyMM',
startDate: 'hello',
},
},
[
['config', 'providerPathFormat'],
['config', 'startDate'],
]
)
);
test(
shouldFailToDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '202101',
endDate: 'never',
},
},
[['config', 'endDate']]
)
);
test(
shouldFailToDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '202101',
step: 'none',
},
},
[['config', 'step']]
)
);
//------------------------------------------------------------------------------
// Expected decoding successes
//------------------------------------------------------------------------------
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2018-08'),
endDate: O.none,
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
extraProperty: 'whatever',
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
},
},
{
config: {
extraProperty: 'whatever',
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2018-08'),
endDate: O.none,
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2019-08',
endDate: undefined,
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2019-08'),
endDate: O.none,
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2019-08',
endDate: null,
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2019-08'),
endDate: O.none,
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: '202001',
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2018-08'),
endDate: O.some(new Date('202001')),
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2020-08',
step: undefined,
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2020-08'),
endDate: O.none,
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2019-08',
step: null,
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2019-08'),
endDate: O.none,
step: O.none,
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
step: 'P1M',
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2018-08'),
endDate: O.none,
step: O.some(dayjs.duration('P1M')),
},
}
)
);
test(
shouldDecode(
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: '202001',
step: 'P1M',
},
},
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: new Date('2018-08'),
endDate: O.some(new Date('202001')),
step: O.some(dayjs.duration('P1M')),
},
}
)
);
//------------------------------------------------------------------------------
// Expected formatProviderPath outputs
//------------------------------------------------------------------------------
test(
shouldOutput(
formatProviderPath,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
},
},
'planet/PSScene3Band-201808'
)
);
//------------------------------------------------------------------------------
// Expected updateStartDate outputs
//------------------------------------------------------------------------------
test(
shouldOutput(
advanceStartDate,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
},
},
null
)
);
test(
shouldOutput(
advanceStartDate,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: '2021-09',
step: null,
},
},
null
)
);
test(
shouldOutput(
advanceStartDate,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: null,
step: 'P1M',
},
},
'2018-09-01T00:00:00.000Z'
)
);
test(
shouldOutput(
advanceStartDate,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: '2020-01',
step: 'P1M',
},
},
'2018-09-01T00:00:00.000Z'
)
);
test(
shouldOutput(
advanceStartDate,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: '2018-09', // endDate is exclusive
step: 'P1M',
},
},
null
)
);
test(
shouldOutput(
advanceStartDate,
{
config: {
providerPathFormat: '[planet/PSScene3Band-]YYYYMM',
startDate: '2018-08',
endDate: '2020-01',
step: 'P1Y',
},
},
'2019-08-01T00:00:00.000Z'
)
);
|
#include <vector>
#include <string>
#include <sstream>
std::vector<int> parseString(const std::string& inputString) {
std::vector<int> result;
// parse input string
std::stringstream ss(inputString);
std::string item;
while (std::getline(ss, item, ',')) {
result.push_back(std::stoi(item));
}
return result;
} |
#! /bin/bash
. ~/.bashrc
. ~/.lrgpaths
output_file=$1
is_test=$2
host=${LRGDBHOST}
port=${LRGDBPORT}
user=${LRGDBROUSER}
dbname=${LRGDBNAMETEST}
if [[ -z ${is_test} || ${is_test} == 0 ]] ; then
dbname=${LRGDBNAME}
fi
perldir=${LRGROOTDIR}/lrg-code/scripts/
echo "Dumping request data from the LRG database ..."
perl ${perldir}/dump_request_info.pl -host ${host} -user ${user} -port ${port} -dbname ${dbname} -output_file ${output_file} -verbose
echo "done!"
|
echo "I am in secondScript.sh file and the message from source.sh is: $MESSAGE"
|
<filename>comun/comun-dominio/src/main/java/com/ceiba/dominio/excepcion/ExcepcionTopeNumeroReservasFecha.java
package com.ceiba.dominio.excepcion;
public class ExcepcionTopeNumeroReservasFecha extends RuntimeException {
private static final long serialVersionUID = 1L;
public ExcepcionTopeNumeroReservasFecha(String mensaje) {
super(mensaje);
}
}
|
#!/usr/bin/env bash
DATASET=$1
if [ "$DATASET" == "kinetics400" ] || [ "$1" == "kinetics600" ] || [ "$1" == "kinetics700" ] || [ "$1" == "kinetics700" ] || [ "$1" == "kinetics700_2020" ]; then
echo "We are processing $DATASET"
else
echo "Bad Argument, we only support kinetics400, kinetics600 or kinetics700 or kinetics700_2020"
exit 0
fi
DATA_DIR="../../../data/${DATASET}/annotations"
if [[ ! -d "${DATA_DIR}" ]]; then
echo "${DATA_DIR} does not exist. Creating";
mkdir -p ${DATA_DIR}
fi
wget https://storage.googleapis.com/deepmind-media/Datasets/${DATASET}.tar.gz
tar -zxvf ${DATASET}.tar.gz --strip-components 1 -C ${DATA_DIR}/
mv ${DATA_DIR}/train.csv ${DATA_DIR}/kinetics_train.csv
mv ${DATA_DIR}/validate.csv ${DATA_DIR}/kinetics_val.csv
mv ${DATA_DIR}/test.csv ${DATA_DIR}/kinetics_test.csv
rm ${DATASET}.tar.gz
rm ${DATA_DIR}/*.json
|
use sicem;
alter table Permisos
add foreign key (IDusuario) references Usuario(ID);
alter table Empleado
add foreign key (DepartamentoID) references Departamentos(ID);
alter table Producto
add foreign key (CategoriaID) references Categoria(ID);
alter table HistorialPrecioProducto
add foreign key (ProductoID) references Producto(ID);
alter table HistorialCostoProducto
add foreign key (ProductoID) references Producto(ID);
alter table HistorialEntradaProducto
add foreign key (ProductoID) references Producto(ID);
alter table Compra
add foreign key (ProveedorID) references Proveedor(ID);
alter table Detalle_Compra
add foreign key (CompraID) references Compra(Id);
alter table Detalle_Compra
add foreign key (ProductoID) references Producto(ID);
alter table Venta
add foreign key (ClienteID) references Cliente(ID);
alter table Detalle_Venta
add foreign key (VentaID) references Venta(ID);
alter table Detalle_Venta
add foreign key (ProductoID) references Producto(ID); |
#!/bin/bash
if [ -f version ]; then
export VERSION=$(cat version)
else
TIMESTAMP=$(date +%s)
export VERSION="1.0.${TIMESTAMP//[$'\t\r\n']}"
echo -n "$VERSION" > version
fi
export PROJECTNAME=ratelimit
export REPLICON_GIT_BRANCH="$(git symbolic-ref HEAD --short 2>/dev/null)"
if [ "$REPLICON_GIT_BRANCH" = "" ] ; then
REPLICON_GIT_BRANCH="$(git branch -a --contains HEAD | sed -n 2p | awk '{ printf $1 }')";
export REPLICON_GIT_BRANCH=${REPLICON_GIT_BRANCH#remotes/origin/};
fi
export REPLICON_GIT_CLEAN_BRANCH="$(echo $REPLICON_GIT_BRANCH | tr '/' '.')"
export REPLICON_GIT_ESCAPED_BRANCH="$(echo $REPLICON_GIT_CLEAN_BRANCH | sed -e 's/[]\/$*.^[]/\\\\&/g')"
export REPLICON_GIT_MESSAGE="$(git log -1 --pretty=%B)"
export REPLICON_GIT_AUTHOR="$(git log -1 --pretty=%an)"
export REPLICON_GIT_AUTHOR_EMAIL="$(git log -1 --pretty=%ae)"
echo "==> AWS CodeBuild Extra Environment Variables:"
echo "==> REPLICON_GIT_AUTHOR = $REPLICON_GIT_AUTHOR"
echo "==> REPLICON_GIT_AUTHOR_EMAIL = $REPLICON_GIT_AUTHOR_EMAIL"
echo "==> REPLICON_GIT_BRANCH = $REPLICON_GIT_BRANCH"
echo "==> REPLICON_GIT_CLEAN_BRANCH = $REPLICON_GIT_CLEAN_BRANCH"
echo "==> REPLICON_GIT_ESCAPED_BRANCH = $REPLICON_GIT_ESCAPED_BRANCH"
echo "==> REPLICON_GIT_MESSAGE = $REPLICON_GIT_MESSAGE"
echo "==> VERSION = $VERSION" |
function getCharAtIndex(str: string, index: number): string {
if (index < 0 || index >= str.length) {
return "";
}
return str.charAt(index);
}
console.log(getCharAtIndex("Hello World", 5));
Output:
"W" |
<reponame>ArthemCo/SpeculativeDesign
export function stripHTML(html) {
// Bail out early in SSR environment
if (typeof window === 'undefined' || !window.document) {
return
}
let tmp = document.createElement('div')
tmp.innerHTML = html
return tmp.textContent || tmp.innerText || ''
}
|
#!/bin/bash
set -x
set -e
apt-get update
# Java 8
apt-get install -y --no-install-recommends ca-certificates-java openjdk-8-jre-headless
# Tomcat 8
apt-get install -y --no-install-recommends tomcat8
# Birt manual war
mkdir -p /tmp/birt
wget http://ftp.fau.de/eclipse/birt/downloads/drops/R-R1-4_5_0-201506092134/birt-runtime-4.5.0-20150609.zip -O /tmp/birt/birt.zip
cd /tmp/birt
unzip birt.zip
mv birt-runtime-4_5_0/birt.war /var/lib/tomcat8/webapps
cd /
rm -rf /tmp/birt
# Cleanup tomcat
rm -rf /var/lib/tomcat8/webapps/ROOT
# Run tomcat to deploy birt war
/build/files/entrypoint.sh &>/dev/null &
tomcat_pid=$!
set +x
echo -n "Waiting for birt deployment ."
while [[ ! -f /var/lib/tomcat8/webapps/birt/WEB-INF/web.xml ]]; do
sleep 0.7
echo -n "."
done
echo -ne "\nDone\n"
set -x
kill $tomcat_pid
rm /var/lib/tomcat8/webapps/birt.war
# Copy runtime files
cp -frv /build/files/* / || true
source /usr/local/build_scripts/cleanup_apt.sh
|
import type {NextApiHandler, NextApiRequest, NextApiResponse} from 'next';
import {DefaultResponse} from '../types/DefaultResponse';
import NextCors from 'nextjs-cors';
const corsPolicy = (handler: NextApiHandler) =>
async (req: NextApiRequest, res: NextApiResponse<DefaultResponse>) => {
await NextCors(req, res, {
methods: ['GET', 'OPTIONS', 'PUT', 'POST', 'DELETE'],
origin: '*',
optionsSuccessStatus: 200,
});
return handler(req, res);
}
export {corsPolicy} |
const mathObject = {
add: (a,b) => {
return a + b;
},
subtract: (a,b) =>{
return a - b;
}
}; |
import * as express from 'express'
import * as bodyParser from 'body-parser'
import { dialogflow, Permission, BasicCard, Image, Carousel, List} from 'actions-on-google'
import { DataHandler } from './Persistence/DataHandler'
import { utils } from './utils'
const app = dialogflow({debug: true})
const dataHandler = new DataHandler()
//#region Information Display
app.intent('InformationIntent', (conv, params) => {
const input = params.Serie ? params.Serie : params.Film;
//check if title is available
if(input !== '') {
const information = dataHandler.getInformation(input);
conv.ask('Hier sind Informationen zu deiner Suche:\n' +information);
conv.ask('Möchtest du noch mehr Informationen zu den Schauspielern erfahren?')
}
else {
conv.ask('Leider habe ich den gewünschten Titel nicht gefunden. Probiere es mit einem anderen.')
}
});
app.intent('SuggestionInformationIntent_2', (conv, params) => {
const input = params.Serie ? params.Serie : params.Film;
//check if title is available
if(input !== '') {
const information = dataHandler.getInformation(input);
conv.ask('Hier sind Informationen zu deiner Suche:\n' +information);
conv.ask('Möchtest du noch mehr Informationen zu den Schauspielern erfahren?')
}
else {
conv.ask('Leider habe ich den gewünschten Titel nicht gefunden. Probiere es mit einem anderen.')
}
});
//#endregion
//#region Actor Information Display
app.intent('ActorInformationSuggestionIntent', (conv, params) => {
let resultString:string = '';
let parameters = conv.contexts.get('suggestioninformationintent_2-followup').parameters
const input = parameters.Serie ? parameters.Serie : parameters.Film;
//check if title is available
if(input !== ''){
let actors = dataHandler.getActors(input);
actors.forEach(s => resultString = resultString + '\n' + s);
conv.ask('Es spielen folgende Schauspieler mit: \n' +resultString);
}
else {
conv.ask('Leider habe den gewünschten Titel nicht gefunden. Probiere es mit einem anderen.')
}
});
app.intent('ActorInformation-FollowupIntent', (conv, params) => {
let resultString:string = '';
let parameters = conv.contexts.get('informationintent-followup').parameters
const input = parameters.Serie ? parameters.Serie : parameters.Film;
//check if title is available
if(input !== ''){
let actors = dataHandler.getActors(input);
actors.forEach(s => resultString = resultString + '\n' + s);
conv.ask('Es spielen folgende Schauspieler mit: \n' +resultString);
}
else {
conv.ask('Leider habe den gewünschten Titel nicht gefunden. Probiere es mit einem anderen.')
}
});
app.intent('ActorInformationIntent', (conv, params) => {
const input = params.Serie ? params.Serie : params.Film;
if(input !== '') {
let actors = dataHandler.getActors(input);
conv.ask('Es spielen folgende Schauspieler mit:');
//create list from recieved titles
let list : List = utils.createList(actors);
conv.ask(list);
}
else {
conv.ask('Leider habe ich den gewünschten Titel nicht gefunden. Probiere es mit einem anderen.')
}
});
//#endregion
//#region Genre Filter
app.intent('GenreIntent', (conv, params) => {
let genre = params.Genre;
let result = dataHandler.getMoviesInGenre(genre);
//check if genre could be found
if(result.length !== 0) {
conv.ask('Hier sind ein paar Vorschläge:')
//create carousel from recieved titles
let carousel : Carousel = utils.createCarousel(result);
conv.ask(carousel);
}
else {
conv.ask('Leider habe ich keinen Titel im gewünschten Genre gefunden. Probiere es mit einem anderen.');
}
})
//#endregion
//#region Inspiration
app.intent('InspirationWatchlistIntent', (conv) => {
let result = dataHandler.getMoviesInWatchlist();
//check if watchlist was not empty
if(result.length != 0) {
conv.ask('Folgende Titel befinden sich in deiner Watchlist:')
//create carousel from recieved titles
let carousel : Carousel = utils.createCarousel(result);
conv.ask(carousel);
}
else {
conv.ask('Leider befinden sich momentan noch keine Titel in deiner Watchlist.');
}
})
app.intent('InspirationCurrentlyLikedIntent', (conv) => {
let result = dataHandler.getMostLikedMovies();
//check if movies could be found
if(result.length != 0) {
conv.ask('Titel mit den besten Bewertungen:')
//create carousel from recieved titles
let carousel : Carousel = utils.createCarousel(result);
conv.ask(carousel);
}
else {
conv.ask('Es ist ein Fehler aufgetreten. Keine Titel gefunden.')
}
})
app.intent('ShowCurrentlyLikedIntent', (conv, params) => {
let result = dataHandler.getMostLikedMovies();
//check if movies could be found
if(result.length != 0) {
conv.ask('Titel mit den besten Bewertungen:')
//create carousel from recieved titles
let carousel : Carousel = utils.createCarousel(result);
conv.ask(carousel);
}
else {
conv.ask('Leider befinden sich momentan keine Titel in deiner Watchlist.')
}
})
//#endregion
//#region Watchlist
app.intent('AppendWatchlistIntent', (conv, params) => {
const input = params.Serie ? params.Serie : params.Film;
//check if title is available
if(input !== '') {
//check if title is not yet in watchist
if(dataHandler.addToWatchlist(input)){
conv.ask('Ich habe '+input+' zu deiner Watchlist hinzugefügt.');
}
else {
conv.ask(input+' befindet sich bereits in deiner Watchlist.');
}
}
else {
conv.ask('Leider habe ich diesen Titel nicht gefunden. Probiere es mit einem anderen.')
}
})
app.intent('ShowWatchlistIntent', (conv, params) => {
let result = dataHandler.getMoviesInWatchlist();
//check if watchlist was not empty
if(result.length != 0) {
conv.ask('Folgende Titel befinden sich in deiner Watchlist:')
//create carousel from recieved titles
let carousel : Carousel = utils.createCarousel(result);
conv.ask(carousel);
}
else {
conv.ask('Leider befinden sich momentan keine Titel in deiner Watchlist.')
}
})
//#endregion
//#region Play Title
app.intent('PlayTrailerIntent', (conv, params) => {
let title:string = params.Serie ? params.Serie.toString() : params.Film.toString();
//check if title is available
if(title !== '') {
conv.ask('Hier ist der Trailer zu '+title+':')
//create Card View
conv.ask(new BasicCard({
title: title,
image: new Image({
url: 'https://images.assetsdelivery.com/compings_v2/4zevar/4zevar1509/4zevar150900035.jpg',
alt: 'play_image'
})
}))
}
else {
conv.ask('Leider habe ich den Titel nicht gefunden. Versuche es mit einem anderen.')
}
})
app.intent('PlayMovieIntent', (conv, params) => {
let title:string = params.Serie ? params.Serie.toString() : params.Film.toString();
//check if title is available
if(title !== '') {
conv.ask('Okay, ich starte '+title+':')
//create Card View
conv.ask(new BasicCard({
title: title,
image: new Image({
url: 'https://images.assetsdelivery.com/compings_v2/4zevar/4zevar1509/4zevar150900035.jpg',
alt: 'play_image'
})
}))
}
else {
conv.ask('Leider habe ich den Titel nicht gefunden. Versuche es mit einem anderen.')
}
})
app.intent('PlaySeriesIntent', (conv, params) => {
//saving parameters
let season = params.Season;
let episode = params.number;
let title:string = params.Serie.toString();
//checking if title exists
if(title !== '') {
//checking if season and episodes exist -> returns array [true/false, maxSeasonNumber, maxEpisodeNumber]
let returnArray = dataHandler.checkIfExistent(title, season, episode);
//if seasons and episodes exist
if(returnArray[0]){
conv.ask('Okay, hier ist '+params.Season+', Folge '+params.number+' von '+title+':')
conv.ask(new BasicCard({
title: title,
image: new Image({
url: 'https://images.assetsdelivery.com/compings_v2/4zevar/4zevar1509/4zevar150900035.jpg',
alt: 'play_image'
})
}))
}
//if season or episode does not exist
else{
//get maxNumbers for Output
let maxSeasons = returnArray[1].toString();
let maxEpisodes = returnArray[2].toString();
conv.ask('Leider existiert die angegebene Staffel oder Episode nicht. ' + title + ' hat ' + maxSeasons +' Staffeln mit je '+maxEpisodes+' Folgen.')
}
}
else {
conv.ask('Leider habe ich den Titel nicht gefunden. Versuche es mit einem anderen.')
}
})
//#endregion
app.fallback((conv) => {
const intent = conv.intent
conv.close('Ok, thanks!');
})
const expressApp = express().use(bodyParser.json())
expressApp.post('/fulfillments', app)
expressApp.listen(3000) |
<gh_stars>100-1000
//#region IMPORTS
import type Pose from '../../armature/Pose';
import type { IKChain, IKLink } from '../rigs/IKChain';
import type { ISolver } from './support/ISolver';
import type { IKData } from '..';
import { Transform } from '../../maths';
import { vec3, quat } from 'gl-matrix';
import QuatUtil from '../../maths/QuatUtil';
//#endregion
class SwingTwistSolver implements ISolver{
//#region TARGETTING DATA
_isTarPosition : boolean = false; // Is the Target a Position or a Direction?
_originPoleDir : vec3 = [ 0, 0, 0 ]; // Pole gets updated based on effector direction, so keep originally set dir to compute the orthogonal poleDir
effectorScale : number = 1;
effectorPos : vec3 = [ 0, 0, 0 ]; // IK Target can be a Position or...
effectorDir : vec3 = [ 0, 0, 1 ]; // Direction. BUT if its position, need to compute dir from chain origin position.
poleDir : vec3 = [ 0, 1, 0 ]; // Direction that handles the twisitng rotation
orthoDir : vec3 = [ 1, 0, 0 ]; // Direction that handles the bending direction, like elbow/knees.
originPos : vec3 = [ 0, 0, 0 ]; // Starting World Position of the Chain
//#endregion
initData( pose?: Pose, chain?: IKChain ): this{
if( pose && chain ){
// If init pose is the same used for binding, this should recreate the WORLD SPACE Pole Direction just fine
const lnk: IKLink = chain.links[ 0 ];
const rot: quat = pose.bones[ lnk.idx ].world.rot;
const eff : vec3 = vec3.transformQuat( [0,0,0], lnk.effectorDir, rot );
const pole : vec3 = vec3.transformQuat( [0,0,0], lnk.poleDir, rot );
this.setTargetDir( eff, pole );
//this.setTargetPos( chain.getTailPosition( pose ), pole );
}
return this;
}
//#region SETTING TARGET DATA
setTargetDir( e: vec3, pole ?: vec3, effectorScale ?: number ): this{
this._isTarPosition = false;
this.effectorDir[ 0 ] = e[ 0 ];
this.effectorDir[ 1 ] = e[ 1 ];
this.effectorDir[ 2 ] = e[ 2 ];
if( pole ) this.setTargetPole( pole );
if( effectorScale ) this.effectorScale = effectorScale;
return this;
}
setTargetPos( v: vec3, pole ?: vec3 ): this{
this._isTarPosition = true;
this.effectorPos[ 0 ] = v[ 0 ];
this.effectorPos[ 1 ] = v[ 1 ];
this.effectorPos[ 2 ] = v[ 2 ];
if( pole ) this.setTargetPole( pole );
return this;
}
setTargetPole( v: vec3 ): this{
this._originPoleDir[ 0 ] = v[ 0 ];
this._originPoleDir[ 1 ] = v[ 1 ];
this._originPoleDir[ 2 ] = v[ 2 ];
return this;
}
//#endregion
resolve( chain: IKChain, pose: Pose, debug?:any ): void{
const [ rot, pt ] = this.getWorldRot( chain, pose, debug );
QuatUtil.pmulInvert( rot, rot, pt.rot ); // To Local Space
pose.setLocalRot( chain.links[ 0 ].idx, rot ); // Save to Pose
}
ikDataFromPose( chain: IKChain, pose: Pose, out: IKData.Dir ): void{
const dir: vec3 = [0,0,0]; //new Vec3();
const lnk = chain.first();
const b = pose.bones[ lnk.idx ];
// Alt Effector
vec3.transformQuat( dir, lnk.effectorDir, b.world.rot );
vec3.normalize( out.effectorDir, dir );
// Alt Pole
vec3.transformQuat( dir, lnk.poleDir, b.world.rot );
vec3.normalize( out.poleDir, dir );
}
/** Update Target Data */
_update( origin: vec3 ): void{
const v: vec3 = [0,0,0];
const o: vec3 = [0,0,0];
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Compute the Effector Direction if only given effector position
if( this._isTarPosition ){
vec3.sub( v, this.effectorPos, origin ); // Forward Axis Z
vec3.normalize( this.effectorDir, v );
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Left axis X - Only needed to make pole orthogonal to effector
vec3.cross( v, this._originPoleDir, this.effectorDir );
vec3.normalize( this.orthoDir, v );
// Up Axis Y
vec3.cross( v, this.effectorDir, this.orthoDir );
vec3.normalize( this.poleDir, v );
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
vec3.copy( this.originPos, origin );
}
getWorldRot( chain: IKChain, pose: Pose, debug?:any ) : [ quat, Transform ]{
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
const pt = new Transform();
const ct = new Transform();
let lnk = chain.first();
// Get the Starting Transform
if( lnk.pidx == -1 ) pt.copy( pose.offset );
else pose.getWorldTransform( lnk.pidx, pt );
ct.fromMul( pt, lnk.bind ); // Get Bone's BindPose position in relation to this pose
this._update( ct.pos ); // Update Data to use new Origin.
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
const rot : quat = quat.copy( [0,0,0,1], ct.rot );
const dir : vec3 = [0,0,0];
const q : quat = [0,0,0,1];
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Swing
vec3.transformQuat( dir, lnk.effectorDir, ct.rot ); // Get WS Binding Effector Direction of the Bone
quat.rotationTo( q, dir, this.effectorDir ); // Rotation TO IK Effector Direction
quat.mul( rot, q, rot ); // Apply to Bone WS Rot
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Twist
if( vec3.sqrLen( this.poleDir ) > 0.0001 ){
vec3.transformQuat( dir, lnk.poleDir, rot ); // Get WS Binding Pole Direction of the Bone
quat.rotationTo( q, dir, this.poleDir ); // Rotation to IK Pole Direction
quat.mul( rot, q, rot ); // Apply to Bone WS Rot + Swing
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Kinda Hacky putting this here, but its the only time where there is access to chain's length for all extending solvers.
// So if not using a TargetPosition, means we're using Direction then we have to compute the effectorPos.
if( !this._isTarPosition ){
this.effectorPos[ 0 ] = this.originPos[ 0 ] + this.effectorDir[ 0 ] * chain.length * this.effectorScale;
this.effectorPos[ 1 ] = this.originPos[ 1 ] + this.effectorDir[ 1 ] * chain.length * this.effectorScale;
this.effectorPos[ 2 ] = this.originPos[ 2 ] + this.effectorDir[ 2 ] * chain.length * this.effectorScale;
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
return [ rot, pt ];
}
}
export default SwingTwistSolver; |
pm2 start ./bin/www --name "saltvis" --node-args="--max_old_space_size=8192" |
import 'rxjs/add/observable/from';
import 'rxjs/add/observable/combineLatest';
import 'rxjs/add/operator/combineLatest';
import 'rxjs/add/operator/withLatestFrom';
import 'rxjs/add/operator/switchMap';
import 'rxjs/add/operator/merge';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/startWith';
import 'rxjs/add/operator/pluck';
import 'rxjs/add/operator/distinctUntilChanged';
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/do';
|
#!/bin/bash
echo "PATH=$PATH:$HOME/WebStorm-202.7319.49/bin/" >> ~/.bashrc
|
#!/bin/bash
export SPARK_HOME=$SPARK_HOME
export MASTER=local[4]
export FTP_URI=$FTP_URI
export ANALYTICS_ZOO_ROOT=$ANALYTICS_ZOO_ROOT
export ANALYTICS_ZOO_HOME=$ANALYTICS_ZOO_ROOT/dist
export ANALYTICS_ZOO_JAR=`find ${ANALYTICS_ZOO_HOME}/lib -type f -name "analytics-zoo*jar-with-dependencies.jar"`
set -e
echo "#1 start example test for tfnet"
if [ -d analytics-zoo-data/data/object-detection-coco ]
then
echo "analytics-zoo-data/data/object-detection-coco already exists"
else
wget $FTP_URI/analytics-zoo-data/data/object-detection-coco.zip -P analytics-zoo-data/data/
unzip -q analytics-zoo-data/data/object-detection-coco.zip -d analytics-zoo-data/data/
fi
if [ -d analytics-zoo-models/tfnet ]
then
echo "analytics-zoo-model/tfnet already exists"
else
wget $FTP_URI/analytics-zoo-models/tfnet/tfnet.zip -P analytics-zoo-models/tfnet/
unzip -q analytics-zoo-models/tfnet/tfnet.zip -d analytics-zoo-models/tfnet/
fi
#timer
start=$(date "+%s")
bash ${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master $MASTER \
--conf spark.executor.cores=1 \
--total-executor-cores 4 \
--class com.intel.analytics.zoo.examples.tfnet.Predict \
--image analytics-zoo-data/data/object-detection-coco \
--model analytics-zoo-models/tfnet/tfnet/frozen_inference_graph.pb \
--partition 4
now=$(date "+%s")
time1=$((now-start))
echo "#1 Tfnet time used:$time1 seconds"
echo "#2 start example test for LocalEstimator"
if [ -d analytics-zoo-data/data/mnist ]
then
echo "analytics-zoo-data/data/mnist already exists"
else
wget $FTP_URI/analytics-zoo-data/data/mnist.zip -P analytics-zoo-data/data
unzip -q analytics-zoo-data/data/mnist.zip -d analytics-zoo-data/data/
fi
if [ -d analytics-zoo-data/data/cifar10 ];then
echo "analytics-zoo-data/data/cifar10 already exists"
else
wget $FTP_URI/analytics-zoo-data/data/cifar10.zip -P analytics-zoo-data/data
unzip -q analytics-zoo-data/data/cifar10.zip -d analytics-zoo-data/data/
fi
if [ -d analytics-zoo-models/localestimator/saved_model4 ];then
echo "analytics-zoo-models/localestimator/saved_model4 already exists"
else
wget $FTP_URI/analytics-zoo-models/localestimator/saved_model4.zip -P analytics-zoo-models/localestimator
unzip -q analytics-zoo-models/localestimator/saved_model4.zip -d analytics-zoo-models/localestimator/
fi
echo "##2.1 LenetEstimator testing"
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--class com.intel.analytics.zoo.examples.localEstimator.LenetLocalEstimator \
-d analytics-zoo-data/data/mnist -b 128 -e 1 -t 4
now=$(date "+%s")
time2=$((now-start))
echo "#2.1 LocalEstimator:LenetEstimator time used:$time2 seconds"
echo "##2.2 ResnetEstimator testing"
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--class com.intel.analytics.zoo.examples.localEstimator.ResnetLocalEstimator \
-d analytics-zoo-data/data/cifar10 -b 128 -e 1 -t 4
now=$(date "+%s")
time3=$((now-start))
echo "#2.2 LocalEstimator:ResnetEstimator time used:$time3 seconds"
echo "##2.3 TransferLearning testing"
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--class com.intel.analytics.zoo.examples.localEstimator.TransferLearning \
-d analytics-zoo-data/data/cifar10 \
-m analytics-zoo-models/localestimator/saved_model4 \
-i "resnet50_input:0" -o "resnet50/activation_48/Relu:0" -b 132 -e 20 -t 10
now=$(date "+%s")
time4=$((now-start))
echo "#2.3 LocalEstimator:TransferLearning time used:$time4 seconds"
echo "#3 start example test for Streaming Test"
echo "#3.1 start example test for streaming Object Detection"
if [ -d analytics-zoo-data/data/object-detection-coco ];then
echo "analytics-zoo-data/data/object-detection-coco already exists"
else
wget $FTP_URI/analytics-zoo-data/data/object-detection-coco.zip -P analytics-zoo-data/data
unzip -q analytics-zoo-data/data/object-detection-coco.zip -d analytics-zoo-data/data/
fi
if [ -f analytics-zoo-models/object-detection/analytics-zoo_ssd-vgg16-300x300_COCO_0.1.0.model ];then
echo "analytics-zoo-models/object-detection/analytics-zoo_ssd-vgg16-300x300_COCO_0.1.0.model already exists"
else
wget ${FTP_URI}/analytics-zoo-models/object-detection/analytics-zoo_ssd-vgg16-300x300_COCO_0.1.0.model -P analytics-zoo-models/object-detection/
fi
mkdir output
mkdir stream
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master local[20] \
--driver-memory 4g \
--executor-memory 5g \
--class com.intel.analytics.zoo.examples.streaming.objectdetection.StreamingObjectDetection \
--streamingPath ./stream --model analytics-zoo-models/object-detection/analytics-zoo_ssd-vgg16-300x300_COCO_0.1.0.model \
--output ./output > 1.log &
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master local[2] \
--driver-memory 2g \
--executor-memory 5g \
--class com.intel.analytics.zoo.examples.streaming.objectdetection.ImagePathWriter \
--imageSourcePath analytics-zoo-data/data/object-detection-coco --streamingPath ./stream
while true
do
cp ./stream/0.txt ./stream/1.txt
temp1=$(find analytics-zoo-data/data/object-detection-coco -type f|wc -l)
temp2=$(find ./output -type f|wc -l)
temp3=$(($temp1+$temp1))
if [ $temp3 -le $temp2 ];then
kill -9 $(ps -ef | grep StreamingObjectDetection | grep -v grep |awk '{print $2}')
rm -r output
rm -r stream
rm 1.log
echo "Finished streaming"
break
fi
done
now=$(date "+%s")
time5=$((now-start))
echo "#3.1 Streaming:Object Detection time used:$time5 seconds"
echo "#3.2 start example test for streaming Text Classification"
if [ -d analytics-zoo-data/data/streaming/text-model ]
then
echo "analytics-zoo-data/data/streaming/text-model already exists"
else
wget $FTP_URI/analytics-zoo-data/data/streaming/text-model.zip -P analytics-zoo-data/data/streaming/
unzip -q analytics-zoo-data/data/streaming/text-model.zip -d analytics-zoo-data/data/streaming/
fi
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--driver-memory 2g \
--executor-memory 5g \
--class com.intel.analytics.zoo.examples.streaming.textclassification.StreamingTextClassification \
--model analytics-zoo-data/data/streaming/text-model/text_classifier.model \
--indexPath analytics-zoo-data/data/streaming/text-model/word_index.txt \
--inputFile analytics-zoo-data/data/streaming/text-model/textfile/ > 1.log &
while :
do
echo "I am strong and I am smart" >> analytics-zoo-data/data/streaming/text-model/textfile/s
if [ -n "$(grep "top-5" 1.log)" ];then
echo "----Find-----"
kill -9 $(ps -ef | grep StreamingTextClassification | grep -v grep |awk '{print $2}')
rm 1.log
sleep 1s
break
fi
done
now=$(date "+%s")
time6=$((now-start))
echo "#3.2 Streaming:Text Classification time used:$time6 seconds"
echo "#4 start example test for chatbot"
if [ -d analytics-zoo-data/data/chatbot_short ]
then
echo "analytics-zoo-data/data/object-detection-coco already exists"
else
wget $FTP_URI/analytics-zoo-data/data/chatbot_short.zip -P analytics-zoo-data/data
unzip analytics-zoo-data/data/chatbot_short.zip -d analytics-zoo-data/data/
fi
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--class com.intel.analytics.zoo.examples.chatbot.Train \
-f analytics-zoo-data/data/chatbot_short/ -b 32 -e 2
now=$(date "+%s")
time7=$((now-start))
echo "#4 Chatbot time used:$time7 seconds"
echo "#5 start example test for resnet training"
#timer
start=$(date "+%s")
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--executor-cores 4 --total-executor-cores 4 \
--driver-memory 50g \
--class com.intel.analytics.zoo.examples.resnet.TrainImageNet \
-f hdfs://172.168.2.181:9000/imagenet-zl \
--batchSize 32 --nEpochs 2 --learningRate 0.1 --warmupEpoch 1 \
--maxLr 3.2 --cache /cache --depth 50 --classes 1000
now=$(date "+%s")
time8=$((now-start))
echo "#5 Resnet time used:$time8 seconds"
echo "#6 start example test for vnni"
if [ -d analytics-zoo-data/data/imagenet_val ]
then
echo "analytics-zoo-data/data/imagenet_val already exists"
else
wget $FTP_URI/analytics-zoo-data/data/imagenet_val.zip -P analytics-zoo-data/data/
unzip -q analytics-zoo-data/data/imagenet_val.zip -d analytics-zoo-data/data/
fi
if [ -d analytics-zoo-data/data/opencvlib/lib ]
then
echo "analytics-zoo-data/data/opencvlib/lib already exists"
else
wget $FTP_URI/analytics-zoo-data/data/opencvlib/opencv_4.0.0_ubuntu_lib.tar -P analytics-zoo-data/data/opencvlib/
tar -xvf analytics-zoo-data/data/opencvlib/opencv_4.0.0_ubuntu_lib.tar -C analytics-zoo-data/data/opencvlib/
fi
if [ -f analytics-zoo-models/openVINO_model/resnet_v1_50.ckpt ]
then
echo "analytics-zoo-models/flink_model/resnet_v1_50.ckpt already exists"
else
wget ${FTP_URI}/analytics-zoo-models/flink_model/resnet_v1_50.ckpt -P analytics-zoo-models/openVINO_model/
fi
if [ -f analytics-zoo-models/bigdl_model/analytics-zoo_resnet-50-int8_imagenet_0.5.0.model ]
then
echo "analytics-zoo-models/bigdl_model/analytics-zoo_resnet-50-int8_imagenet_0.5.0.model already exists"
else
wget ${FTP_URI}/analytics-zoo-models/bigdl_model/analytics-zoo_resnet-50-int8_imagenet_0.5.0.model -P analytics-zoo-models/bigdl_model/
fi
echo "#6.1 start OpenVINO Int8 Resnet example"
#timer
start=$(date "+%s")
echo "Prepare model and data"
java -cp ${ANALYTICS_ZOO_JAR}:${SPARK_HOME}/jars/* \
com.intel.analytics.zoo.examples.vnni.openvino.PrepareOpenVINOResNet \
-m analytics-zoo-models/openVINO_model \
-v analytics-zoo-data/data/imagenet_val -l analytics-zoo-data/data/opencvlib/lib
echo "OpenVINO Perf"
java -cp ${ANALYTICS_ZOO_JAR}:${SPARK_HOME}/jars/* \
com.intel.analytics.zoo.examples.vnni.openvino.Perf \
-m analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.xml \
-w analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.bin
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} --driver-memory 4g \
--class com.intel.analytics.zoo.examples.vnni.openvino.Perf \
-m analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.xml \
-w analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.bin --onSpark
echo "OpenVINO ImageNetEvaluation"
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} --driver-memory 100g \
--class com.intel.analytics.zoo.examples.vnni.openvino.ImageNetEvaluation \
-f hdfs://172.168.2.181:9000/imagenet-zl/val/imagenet-seq-0_0.seq \
-m analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.xml \
-w analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.bin
echo "OpenVINO Predict"
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} --driver-memory 10g \
--class com.intel.analytics.zoo.examples.vnni.openvino.Predict \
-f zoo/src/test/resources/imagenet/n04370456/ \
-m analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.xml \
-w analytics-zoo-models/openVINO_model/resnet_v1_50_inference_graph.bin
now=$(date "+%s")
time9=$((now-start))
echo "#6.1 OpenVINO Resnet time used:$time9 seconds"
echo "#6.2 start BigDL Resnet example"
#timer
start=$(date "+%s")
echo "BigDL Perf"
java -cp ${ANALYTICS_ZOO_JAR}:${SPARK_HOME}/jars/* \
com.intel.analytics.zoo.examples.vnni.bigdl.Perf \
-m analytics-zoo-models/bigdl_model/analytics-zoo_resnet-50-int8_imagenet_0.5.0.model \
-b 64 -i 20
echo "BigDL ImageNetEvaluation"
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--class com.intel.analytics.zoo.examples.vnni.bigdl.ImageNetEvaluation \
-f hdfs://172.168.2.181:9000/imagenet-zl/val/imagenet-seq-0_0.seq \
-m analytics-zoo-models/bigdl_model/analytics-zoo_resnet-50-int8_imagenet_0.5.0.model
echo "BigDL Predict"
${ANALYTICS_ZOO_HOME}/bin/spark-shell-with-zoo.sh \
--master ${MASTER} \
--class com.intel.analytics.zoo.examples.vnni.bigdl.Predict \
-f zoo/src/test/resources/imagenet/n04370456/ \
-m analytics-zoo-models/bigdl_model/analytics-zoo_resnet-50-int8_imagenet_0.5.0.model
now=$(date "+%s")
time10=$((now-start))
echo "#6.2 BigDL Resnet time used:$time10 seconds"
echo "Scala Examples"
echo "#1 tfnet time used:$time1 seconds"
echo "#2.1 LocalEstimator:LenetEstimator time used:$time2 seconds"
echo "#2.2 LocalEstimator:ResnetEstimator time used:$time3 seconds"
echo "#2.3 LocalEstimator:TransferLearning used:$time4 seconds"
echo "#3.1 Streaming:Object Detection time used:$time5 seconds"
echo "#3.2 Streaming:Text Classification time used:$time6 seconds"
echo "#4 chatbot time used:$time7 seconds"
echo "#5 Resnet time used:$time8 seconds"
echo "#6.1 OpenVINO Resnet time used:$time9 seconds"
echo "#6.2 BigDL Resnet time used:$time10 seconds"
|
export default ({ palette, breakpoints, spacing }) => ({
container: {
margin: 'auto',
width: spacing.fullWidth,
background: palette.secondary['500'],
position: 'relative',
},
foreground: {
height: spacing.fullViewportHeight,
width: spacing.fullWidth,
zIndex: 0,
margin: 'auto',
[breakpoints.up('lg')]: {
minWidth: 1024,
maxWidth: 1280,
height: '90vh',
},
},
sloganContainer: {
[breakpoints.down('sm')]: {
textAlign: 'center',
},
},
backgroundImg: {
position: 'absolute',
width: spacing.fullWidth,
height: spacing.fullViewportHeight,
top: 0,
left: 0,
margin: 'auto',
filter: 'opacity(0.7)',
zIndex: -1,
objectFit: 'cover',
[breakpoints.up('lg')]: {
height: '90vh',
},
},
mainLine: {
cursor: 'default',
color: palette.white,
fontSize: '3em',
[breakpoints.up('md')]: {
fontSize: '7em',
},
},
sloganText: {
cursor: 'default',
fontSize: '2rem',
fontWeight: '200',
color: palette.white,
textAlign: 'center',
zIndex: 2,
[breakpoints.up('sm')]: {
fontSize: '2.5rem',
},
},
formInput: {
flexGrow: 0,
margin: 'auto',
zIndex: 2,
'& input': {
color: palette.white,
},
'& label': {
color: palette.white,
},
'& div': {
[breakpoints.down('sm')]: {
width: '80vw',
},
},
'& div::before': {
backgroundColor: palette.primary['500'],
},
'& div::after': {
backgroundColor: palette.primary['500'],
color: palette.primary['500'],
},
'& button': {
[breakpoints.down('sm')]: {
width: '80vw',
},
[breakpoints.up('sm')]: {
marginLeft: '2em',
},
},
[breakpoints.up('sm')]: {
flexGrow: 1,
textAlign: 'center',
},
},
textField: {
[breakpoints.down('sm')]: {
marginTop: '5em',
paddingBottom: '1em',
},
[breakpoints.up('sm')]: {
marginTop: '7em',
width: '30vh',
},
},
buttonSend: {
'&[disabled]': {
backgroundColor: 'rgba(224,106,78,.25)',
color: 'rgba(255,255,255,.25)',
},
},
sendIcon: {
marginLeft: '0.5em',
fontSize: '1.5em',
},
bar: {},
checked: {
color: palette.primary[500],
'& + $bar': {
backgroundColor: palette.primary[500],
},
},
privateOption: {
textAlign: 'end',
fontSize: '14px',
color: 'white',
},
});
|
<reponame>EelaiWind/AIPoem
package ai.exception;
public class BopomofoException extends Exception{
/**
* 處理從教育部國語辭典讀取注音時遇到例外情況
* 1. BopomofoException(String word) :
* 找不到某個詞的注音
* 2. BopomofoException (String word,int wordLength, int bopomofoLength)
* 注音跟詞的長度不符合,通常是有標點符號或是是有"ㄦ"出現時
*/
private static final long serialVersionUID = 1L;
public BopomofoException(String word){
System.err.println("error : Can't find word("+word+") and its bopomofo");
}
public BopomofoException (String word,int wordLength, int bopomofoLength){
System.err.println("error : Bopomofo's length("+bopomofoLength+") is diffferent from word's("+word+") length("+wordLength+")");
}
}
|
#!/bin/bash
# +-----------------+
# | npm postinstall |
# | @bugsounet |
# +-----------------+
# get the installer directory
Installer_get_current_dir () {
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
echo "$( cd -P "$( dirname "$SOURCE" )" && pwd )"
}
Installer_dir="$(Installer_get_current_dir)"
# move to installler directory
cd "$Installer_dir"
source utils.sh
# module name
Installer_module="IronManGA"
Version="$(node -p -e "require('./../package.json').version")"
Installer_info "Welcome to $Installer_module v$Version"
echo
Installer_info "Installing all Needed Files..."
echo
Installer_info "Copying directory 'IronManGA' to MMM-GoogleAssistant resources directory"
cp -Rf ../IronManGA ../../MMM-GoogleAssistant/resources && Installer_success "Done"
Installer_info "Copying directory 'IronManGABA' to MMM-GABackground directory"
cp -Rf ../IronManGABA ../../MMM-GABackground && Installer_success "Done"
Installer_info "Copy 'with-IronMan.js' to MMM-GoogleAssistant recipes directory"
cp -f with-IronMan.js ../../MMM-GoogleAssistant/recipes && Installer_success "Done"
echo
Installer_success "$Installer_module is now installed !"
echo
Installer_warning "Don't forget:"
Installer_warning "1) You have to merge your custom.css file with IronMan.css file"
Installer_warning "2) You have to define 'with-IronMan.js' recipe in GoogleAssistant configuration"
Installer_warning "See Readme file for more informations"
echo
|
package com.jinjunhang.contract.controller;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.NavUtils;
import android.support.v7.app.AppCompatActivity;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.jinjunhang.contract.R;
import com.jinjunhang.contract.model.OrderPurchaseInfo;
import com.jinjunhang.contract.model.OrderPurchaseItem;
/**
* Created by lzn on 16/3/24.
*/
public class OrderFukuangFragment extends android.support.v4.app.ListFragment {
public final static String EXTRA_ORDER_FUKUANG = "orderFukuang";
private OrderPurchaseInfo mFukuangInfo;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Intent i = getActivity().getIntent();
mFukuangInfo = (OrderPurchaseInfo)i.getSerializableExtra(EXTRA_ORDER_FUKUANG);
FukuangAdpter adpter = new FukuangAdpter(mFukuangInfo);
setListAdapter(adpter);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
View footerView = LayoutInflater.from(getActivity()).inflate(R.layout.loading_view, null);
if (mFukuangInfo.getItems().size() == 0) {
footerView.findViewById(R.id.loading_progressbar).setVisibility(View.GONE);
TextView textView = ((TextView)footerView.findViewById(R.id.loading_message));
textView.setText("没有找到任何付款信息");
textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 18);
textView.offsetTopAndBottom(30);
getListView().addFooterView(footerView, null, false);
}
}
private class FukuangAdpter extends ArrayAdapter<OrderPurchaseItem> {
public FukuangAdpter(OrderPurchaseInfo fukuangInfo) {
super(getActivity(), 0, fukuangInfo.getItems());
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = getActivity().getLayoutInflater().inflate(R.layout.list_item_shougouinfo, null);
}
OrderPurchaseItem item = getItem(position);
TextView contractTextView = (TextView)convertView.findViewById(R.id.order_shougouInfo_contractNo);
contractTextView.setText(item.getContract());
TextView dateTextView = (TextView)convertView.findViewById(R.id.order_shougouInfo_date);
dateTextView.setText(item.getDate());
TextView factoryTextView = (TextView)convertView.findViewById(R.id.order_shougouInfo_factory);
factoryTextView.setText(item.getFactory());
TextView amountTextView = (TextView)convertView.findViewById(R.id.order_shougouInfo_amount);
amountTextView.setText("¥" + String.format("%.2f", item.getAmount()));
return convertView;
}
}
}
|
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jooby.internal.ssl;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.KeyException;
import java.security.KeyFactory;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.ArrayList;
import java.util.List;
import javax.crypto.Cipher;
import javax.crypto.EncryptedPrivateKeyInfo;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLSessionContext;
import javax.net.ssl.TrustManagerFactory;
import javax.security.auth.x500.X500Principal;
/**
* A secure socket protocol implementation which acts as a factory for {@link SSLEngine} and
* {@link SslHandler}.
* Internally, it is implemented via JDK's {@link SSLContext} or OpenSSL's {@code SSL_CTX}.
*
* <h3>Making your server support SSL/TLS</h3>
* <pre>
* // In your {@link ChannelInitializer}:
* {@link ChannelPipeline} p = channel.pipeline();
* {@link SslContext} sslCtx = {@link SslContextBuilder#forServer(File, File) SslContextBuilder.forServer(...)}.build();
* p.addLast("ssl", {@link #newEngine(ByteBufAllocator) sslCtx.newEngine(channel.alloc())});
* ...
* </pre>
*
* <h3>Making your client support SSL/TLS</h3>
* <pre>
* // In your {@link ChannelInitializer}:
* {@link ChannelPipeline} p = channel.pipeline();
* {@link SslContext} sslCtx = {@link SslContextBuilder#forClient() SslContextBuilder.forClient()}.build();
* p.addLast("ssl", {@link #newEngine(ByteBufAllocator, String, int) sslCtx.newEngine(channel.alloc(), host, port)});
* ...
* </pre>
*
* Kindly Borrowed from <a href="http://netty.io">Netty</a>
*/
public abstract class SslContext {
static final CertificateFactory X509_CERT_FACTORY;
static {
try {
X509_CERT_FACTORY = CertificateFactory.getInstance("X.509");
} catch (CertificateException e) {
throw new IllegalStateException("unable to instance X.509 CertificateFactory", e);
}
}
public static SslContext newServerContextInternal(
final File trustCertChainFile,
final File keyCertChainFile, final File keyFile, final String keyPassword,
final long sessionCacheSize, final long sessionTimeout) throws SSLException {
return new JdkSslServerContext(trustCertChainFile, keyCertChainFile,
keyFile, keyPassword, sessionCacheSize, sessionTimeout);
}
/**
* Returns the size of the cache used for storing SSL session objects.
*/
public abstract long sessionCacheSize();
public abstract long sessionTimeout();
public abstract SSLContext context();
/**
* Returns the {@link SSLSessionContext} object held by this context.
*/
public abstract SSLSessionContext sessionContext();
/**
* Generates a key specification for an (encrypted) private key.
*
* @param password characters, if {@code null} or empty an unencrypted key is assumed
* @param key bytes of the DER encoded private key
*
* @return a key specification
*
* @throws IOException if parsing {@code key} fails
* @throws NoSuchAlgorithmException if the algorithm used to encrypt {@code key} is unkown
* @throws NoSuchPaddingException if the padding scheme specified in the decryption algorithm is
* unkown
* @throws InvalidKeySpecException if the decryption key based on {@code password} cannot be
* generated
* @throws InvalidKeyException if the decryption key based on {@code password} cannot be used to
* decrypt
* {@code key}
* @throws InvalidAlgorithmParameterException if decryption algorithm parameters are somehow
* faulty
*/
protected static PKCS8EncodedKeySpec generateKeySpec(final char[] password, final byte[] key)
throws IOException, NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeySpecException,
InvalidKeyException, InvalidAlgorithmParameterException {
if (password == null || password.length == 0) {
return new PKCS8EncodedKeySpec(key);
}
EncryptedPrivateKeyInfo encryptedPrivateKeyInfo = new EncryptedPrivateKeyInfo(key);
SecretKeyFactory keyFactory = SecretKeyFactory
.getInstance(encryptedPrivateKeyInfo.getAlgName());
PBEKeySpec pbeKeySpec = new PBEKeySpec(password);
SecretKey pbeKey = keyFactory.generateSecret(pbeKeySpec);
Cipher cipher = Cipher.getInstance(encryptedPrivateKeyInfo.getAlgName());
cipher.init(Cipher.DECRYPT_MODE, pbeKey, encryptedPrivateKeyInfo.getAlgParameters());
return encryptedPrivateKeyInfo.getKeySpec(cipher);
}
/**
* Generates a new {@link KeyStore}.
*
* @param certChainFile a X.509 certificate chain file in PEM format,
* @param keyFile a PKCS#8 private key file in PEM format,
* @param keyPasswordChars the password of the {@code keyFile}.
* {@code null} if it's not password-protected.
* @return generated {@link KeyStore}.
*/
static KeyStore buildKeyStore(final File certChainFile, final File keyFile,
final char[] keyPasswordChars)
throws KeyStoreException, NoSuchAlgorithmException,
NoSuchPaddingException, InvalidKeySpecException, InvalidAlgorithmParameterException,
CertificateException, KeyException, IOException {
ByteBuffer encodedKeyBuf = PemReader.readPrivateKey(keyFile);
byte[] encodedKey = encodedKeyBuf.array();
PKCS8EncodedKeySpec encodedKeySpec = generateKeySpec(keyPasswordChars, encodedKey);
PrivateKey key;
try {
key = KeyFactory.getInstance("RSA").generatePrivate(encodedKeySpec);
} catch (InvalidKeySpecException ignore) {
try {
key = KeyFactory.getInstance("DSA").generatePrivate(encodedKeySpec);
} catch (InvalidKeySpecException ignore2) {
try {
key = KeyFactory.getInstance("EC").generatePrivate(encodedKeySpec);
} catch (InvalidKeySpecException e) {
throw new InvalidKeySpecException("Neither RSA, DSA nor EC worked", e);
}
}
}
CertificateFactory cf = CertificateFactory.getInstance("X.509");
List<ByteBuffer> certs = PemReader.readCertificates(certChainFile);
List<Certificate> certChain = new ArrayList<Certificate>(certs.size());
for (ByteBuffer buf : certs) {
certChain.add(cf.generateCertificate(new ByteArrayInputStream(buf.array())));
}
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null);
ks.setKeyEntry("key", key, keyPasswordChars,
certChain.toArray(new Certificate[certChain.size()]));
return ks;
}
/**
* Build a {@link TrustManagerFactory} from a certificate chain file.
*
* @param certChainFile The certificate file to build from.
* @param trustManagerFactory The existing {@link TrustManagerFactory} that will be used if not
* {@code null}.
* @return A {@link TrustManagerFactory} which contains the certificates in {@code certChainFile}
*/
protected static TrustManagerFactory buildTrustManagerFactory(final File certChainFile,
TrustManagerFactory trustManagerFactory)
throws NoSuchAlgorithmException, CertificateException, KeyStoreException, IOException {
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null);
CertificateFactory cf = CertificateFactory.getInstance("X.509");
List<ByteBuffer> certs = PemReader.readCertificates(certChainFile);
for (ByteBuffer buf : certs) {
X509Certificate cert = (X509Certificate) cf
.generateCertificate(new ByteArrayInputStream(buf.array()));
X500Principal principal = cert.getSubjectX500Principal();
ks.setCertificateEntry(principal.getName("RFC2253"), cert);
}
// Set up trust manager factory to use our key store.
if (trustManagerFactory == null) {
trustManagerFactory = TrustManagerFactory
.getInstance(TrustManagerFactory.getDefaultAlgorithm());
}
trustManagerFactory.init(ks);
return trustManagerFactory;
}
}
|
package org.jooby.micrometer;
import io.micrometer.prometheus.PrometheusMeterRegistry;
import static org.easymock.EasyMock.expect;
import org.jooby.MediaType;
import org.jooby.Request;
import org.jooby.Response;
import org.jooby.Result;
import org.jooby.test.MockUnit;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class PrometheusHandlerTest {
@Test
public void handle() throws Exception {
new MockUnit(PrometheusMeterRegistry.class, Request.class, Response.class)
.expect(unit -> {
PrometheusMeterRegistry registry = unit.get(PrometheusMeterRegistry.class);
expect(registry.scrape()).andReturn("scrape");
Request req = unit.get(Request.class);
expect(req.require(PrometheusMeterRegistry.class)).andReturn(registry);
Response rsp = unit.get(Response.class);
expect(rsp.type(MediaType.plain)).andReturn(rsp);
rsp.send(unit.capture(Result.class));
})
.run(unit -> {
new PrometheusHandler()
.handle(unit.get(Request.class), unit.get(Response.class));
}, unit->{
assertEquals("scrape", unit.captured(Result.class).get(0).get());
});
}
}
|
<filename>source/universal/pages/LoadingPage/index.js
/* @flow */
import universal from 'react-universal-component'
export default universal(() => import('./LoadingPage'))
|
#!/bin/sh
echo "
######################################
### Release test ##
######################################
"
if [ "$MC_VERSION" = "" ]; then
echo "\$MC_VERSION is not set. Useless test... Exiting."
exit 0
fi
# Check for command existence
# See: https://www.shivering-isles.com/helpful-shell-snippets-for-docker-testing-and-bootstrapping/
command_exists() { command -v "$1" >/dev/null 2>&1 || { echo >&2 "I require $1 but it's not installed. Aborting."; exit 1; }; }
# Version comparison greater or equal
# See: https://www.shivering-isles.com/helpful-shell-snippets-for-docker-testing-and-bootstrapping/
version_ge() { test "$(printf '%s\n' "$@" | sort -V | head -n 1)" != "$1" || test "$1" = "$2"; }
# GitHub get latest release tag
# See: https://www.shivering-isles.com/helpful-shell-snippets-for-docker-testing-and-bootstrapping/
github_latest_release() { wget -qO- "https://api.github.com/repos/$1/releases/latest" | jq .tag_name | sed -e 's/"//g'; }
command_exists wget
command_exists jq
docker pull sheogorath/pup-docker
MC_LATEST_VERSION=$(wget -qO- https://mcversions.net/ | docker run --rm -i sheogorath/pup-docker "ul .latest strong.version text{}")
if version_ge "$MC_VERSION" "${MC_LATEST_VERSION}"; then
echo "Minecraft version ($MC_VERSION) is up to date! Test successful."
else
echo >&2 "A newer base image is available! Please update. New version is ${MC_LATEST_VERSION}"
exit 1
fi
|
#!/bin/sh
#
# This script configures my Node.js development setup. Note that
# nvm is installed by the Homebrew install script.
#
# Also, I would highly reccomend not installing your Node.js build
# tools, e.g., Grunt, gulp, WebPack, or whatever you use, globally.
# Instead, install these as local devDepdencies on a project-by-project
# basis. Most Node CLIs can be run locally by using the executable file in
# "./node_modules/.bin". For example:
#
# ./node_modules/.bin/webpack --config webpack.local.config.js
#
if test ! $(which nvm)
then
echo "Installing a stable version of Node..."
# Install the latest stable version of node
nvm install stable
# Switch to the installed version
nvm use node
# Use the stable version of node by default
nvm alias default node
fi
# All `npm install <pkg>` commands will pin to the version that was available at the time you run the command
# npm config set save-exact = true
# Globally install with npm
# To list globally installed npm packages and version: npm list -g --depth=0
#
# Some descriptions:
#
# diff-so-fancy — sexy git diffs
# git-recent — Type `git recent` to see your recent local git branches
# git-open — Type `git open` to open the GitHub page or website for a repository
packages=(
diff-so-fancy
git-recent
git-open
npm-check-updates
webpack
yarn
nodemon
)
npm install -g "${packages[@]}"
|
/*
*
*/
package net.community.chest.jfree.jfreechart.plot.misc;
import java.text.NumberFormat;
import net.community.chest.dom.proxy.AbstractXmlProxyConverter;
import net.community.chest.dom.transform.XmlValueInstantiator;
import net.community.chest.jfree.jfreechart.plot.PlotReflectiveProxy;
import net.community.chest.text.NumberFormatReflectiveProxy;
import org.jfree.chart.plot.MeterPlot;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @param <P> The reflected {@link MeterPlot}
* @author <NAME>.
* @since Feb 9, 2009 11:17:40 AM
*/
public class MeterPlotReflectiveProxy<P extends MeterPlot> extends PlotReflectiveProxy<P> {
protected MeterPlotReflectiveProxy (Class<P> objClass, boolean registerAsDefault)
throws IllegalArgumentException, IllegalStateException
{
super(objClass, registerAsDefault);
}
public MeterPlotReflectiveProxy (Class<P> objClass) throws IllegalArgumentException
{
this(objClass, false);
}
public static final String TICK_LABEL_FMT_ATTR="TickLabelFormat";
public boolean isTickLabelFormatElement (Element elem, String tagName)
{
return AbstractXmlProxyConverter.isDefaultMatchingElement(elem, tagName, TICK_LABEL_FMT_ATTR);
}
public XmlValueInstantiator<? extends NumberFormat> getTickLabelFormatConverter (Element elem)
{
return (null == elem) ? null : NumberFormatReflectiveProxy.getNumberFormatConverter(elem);
}
public NumberFormat setTickLabelFormat (P src, Element elem) throws Exception
{
final XmlValueInstantiator<? extends NumberFormat> conv=getTickLabelFormatConverter(elem);
final NumberFormat fmt=conv.fromXml(elem);
if (fmt != null)
src.setTickLabelFormat(fmt);
return fmt;
}
/*
* @see net.community.chest.jfree.jfreechart.plot.PlotReflectiveProxy#fromXmlChild(org.jfree.chart.plot.Plot, org.w3c.dom.Element)
*/
@Override
public P fromXmlChild (P src, Element elem) throws Exception
{
final String tagName=(null == elem) ? null : elem.getTagName();
if (isTickLabelFormatElement(elem, tagName))
{
setTickLabelFormat(src, elem);
return src;
}
return super.fromXmlChild(src, elem);
}
public static final MeterPlotReflectiveProxy<MeterPlot> METERPLOT=
new MeterPlotReflectiveProxy<MeterPlot>(MeterPlot.class, true);
}
|
<reponame>Stravanni/ruler
package RulER.DataStructure
case class Rule(attribute: String, ruleType: String, threshold: Double) {
def and(that: Rule): MatchingRule = {
MatchingRule().addOrRule(List(this, that))
}
def or(that: Rule): MatchingRule = {
MatchingRule().addOrRule(List(this)).addOrRule((List(that)))
}
}
|
package main
import "time"
// track last time a user used a command, and the
// last time a channel had a command used. Then
// just see how much time has passed since then
// to see if they are within the limit
var (
userLastuse map[string]time.Time = make(map[string]time.Time)
channelLastuse map[string]time.Time = make(map[string]time.Time)
perUserRateLimit = time.Second * 12
perChannelRateLimit = time.Second * 5
)
func UserCheckLimit(username string) bool {
return time.Since(userLastuse[username]) > perUserRateLimit
}
func UserRegisterLimit(username string) {
userLastuse[username] = time.Now()
}
func ChannelCheckLimit(channel string) bool {
return time.Since(channelLastuse[channel]) > perChannelRateLimit
}
func ChannelRegisterLimit(channel string) {
channelLastuse[channel] = time.Now()
}
func RegisterUserChannelComboAllin1(username, channel string) {
UserRegisterLimit(username)
ChannelRegisterLimit(channel)
}
|
1. Get smart with AI - Learn AI, fast!
2. Unlock the power of Artificial Intelligence.
3. Learn to become an AI master.
4. Become an AI innovator.
5. Create better AI solutions with AI training.
6. Thinking outside the box with AI.
7. Taking AI to the next level.
8. Redefine AI boundaries.
9. Stay ahead of the curve with AI.
10. Speed-up your AI development with this course. |
def display_back_button(user_role: str) -> bool:
if user_role == "admin":
return True
else:
return False |
/*
* Copyright (c) 2010, <NAME>
* All rights reserved.
*
* Made available under the BSD license - see the LICENSE file
*/
package sim.traffic;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
public class Trace {
public ArrayList<TraceElement> m_trace;
public Trace(String filename) {
m_trace = new ArrayList<TraceElement>();
readTrace(filename);
}
private void readTrace(String filename) {
try {
BufferedReader in = new BufferedReader(new FileReader(filename));
String str;
while ((str = in.readLine()) != null) {
if(!(str.trim().charAt(0) == '#')) {
String[] frags = str.split(",");
long clock = Long.parseLong(frags[0]);
int source = Integer.parseInt(frags[1]);
int dest = Integer.parseInt(frags[2]);
int burst = Integer.parseInt(frags[3]);
m_trace.add(new TraceElement(clock, source, dest, burst));
}
}
in.close();
System.out.println("Read "+m_trace.size()+" events from traffic trace "+filename);
} catch (IOException e) {
System.err.println("Error: reading traffic trace "+filename);
}
}
}
|
<reponame>noamt/uv-bot
// Package uv provides utilities for measuring and reporting UV indices
package uv |
#!/bin/bash
dotnet build \
--project ./HolisticWare.Xamarin.Tools.AndroidSampleProjectKreator.dotnet-tool/HolisticWare.Xamarin.Tools.AndroidSampleProjectKreator.dotnet-tool.csproj \
dotnet run \
--project ./HolisticWare.Xamarin.Tools.AndroidSampleProjectKreator.dotnet-tool/HolisticWare.Xamarin.Tools.AndroidSampleProjectKreator.dotnet-tool.csproj \
--framework net5.0 \
-- \
create \
--input-folder a \
--output-folder b \
--name Demo |
package com.messanger.data.database;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import com.messanger.data.model.Messages;
import com.messanger.data.model.User;
import java.util.ArrayList;
/**
* Created by <NAME> on 11/24/17.
* <p>
* Class {@link LocalDatabaseManager} used to display all
* local chat and messages that are stores in Users
* and messages tables.
* Key points:-
* 1.) this class handle the database locally.
* 2.) all info stores in this table is stores locally.
*/
public class LocalDatabaseManager extends SQLiteOpenHelper {
private static final String TAG = LocalDatabaseManager.class.getSimpleName();
// All Static variables
// Database Version
private static final int DATABASE_VERSION = 1;
// Database Name
private static final String DATABASE_NAME = "messangerDB";
// Contacts table name
private static final String TABLE_USERS = "users";
private static final String TABLE_MESSAGES = "messages";
// Users Table Columns names
private static final String NAME = "name";
private static final String EMAIL = "email";
private static final String TOKEN = "token";
// Message Table Columns names
private static final String TIME_STAMP = "timestamp";
private static final String ID = "id";
private static final String MESSAGE = "message";
private static final String IMAGE_LINK = "image_link";
private static final String TO_ID = "to_id";
private static final String FROM_ID = "from_id";
//USED AS PRIMARY KEY
private static final String USER_ID = "user_id";
private static final String MESSAGE_ID = "message_id";
private static LocalDatabaseManager localDatabaseManager;
/**
* Instantiates a new LocalDB handler.
*/
private LocalDatabaseManager() {
/*TODO context cannot be null.*/
super(null, DATABASE_NAME, null, DATABASE_VERSION);
}
/**
* get the instance of current class to access local db.
*
* @return instance of local db.
*/
public static LocalDatabaseManager getInstance() {
/*TODO we don't required double locking here*/
if (localDatabaseManager == null) {
synchronized (LocalDatabaseManager.class) {
if (localDatabaseManager == null) {
localDatabaseManager = new LocalDatabaseManager();
}
}
}
return localDatabaseManager;
}
@Override
public void onCreate(final SQLiteDatabase db) {
//Creating Tables
String createUsers = "CREATE TABLE "
+ TABLE_USERS + "("
+ USER_ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ NAME + " VARCHAR,"
+ EMAIL + " VARCHAR,"
+ TOKEN + " VARCHAR" + ");";
String createMesseges = "CREATE TABLE "
+ TABLE_MESSAGES + "("
+ MESSAGE_ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ ID + " VARCHAR,"
+ MESSAGE + " VARCHAR,"
+ IMAGE_LINK + " VARCHAR,"
+ TO_ID + " VARCHAR,"
+ FROM_ID + " VARCHAR,"
+ TIME_STAMP + " VARCHAR,"
+ USER_ID + " VARCHAR,"
+ " FOREIGN KEY (" + USER_ID + ") REFERENCES "
+ TABLE_USERS + "(" + TOKEN + "));";
db.execSQL(createUsers);
db.execSQL(createMesseges);
}
@Override
public void onUpgrade(final SQLiteDatabase db, final int oldVersion, final int newVersion) {
//Upgrading database
// Drop older table if existed
db.execSQL("DROP TABLE IF EXISTS " + TABLE_USERS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_MESSAGES);
// Create tables again
onCreate(db);
}
/**
* Method used to check if user already exists in
* local db then don't save it else save it.
*
* @param userId user id that need to be check.
* @param db instance of Sqlite Database.
* @return true is user exist else false.
*/
private boolean ifExists(final String userId, final SQLiteDatabase db) {
String checkQuery = "SELECT " + TOKEN + " FROM " + TABLE_USERS + " WHERE " + TOKEN + "= '" + userId + "'";
Cursor cursor = db.rawQuery(checkQuery, null);
boolean exists = cursor.getCount() > 0;
cursor.close();
return exists;
}
/**
* Add new User here.
*
* @param user user model.
*/
public void addUsers(final User user) {
Log.e(TAG, "addUsers: -------->>" + user.getId());
if (user.getId() != null) {
SQLiteDatabase db = this.getWritableDatabase();
if (ifExists(user.getId(), db)) {
Log.e(TAG, "addUsers: 0======>>user already exist.");
// Closing database connection
db.close();
return;
}
ContentValues values = new ContentValues();
values.put(NAME, user.getName());
values.put(EMAIL, user.getEmail());
values.put(TOKEN, user.getId());
// Inserting Row
db.insert(TABLE_USERS, null, values);
// Closing database connection
db.close();
}
}
/**
* Method used to check if data is already in db or not.
* if data exists in db then show it else hit api to get
* data from server and insert into the db.
*
* @return true if data exists else false.
*/
public boolean checkIsDataAlreadyInDBorNot() {
SQLiteDatabase sqldb = this.getWritableDatabase();
String query = "Select * from " + TABLE_USERS /*+ " where " + TOKEN + " = '" + userId + "'"*/;
Cursor cursor = sqldb.rawQuery(query, null);
if (cursor.getCount() <= 0) {
cursor.close();
return false;
}
cursor.close();
return true;
}
/**
* Method used to get data from DB and add into the userList.
*
* @return arraylist of type {@link User}.
*/
public ArrayList<User> getUsers() {
ArrayList<User> usersList = new ArrayList<>();
SQLiteDatabase sqldb = this.getWritableDatabase();
Cursor cursor;
try {
cursor = sqldb.query(TABLE_USERS, null, null, null,
null, null, null, null);
if (cursor.moveToFirst()) {
while (!cursor.isAfterLast()) {
// int id = cursor.getInt(cursor.getColumnIndex(USER_ID));
String name = cursor.getString(cursor.getColumnIndex(NAME));
String email = cursor.getString(cursor.getColumnIndex(EMAIL));
String token = cursor.getString(cursor.getColumnIndex(TOKEN));
// Log.e(TAG, "getUsers: ........" + token + "<------>" + name);
usersList.add(new User(name, email, token));
cursor.moveToNext();
}
}
cursor.close();
} catch (SQLiteException e) {
e.printStackTrace();
}
return usersList;
}
/**
* Method used to check if message unique key already exists in
* local db then don't save it else save it.
*
* @param key user id that need to be check.
* @param db instance of Sqlite Database.
* @return true is user exist else false.
*/
private boolean ifMessageKeyExists(final String key, final SQLiteDatabase db) {
String checkQuery = "SELECT " + ID + " FROM " + TABLE_MESSAGES + " WHERE " + ID + "= '" + key + "'";
Cursor cursor = db.rawQuery(checkQuery, null);
boolean exists = cursor.getCount() > 0;
cursor.close();
return exists;
}
/**
* Add new User here.
*
* @param messages user model.
* @param key unique key
*/
public void addMessages(final Messages messages, final String key) {
SQLiteDatabase db = this.getWritableDatabase();
if (ifMessageKeyExists(key, db)) {
Log.e(TAG, "addMessages: ------>message Key already exists.");
db.close();
return;
}
ContentValues values = new ContentValues();
values.put(ID, key);
values.put(MESSAGE, messages.getMessage());
values.put(TIME_STAMP, messages.getTimeStamp());
values.put(IMAGE_LINK, messages.getImageUrl());
values.put(TO_ID, messages.getToId());
values.put(FROM_ID, messages.getFromId());
// Inserting Row
db.insert(TABLE_MESSAGES, null, values);
// Closing database connection
db.close();
}
/**
* Method used to get all messages with ToId and FromId.
*
* @param fromId from whome we are getting the message.
* @param toId whose we are going to send
* @return list of {@link Messages}.
*/
public ArrayList<Messages> getAllMessages(final String toId, final String fromId) {
ArrayList<Messages> messagesList = new ArrayList<>();
SQLiteDatabase sqldb = this.getWritableDatabase();
Cursor cursor;
try {
String query = "SELECT * from " + TABLE_MESSAGES + " WHERE ("
+ TO_ID + " = '" + toId + "' AND " + FROM_ID + " = '" + fromId
+ "') OR (" + TO_ID + " = '" + fromId + "' AND " + FROM_ID + " = '" + toId + "')";
cursor = sqldb.rawQuery(query, null);
if (cursor.moveToFirst()) {
while (!cursor.isAfterLast()) {
String fId = cursor.getString(cursor.getColumnIndex(FROM_ID));
String tId = cursor.getString(cursor.getColumnIndex(TO_ID));
String message = cursor.getString(cursor.getColumnIndex(MESSAGE));
String imageUrl = cursor.getString(cursor.getColumnIndex(IMAGE_LINK));
String timeStamp = cursor.getString(cursor.getColumnIndex(TIME_STAMP));
Log.e(TAG, "getMessage: ........" + fId + "<------>" + tId);
Messages messages = new Messages();
messages.setFromId(fId);
messages.setToId(tId);
messages.setMessage(message);
messages.setImageUrl(imageUrl);
messages.setTimeStamp(Long.parseLong(timeStamp));
messagesList.add(messages);
cursor.moveToNext();
}
}
cursor.close();
} catch (SQLiteException e) {
e.printStackTrace();
}
return messagesList;
}
/**
* Method used to get recents messeges from
* the table according to user Id.
*
* @param userId user id to identify records from tables.
*/
public void getRecentMessages(final String userId) {
SQLiteDatabase sqlDb = this.getWritableDatabase();
ArrayList<Messages> messagesLis = new ArrayList<>();
String query = "SELECT " + MESSAGE + "," + IMAGE_LINK + "," + TIME_STAMP + "," + TOKEN + " FROM " + TABLE_MESSAGES
+ " LEFT JOIN " + TABLE_USERS + " ON "
+ TABLE_MESSAGES + "." + TO_ID + " = " + TABLE_USERS + "." + TOKEN
+ " AND "
+ TABLE_MESSAGES + "." + FROM_ID + " = " + TABLE_USERS + "." + TOKEN
+ " WHERE " + TO_ID + "= '" + userId + "' OR " + FROM_ID + " = '" + userId
+ "' GROUP BY " + TO_ID + "," + FROM_ID + " ORDER BY " + TIME_STAMP + " DESC";
Log.e(TAG, "getRecentMessages: ------>>" + query);
Cursor cursor = sqlDb.rawQuery(query, null);
if (cursor.moveToFirst()) {
while (!cursor.isAfterLast()) {
// String fId = cursor.getString(cursor.getColumnIndex(FROM_ID));
// String tId = cursor.getString(cursor.getColumnIndex(TO_ID));
String token = cursor.getString(cursor.getColumnIndex(TOKEN));
String message = cursor.getString(cursor.getColumnIndex(MESSAGE));
String imageUrl = cursor.getString(cursor.getColumnIndex(IMAGE_LINK));
String timeStamp = cursor.getString(cursor.getColumnIndex(TIME_STAMP));
// Log.e(TAG, "getMessage: ........" + fId + "<------>" + tId);
Messages messages = new Messages();
// messages.setFromId(fId);
// messages.setToId(tId);
messages.setMessage(message);
messages.setImageUrl(imageUrl);
messages.setTimeStamp(Long.parseLong(timeStamp));
messagesLis.add(messages);
cursor.moveToNext();
}
}
// boolean isContainChat = cursor.getCount() > 0;
Log.e(TAG, "getRecentMessages: " + messagesLis.size());
cursor.close();
}
/**
* Method used to check if mesages exist in
* the db for the particular user.
*
* @param toId message sent to.
* @param fromId message from.
* @return true if message exists else false.
*/
public boolean checkIfMessagesAlreadyInDb(final String toId, final String fromId) {
SQLiteDatabase sqldb = this.getWritableDatabase();
String query = "Select * from " + TABLE_MESSAGES + " where ("
+ TO_ID + " = '" + toId + "' AND " + FROM_ID + " = '" + fromId
+ "') OR (" + TO_ID + " = '" + fromId + "' AND " + FROM_ID + " = '" + toId + "')";
Cursor cursor = sqldb.rawQuery(query, null);
if (cursor.getCount() <= 0) {
cursor.close();
return false;
}
cursor.close();
return true;
}
} |
import numpy as np
def identify_events(G, T_test, Time):
event_indices = []
for i in range(G.shape[0]):
tmp_idx = np.where((G[i, :] >= T_test[i]) & (G[i, :] >= Time))[0]
event_indices.append((i, tmp_idx))
return event_indices |
<reponame>rickgroen/cov-weighting<filename>utils/train_utils.py
import numpy as np
import os
from config import EIGEN_PATH, CITYSCAPES_PATH
def check_if_all_images_are_present(dataset, data_dir_path):
set_names = ['train', 'val', 'test']
if dataset == 'kitti':
data_set_path = EIGEN_PATH
elif dataset == 'cityscapes':
data_set_path = CITYSCAPES_PATH
else:
raise ValueError("Code for running that dataset has not yet been implemented")
all_file_names_paths = []
for set_name in set_names:
full_path_to_set = data_set_path.format(set_name)
with open(full_path_to_set, 'r') as f:
data_set_names = f.read()
data_set_names = data_set_names.splitlines()
for names in data_set_names:
names = names.split(' ')
all_file_names_paths.extend(names)
total_length = len(all_file_names_paths)
number_not_present = 0
for file_path in all_file_names_paths:
full_file_path = os.path.join(data_dir_path, file_path)
if os.path.exists(full_file_path):
number_not_present += 1
print("-- Dir check for {}: --".format(dataset))
print("{} out of {} images present".format(number_not_present, total_length))
return
def get_present_images_from_list(lst_of_paths, data_dir):
to_remove = []
for pos, line in enumerate(lst_of_paths):
line = line.split()
single_left_path = os.path.join(data_dir, line[0])
if not os.path.exists(single_left_path):
to_remove.append(pos)
for pos in to_remove[::-1]:
lst_of_paths.pop(pos)
return lst_of_paths
def post_process_disparity(disp):
(_, h, w) = disp.shape
l_disp = disp[0, :, :]
r_disp = np.fliplr(disp[1, :, :])
m_disp = 0.5 * (l_disp + r_disp)
(l, _) = np.meshgrid(np.linspace(0, 1, w), np.linspace(0, 1, h))
l_mask = 1.0 - np.clip(20 * (l - 0.05), 0, 1)
r_mask = np.fliplr(l_mask)
return (r_mask * l_disp + l_mask * r_disp + (1.0 - l_mask - r_mask) * m_disp).astype(np.float32)
def print_epoch_update(epoch, time, losses):
# Print update for this epoch.
train_loss = losses[epoch]['train']
val_loss = losses[epoch]['val']
update_print_statement = 'Epoch: {}\t | train: {:.4f}\t | val: {:.4f}\t | time: {:.2f}'
print(update_print_statement.format(epoch, train_loss, val_loss, time))
# Print loss weights after the epoch, if it is applicable.
if 'alphas' in losses[epoch].keys():
weight_str = 'Loss Weights: '
for weight in losses[epoch]['alphas']:
weight_str += '{:.4f} '.format(weight)
print(weight_str)
return
def pre_validation_update(val_loss):
val_loss_string = 'Pre-training val loss:\t{:.4f}'.format(val_loss)
print(val_loss_string)
return
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
usage() {
echo "Usage: prepare_rc -v version_number -k signing_key -a apache_ldap_username"
echo " -v The #.#.#.RC# version number"
echo " -k Your 8 digit GPG key id (the last 8 digits of your gpg fingerprint)"
echo " -a Your apache LDAP username (that you use to log in to https://id.apache.org)"
exit 1
}
checkCommand() {
COMMAND=$1
if ! [[ -x "$(command -v $COMMAND)" ]]; then
echo "$COMMAND must be installed"
exit 1
fi
}
FULL_VERSION=""
SIGNING_KEY=""
APACHE_USERNAME=""
while getopts ":v:k:a:" opt; do
case ${opt} in
v )
FULL_VERSION=$OPTARG
;;
k )
SIGNING_KEY=$OPTARG
;;
a )
APACHE_USERNAME=$OPTARG
;;
\? )
usage
;;
esac
done
if [[ ${FULL_VERSION} == "" ]] || [[ ${SIGNING_KEY} == "" ]] || [[ ${APACHE_USERNAME} == "" ]]; then
usage
fi
if [[ $SIGNING_KEY =~ ^[0-9A-Fa-f]{8}$ ]]; then
true
else
echo "Malformed signing key ${SIGNING_KEY}. Example valid key: ABCD1234"
exit 1
fi
if [[ $FULL_VERSION =~ ^([0-9]+\.[0-9]+\.[0-9]+)\.(RC[0-9]+)$ ]]; then
VERSION=${BASH_REMATCH[1]}
else
echo "Malformed version number ${FULL_VERSION}. Example valid version: 1.9.0.RC1"
exit 1
fi
VERSION_MM=${VERSION%.*}
checkCommand gpg
checkCommand cmake
checkCommand svn
checkCommand doxygen
echo ""
echo "============================================================"
echo "Checking gpg... (you will be prompted to enter passphrase)"
echo "============================================================"
SECRING=~/.gnupg/secring.gpg
! [ -r $SECRING ] || SECRING=/dev/null
if gpg --export-secret-keys > ${SECRING} && echo "1234" | gpg -o /dev/null --local-user ${SIGNING_KEY} -as - ; then
echo "You entered the correct passphrase; proceeding."
echo "Please note, you will still need to enter it a few more times."
echo "PLEASE NOTE, the very last prompt will be for your apache password (not gpg). Pay attention as the prompts look very similar."
else
echo "Hmm, gpg seems unhappy. Check that you entered correct passphrase or refer to release wiki for troubleshooting."
exit 1
fi
if ! gpg --list-keys ${SIGNING_KEY} | grep -q "${APACHE_USERNAME}@apache.org" ; then
echo "Please specify a gpg key that is associated with your apache email address."
echo "Expected: ${APACHE_USERNAME}@apache.org"
echo "Found: $(gpg --list-keys ${SIGNING_KEY} | grep ^uid | sed -e 's/.*<//' -e 's/>.*//')"
exit 1
fi
set -x
WORKSPACE=$PWD/release-${VERSION}-workspace
GEODE=$WORKSPACE/geode
GEODE_EXAMPLES=$WORKSPACE/geode-examples
GEODE_NATIVE=$WORKSPACE/geode-native
GEODE_BENCHMARKS=$WORKSPACE/geode-benchmarks
BREW_DIR=$WORKSPACE/homebrew-core
SVN_DIR=$WORKSPACE/dist/dev/geode
if which shasum >/dev/null; then
SHASUM="shasum -a 256"
else
SHASUM=sha256sum
fi
set +x
function failMsg1 {
echo "ERROR: script did NOT complete successfully. Please try again."
}
trap failMsg1 ERR
echo ""
echo "============================================================"
echo "Cleaning workspace directory..."
echo "============================================================"
set -x
rm -rf $WORKSPACE
mkdir -p $WORKSPACE
cd $WORKSPACE
set +x
echo ""
echo "============================================================"
echo "Cloning repositories..."
echo "============================================================"
set -x
git clone --single-branch --branch support/${VERSION_MM} git@github.com:apache/geode.git
#if you attempt to reset to a prior SHA here, skip ${GEODE} in set_copyright.sh or it may backfire
#(cd geode; git reset --hard $desired_sha) #uncomment if latest commit is not the desired sha
git clone git@github.com:apache/geode.git geode-develop
git clone --single-branch --branch support/${VERSION_MM} git@github.com:apache/geode-examples.git
git clone --single-branch --branch support/${VERSION_MM} git@github.com:apache/geode-native.git
git clone --single-branch --branch develop git@github.com:apache/geode-native.git geode-native-develop
git clone --single-branch --branch support/${VERSION_MM} git@github.com:apache/geode-benchmarks.git
git clone --single-branch --branch master git@github.com:Homebrew/homebrew-core.git
svn checkout https://dist.apache.org/repos/dist --depth empty
svn update --set-depth immediates --parents dist/release/geode
svn update --set-depth infinity --parents dist/dev/geode
set +x
for REPO in ${GEODE} ${WORKSPACE}/geode-develop ${GEODE_EXAMPLES} ${GEODE_NATIVE} ${GEODE_BENCHMARKS} ${BREW_DIR} ; do
cd ${REPO}
git config user.email "${APACHE_USERNAME}@apache.org"
done
cd ${GEODE}/../..
set -x
${0%/*}/set_copyright.sh ${GEODE} ${GEODE_EXAMPLES} ${GEODE_NATIVE} ${GEODE_BENCHMARKS}
set +x
echo ""
echo "============================================================"
echo "Keeping -build.0 suffix"
echo "============================================================"
cd ${GEODE}/../..
set -x
${0%/*}/set_versions.sh -v ${VERSION} -n -w ${WORKSPACE}
set +x
echo ""
echo "============================================================"
echo "Building geode..."
echo "============================================================"
set -x
cd ${GEODE}
git clean -fdx && ./gradlew build -x test publishToMavenLocal -Pversion=${VERSION} -Paskpass -Psigning.keyId=${SIGNING_KEY} -Psigning.secretKeyRingFile=${HOME}/.gnupg/secring.gpg
set +x
if [ "${FULL_VERSION##*.RC}" -gt 1 ] ; then
echo ""
echo "============================================================"
echo "Removing previous RC's temporary commit from geode-examples..."
echo "============================================================"
set -x
cd ${GEODE_EXAMPLES}
git pull
set +x
sed -e 's#^geodeRepositoryUrl *=.*#geodeRepositoryUrl =#' \
-e 's#^geodeReleaseUrl *=.*#geodeReleaseUrl =#' -i.bak gradle.properties
rm gradle.properties.bak
set -x
git add gradle.properties
if [ $(git diff --staged | wc -l) -gt 0 ] ; then
git diff --staged --color | cat
git commit -m 'Revert "temporarily point to staging repo for CI purposes"'
fi
set +x
fi
echo ""
echo "============================================================"
echo "Building geode-examples..."
echo "============================================================"
set -x
cd ${GEODE_EXAMPLES}
git clean -dxf && ./gradlew -Pversion=${VERSION} -PsignArchives -PgeodeReleaseUrl="file://${GEODE}/geode-assembly/build/geode-assembly/build/distributions/apache-geode-${VERSION}" -PgeodeRepositoryUrl="file://${HOME}/.m2/repository" -Psigning.keyId=${SIGNING_KEY} -Psigning.secretKeyRingFile=${HOME}/.gnupg/secring.gpg build
set +x
echo ""
echo "============================================================"
echo "Building geode-native..."
echo "============================================================"
set -x
cd ${GEODE_NATIVE}
mkdir build
which brew >/dev/null && OPENSSL_ROOT_DIR=$(brew --prefix openssl) || OPENSSL_ROOT_DIR=$(which openssl)
cd ${GEODE_NATIVE}/build
cmake .. -DPRODUCT_VERSION=${VERSION} -DOPENSSL_ROOT_DIR=$OPENSSL_ROOT_DIR -DGEODE_ROOT=${GEODE}/geode-assembly/build/install/apache-geode
cpack -G TGZ --config CPackSourceConfig.cmake
NCOUT=apache-geode-native-${VERSION}-src.tar.gz
NCTGZ=apache-geode-native-${VERSION}-src.tgz
mkdir repkg-temp
cd repkg-temp
tar xzf ../${NCOUT}
rm ../${NCOUT}*
mv apache-geode-native apache-geode-native-${VERSION}-src
tar czf ../${NCTGZ} *
cd ..
rm -Rf repkg-temp
gpg --armor -u ${SIGNING_KEY} -b ${NCTGZ}
${SHASUM} ${NCTGZ} > ${NCTGZ}.sha256
set +x
echo ""
echo "============================================================"
echo "Building geode-benchmarks..."
echo "============================================================"
set -x
cd ${GEODE_BENCHMARKS}
BMDIR=apache-geode-benchmarks-${VERSION}-src
BMTAR=${BMDIR}.tgz
git clean -dxf
mkdir ../${BMDIR}
cp -r .travis.yml * ../${BMDIR}
tar czf ${BMTAR} -C .. ${BMDIR}
rm -Rf ../${BMDIR}
gpg --armor -u ${SIGNING_KEY} -b ${BMTAR}
${SHASUM} ${BMTAR} > ${BMTAR}.sha256
set +x
function failMsg2 {
errln=$1
echo "ERROR: script did NOT complete successfully"
echo "Comment out any steps that already succeeded (approximately lines 120-$(( errln - 1 ))) and try again"
echo "For this script only (prepare_rc.sh), it's also safe to just try again from the top"
}
trap 'failMsg2 $LINENO' ERR
echo ""
echo "============================================================"
echo "Tagging the release candidate in each repository. The tags will not be pushed yet..."
echo "============================================================"
for DIR in ${GEODE} ${GEODE_EXAMPLES} ${GEODE_NATIVE} ${GEODE_BENCHMARKS} ; do
set -x
cd ${DIR}
git tag -s -u ${SIGNING_KEY} rel/v${FULL_VERSION} -m "Release candidate ${FULL_VERSION}"
set +x
done
echo ""
echo "============================================================"
echo "Copying artifacts to svn directory for publication. The artifacts will not be committed..."
echo "============================================================"
set -x
cd ${SVN_DIR}
svn rm ${VERSION}.RC* &>/dev/null || true
cp ${GEODE}/KEYS .
mkdir ${FULL_VERSION}
cp ${GEODE}/geode-assembly/build/distributions/* ${FULL_VERSION}
cp ${GEODE_EXAMPLES}/build/distributions/* ${FULL_VERSION}
cp ${GEODE_NATIVE}/build/apache-geode-native-${VERSION}* ${FULL_VERSION}
cp ${GEODE_BENCHMARKS}/apache-geode-benchmarks-${VERSION}* ${FULL_VERSION}
set +x
# verify all files are signed. sometimes gradle "forgets" to make the .asc file
for f in ${FULL_VERSION}/*.tgz ; do
if ! [ -r $f.sha256 ] ; then
echo missing $f.sha256
exit 1
fi
if ! [ -r $f.asc ] ; then
set -x
gpg --armor -u ${SIGNING_KEY} -b $f
set +x
if ! [ -r $f.asc ] ; then
echo missing $f.asc
exit 1
fi
fi
size=$(ls -l $f | awk '{print $5}')
if [ $size -lt 10000 ] ; then
echo $f file size is only $size bytes, that seems suspicious.
exit 1
fi
done
set -x
svn add ${FULL_VERSION}
set +x
echo ""
echo "============================================================"
echo "Publishing artifacts to nexus staging manager..."
echo "PLEASE NOTE, the 2nd prompt will be for your apache (not gpg) password. Pay attention as the prompts look very similar."
echo "============================================================"
set -x
cd ${GEODE}
./gradlew publish -Pversion=${VERSION} -Paskpass -Psigning.keyId=${SIGNING_KEY} -Psigning.secretKeyRingFile=${HOME}/.gnupg/secring.gpg -PmavenUsername=${APACHE_USERNAME}
set +x
echo ""
echo "============================================================"
echo "Done preparing the release and staging to nexus! Next steps:"
echo "============================================================"
cd ${GEODE}/../..
echo "1. Go to https://repository.apache.org, login as ${APACHE_USERNAME}, and click on Staging Repositories"
echo "2. If there is a prior ${VERSION} RC, select it and click Drop."
echo '3. Make a note of the 4-digit ID of the current ("implicitly created") staging repo.'
echo '4. Select the current staging repo and click Close.'
echo '5. Wait ~10 seconds and then refresh the page to confirm that status has become "Closed"'
echo "6. Run ${0%/*}/commit_rc.sh -v ${FULL_VERSION} -m <4-DIGIT-ID-NOTED-ABOVE>"
|
import React from 'react';
import { AppRegistry, View, StyleSheet } from 'react-native';
import { AppLoading, Location, Permissions } from 'expo';
export default class LocationTracking extends React.Component {
state = {
location: null,
errorMessage: null,
};
componentWillMount() {
this._getLocationAsync();
}
_getLocationAsync = async () => {
let { status } = await Permissions.askAsync(Permissions.LOCATION);
if (status !== 'granted') {
this.setState({
errorMessage: 'Permission to access location was denied',
});
}
let location = await Location.getCurrentPositionAsync({});
this.setState({ location });
};
render() {
return (
<View style={styles.container}>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
},
});
AppRegistry.registerComponent('LocationTracking', () => LocationTracking); |
#!/bin/bash -e
# Copyright (c) Facebook, Inc. and its affiliates.
# cd to detectron2 project root
cd "$(dirname "${BASH_SOURCE[0]}")/.."
{
black --version | grep -E "21\." > /dev/null
} || {
echo "Linter requires 'black==21.*' !"
exit 1
}
ISORT_VERSION=$(isort --version-number)
if [[ "$ISORT_VERSION" != 4.3* ]]; then
echo "Linter requires isort==4.3.21 !"
exit 1
fi
set -v
echo "Running isort ..."
isort -y -sp . --atomic
echo "Running black ..."
black -l 100 .
echo "Running flake8 ..."
if [ -x "$(command -v flake8-3)" ]; then
flake8-3 .
else
python3 -m flake8 .
fi
# echo "Running mypy ..."
# Pytorch does not have enough type annotations
# mypy detectron2/solver detectron2/structures detectron2/config
echo "Running clang-format ..."
find . -regex ".*\.\(cpp\|c\|cc\|cu\|cxx\|h\|hh\|hpp\|hxx\|tcc\|mm\|m\)" -print0 | xargs -0 clang-format -i
command -v arc > /dev/null && arc lint
|
#!/bin/tcsh -f
foreach d ( ../../evt/*_cl )
set base=`ls -1 $d/ | head -1 | sed 's/xi.*//'`
set name=`echo $d | sed 's/_cl//' | sed 's/..\/..\/evt\///'`
echo "name=$name, base=${base}"
foreach i ( 0 1 3 )
#foreach i ( 0 1 2 3 )
#foreach e ( 1 )
foreach e ( 1 2 3 4 5 )
#set ph_lo = ( 109 )
set ph_lo = ( 109 547 1369 109 136 )
#set ph_lo = ( 136 273 547 136 136 )
#set ph_hi = ( 2739 )
set ph_hi = ( 547 1369 2739 2739 1370 )
#set ph_hi = ( 273 547 2739 2739 1369 )
#set ene = ( "0.4_10" )
set ene = ( "0.4_2" "2_5" "5_10" "0.4_10.0" "0.5_2.0" )
#set ene = ( "0.5_1" "1_2" "2_10" "0.5_10.0" "0.5_5.0")
set evtfile = sim_xis${i}_gal.evt
#set evtfile = sim-x${i}-gal-${name}.evt.gz
#set imgfile = sim_xis${i}_gal_nocal.img
set imgfile = sim_xis${i}_gal_${ene[$e]}_nocal.img
#set imgfile = sim-x${i}-gal-${name}-${ene[$e]}-cal.img
rm -f ${imgfile}
xselect <<EOF
xis${i}
read events $evtfile
./
yes
filter pha_cutoff ${ph_lo[$e]} ${ph_hi[$e]}
filter column "status=0:65535"
set xybin 1
set image SKY
extract image
save image ${imgfile}
exit
no
EOF
end
end
end
#filter column "status=0:65535"
#filter region ../reg/a2104ps.reg
|
import pandas as pd
# Create a dataframe from the input data
df = pd.DataFrame([
{"customer_id": 1, "month": 1, "amount": 40000},
{"customer_id": 2, "month": 1, "amount": 10000},
{"customer_id": 3, "month": 1, "amount": 20000},
{"customer_id": 1, "month": 2, "amount": 30000},
{"customer_id": 2, "month": 2, "amount": 15000},
{"customer_id": 3, "month": 2, "amount": 50000},
])
# Group by customer and get the average purchase amount in each month
result = df.groupby('customer_id')['amount'].mean().sort_values(ascending=False).head(5)
# Print the result
print(result) |
#!/bin/sh
SCRIPTS_PATH=$1
ROOT_FOLDER=$2
SRC_LANG=$3
TGT_LANG=$4
cd "${ROOT_FOLDER}/${SRC_LANG}-${TGT_LANG}"
# tokenize training dev and test set
for kind in train dev test
do
$SCRIPTS_PATH/moses-tokenize.sh \
$SRC_LANG \
"${kind}/${kind}.${SRC_LANG}" \
"${kind}/${kind}.tok.${SRC_LANG}"
$SCRIPTS_PATH/moses-tokenize.sh \
$TGT_LANG \
"${kind}/${kind}.${TGT_LANG}" \
"${kind}/${kind}.tok.${TGT_LANG}"
done
# train truecaser on train set
$SCRIPTS_PATH/moses-train-truecaser.sh \
"${TGT_LANG}-truecaser.model" \
"train/train.tok.${TGT_LANG}"
$SCRIPTS_PATH/moses-train-truecaser.sh \
"${SRC_LANG}-truecaser.model" \
"train/train.tok.${SRC_LANG}"
# truecase train dev and test set
for kind in train dev test
do
$SCRIPTS_PATH/moses-truecase.sh \
"${TGT_LANG}-truecaser.model" \
"${kind}/${kind}.tok.${TGT_LANG}" \
"${kind}/${kind}.tok.true.${TGT_LANG}"
$SCRIPTS_PATH/moses-truecase.sh \
"${SRC_LANG}-truecaser.model" \
"${kind}/${kind}.tok.${SRC_LANG}" \
"${kind}/${kind}.tok.true.${SRC_LANG}"
done
|
<gh_stars>0
# coding=utf-8
from .. import BaseProvider
import itertools
class Provider(BaseProvider):
"""
This provider is a collection of functions to generate personal profiles and identities.
"""
def simple_profile(self, sex=None):
"""
Generates a basic profile with personal informations
"""
SEX = ["F", "M"]
if sex not in SEX:
sex = self.random_element(SEX)
if sex == 'F':
name = self.generator.name_female()
elif sex == 'M':
name = self.generator.name_male()
return {
"username": self.generator.user_name(),
"name": name,
"sex": sex,
"address": self.generator.address(),
"mail": self.generator.free_email(),
#"password":<PASSWORD>()
"birthdate": self.generator.date(),
}
def profile(self, fields=None, sex=None):
"""
Generates a complete profile.
If "fields" is not empty, only the fields in the list will be returned
"""
if fields is None:
fields = []
d = {
"job": self.generator.job(),
"company": self.generator.company(),
"ssn": self.generator.ssn(),
"residence": self.generator.address(),
"current_location": (self.generator.latitude(), self.generator.longitude()),
"blood_group": "".join(self.random_element(list(itertools.product(["A", "B", "AB", "0"], ["+", "-"])))),
"website": [self.generator.url() for _ in range(1, self.random_int(2, 5))]
}
d = dict(d, **self.generator.simple_profile(sex))
# field selection
if len(fields) > 0:
d = dict((k, v) for (k, v) in d.items() if k in fields)
return d
|
<gh_stars>0
(function (win, doc, undefined) {
win.toaster = function(message, type) {
var toaster = $("#toaster");
toaster.append('<div class="toast-item"><div class="message">' + message + '</div>' +
'<i class="close fa fa-close"></i></div>');
var thisItem = toaster.children().last();
$(thisItem.children(".close").eq(0)).bind("click", function () {
thisItem.slideUp(function() {
thisItem.remove();
});
});
if (type == "success") thisItem.addClass("alert alert-success");
else if (type == "error") thisItem.addClass("alert alert-danger");
thisItem.fadeIn();
setTimeout(function() {
thisItem.slideUp(function() {
thisItem.remove();
});
}, 3000);
};
win.tmplCache = {};
win.tmpl = function(strTmpl, args) {
var argNames = [];
var argValues = [];
for (var a in args) {
argNames.push(a);
argValues.push(args[a]);
}
var funcs = win.tmplCache[strTmpl] || function() {
var f = [ 'var __out__ = [];' ];
strTmpl.replace(/<%=([\d\D]*?)%>|<%([\d\D]*?)%>|([\d\D]+?)(?=<\%|$)/g, function($0, $1, $2, $3) {
if ($3) {
f.push('__out__.push(unescape("', escape($3), '"));');
} else if ($1) {
f.push('__out__.push(', $1, ');');
} else if ($2) {
f.push($2, ';');
}
});
f.push('return __out__.join("")');
return new Function(argNames.join(', '), f.join(''));
}();
win.tmplCache[strTmpl] = funcs;
return funcs.apply(args||{}, argValues);
};
win.showConfirm = function(text, title, handler, cancelHandler) {
var dgEl = $('.g_confirm'),
html = '';
if (dgEl.length) {
return;
}
html = '<div class="g_confirm modal fade" style="z-index:9999;display:block">' +
'<div class="modal-dialog modal-sm">' +
'<div class="modal-content">' +
'<div class="modal-header">' +
'<button type="button" class="close" data-action="cancel"><span data-action="cancel">×</span></button>' +
'<h4 class="modal-title">' + (title || '<span class="fa fa-info" aria-hidden="true"></span>') + '</h4>' +
'</div>' +
'<div class="modal-body">' +
'<div class="row">' +
'<div class="col-md-12">' + text + '</div>' +
'</div>' +
'</div>' +
'<div class="modal-footer">' +
'<button type="button" class="btn btn-default" data-action="cancel">取消</button>' +
'<button type="button" class="btn btn-primary" data-action="confirm">确定</button>' +
'</div>' +
'</div>' +
'</div>' +
'</div>';
dgEl = $(html);
dgEl.appendTo(doc.body);
dgEl.modal({
keyboard: false,
backdrop: 'static'
});
dgEl.on('click', function(e) {
var target = $(e.target),
action = target.attr('data-action');
if (action == 'cancel') {
dgEl.modal('hide');
if (typeof cancelHandler == 'function') {
cancelHandler();
}
} else if (action == 'confirm') {
dgEl.modal('hide');
if (typeof handler == 'function') {
handler();
}
}
});
dgEl.on('hidden.bs.modal', function() {
dgEl.remove();
});
};
win.formatDateTime = function(d) {
function addZero(num) {
return num < 10 ? '0' + num : num;
}
var _d = new Date(d*1000);
return _d.getFullYear() + '-' + addZero(_d.getMonth() + 1) + '-' + addZero(_d.getDate()) + ' ' + addZero(_d.getHours()) + ':' + addZero(_d.getMinutes()) + ':' + addZero(_d.getSeconds());
};
win.dataTableChinese = {
"sProcessing": "处理中...",
"sLengthMenu": "显示 _MENU_ 项结果",
"sZeroRecords": "没有匹配结果",
"sInfo": "显示第 _START_ 至 _END_ 项结果,共 _TOTAL_ 项",
"sInfoEmpty": "显示第 0 至 0 项结果,共 0 项",
"sInfoFiltered": "(由 _MAX_ 项结果过滤)",
"sInfoPostFix": "",
"sSearch": "搜索:",
"sUrl": "",
"sEmptyTable": "表中数据为空",
"sLoadingRecords": "载入中...",
"sInfoThousands": ",",
"oPaginate": {
"sFirst": "首页",
"sPrevious": "上页",
"sNext": "下页",
"sLast": "末页"
},
"oAria": {
"sSortAscending": ": 以升序排列此列",
"sSortDescending": ": 以降序排列此列"
}
}
})(window, document);
$(document).ready(function () {
$('#js-btn-logout').on('click', function () {
$.ajax({
type: 'GET',
url: $('#prefixUrl').val() + '/api/auth/logout',
success: function(ret) {
if (ret.code == 0) {
toaster('登出成功' , 'success');
setTimeout(function () {
window.location.href = $('#prefixUrl').val() + '/login';
}, 1000);
} else {
toaster(ret.msg || '系统繁忙' , 'error');
}
},
error: function() {
toaster('系统繁忙', "error");
}
});
});
}); |
declare module "*.mdx" {
let MDXComponent: (props) => preact.JSX.Element;
export default MDXComponent;
export const metadata: {
title: string;
author: string;
tags: string[];
timestamp: Date;
};
}
|
npm install
node ./node_modules/bower/bin/bower update
node ./node_modules/gulp/bin/gulp.js build
node ./node_modules/gulp/bin/gulp.js manifest |
from typing import Tuple
def scale_point(p: Tuple[float, float], scale: float) -> Tuple[float, float]:
x, y = p # Extracting the coordinates
scaled_x = x * scale # Scaling the x-coordinate
scaled_y = y * scale # Scaling the y-coordinate
return scaled_x, scaled_y # Returning the scaled point as a tuple |
#!bin/bash
# Convert dicom-like images to nii files in 3D
# This is the first step for image pre-processing
# Feed path to the downloaded data here
DATAPATH=./MR # please put chaos dataset training fold here which contains ground truth
# Feed path to the output folder here
OUTPATH=./niis
if [ ! -d $OUTPATH/T2SPIR ]
then
mkdir $OUTPATH/T2SPIR
fi
for sid in $(ls "$DATAPATH")
do
dcm2nii -o "$DATAPATH/$sid/T2SPIR" "$DATAPATH/$sid/T2SPIR/DICOM_anon";
find "$DATAPATH/$sid/T2SPIR" -name "*.nii.gz" -exec mv {} "$OUTPATH/T2SPIR/image_$sid.nii.gz" \;
done;
|
package org.glamey.training.spring.loading;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
/**
* @author zhouyang.zhou. 2017.08.30.16.
*/
public class ScopeDemo implements InitializingBean, DisposableBean {
private String message;
public void process() {
System.out.println("bean process..." + message);
}
@Override public void destroy() throws Exception {
System.out.println("bean destroy...");
}
@Override public void afterPropertiesSet() throws Exception {
System.out.println("bean init...");
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
|
<reponame>tellery/blockchain-spark<filename>java/src/test/java/io/iftech/sparkudf/hive/DecodeContractEventHiveUDFTest.java
package io.iftech.sparkudf.hive;
import static org.junit.Assert.assertEquals;
import com.google.common.collect.ImmutableList;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.iftech.sparkudf.Mocks.ContractEvent;
import io.iftech.sparkudf.Mocks.EventField;
import io.iftech.sparkudf.converter.Converter;
import java.util.List;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.junit.Test;
@SuppressWarnings("unchecked")
public class DecodeContractEventHiveUDFTest {
Gson gson = new GsonBuilder().create();
@Test
public void testDecodeContractEvent() throws HiveException {
ContractEvent e = new ContractEvent("Transfer");
e.inputs = ImmutableList.of(
new EventField("from", "address", true),
new EventField("to", "address", true),
new EventField("value", "uint256"));
DecodeContractEventHiveImpl impl = new DecodeContractEventHiveImpl();
// Mock input object inspector
BinaryObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector;
ListObjectInspector valueOI2 = ObjectInspectorFactory.getStandardListObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector);
ObjectInspector valueOI3 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector valueOI4 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] arguments = {valueOI1, valueOI2, valueOI3, valueOI4};
StandardStructObjectInspector resultOI = (StandardStructObjectInspector) impl.initialize(
arguments);
// Mock input data
DeferredObject valueDf1 = new DeferredJavaObject(
Converter.decodeHexStartsWith0x(
"0x000000000000000000000000000000000000000000003b23f6365b3fabec0000")
);
DeferredObject valueDf2 = new DeferredJavaObject(
ImmutableList.of(
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
"0x000000000000000000000000b3f923eabaf178fc1bd8e13902fc5c61d3ddef5b",
"0x00000000000000000000000028c6c06298d514db089934071355e5743bf21d60"
)
);
DeferredObject valueDf3 = new DeferredJavaObject(gson.toJson(e));
DeferredObject valueDf4 = new DeferredJavaObject("Transfer");
DeferredObject[] args = {valueDf1, valueDf2, valueDf3, valueDf4};
List<Object> result = (List<Object>) impl.evaluate(args);
// Parse output data by output inspector and check it
StandardStructObjectInspector inputOI = (StandardStructObjectInspector) resultOI.getStructFieldRef(
"input").getFieldObjectInspector();
List<Object> inputData = (List<Object>) resultOI.getStructFieldData(result,
resultOI.getStructFieldRef("input"));
List<Object> resultData = inputOI.getStructFieldsDataAsList(inputData);
assertEquals("0xb3f923eabaf178fc1bd8e13902fc5c61d3ddef5b", resultData.get(0).toString());
assertEquals("0x28c6c06298d514db089934071355e5743bf21d60", resultData.get(1).toString());
assertEquals("279283000000000000000000", resultData.get(2).toString());
}
}
|
#!/bin/sh
# ensure libpcap is installed
package=libpcap
if [ $1 = "mac" ]; then
brew install libpcap
elif [ $1 = "linux" ]; then
apt_cmd=$(which apt-get) # Debian based distro
pacman_cmd=$(which pacman) # Arch based distro
yum_cmd=$(which yum) # Red Hat based distro
zypper_cmd=$(which zypper) # OpenSuse based distro
portage_cmd=$(which emerge) # Gentoo based distro
if [ ! -z $apt_cmd ]; then
apt update -y
apt install -y libpcap-dev
elif [ ! -z $pacman_cmd ]; then
pacman -Sy libpcap --noconfirm
elif [ ! -z $yum_cmd ]; then
yum update -y
yum -y install libpcap
elif [ ! -z $zypper_cmd ]; then
zypper --non-interactive install libpcap
elif [ ! -z $portage_cmd ]; then
emerge net-libs/libpcap
else
echo "Error installing $package. Package manager not supported in setup script, you are on your own! Ending build..."
exit 1
fi
else
echo "OS not supported in setup script. Ending build..."
exit 2
fi
exit 0
|
#! /bin/bash
rm -f logfile.txt
rm -rf tmp/
mongo --quiet localhost:45555/db --eval 'db.dropDatabase()'
|
// User class definition
class User {
private String name;
public User(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
// QuizScorer class definition
class QuizScorer {
private Integer score;
private User user;
public QuizScorer(User user) {
this.score = 0;
this.user = user;
}
public void addScore(int points) {
this.score += points;
}
public User getUser() {
return user;
}
public Integer getScore() {
return score;
}
}
// Example usage
public class Main {
public static void main(String[] args) {
User user = new User("John Doe");
QuizScorer quizScorer = new QuizScorer(user);
quizScorer.addScore(10);
quizScorer.addScore(5);
System.out.println("User: " + quizScorer.getUser().getName());
System.out.println("Score: " + quizScorer.getScore());
}
} |
<gh_stars>1-10
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20100824200944) do
create_table "admin_users", :force => true do |t|
t.string "first_name", :default => "", :null => false
t.string "last_name", :default => "", :null => false
t.string "role", :null => false
t.string "email", :null => false
t.boolean "status", :default => false
t.string "token", :null => false
t.string "salt", :null => false
t.string "crypted_password", :null => false
t.string "preferences"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "configurations", :force => true do |t|
t.string "name"
t.string "value"
t.string "type"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "configurations", ["name"], :name => "index_configurations_on_name"
create_table "links", :force => true do |t|
t.string "url"
t.string "title"
t.text "summary"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "bookmarks", :default => 0
t.datetime "last_seen_in_feed", :default => '2010-05-10 10:34:35'
t.text "source_tags"
end
add_index "links", ["url"], :name => "index_links_on_url"
create_table "super_users", :force => true do |t|
t.string "login"
t.string "email"
t.string "crypted_password"
t.string "password_salt"
t.string "persistence_token"
t.integer "login_count"
t.datetime "last_request_at"
t.datetime "last_login_at"
t.datetime "current_login_at"
t.string "last_login_ip"
t.string "current_login_ip"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "super_users", ["email"], :name => "index_super_users_on_email"
add_index "super_users", ["login"], :name => "index_super_users_on_login"
create_table "tagged_links", :force => true do |t|
t.integer "tag_id"
t.integer "link_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "tagged_links", ["link_id"], :name => "index_tagged_links_on_link_id"
add_index "tagged_links", ["tag_id"], :name => "index_tagged_links_on_tag_id"
create_table "taggings", :force => true do |t|
t.integer "user_id"
t.integer "tag_id"
t.integer "position"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "last_seen_at"
end
add_index "taggings", ["tag_id"], :name => "index_taggings_on_tag_id"
add_index "taggings", ["user_id"], :name => "index_taggings_on_user_id"
create_table "tags", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "taggings_count"
end
add_index "tags", ["name"], :name => "index_tags_on_name"
create_table "users", :force => true do |t|
t.string "login"
t.string "email"
t.string "crypted_password"
t.string "password_salt"
t.string "persistence_token"
t.integer "login_count"
t.datetime "last_request_at"
t.datetime "last_login_at"
t.datetime "current_login_at"
t.string "last_login_ip"
t.string "current_login_ip"
t.datetime "created_at"
t.datetime "updated_at"
t.string "perishable_token", :default => "", :null => false
t.datetime "last_viewed_tags_index_at"
end
add_index "users", ["email"], :name => "index_users_on_email"
add_index "users", ["login"], :name => "index_users_on_login"
add_index "users", ["perishable_token"], :name => "index_users_on_perishable_token"
end
|
<reponame>mdcarter/nextlaunch.io
const { send } = require('micro')
const fetch = require('node-fetch')
const { redis } = require('../connectors')
const orbits = {
'ISS': 'International Space Station',
'LEO': 'Low Earth',
'PO': 'Polar',
'GTO': 'Geosynchronous Transfer'
}
module.exports = async (_, res) => {
const url = `${process.env.DATA_URL}/launches/upcoming`
const key = 'nextlaunch:upcoming'
let flights = JSON.parse(await redis.get(key))
if (!flights) {
const results = await fetch(url)
flights = await results.json()
await redis.set(key, JSON.stringify(flights))
await redis.expire(key, 60)
}
send(res, 200, flights.map(flight => {
const payloads = flight.rocket.second_stage.payloads || []
const cores = flight.rocket.first_stage.cores || []
const reused = cores.reduce((condition, core) => condition + (core.reused ? 1 : 0), 0)
const landings = cores.filter(core => core.landing_vehicle)
return {
id: flight.flight_number,
name: payloads.reduce((name, payload) => `${name}, ${payload.payload_id}`, '').substr(2),
date: new Date(flight.launch_date_utc).getTime(),
video: flight.links.video_link || null,
patch: flight.links.mission_patch || null,
rocket: {
id: flight.rocket.rocket_id,
name: flight.rocket.rocket_name,
condition: reused === 0 ? 'new' : reused === cores.length ? 'reused' : 'partially_reused',
blocks: cores.reduce((blocks, core) => `${blocks}, ${core.block}`, '').substr(2),
landing: landings.length ? landings.map(core => core.landing_vehicle) : false,
cores: cores.length
},
payload: {
orbit: [...new Set(payloads.map(load => load.orbit))].reduce((orbit, load) => `${orbit}, ${orbits[load] || load}`, '').substr(2),
customers: payloads.reduce((customers, load) => `${customers}, ${load.customers.join(', ')}`, '').substr(2)
}
}
}))
}
|
<?php
// Define the RepositoryInterface
interface RepositoryInterface {
public function apply($model);
}
// Implement a class that implements the RepositoryInterface
class SampleRepository implements RepositoryInterface {
public function apply($model) {
// Implement a meaningful operation using the $model
// For example, let's assume $model is an array and we want to add a new element to it
$model[] = "new element";
return $model;
}
}
// Usage of the implemented class
$model = [1, 2, 3];
$repository = new SampleRepository();
$result = $repository->apply($model);
print_r($result); // Output: Array ( [0] => 1 [1] => 2 [2] => 3 [3] => new element )
?> |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-shuffled/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-shuffled/7-1024+0+512-SS-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_first_two_thirds_sixth --eval_function last_sixth_eval |
def findPair(lst, target):
for i in range(0, len(lst)):
for j in range(i + 1, len(lst)):
if lst[i] + lst[j] == target:
return print(lst[i],",", lst[j])
list = [2, 4, 6, 8]
target = 10
findPair(list, target) |
package chengweiou.universe.leob.model;
import chengweiou.universe.blackhole.model.RestCode;
public enum ProjectRestCode implements RestCode {
EXISTS,
;
}
|
<filename>src/templates/PostListTemplate.tsx<gh_stars>0
import React from "react";
import Layout from "../components/Layout";
import SEO from "../components/Seo";
import Profile from "../components/Profile";
import Categories from "../components/Categories";
import PostList from "../components/PostList";
import { IPostListContext, ITemplateProps } from "../interfaces";
type IPostListTemplateProps = ITemplateProps<IPostListContext>;
const PostListTemplate: React.FC<IPostListTemplateProps> = React.memo(props => {
const { category, nodes } = props.pageContext;
return (
<Layout>
<SEO title={category} lang="" />
<Profile />
<Categories />
<PostList nodes={nodes} />
</Layout>
);
});
export default PostListTemplate;
|
/* SPDX-License-Identifier: Apache-2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.userinterface.uichassis.springboot;
import org.odpi.openmetadata.accessservices.assetcatalog.AssetCatalog;
import org.odpi.openmetadata.accessservices.glossaryview.client.GlossaryViewClient;
import org.odpi.openmetadata.governanceservers.openlineage.client.OpenLineageClient;
import org.odpi.openmetadata.http.HttpHelper;
import org.odpi.openmetadata.userinterface.uichassis.springboot.auth.AuthService;
import org.odpi.openmetadata.userinterface.uichassis.springboot.auth.SessionAuthService;
import org.odpi.openmetadata.userinterface.uichassis.springboot.auth.TokenAuthService;
import org.odpi.openmetadata.userinterface.uichassis.springboot.service.ComponentService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.netflix.zuul.EnableZuulProxy;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import javax.annotation.PostConstruct;
@EnableZuulProxy
@SpringBootApplication
@ComponentScan({"org.odpi.openmetadata.*"})
@Configuration
@EnableConfigurationProperties(ComponentService.class)
public class EgeriaUIPlatform {
private static final Logger LOG = LoggerFactory.getLogger(EgeriaUIPlatform.class);
@Autowired
private Environment env;
@Value("${strict.ssl}")
Boolean strictSSL;
public static void main(String[] args) {
SpringApplication.run(EgeriaUIPlatform.class, args);
}
@Bean
public InitializingBean getInitialize()
{
return () -> {
if (!strictSSL)
{
HttpHelper.noStrictSSL();
}
};
}
@Bean
public AssetCatalog getAssetCatalog(@Value("${omas.server.url}") String serverUrl,
@Value("${omas.server.name}") String serverName) throws org.odpi.openmetadata.frameworks.connectors.ffdc.InvalidParameterException {
return new AssetCatalog(serverName, serverUrl);
}
@Bean
public GlossaryViewClient getGlossaryViewClient(@Value("${omas.server.url}") String serverUrl,
@Value("${omas.server.name}") String serverName) throws org.odpi.openmetadata.frameworks.connectors.ffdc.InvalidParameterException {
return new GlossaryViewClient(serverName, serverUrl);
}
@Bean
public OpenLineageClient getOpenLineage(@Value("${open.lineage.server.url}") String serverUrl,
@Value("${open.lineage.server.name}") String serverName) throws org.odpi.openmetadata.frameworks.connectors.ffdc.InvalidParameterException {
return new OpenLineageClient(serverName, serverUrl);
}
@Bean
public AuthService getAuthService(@Value("${authentication.mode}") String authenticationMode) {
if(null == authenticationMode || authenticationMode.isEmpty() || "token".equals(authenticationMode)){
return new TokenAuthService();
}
return new SessionAuthService();
}
@PostConstruct
private void configureTrustStore() {
//making sure truststore was not set using JVM options
// and strict.ssl is true ( if false, truststore will ignored anyway )
if(strictSSL && System.getProperty("javax.net.ssl.trustStore")==null) {
//load the 'javax.net.ssl.trustStore' and
//'javax.net.ssl.trustStorePassword' from application.properties
System.setProperty("javax.net.ssl.trustStore", env.getProperty("server.ssl.trust-store"));
System.setProperty("javax.net.ssl.trustStorePassword", env.getProperty("server.ssl.trust-store-password"));
}
}
}
|
<filename>resources/assets/api/payment.js<gh_stars>0
PagSeguroDirectPayment.setSessionId(document.querySelector("[name='session_id']").value);
var items = Payment.getItems();
var auth = Payment.getAuth();
auth = JSON.parse(auth);
items = JSON.parse(items);
$.ajax({
method: 'POST',
url: '/pagseguro/generate_order',
headers: {
'X-CSRF-TOKEN': $('meta[name="_token"]').attr('content')
},
/*data: {
user_id: 1,
year: '2017',
month: '11',
day: '15',
hour: '17',
minute: '12',
second: '00',
package_id: 1
},*/
data: {
user_id: auth.id,
year: items.year,
month: items.month,
day: items.day,
hour: items.hour,
minute: items.minute,
second: items.second,
package_id: items.package.id
},
success: function (data) {
$('[name="order_id"]').val(data.order_id);
$('[name="item_id"]').val(data.items.id);
$('[name="user_id"]').val(auth.id);
},
error: function (errors) {
}
});
function setSenderHash() {
var form = document.querySelector('#payment-pagseguro');
var hash = PagSeguroDirectPayment.getSenderHash();
if (document.querySelector("input[name=sender_hash]") == null) {
var senderHash = document.createElement('input');
senderHash.setAttribute('name', "sender_hash");
senderHash.setAttribute('type', "hidden");
senderHash.setAttribute('value', hash);
form.appendChild(senderHash);
}
}
$(function(){
function setCardToken() {
var parametros = {
cardNumber: document.getElementById('card_number').value,
brand: document.querySelector("input[name=card_brand]").value,
cvv: document.querySelector("input[name=card_cvv]").value,
expirationMonth: document.querySelector('input[name=card_month]').value,
expirationYear: document.querySelector('input[name=card_year]').value,
success: function (data) {
var form = document.querySelector('#payment-pagseguro');
var token = JSON.stringify(data.card.token).replace(/"/g, '');
if (document.querySelector("input[name=card_token]") == null) {
var cardToken = document.createElement('input');
cardToken.setAttribute('name', "card_token");
cardToken.setAttribute('type', "hidden");
cardToken.setAttribute('value', token);
form.appendChild(cardToken);
} else {
document.querySelector("input[name=card_token]").value = token;
}
},
error: function (data) {
window.console.log(data);
// console.log('Ocorreu um erro na validação do cartão');
// console.log(JSON.stringify(data));
},
complete: function(data){
$('body').append(data);
setTimeout(function(){
executePayment();
}, 3000);
}
};
PagSeguroDirectPayment.createCardToken(parametros);
}
var sessionId = $('[name="session_id"]').val();
var form_payment_pagseguro = $('#payment-pagseguro');
var input_card_number = $('#card_number');
var input_card_cvv = $('#card_cvv');
var input_card_year = $('#card_year');
var input_card_month = $('#card_month');
var card_token = $('[name="card_token"]');
$(".select2").select2();
$('#card_month').inputmask("99");
$('.card_birth_date').inputmask('99/99/9999');
$('#card_personal_id').inputmask("999.999.999-99");
$('#card_year').inputmask("9999");
$('#cel').inputmask("(99) 99999-9999");
$('.zip')
.inputmask("99999-999")
.blur(function(event){
var value = $(this).val().replace('-', '');
$.ajax({
method: 'GET',
url: 'https://viacep.com.br/ws/'+value+'/json/',
data: {},
success: function (data) {
$('[name="address"]').val(data.logradouro);
$('[name="city"]').val(data.localidade);
$('[name="state"]').val(data.uf);
$('[name="district"]').val(data.bairro);
},
error: function (errors) {
}
});
});
$('#card_cvv').inputmask({
mask: 9,
repeat: 5,
greedy: false,
rightAlign: false
});
$('#card_number').inputmask({
mask: 9,
repeat: 20,
greedy: false,
rightAlign: false
});
$('#card_number').blur(function(event){
var cardNumber = $(this).val();
if(cardNumber != ''){
PagSeguroDirectPayment.getBrand({
cardBin: cardNumber.replace(/ /g, ''),
success: function (data) {
var brand = JSON.stringify(data.brand.name).replace(/"/g, '');
if (document.querySelector("input[name=card_brand]") == null) {
var cardBrand = document.createElement('input');
cardBrand.setAttribute('name', "card_brand");
cardBrand.setAttribute('type', "hidden");
cardBrand.setAttribute('value', brand);
form_payment_pagseguro.append(cardBrand);
//setInstallmentAmount();
} else {
document.querySelector("input[name=card_brand]").value = brand;
}
$(".fa-credit-card-alt").removeClass('fa-credit-card-alt').addClass('fa-cc-' + brand);
},
error: function (data, error, other) {
},
complete: function(){
}
});
}
});
function executePayment(){
if(card_token.val() == ''){
swal({
title: "Erro nos dados",
text: 'Revise os dados do cartão e tente novamente.',
type: "error",
cancelButtonText: "Cancelar",
html: true,
showCancelButton: false,
confirmButtonColor: "#00a65a",
confirmButtonText: "Tente novamente",
closeOnConfirm: true,
showLoaderOnConfirm: false
}, function(inputValue){
});
}else{
$.ajax({
method: 'POST',
async: false,
url: $('#payment-pagseguro').attr('action'),
data: $('#payment-pagseguro').serialize(),
headers: {
'X-CSRF-TOKEN': $('meta[name="_token"]').attr('content')
},
beforeSend: function(){
$('[type="submit"]').attr('disabled', 'disabled');
},
success: function(data){
//$('body').append(data);
swal({
title: "Pedido efeituado",
type: "success",
text: 'Confirme para redirecionar à página do cliente',
html: true,
showCancelButton: false,
confirmButtonColor: "#00a65a",
confirmButtonText: "Confirmar",
closeOnConfirm: false,
showLoaderOnConfirm: false
}, function(inputValue){
Payment.redirect();
});
/*var xmlDOM = new DOMParser().parseFromString(data, 'text/xml');
data = xmlToJson(xmlDOM);
$.ajax({
method: 'POST',
url: '/pagseguro/notifications',
data: {content: data},
headers: {
'X-CSRF-TOKEN': $('meta[name="_token"]').attr('content')
},
success: function(val){
},
error: function(val){
}
});*/
},
error: function(errors){
}
});
}
}
$('#payment-pagseguro').on('submit', function (event) {
event.preventDefault();
document.querySelector('[name="method"]').setAttribute('value', 'creditCard');
var isValid = $("#payment-pagseguro").valid();
$('body').append('isValid '+isValid);
if(isValid){
if(!validateCPF($('#card_personal_id').val())) {
swal("Oops...", "CPF Inválido!", "error");
}else{
swal({
title: "Deseja continuar?",
type: "info",
cancelButtonText: "Cancelar",
showCancelButton: true,
confirmButtonColor: "#00a65a",
confirmButtonText: "Continuar",
closeOnConfirm: false,
showLoaderOnConfirm: true,
},function(type){
setCardToken();
setSenderHash();
});
}
}
});
$('#payment-pagseguro').validate({
rules: {
card_name: {
required: true
},
card_personal_id: {
required: true/*,
isCPF: true*/
},
card_birth_date:{
required: true
},
card_number: {
required: true
},
card_month: {
required: true,
minlength: 2,
maxlength: 2,
},
card_year: {
required: true,
minlength: 4
},
card_cvv: {
required: true,
minlength: 3
},
number: {
required: true
},
cel: {
required: true
},
address: {
required: true
},
city: {
required: true
},
district: {
required: true
},
state: {
required: true
},
zip: {
required: true
}
},
messages: {
card_name: {
required: 'Campo obrigatório'
},
card_personal_id: {
required: 'Campo obrigatório'/*,
isCPF: 'CPF Inválido'*/
},
card_birth_date:{
required: 'Campo obrigatório'
},
card_number: {
required: 'Campo obrigatório'
},
card_month: {
required: 'Campo obrigatório',
minlength: 'Tamanho mínimo de 2 digitos'
},
card_year: {
required: 'Campo obrigatório',
minlength: 'Tamanho mínimo de 4 digitos'
},
card_cvv: {
required: 'Campo obrigatório',
minlength: 'Tamanho mínimo de 3 digitos'
},
number: {
required: 'Campo obrigatório'
},
cel: {
required: 'Campo obrigatório'
},
address: {
required: 'Campo obrigatório'
},
city: {
required: 'Campo obrigatório'
},
district: {
required: 'Campo obrigatório'
},
state: {
required: 'Campo obrigatório'
},
zip: {
required: 'Campo obrigatório'
}
}
});
/*PagSeguroDirectPayment.setSessionId(sessionId);
PagSeguroDirectPayment.createCardToken({
cardNumber: '4271671604887028',
cvv: '063',
expirationMonth: '09',
expirationYear: '2021',
success: function(response){
},
error: function(response){
},
complete: function(response){
}
});*/
}); |
public class ServerManager {
private Cluster cluster;
private Policy policy;
public ServerManager(Cluster cluster, Policy policy) {
this.cluster = cluster;
this.policy = policy;
}
} |
<reponame>mspnp/vnet-integrated-serverless-microservices
import { AxiosError, AxiosInstance, AxiosRequestConfig, AxiosResponse } from "axios";
import * as HttpStatus from "http-status-codes";
import { URL } from "url";
import { AppInsightsService, IDependencyTelemetry } from "./app-insights/app-insights-service";
import { IHeaders } from "../Models/IHeaders";
import { Timer } from "./app-insights/timer";
import { IResponse } from "../Models/IResponse";
import { DownstreamError } from "../Models/DownstreamError";
/**
* HTTP Service class for calling external API services
*/
export class HttpDataService {
constructor(
private readonly axiosClient: AxiosInstance,
private readonly appInsightsService: AppInsightsService
) { }
/** Make a HTTP call with GET HTTP method */
public async makeHttpGetCall<T>(
url: URL,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
queryParams?: any, headers?: IHeaders): Promise<IResponse> {
const getCall = (innerUrl: string, requestConfig: AxiosRequestConfig): Promise<AxiosResponse> => this.axiosClient.get<T>(
innerUrl,
requestConfig
);
return this.axiosHttpCall(url, getCall, queryParams, headers);
}
/** Make a HTTP call with PUT HTTP method */
public async makeHttpPutCall<T>(
url: URL,
headers?: IHeaders, payload?: T): Promise<IResponse> {
const putCall = (innerUrl: string, requestConfig: AxiosRequestConfig): Promise<AxiosResponse> => this.axiosClient.put<T>(
innerUrl,
payload,
requestConfig
);
return this.axiosHttpCall(url, putCall, {}, headers);
}
/** Make a HTTP call with POST HTTP method */
public async makeHttpPostCall(
url: URL,
headers: IHeaders, payload: unknown): Promise<IResponse> {
const postCall = (innerUrl: string, requestConfig: AxiosRequestConfig): Promise<AxiosResponse> => this.axiosClient.post(
innerUrl,
payload,
requestConfig
);
return this.axiosHttpCall(url, postCall, {}, headers);
}
/**
* Make the http call to the external API service
* @param url The URL of the endpoint to call
* @param queryParams Any query Params to send
* @param headers any HTTP Headers to send
* @param axiosRequestCallFn The axios operation function
*/
private async axiosHttpCall(
url: URL,
axiosRequestCallFn: (url: string, requestConfig: AxiosRequestConfig) => Promise<AxiosResponse>,
queryParams?: unknown,
headers?: IHeaders
): Promise<IResponse> {
const appInsightsHeaders = this.appInsightsService.getHeadersForRequest();
const headersWithCorrelationContext = { ...headers, ...appInsightsHeaders };
const requestConfig: AxiosRequestConfig = {
headers: headersWithCorrelationContext,
params: queryParams
};
const timer = new Timer();
try {
const response = await axiosRequestCallFn(url.toString(), requestConfig);
const apiResponse: IResponse = {
body: response.data,
status: response.status,
headers: response.headers
};
// App insights metrics
timer.stop();
// tslint:disable-next-line: no-unsafe-any
const dependency = this.createDependencyTelemetry(timer, response.status, true, url, response.config.method);
this.appInsightsService.trackDependency(
dependency);
return apiResponse;
} catch (error) {
const e: AxiosError = error as AxiosError;
// App insights metrics
timer.stop();
const resultCode = (e.response && e.response.status) || e.message;
const dependency = this.createDependencyTelemetry(timer, resultCode, false, url, e.config?.method || "");
this.appInsightsService.trackDependency(dependency);
const errorMessage = e.response && e.response.data ? JSON.stringify(e.response.data) : e.message;
throw new DownstreamError(errorMessage, {
body: e.response && e.response.data || {},
status: e.response ? e.response.status : HttpStatus.INTERNAL_SERVER_ERROR,
headers: e.response?.headers
});
}
}
// tslint:disable-next-line: completed-docs
private createDependencyTelemetry(
timer: Timer, resultCode: string | number, success: boolean, url: URL, method?: string): IDependencyTelemetry {
return {
data: `${method} ${url.toString()}`,
dependencyTypeName: "HTTP",
duration: Math.max(timer.duration, 1),
time: timer.endDate,
resultCode,
success,
name: url.pathname,
target: url.hostname
};
}
}
|
import React, { BaseSyntheticEvent, useContext } from 'react';
import { CONTEXT_CONFIG } from '../../../../contexts/constants';
import TweetsContext from '../../../../contexts/TweetsContext';
/**
* Function to define the LastSearchs component. It is used to display and manage the last user searches.
* @returns The React component.
*/
export default function LastSearchs() {
const { lastSearchs, loadTweets } = useContext(TweetsContext);
/**
* Handler executed when a last search is clicked.
* @param event Is the button click event.
*/
const handleClick = (event: BaseSyntheticEvent) => {
loadTweets(`${CONTEXT_CONFIG.TWITTER_URL}${event.target.innerHTML}`);
};
return (
<div>
<h2>Your last {CONTEXT_CONFIG.MAX_STORE} searchs</h2>
{lastSearchs.length > 0 ? (
<ul>
{lastSearchs.map((s, index) => (
// eslint-disable-next-line react/no-array-index-key
<li key={index}>
<button type="button" onClick={handleClick}>
{s.replace(CONTEXT_CONFIG.TWITTER_URL, '')}
</button>
</li>
))}
</ul>
) : (
<p>You do not have saved searches</p>
)}
</div>
);
}
|
#!/bin/bash
pid=$(ps -ef | grep 'demo' | grep -v grep | awk '{print $2}')
kill -9 $pid || echo 无服务 |
import librosa
import numpy as np
def process_audio_data(data):
for label, waveforms in data.items():
vectors = []
for wave in waveforms:
# Downsampling
downsampled_wave = wave[::3]
# Compute MFCC
mfcc = librosa.feature.mfcc(downsampled_wave, sr=16000, n_mels=22, n_mfcc=30, fmax=8000, n_fft=1024, hop_length=1024)
# Normalize MFCC
mfcc = librosa.util.normalize(mfcc, axis=1)
# Convert power-scaled MFCC to decibel units
mfcc = librosa.power_to_db(mfcc, top_db=80.0)
vectors.append(mfcc)
data[label]['mfcc'] = vectors
return data |
<reponame>akokhanovskyi/kaa<filename>avrogen/src/main/java/org/kaaproject/kaa/avro/avrogen/compiler/Compiler.java
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.avro.avrogen.compiler;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.*;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.kaaproject.kaa.avro.avrogen.GenerationContext;
import org.kaaproject.kaa.avro.avrogen.KaaGeneratorException;
import org.kaaproject.kaa.avro.avrogen.StyleUtils;
import org.kaaproject.kaa.avro.avrogen.TypeConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.commons.lang.StringUtils.join;
public abstract class Compiler {
private static final String DIRECTION_PROP = "direction";
private static final Logger LOG = LoggerFactory.getLogger(Compiler.class);
private final String generatedSourceName;
private List<Schema> schemas = new ArrayList<>();
protected VelocityEngine engine;
protected PrintWriter headerWriter;
protected PrintWriter sourceWriter;
protected String namespacePrefix;
protected final Map<Schema, GenerationContext> schemaGenerationQueue;
// list of schemas that should be skipped during generation
protected Set<Schema> generatedSchemas = new HashSet<>();
private void initVelocityEngine() {
engine = new VelocityEngine();
engine.addProperty("resource.loader", "class, file");
engine.addProperty("class.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
engine.addProperty("file.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.FileResourceLoader");
engine.addProperty("file.resource.loader.path", "/, .");
engine.setProperty("runtime.references.strict", true);
engine.setProperty("runtime.log.logsystem.class", "org.apache.velocity.runtime.log.NullLogSystem");
}
private Compiler(String sourceName) throws KaaGeneratorException {
this.namespacePrefix = "kaa";
this.generatedSourceName = sourceName;
this.schemaGenerationQueue = new LinkedHashMap<>();
initVelocityEngine();
}
public Compiler(Schema schema, String sourceName, OutputStream hdrS, OutputStream srcS) throws KaaGeneratorException {
this(sourceName);
this.schemas.add(schema);
this.headerWriter = new PrintWriter(hdrS);
this.sourceWriter = new PrintWriter(srcS);
prepareTemplates(false);
}
public Compiler(List<Schema> schemas, String sourceName, OutputStream hdrS, OutputStream srcS) throws KaaGeneratorException {
this(sourceName);
this.schemas.addAll(schemas);
this.headerWriter = new PrintWriter(hdrS);
this.sourceWriter = new PrintWriter(srcS);
prepareTemplates(false);
}
public Compiler(List<Schema> schemas, String sourceName, OutputStream hdrS, OutputStream srcS, Set<Schema> generatedSchemas) throws KaaGeneratorException {
this(schemas, sourceName, hdrS, srcS);
this.generatedSchemas = new HashSet<>(generatedSchemas);
}
public Compiler(String schemaPath, String outputPath, String sourceName) throws KaaGeneratorException {
this(sourceName);
try {
this.schemas.add(new Schema.Parser().parse(new File(schemaPath)));
prepareTemplates(true);
File outputDir = new File(outputPath);
outputDir.mkdirs();
String headerPath = outputPath + File.separator + generatedSourceName + ".h";
String sourcePath = outputPath + File.separator + generatedSourceName + getSourceExtension();
Files.move(new File(headerTemplateGen()).toPath()
, new File(headerPath).toPath(), StandardCopyOption.REPLACE_EXISTING);
Files.move(new File(sourceTemplateGen()).toPath()
, new File(sourcePath).toPath(), StandardCopyOption.REPLACE_EXISTING);
this.headerWriter = new PrintWriter(new BufferedWriter(new FileWriter(headerPath, true)));
this.sourceWriter = new PrintWriter(new BufferedWriter(new FileWriter(sourcePath, true)));
} catch (Exception e) {
LOG.error("Failed to create ouput path: ", e);
throw new KaaGeneratorException("Failed to create output path: " + e.toString());
}
}
protected abstract String headerTemplateGen();
protected abstract String sourceTemplateGen();
protected abstract String headerTemplate();
protected abstract String sourceTemplate();
protected abstract String getSourceExtension();
private void prepareTemplates(boolean toFile) throws KaaGeneratorException {
try {
VelocityContext context = new VelocityContext();
context.put("headerName", generatedSourceName);
StringWriter hdrWriter = new StringWriter();
engine.getTemplate(headerTemplate()).merge(context, hdrWriter);
StringWriter srcWriter = new StringWriter();
engine.getTemplate(sourceTemplate()).merge(context, srcWriter);
if (toFile) {
writeToFile(hdrWriter, srcWriter);
} else {
writeToStream(hdrWriter, srcWriter);
}
} catch (Exception e) {
LOG.error("Failed to prepare source templates: ", e);
throw new KaaGeneratorException("Failed to prepare source templates: " + e.toString());
}
}
private void writeToStream(StringWriter hdrWriter, StringWriter srcWriter) {
headerWriter.write(hdrWriter.toString());
sourceWriter.write(srcWriter.toString());
}
private void writeToFile(StringWriter hdrWriter, StringWriter srcWriter) throws Exception {
FileOutputStream hdrOs = new FileOutputStream(headerTemplateGen());
hdrOs.write(hdrWriter.toString().getBytes());
hdrOs.close();
FileOutputStream srcOs = new FileOutputStream(sourceTemplateGen());
srcOs.write(srcWriter.toString().getBytes());
srcOs.close();
}
public Set<Schema> generate() throws KaaGeneratorException {
try {
LOG.debug("Processing schemas: [" + join(schemas, ", ") + "]");
for (Schema schema : schemas) {
if (schema.getType() == Type.UNION) {
for (Schema s : schema.getTypes()) {
addAllSchemasToQueue(s, null);
}
} else {
addAllSchemasToQueue(schema, null);
}
}
doGenerate();
LOG.debug("Sources were successfully generated");
return schemaGenerationQueue.keySet();
} catch (Exception e) {
LOG.error("Failed to generate C sources: ", e);
throw new KaaGeneratorException("Failed to generate sources: " + e.toString());
} finally {
headerWriter.close();
sourceWriter.close();
}
}
/**
* Recursively add all unique dependencies of a passed schema and the one to generation queue,
* that used to generate sources.
*/
private void addAllSchemasToQueue(Schema schema, GenerationContext context) {
GenerationContext existingContext = schemaGenerationQueue.get(schema);
if (existingContext != null) {
existingContext.updateDirection(context);
return;
}
switch (schema.getType()) {
case RECORD:
for (Field f : schema.getFields()) {
addAllSchemasToQueue(f.schema(), new GenerationContext(
schema.getName(), f.name(), schema.getProp(DIRECTION_PROP)));
}
schemaGenerationQueue.put(schema, null);
break;
case UNION:
for (Schema branchSchema : schema.getTypes()) {
addAllSchemasToQueue(branchSchema, context);
}
schemaGenerationQueue.put(schema, context);
break;
case ARRAY:
addAllSchemasToQueue(schema.getElementType(), context);
break;
case ENUM:
schemaGenerationQueue.put(schema, null);
break;
default:
break;
}
}
protected abstract void doGenerate();
protected void processRecord(Schema schema, String headerTemplate, String sourceTemplate) {
VelocityContext context = new VelocityContext();
context.put("schema", schema);
context.put("StyleUtils", StyleUtils.class);
context.put("TypeConverter", TypeConverter.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter hdrWriter = new StringWriter();
engine.getTemplate(headerTemplate).merge(context, hdrWriter);
appendResult(hdrWriter.toString(), true);
StringWriter srcWriter = new StringWriter();
engine.getTemplate(sourceTemplate).merge(context, srcWriter);
appendResult(srcWriter.toString(), false);
}
protected void processEnum(Schema schema, String template) {
VelocityContext context = new VelocityContext();
List<String> symbols = schema.getEnumSymbols();
context.put("schema", schema);
context.put("symbols", symbols);
context.put("StyleUtils", StyleUtils.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter writer = new StringWriter();
engine.getTemplate(template).merge(context, writer);
appendResult(writer.toString(), true);
}
protected void appendResult(String str, boolean toHeader) {
if (toHeader) {
headerWriter.write(str);
} else {
sourceWriter.write(str);
}
}
public void setNamespacePrefix(String namespacePrefix) {
this.namespacePrefix = namespacePrefix;
}
} |
#!/bin/bash
# Yet Another UserAgent Analyzer
# Copyright (C) 2013-2019 Niels Basjes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
TARGETDIR=$(cd "${SCRIPTDIR}/../../../resources/UserAgents" || exit 1; pwd)
INPUT=AmazonDevices.csv
OUTPUT="${TARGETDIR}/AmazonDevices.yaml"
if [ "Generate.sh" -ot "${OUTPUT}" ]; then
if [ "${INPUT}" -ot "${OUTPUT}" ]; then
echo "Up to date: ${OUTPUT}";
exit;
fi
fi
echo "Generating: ${OUTPUT}";
(
echo "# ============================================="
echo "# THIS FILE WAS GENERATED; DO NOT EDIT MANUALLY"
echo "# ============================================="
echo "#"
echo "# Yet Another UserAgent Analyzer"
echo "# Copyright (C) 2013-2019 Niels Basjes"
echo "#"
echo "# Licensed under the Apache License, Version 2.0 (the \"License\");"
echo "# you may not use this file except in compliance with the License."
echo "# You may obtain a copy of the License at"
echo "#"
echo "# https://www.apache.org/licenses/LICENSE-2.0"
echo "#"
echo "# Unless required by applicable law or agreed to in writing, software"
echo "# distributed under the License is distributed on an \"AS IS\" BASIS,"
echo "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied."
echo "# See the License for the specific language governing permissions and"
echo "# limitations under the License."
echo "#"
echo "config:"
grep -F -v '#' "${INPUT}" | grep '[a-z]' | while read -r line
do
tag=$( echo "${line}" | sed 's@ *| *@|@g' | cut -d'|' -f1 )
deviceName=$( echo "${line}" | sed 's@ *| *@|@g' | cut -d'|' -f2 )
deviceBrand=$(echo "${line}" | sed 's@ *| *@|@g' | cut -d'|' -f3 )
deviceClass=$(echo "${line}" | sed 's@ *| *@|@g' | cut -d'|' -f4 )
tagWords=$(echo "${tag}" | sed 's@-@ @g' | wc -w)
echo "
- matcher:
require:
- 'agent.product.name=\"${tag}\"'
extract:
- 'DeviceClass : 1001 :\"${deviceClass}\"'
- 'DeviceName : 1002 :\"${deviceName}\"'
- 'DeviceBrand : 1002 :\"${deviceBrand}\"'
- 'OperatingSystemName : 1002 :\"FireOS\"' # All known Amazon devices run FireOS (a tweaked Android)
- matcher:
require:
- 'agent.(1)product.(1)comments.entry.(1)product.name[1]=\"Android\"'
- 'agent.(1)product.(1)comments.entry.(1)product.name[1]=\"${tag}\"'
extract:
- 'DeviceClass : 1001 :\"${deviceClass}\"'
- 'DeviceName : 1002 :\"${deviceName}\"'
- 'DeviceBrand : 1002 :\"${deviceBrand}\"'
- 'OperatingSystemName : 1002 :\"FireOS\"' # All known Amazon devices run FireOS (a tweaked Android)
- matcher:
require:
- 'IsNull[agent.product.name=\"Chrome\"]'
extract:
- 'DeviceClass : 1001 :\"${deviceClass}\"'
- 'DeviceName : 1002 :\"${deviceName}\"'
- 'DeviceBrand : 1002 :\"${deviceBrand}\"'
- 'OperatingSystemVersionBuild : 12 :agent.(1)product.(1)comments.entry[1-$((tagWords+1))]=\"${tag} Build\"@[$((tagWords+2))]'
- 'OperatingSystemName : 1002 :\"FireOS\"' # All known Amazon devices run FireOS (a tweaked Android)
- matcher:
require:
- 'IsNull[agent.product.name=\"Chrome\"]'
extract:
- 'DeviceClass : 1001 :\"${deviceClass}\"'
- 'DeviceName : 1002 :\"${deviceName}\"'
- 'DeviceBrand : 1002 :\"${deviceBrand}\"'
- 'OperatingSystemVersionBuild : 11 :agent.(1)product.(1)comments.entry[1-$((tagWords))]=\"${tag}\"@[$((tagWords+1))]'
- 'OperatingSystemName : 1002 :\"FireOS\"' # All known Amazon devices run FireOS (a tweaked Android)
"
done
) > "${OUTPUT}"
|
class Polygon:
def __init__(self, num_of_sides):
self.number_of_sides = num_of_sides
self.sides = [0 for i in range(num_of_sides)]
def input_sides(self, sides):
self.sides = sides
def perimeter(self):
peri = 0
for side in self.sides:
peri += side
return peri |
#!/bin/bash
# Local variables
origDir="`pwd`" ;
enableFileName=${origDir}/enable ;
# Fetch the VIM repository
if ! test -d vim ; then
git clone https://github.com/vim/vim.git
else
pushd ./ ;
cd vim ;
make clean ;
git pull ;
popd ;
fi
# Configure
# Run
# $./configure --help
# to see all options
cd vim ;
./configure --with-features=huge \
--enable-multibyte \
--enable-python3interp=yes \
--enable-cscope \
--enable-terminal \
--enable-fontset \
--prefix=${origDir}/release
# Compile
make VIMRUNTIMEDIR=${origDir}/release/share/vim/vim81 -j16
# Install
make install
# Write the enable file
echo "#!/bin/bash" > ${enableFileName} ;
echo " " >> ${enableFileName} ;
echo "VIM_HOME=${origDir}/release" >> ${enableFileName} ;
echo "export PATH=\$VIM_HOME/bin:\$PATH" >> ${enableFileName} ;
|
# Generated by Django 2.2.3 on 2019-08-29 22:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('snakepy', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='YourSecondModelNameHere',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('your_3rd_field_name_here', models.TextField(blank=True, default='', null=True)),
('your_4th_field_name_here', models.TextField(blank=True, default='', null=True)),
],
),
migrations.RenameModel(
old_name='YourModelNameHere',
new_name='YourFirstModelNameHere',
),
]
|
#!/bin/bash
go get -u github.com/golang/lint/golint
cd $GOPATH/src/github.com/golang/lint
go install .
|
/*
* Copyright 2016 NIIT Ltd, Wipro Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
*
* 1. <NAME>
* 2. <NAME>
* 3. <NAME>
* 4. <NAME>
* 5. <NAME>
* 6. <NAME>
* 7. <NAME>
*/
angular.module('vbiApp')
.controller('loginController', ['$rootScope', '$location', 'userManager', '$cookies', '$scope', function($rootScope, $location, userManager, $cookies, $scope) {
$rootScope.loggedInUser = {};
this.user = {
email: "",
password: ""
};
var self = this;
this.errorMessage = "";
this.login = function() {
//reset the message if when a user login
$rootScope.registerUserMessage = "";
userManager.login(this.user, function(err, data) {
if(!err) {
$rootScope.loggedInUser = JSON.parse($cookies.get($rootScope.authToken));
$location.url($location.url() + 'home');
} else {
self.errorMessage = err;
}
});
};
this.newUser = function(){
$rootScope.showRegisterPage=true;
};
$scope.$watch(function() {
return self.user.password;}, function(newValue) {
self.errorMessage = "";
})
}])
|
package com.qtimes.pavilion.status;
/**
* Author: JackHou
* Date: 2020/4/21.
*/
public enum FuncStatus {
RESOLVED(0, "保留"),
OPEN(1, "开启"),
CLOSE(2, "关闭");
int code;
String desc;
FuncStatus(int mCode, String mDesc) {
code = mCode;
desc = mDesc;
}
public int getCode() {
return code;
}
public void setCode(int mCode) {
code = mCode;
}
public String getDesc() {
return desc;
}
public void setDesc(String mDesc) {
desc = mDesc;
}
@Override
public String toString() {
return "FuncStatus{" +
"code=" + code +
", desc='" + desc + '\'' +
'}';
}
}
|
#!/bin/bash
# pbs launching script:
#PBS -N cuda_dp_knapsack
#PBS -q gpu
#PBS -l walltime=2:00:0
#PBS -l nodes=1:ppn=8
module load cuda
cd $PBS_O_WORKDIR
set CONV_RSH = ssh
echo -n "Start: "
date
INSTANCE=instances/gen.3.500.500.88
#INSTANCE=instances/gen.4.500.500.58
#INSTANCE=instances/gen.1.500.500.10
time output=`./knapsack_simple < ${INSTANCE}`
echo -n "simple: "
echo $output
correct_profit=`echo "$output" | awk '{print $1}'`
echo -n "httslf-r1: "
time ./knapsack_httslf -r1 < ${INSTANCE}
echo -n "httslf-r8: "
time ./knapsack_httslf -r8 < ${INSTANCE}
echo -n "oahttslf-r1: "
time ./knapsack_oahttslf -r1 < ${INSTANCE}
echo -n "oahttslf-r8: "
time ./knapsack_oahttslf -r8 < ${INSTANCE}
echo -n "oahttslf_nr-r1: "
time ./knapsack_nr_oahttslf -r1 < ${INSTANCE}
echo -n "oahttslf_nr-r8: "
time ./knapsack_nr_oahttslf -r8 < ${INSTANCE}
echo -n "oahttslf_randomstart-r1: "
time ./knapsack_randomstart_oahttslf -r1 < ${INSTANCE}
echo -n "oahttslf_randomstart-r8: "
time ./knapsack_randomstart_oahttslf -r8 < ${INSTANCE}
echo -n "oahttslf_gpu: "
time output=`./knapsack_gpu_oahttslf < ${INSTANCE} | grep -v '^>' | grep -v '^$'`
profit=`echo "$output" | awk '{print $1}'`
echo $output
if [ $profit -ne $correct_profit ]; then
echo FAILED
fi
echo -n "oahttslf_gpu_nr: "
time output=`./knapsack_gpu_nr_oahttslf < ${INSTANCE} | grep -v '^>' | grep -v '^$'`
profit=`echo "$output" | awk '{print $1}'`
echo $output
if [ $profit -ne $correct_profit ]; then
echo FAILED
fi
echo -n "httslf_gpu_nr: "
time output=`./knapsack_gpu_nr_httslf < ${INSTANCE} | grep -v '^>' | grep -v '^$'`
profit=`echo "$output" | awk '{print $1}'`
echo $output
if [ $profit -ne $correct_profit ]; then
echo FAILED
fi
echo -n "oahttslf_randomstart_gpu_nr: "
time output=`./knapsack_gpu_randomstart_nr_oahttslf < ${INSTANCE} | grep -v '^>' | grep -v '^$'`
profit=`echo "$output" | awk '{print $1}'`
echo $output
if [ $profit -ne $correct_profit ]; then
echo FAILED
fi
echo -n "httslf_randomstart_gpu_nr: "
time output=`./knapsack_gpu_randomstart_nr_httslf < ${INSTANCE} | grep -v '^>' | grep -v '^$'`
profit=`echo "$output" | awk '{print $1}'`
echo $output
if [ $profit -ne $correct_profit ]; then
echo FAILED
fi
times
echo -n "End: "
date
|
#!/bin/bash
if [ "$#" -eq 1 ]; then
outputPath="$1/doc3d"
else
outputPath="$HOME/Downloads/doc3d"
fi
if ! [ -x "$(command -v wget)" ]; then
echo "Error!: wget is not installed! Please install it and try again"
exit 1
fi
echo -e "\n### ------------------------------------------------------- ###\n"
echo "### Downloading into $outputPath"
echo -e "\n### ------------------------------------------------------- ###\n"
doc3d_download() {
local url=$1
local path=$2
local files=$3
local uname=**** # put your username
local pass=**** # put your password
echo -ne "### Downloading "$files" ###\t\n"
wget --continue --user "$uname" --password "$pass" --directory-prefix="$path" "$url" 2>&1
echo -ne "\b\b\b\b"
echo " # done"
}
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_1.zip" "$outputPath/" "doc3d/img_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_2.zip" "$outputPath/" "doc3d/img_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_3.zip" "$outputPath/" "doc3d/img_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_4.zip" "$outputPath/" "doc3d/img_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_5.zip" "$outputPath/" "doc3d/img_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_6.zip" "$outputPath/" "doc3d/img_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_7.zip" "$outputPath/" "doc3d/img_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_8.zip" "$outputPath/" "doc3d/img_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_9.zip" "$outputPath/" "doc3d/img_9.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_10.zip" "$outputPath/" "doc3d/img_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_11.zip" "$outputPath/" "doc3d/img_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_12.zip" "$outputPath/" "doc3d/img_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_13.zip" "$outputPath/" "doc3d/img_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_14.zip" "$outputPath/" "doc3d/img_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_15.zip" "$outputPath/" "doc3d/img_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_16.zip" "$outputPath/" "doc3d/img_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_17.zip" "$outputPath/" "doc3d/img_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_18.zip" "$outputPath/" "doc3d/img_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_19.zip" "$outputPath/" "doc3d/img_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_20.zip" "$outputPath/" "doc3d/img_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/img_21.zip" "$outputPath/" "doc3d/img_21.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_1.zip" "$outputPath/" "doc3d/wc_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_2.zip" "$outputPath/" "doc3d/wc_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_3.zip" "$outputPath/" "doc3d/wc_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_4.zip" "$outputPath/" "doc3d/wc_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_5.zip" "$outputPath/" "doc3d/wc_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_6.zip" "$outputPath/" "doc3d/wc_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_7.zip" "$outputPath/" "doc3d/wc_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_8.zip" "$outputPath/" "doc3d/wc_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_9.zip" "$outputPath/" "doc3d/wc_9.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_10.zip" "$outputPath/" "doc3d/wc_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_11.zip" "$outputPath/" "doc3d/wc_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_12.zip" "$outputPath/" "doc3d/wc_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_13.zip" "$outputPath/" "doc3d/wc_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_14.zip" "$outputPath/" "doc3d/wc_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_15.zip" "$outputPath/" "doc3d/wc_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_16.zip" "$outputPath/" "doc3d/wc_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_17.zip" "$outputPath/" "doc3d/wc_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_18.zip" "$outputPath/" "doc3d/wc_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_19.zip" "$outputPath/" "doc3d/wc_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_20.zip" "$outputPath/" "doc3d/wc_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/wc_21.zip" "$outputPath/" "doc3d/wc_21.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_1.zip" "$outputPath/" "doc3d/bm_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_2.zip" "$outputPath/" "doc3d/bm_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_3.zip" "$outputPath/" "doc3d/bm_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_4.zip" "$outputPath/" "doc3d/bm_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_5.zip" "$outputPath/" "doc3d/bm_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_6.zip" "$outputPath/" "doc3d/bm_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_7.zip" "$outputPath/" "doc3d/bm_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_8.zip" "$outputPath/" "doc3d/bm_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_9.zip" "$outputPath/" "doc3d/bm_9.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_10.zip" "$outputPath/" "doc3d/bm_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_11.zip" "$outputPath/" "doc3d/bm_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_12.zip" "$outputPath/" "doc3d/bm_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_13.zip" "$outputPath/" "doc3d/bm_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_14.zip" "$outputPath/" "doc3d/bm_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_15.zip" "$outputPath/" "doc3d/bm_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_16.zip" "$outputPath/" "doc3d/bm_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_17.zip" "$outputPath/" "doc3d/bm_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_18.zip" "$outputPath/" "doc3d/bm_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_19.zip" "$outputPath/" "doc3d/bm_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_20.zip" "$outputPath/" "doc3d/bm_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/bm_21.zip" "$outputPath/" "doc3d/bm_21.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_1.zip" "$outputPath/" "doc3d/uv_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_2.zip" "$outputPath/" "doc3d/uv_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_3.zip" "$outputPath/" "doc3d/uv_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_4.zip" "$outputPath/" "doc3d/uv_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_5.zip" "$outputPath/" "doc3d/uv_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_6.zip" "$outputPath/" "doc3d/uv_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_7.zip" "$outputPath/" "doc3d/uv_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_8.zip" "$outputPath/" "doc3d/uv_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_10.zip" "$outputPath/" "doc3d/uv_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_11.zip" "$outputPath/" "doc3d/uv_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_12.zip" "$outputPath/" "doc3d/uv_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_13.zip" "$outputPath/" "doc3d/uv_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_14.zip" "$outputPath/" "doc3d/uv_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_15.zip" "$outputPath/" "doc3d/uv_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_16.zip" "$outputPath/" "doc3d/uv_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_17.zip" "$outputPath/" "doc3d/uv_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_18.zip" "$outputPath/" "doc3d/uv_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_19.zip" "$outputPath/" "doc3d/uv_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_20.zip" "$outputPath/" "doc3d/uv_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/uv_21.zip" "$outputPath/" "doc3d/uv_21.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_1.zip" "$outputPath/" "doc3d/alb_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_2.zip" "$outputPath/" "doc3d/alb_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_3.zip" "$outputPath/" "doc3d/alb_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_4.zip" "$outputPath/" "doc3d/alb_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_5.zip" "$outputPath/" "doc3d/alb_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_6.zip" "$outputPath/" "doc3d/alb_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_7.zip" "$outputPath/" "doc3d/alb_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/alb_8.zip" "$outputPath/" "doc3d/alb_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_1.zip" "$outputPath/" "doc3d/recon_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_2.zip" "$outputPath/" "doc3d/recon_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_3.zip" "$outputPath/" "doc3d/recon_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_4.zip" "$outputPath/" "doc3d/recon_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_5.zip" "$outputPath/" "doc3d/recon_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_6.zip" "$outputPath/" "doc3d/recon_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_7.zip" "$outputPath/" "doc3d/recon_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_8.zip" "$outputPath/" "doc3d/recon_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_9.zip" "$outputPath/" "doc3d/recon_9.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_10.zip" "$outputPath/" "doc3d/recon_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_11.zip" "$outputPath/" "doc3d/recon_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_12.zip" "$outputPath/" "doc3d/recon_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_13.zip" "$outputPath/" "doc3d/recon_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_14.zip" "$outputPath/" "doc3d/recon_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_15.zip" "$outputPath/" "doc3d/recon_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_16.zip" "$outputPath/" "doc3d/recon_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_17.zip" "$outputPath/" "doc3d/recon_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_18.zip" "$outputPath/" "doc3d/recon_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_19.zip" "$outputPath/" "doc3d/recon_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_20.zip" "$outputPath/" "doc3d/recon_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/recon_21.zip" "$outputPath/" "doc3d/recon_21.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_1.zip" "$outputPath/" "doc3d/norm_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_2.zip" "$outputPath/" "doc3d/norm_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_3.zip" "$outputPath/" "doc3d/norm_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_4.zip" "$outputPath/" "doc3d/norm_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_5.zip" "$outputPath/" "doc3d/norm_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_6.zip" "$outputPath/" "doc3d/norm_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_7.zip" "$outputPath/" "doc3d/norm_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_8.zip" "$outputPath/" "doc3d/norm_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_9.zip" "$outputPath/" "doc3d/norm_9.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_10.zip" "$outputPath/" "doc3d/norm_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_11.zip" "$outputPath/" "doc3d/norm_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_12.zip" "$outputPath/" "doc3d/norm_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_13.zip" "$outputPath/" "doc3d/norm_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_14.zip" "$outputPath/" "doc3d/norm_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_15.zip" "$outputPath/" "doc3d/norm_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_16.zip" "$outputPath/" "doc3d/norm_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_17.zip" "$outputPath/" "doc3d/norm_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_18.zip" "$outputPath/" "doc3d/norm_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_19.zip" "$outputPath/" "doc3d/norm_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_20.zip" "$outputPath/" "doc3d/norm_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/norm_21.zip" "$outputPath/" "doc3d/norm_21.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_1.zip" "$outputPath/" "doc3d/dmap_1.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_2.zip" "$outputPath/" "doc3d/dmap_2.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_3.zip" "$outputPath/" "doc3d/dmap_3.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_4.zip" "$outputPath/" "doc3d/dmap_4.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_5.zip" "$outputPath/" "doc3d/dmap_5.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_6.zip" "$outputPath/" "doc3d/dmap_6.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_7.zip" "$outputPath/" "doc3d/dmap_7.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_8.zip" "$outputPath/" "doc3d/dmap_8.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_9.zip" "$outputPath/" "doc3d/dmap_9.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_10.zip" "$outputPath/" "doc3d/dmap_10.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_11.zip" "$outputPath/" "doc3d/dmap_11.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_12.zip" "$outputPath/" "doc3d/dmap_12.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_13.zip" "$outputPath/" "doc3d/dmap_13.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_14.zip" "$outputPath/" "doc3d/dmap_14.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_15.zip" "$outputPath/" "doc3d/dmap_15.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_16.zip" "$outputPath/" "doc3d/dmap_16.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_17.zip" "$outputPath/" "doc3d/dmap_17.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_18.zip" "$outputPath/" "doc3d/dmap_18.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_19.zip" "$outputPath/" "doc3d/dmap_19.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_20.zip" "$outputPath/" "doc3d/dmap_20.zip"
doc3d_download "http://vision.cs.stonybrook.edu/~sagnik/doc3d/dmap_21.zip" "$outputPath/" "doc3d/dmap_21.zip"
echo -e "\n### ------------------------------------------------------- ###\n"
echo "### Unzipping downloaded files ###"
echo -e "\n### ------------------------------------------------------- ###\n"
echo -e $outputPath"/img_1.zip .."
unzip -q $outputPath"/img_1.zip" -d $outputPath
rm $outputPath"doc3d/img_1.zip"
echo -e $outputPath"/img_2.zip .."
unzip -q $outputPath"/img_2.zip" -d $outputPath
rm $outputPath"doc3d/img_2.zip"
echo -e $outputPath"/img_3.zip .."
unzip -q $outputPath"/img_3.zip" -d $outputPath
rm $outputPath"doc3d/img_3.zip"
echo -e $outputPath"/img_4.zip .."
unzip -q $outputPath"/img_4.zip" -d $outputPath
rm $outputPath"doc3d/img_4.zip"
echo -e $outputPath"/img_5.zip .."
unzip -q $outputPath"/img_5.zip" -d $outputPath
rm $outputPath"doc3d/img_5.zip"
echo -e $outputPath"/img_6.zip .."
unzip -q $outputPath"/img_6.zip" -d $outputPath
rm $outputPath"doc3d/img_6.zip"
echo -e $outputPath"/img_7.zip .."
unzip -q $outputPath"/img_7.zip" -d $outputPath
rm $outputPath"doc3d/img_7.zip"
echo -e $outputPath"/img_8.zip .."
unzip -q $outputPath"/img_8.zip" -d $outputPath
rm $outputPath"doc3d/img_8.zip"
echo -e $outputPath"/img_9.zip .."
unzip -q $outputPath"/img_9.zip" -d $outputPath
rm $outputPath"doc3d/img_9.zip"
echo -e $outputPath"/img_10.zip .."
unzip -q $outputPath"/img_10.zip" -d $outputPath
rm $outputPath"doc3d/img_10.zip"
echo -e $outputPath"/img_11.zip .."
unzip -q $outputPath"/img_11.zip" -d $outputPath
rm $outputPath"doc3d/img_11.zip"
echo -e $outputPath"/img_12.zip .."
unzip -q $outputPath"/img_12.zip" -d $outputPath
rm $outputPath"doc3d/img_12.zip"
echo -e $outputPath"/img_13.zip .."
unzip -q $outputPath"/img_13.zip" -d $outputPath
rm $outputPath"doc3d/img_13.zip"
echo -e $outputPath"/img_14.zip .."
unzip -q $outputPath"/img_14.zip" -d $outputPath
rm $outputPath"doc3d/img_14.zip"
echo -e $outputPath"/img_15.zip .."
unzip -q $outputPath"/img_15.zip" -d $outputPath
rm $outputPath"doc3d/img_15.zip"
echo -e $outputPath"/img_16.zip .."
unzip -q $outputPath"/img_16.zip" -d $outputPath
rm $outputPath"doc3d/img_16.zip"
echo -e $outputPath"/img_17.zip .."
unzip -q $outputPath"/img_17.zip" -d $outputPath
rm $outputPath"doc3d/img_17.zip"
echo -e $outputPath"/img_18.zip .."
unzip -q $outputPath"/img_18.zip" -d $outputPath
rm $outputPath"doc3d/img_18.zip"
echo -e $outputPath"/img_19.zip .."
unzip -q $outputPath"/img_19.zip" -d $outputPath
rm $outputPath"doc3d/img_19.zip"
echo -e $outputPath"/img_20.zip .."
unzip -q $outputPath"/img_20.zip" -d $outputPath
rm $outputPath"doc3d/img_20.zip"
echo -e $outputPath"/img_21.zip .."
unzip -q $outputPath"/img_21.zip" -d $outputPath
rm $outputPath"doc3d/img_21.zip"
echo -e $outputPath"/wc_1.zip .."
unzip -q $outputPath"/wc_1.zip" -d $outputPath
rm $outputPath"doc3d/wc_1.zip"
echo -e $outputPath"/wc_2.zip .."
unzip -q $outputPath"/wc_2.zip" -d $outputPath
rm $outputPath"doc3d/wc_2.zip"
echo -e $outputPath"/wc_3.zip .."
unzip -q $outputPath"/wc_3.zip" -d $outputPath
rm $outputPath"doc3d/wc_3.zip"
echo -e $outputPath"/wc_4.zip .."
unzip -q $outputPath"/wc_4.zip" -d $outputPath
rm $outputPath"doc3d/wc_4.zip"
echo -e $outputPath"/wc_5.zip .."
unzip -q $outputPath"/wc_5.zip" -d $outputPath
rm $outputPath"doc3d/wc_5.zip"
echo -e $outputPath"/wc_6.zip .."
unzip -q $outputPath"/wc_6.zip" -d $outputPath
rm $outputPath"doc3d/wc_6.zip"
echo -e $outputPath"/wc_7.zip .."
unzip -q $outputPath"/wc_7.zip" -d $outputPath
rm $outputPath"doc3d/wc_7.zip"
echo -e $outputPath"/wc_8.zip .."
unzip -q $outputPath"/wc_8.zip" -d $outputPath
rm $outputPath"doc3d/wc_8.zip"
echo -e $outputPath"/wc_9.zip .."
unzip -q $outputPath"/wc_9.zip" -d $outputPath
rm $outputPath"doc3d/wc_9.zip"
echo -e $outputPath"/wc_10.zip .."
unzip -q $outputPath"/wc_10.zip" -d $outputPath
rm $outputPath"doc3d/wc_10.zip"
echo -e $outputPath"/wc_11.zip .."
unzip -q $outputPath"/wc_11.zip" -d $outputPath
rm $outputPath"doc3d/wc_11.zip"
echo -e $outputPath"/wc_12.zip .."
unzip -q $outputPath"/wc_12.zip" -d $outputPath
rm $outputPath"doc3d/wc_12.zip"
echo -e $outputPath"/wc_13.zip .."
unzip -q $outputPath"/wc_13.zip" -d $outputPath
rm $outputPath"doc3d/wc_13.zip"
echo -e $outputPath"/wc_14.zip .."
unzip -q $outputPath"/wc_14.zip" -d $outputPath
rm $outputPath"doc3d/wc_14.zip"
echo -e $outputPath"/wc_15.zip .."
unzip -q $outputPath"/wc_15.zip" -d $outputPath
rm $outputPath"doc3d/wc_15.zip"
echo -e $outputPath"/wc_16.zip .."
unzip -q $outputPath"/wc_16.zip" -d $outputPath
rm $outputPath"doc3d/wc_16.zip"
echo -e $outputPath"/wc_17.zip .."
unzip -q $outputPath"/wc_17.zip" -d $outputPath
rm $outputPath"doc3d/wc_17.zip"
echo -e $outputPath"/wc_18.zip .."
unzip -q $outputPath"/wc_18.zip" -d $outputPath
rm $outputPath"doc3d/wc_18.zip"
echo -e $outputPath"/wc_19.zip .."
unzip -q $outputPath"/wc_19.zip" -d $outputPath
rm $outputPath"doc3d/wc_19.zip"
echo -e $outputPath"/wc_20.zip .."
unzip -q $outputPath"/wc_20.zip" -d $outputPath
rm $outputPath"doc3d/wc_20.zip"
echo -e $outputPath"/wc_21.zip .."
unzip -q $outputPath"/wc_21.zip" -d $outputPath
rm $outputPath"doc3d/wc_21.zip"
echo -e $outputPath"/bm_1.zip .."
unzip -q $outputPath"/bm_1.zip" -d $outputPath
rm $outputPath"doc3d/bm_1.zip"
echo -e $outputPath"/bm_2.zip .."
unzip -q $outputPath"/bm_2.zip" -d $outputPath
rm $outputPath"doc3d/bm_2.zip"
echo -e $outputPath"/bm_3.zip .."
unzip -q $outputPath"/bm_3.zip" -d $outputPath
rm $outputPath"doc3d/bm_3.zip"
echo -e $outputPath"/bm_4.zip .."
unzip -q $outputPath"/bm_4.zip" -d $outputPath
rm $outputPath"doc3d/bm_4.zip"
echo -e $outputPath"/bm_5.zip .."
unzip -q $outputPath"/bm_5.zip" -d $outputPath
rm $outputPath"doc3d/bm_5.zip"
echo -e $outputPath"/bm_6.zip .."
unzip -q $outputPath"/bm_6.zip" -d $outputPath
rm $outputPath"doc3d/bm_6.zip"
echo -e $outputPath"/bm_7.zip .."
unzip -q $outputPath"/bm_7.zip" -d $outputPath
rm $outputPath"doc3d/bm_7.zip"
echo -e $outputPath"/bm_8.zip .."
unzip -q $outputPath"/bm_8.zip" -d $outputPath
rm $outputPath"doc3d/bm_8.zip"
echo -e $outputPath"/bm_9.zip .."
unzip -q $outputPath"/bm_9.zip" -d $outputPath
rm $outputPath"doc3d/bm_9.zip"
echo -e $outputPath"/bm_10.zip .."
unzip -q $outputPath"/bm_10.zip" -d $outputPath
rm $outputPath"doc3d/bm_10.zip"
echo -e $outputPath"/bm_11.zip .."
unzip -q $outputPath"/bm_11.zip" -d $outputPath
rm $outputPath"doc3d/bm_11.zip"
echo -e $outputPath"/bm_12.zip .."
unzip -q $outputPath"/bm_12.zip" -d $outputPath
rm $outputPath"doc3d/bm_12.zip"
echo -e $outputPath"/bm_13.zip .."
unzip -q $outputPath"/bm_13.zip" -d $outputPath
rm $outputPath"doc3d/bm_13.zip"
echo -e $outputPath"/bm_14.zip .."
unzip -q $outputPath"/bm_14.zip" -d $outputPath
rm $outputPath"doc3d/bm_14.zip"
echo -e $outputPath"/bm_15.zip .."
unzip -q $outputPath"/bm_15.zip" -d $outputPath
rm $outputPath"doc3d/bm_15.zip"
echo -e $outputPath"/bm_16.zip .."
unzip -q $outputPath"/bm_16.zip" -d $outputPath
rm $outputPath"doc3d/bm_16.zip"
echo -e $outputPath"/bm_17.zip .."
unzip -q $outputPath"/bm_17.zip" -d $outputPath
rm $outputPath"doc3d/bm_17.zip"
echo -e $outputPath"/bm_18.zip .."
unzip -q $outputPath"/bm_18.zip" -d $outputPath
rm $outputPath"doc3d/bm_18.zip"
echo -e $outputPath"/bm_19.zip .."
unzip -q $outputPath"/bm_19.zip" -d $outputPath
rm $outputPath"doc3d/bm_19.zip"
echo -e $outputPath"/bm_20.zip .."
unzip -q $outputPath"/bm_20.zip" -d $outputPath
rm $outputPath"doc3d/bm_20.zip"
echo -e $outputPath"/bm_21.zip .."
unzip -q $outputPath"/bm_21.zip" -d $outputPath
rm $outputPath"doc3d/bm_21.zip"
echo -e $outputPath"/uv_1.zip .."
unzip -q $outputPath"/uv_1.zip" -d $outputPath
rm $outputPath"doc3d/uv_1.zip"
echo -e $outputPath"/uv_2.zip .."
unzip -q $outputPath"/uv_2.zip" -d $outputPath
rm $outputPath"doc3d/uv_2.zip"
echo -e $outputPath"/uv_3.zip .."
unzip -q $outputPath"/uv_3.zip" -d $outputPath
rm $outputPath"doc3d/uv_3.zip"
echo -e $outputPath"/uv_4.zip .."
unzip -q $outputPath"/uv_4.zip" -d $outputPath
rm $outputPath"doc3d/uv_4.zip"
echo -e $outputPath"/uv_5.zip .."
unzip -q $outputPath"/uv_5.zip" -d $outputPath
rm $outputPath"doc3d/uv_5.zip"
echo -e $outputPath"/uv_6.zip .."
unzip -q $outputPath"/uv_6.zip" -d $outputPath
rm $outputPath"doc3d/uv_6.zip"
echo -e $outputPath"/uv_7.zip .."
unzip -q $outputPath"/uv_7.zip" -d $outputPath
rm $outputPath"doc3d/uv_7.zip"
echo -e $outputPath"/uv_8.zip .."
unzip -q $outputPath"/uv_8.zip" -d $outputPath
rm $outputPath"doc3d/uv_8.zip"
echo -e $outputPath"/uv_9.zip .."
unzip -q $outputPath"/uv_9.zip" -d $outputPath
rm $outputPath"doc3d/uv_9.zip"
echo -e $outputPath"/uv_10.zip .."
unzip -q $outputPath"/uv_10.zip" -d $outputPath
rm $outputPath"doc3d/uv_10.zip"
echo -e $outputPath"/uv_11.zip .."
unzip -q $outputPath"/uv_11.zip" -d $outputPath
rm $outputPath"doc3d/uv_11.zip"
echo -e $outputPath"/uv_12.zip .."
unzip -q $outputPath"/uv_12.zip" -d $outputPath
rm $outputPath"doc3d/uv_12.zip"
echo -e $outputPath"/uv_13.zip .."
unzip -q $outputPath"/uv_13.zip" -d $outputPath
rm $outputPath"doc3d/uv_13.zip"
echo -e $outputPath"/uv_14.zip .."
unzip -q $outputPath"/uv_14.zip" -d $outputPath
rm $outputPath"doc3d/uv_14.zip"
echo -e $outputPath"/uv_15.zip .."
unzip -q $outputPath"/uv_15.zip" -d $outputPath
rm $outputPath"doc3d/uv_15.zip"
echo -e $outputPath"/uv_16.zip .."
unzip -q $outputPath"/uv_16.zip" -d $outputPath
rm $outputPath"doc3d/uv_16.zip"
echo -e $outputPath"/uv_17.zip .."
unzip -q $outputPath"/uv_17.zip" -d $outputPath
rm $outputPath"doc3d/uv_17.zip"
echo -e $outputPath"/uv_18.zip .."
unzip -q $outputPath"/uv_18.zip" -d $outputPath
rm $outputPath"doc3d/uv_18.zip"
echo -e $outputPath"/uv_19.zip .."
unzip -q $outputPath"/uv_19.zip" -d $outputPath
rm $outputPath"doc3d/uv_19.zip"
echo -e $outputPath"/uv_20.zip .."
unzip -q $outputPath"/uv_20.zip" -d $outputPath
rm $outputPath"doc3d/uv_20.zip"
echo -e $outputPath"/uv_21.zip .."
unzip -q $outputPath"/uv_21.zip" -d $outputPath
rm $outputPath"doc3d/uv_21.zip"
echo -e $outputPath"/dmap_1.zip .."
unzip -q $outputPath"/dmap_1.zip" -d $outputPath
rm $outputPath"doc3d/dmap_1.zip"
echo -e $outputPath"/dmap_2.zip .."
unzip -q $outputPath"/dmap_2.zip" -d $outputPath
rm $outputPath"doc3d/dmap_2.zip"
echo -e $outputPath"/dmap_3.zip .."
unzip -q $outputPath"/dmap_3.zip" -d $outputPath
rm $outputPath"doc3d/dmap_3.zip"
echo -e $outputPath"/dmap_4.zip .."
unzip -q $outputPath"/dmap_4.zip" -d $outputPath
rm $outputPath"doc3d/dmap_4.zip"
echo -e $outputPath"/dmap_5.zip .."
unzip -q $outputPath"/dmap_5.zip" -d $outputPath
rm $outputPath"doc3d/dmap_5.zip"
echo -e $outputPath"/dmap_6.zip .."
unzip -q $outputPath"/dmap_6.zip" -d $outputPath
rm $outputPath"doc3d/dmap_6.zip"
echo -e $outputPath"/dmap_7.zip .."
unzip -q $outputPath"/dmap_7.zip" -d $outputPath
rm $outputPath"doc3d/dmap_7.zip"
echo -e $outputPath"/dmap_8.zip .."
unzip -q $outputPath"/dmap_8.zip" -d $outputPath
rm $outputPath"doc3d/dmap_8.zip"
echo -e $outputPath"/dmap_9.zip .."
unzip -q $outputPath"/dmap_9.zip" -d $outputPath
rm $outputPath"doc3d/dmap_9.zip"
echo -e $outputPath"/dmap_10.zip .."
unzip -q $outputPath"/dmap_10.zip" -d $outputPath
rm $outputPath"doc3d/dmap_10.zip"
echo -e $outputPath"/dmap_11.zip .."
unzip -q $outputPath"/dmap_11.zip" -d $outputPath
rm $outputPath"doc3d/dmap_11.zip"
echo -e $outputPath"/dmap_12.zip .."
unzip -q $outputPath"/dmap_12.zip" -d $outputPath
rm $outputPath"doc3d/dmap_12.zip"
echo -e $outputPath"/dmap_13.zip .."
unzip -q $outputPath"/dmap_13.zip" -d $outputPath
rm $outputPath"doc3d/dmap_13.zip"
echo -e $outputPath"/dmap_14.zip .."
unzip -q $outputPath"/dmap_14.zip" -d $outputPath
rm $outputPath"doc3d/dmap_14.zip"
echo -e $outputPath"/dmap_15.zip .."
unzip -q $outputPath"/dmap_15.zip" -d $outputPath
rm $outputPath"doc3d/dmap_15.zip"
echo -e $outputPath"/dmap_16.zip .."
unzip -q $outputPath"/dmap_16.zip" -d $outputPath
rm $outputPath"doc3d/dmap_16.zip"
echo -e $outputPath"/dmap_17.zip .."
unzip -q $outputPath"/dmap_17.zip" -d $outputPath
rm $outputPath"doc3d/dmap_17.zip"
echo -e $outputPath"/dmap_18.zip .."
unzip -q $outputPath"/dmap_18.zip" -d $outputPath
rm $outputPath"doc3d/dmap_18.zip"
echo -e $outputPath"/dmap_19.zip .."
unzip -q $outputPath"/dmap_19.zip" -d $outputPath
rm $outputPath"doc3d/dmap_19.zip"
echo -e $outputPath"/dmap_20.zip .."
unzip -q $outputPath"/dmap_20.zip" -d $outputPath
rm $outputPath"doc3d/dmap_20.zip"
echo -e $outputPath"/dmap_21.zip .."
unzip -q $outputPath"/dmap_21.zip" -d $outputPath
rm $outputPath"doc3d/dmap_21.zip"
echo -e $outputPath"/recon_1.zip .."
unzip -q $outputPath"/recon_1.zip" -d $outputPath
rm $outputPath"doc3d/recon_1.zip"
echo -e $outputPath"/recon_2.zip .."
unzip -q $outputPath"/recon_2.zip" -d $outputPath
rm $outputPath"doc3d/recon_2.zip"
echo -e $outputPath"/recon_3.zip .."
unzip -q $outputPath"/recon_3.zip" -d $outputPath
rm $outputPath"doc3d/recon_3.zip"
echo -e $outputPath"/recon_4.zip .."
unzip -q $outputPath"/recon_4.zip" -d $outputPath
rm $outputPath"doc3d/recon_4.zip"
echo -e $outputPath"/recon_5.zip .."
unzip -q $outputPath"/recon_5.zip" -d $outputPath
rm $outputPath"doc3d/recon_5.zip"
echo -e $outputPath"/recon_6.zip .."
unzip -q $outputPath"/recon_6.zip" -d $outputPath
rm $outputPath"doc3d/recon_6.zip"
echo -e $outputPath"/recon_7.zip .."
unzip -q $outputPath"/recon_7.zip" -d $outputPath
rm $outputPath"doc3d/recon_7.zip"
echo -e $outputPath"/recon_8.zip .."
unzip -q $outputPath"/recon_8.zip" -d $outputPath
rm $outputPath"doc3d/recon_8.zip"
echo -e $outputPath"/recon_9.zip .."
unzip -q $outputPath"/recon_9.zip" -d $outputPath
rm $outputPath"doc3d/recon_9.zip"
echo -e $outputPath"/recon_10.zip .."
unzip -q $outputPath"/recon_10.zip" -d $outputPath
rm $outputPath"doc3d/recon_10.zip"
echo -e $outputPath"/recon_11.zip .."
unzip -q $outputPath"/recon_11.zip" -d $outputPath
rm $outputPath"doc3d/recon_11.zip"
echo -e $outputPath"/recon_12.zip .."
unzip -q $outputPath"/recon_12.zip" -d $outputPath
rm $outputPath"doc3d/recon_12.zip"
echo -e $outputPath"/recon_13.zip .."
unzip -q $outputPath"/recon_13.zip" -d $outputPath
rm $outputPath"doc3d/recon_13.zip"
echo -e $outputPath"/recon_14.zip .."
unzip -q $outputPath"/recon_14.zip" -d $outputPath
rm $outputPath"doc3d/recon_14.zip"
echo -e $outputPath"/recon_15.zip .."
unzip -q $outputPath"/recon_15.zip" -d $outputPath
rm $outputPath"doc3d/recon_15.zip"
echo -e $outputPath"/recon_16.zip .."
unzip -q $outputPath"/recon_16.zip" -d $outputPath
rm $outputPath"doc3d/recon_16.zip"
echo -e $outputPath"/recon_17.zip .."
unzip -q $outputPath"/recon_17.zip" -d $outputPath
rm $outputPath"doc3d/recon_17.zip"
echo -e $outputPath"/recon_18.zip .."
unzip -q $outputPath"/recon_18.zip" -d $outputPath
rm $outputPath"doc3d/recon_18.zip"
echo -e $outputPath"/recon_19.zip .."
unzip -q $outputPath"/recon_19.zip" -d $outputPath
rm $outputPath"doc3d/recon_19.zip"
echo -e $outputPath"/recon_20.zip .."
unzip -q $outputPath"/recon_20.zip" -d $outputPath
rm $outputPath"doc3d/recon_20.zip"
echo -e $outputPath"/recon_21.zip .."
unzip -q $outputPath"/recon_21.zip" -d $outputPath
rm $outputPath"doc3d/recon_21.zip"
echo -e $outputPath"/norm_1.zip .."
unzip -q $outputPath"/norm_1.zip" -d $outputPath
rm $outputPath"doc3d/norm_1.zip"
echo -e $outputPath"/norm_2.zip .."
unzip -q $outputPath"/norm_2.zip" -d $outputPath
rm $outputPath"doc3d/norm_2.zip"
echo -e $outputPath"/norm_3.zip .."
unzip -q $outputPath"/norm_3.zip" -d $outputPath
rm $outputPath"doc3d/norm_3.zip"
echo -e $outputPath"/norm_4.zip .."
unzip -q $outputPath"/norm_4.zip" -d $outputPath
rm $outputPath"doc3d/norm_4.zip"
echo -e $outputPath"/norm_5.zip .."
unzip -q $outputPath"/norm_5.zip" -d $outputPath
rm $outputPath"doc3d/norm_5.zip"
echo -e $outputPath"/norm_6.zip .."
unzip -q $outputPath"/norm_6.zip" -d $outputPath
rm $outputPath"doc3d/norm_6.zip"
echo -e $outputPath"/norm_7.zip .."
unzip -q $outputPath"/norm_7.zip" -d $outputPath
rm $outputPath"doc3d/norm_7.zip"
echo -e $outputPath"/norm_8.zip .."
unzip -q $outputPath"/norm_8.zip" -d $outputPath
rm $outputPath"doc3d/norm_8.zip"
echo -e $outputPath"/norm_9.zip .."
unzip -q $outputPath"/norm_9.zip" -d $outputPath
rm $outputPath"doc3d/norm_9.zip"
echo -e $outputPath"/norm_10.zip .."
unzip -q $outputPath"/norm_10.zip" -d $outputPath
rm $outputPath"doc3d/norm_10.zip"
echo -e $outputPath"/norm_11.zip .."
unzip -q $outputPath"/norm_11.zip" -d $outputPath
rm $outputPath"doc3d/norm_11.zip"
echo -e $outputPath"/norm_12.zip .."
unzip -q $outputPath"/norm_12.zip" -d $outputPath
rm $outputPath"doc3d/norm_12.zip"
echo -e $outputPath"/norm_13.zip .."
unzip -q $outputPath"/norm_13.zip" -d $outputPath
rm $outputPath"doc3d/norm_13.zip"
echo -e $outputPath"/norm_14.zip .."
unzip -q $outputPath"/norm_14.zip" -d $outputPath
rm $outputPath"doc3d/norm_14.zip"
echo -e $outputPath"/norm_15.zip .."
unzip -q $outputPath"/norm_15.zip" -d $outputPath
rm $outputPath"doc3d/norm_15.zip"
echo -e $outputPath"/norm_16.zip .."
unzip -q $outputPath"/norm_16.zip" -d $outputPath
rm $outputPath"doc3d/norm_16.zip"
echo -e $outputPath"/norm_17.zip .."
unzip -q $outputPath"/norm_17.zip" -d $outputPath
rm $outputPath"doc3d/norm_17.zip"
echo -e $outputPath"/norm_18.zip .."
unzip -q $outputPath"/norm_18.zip" -d $outputPath
rm $outputPath"doc3d/norm_18.zip"
echo -e $outputPath"/norm_19.zip .."
unzip -q $outputPath"/norm_19.zip" -d $outputPath
rm $outputPath"doc3d/norm_19.zip"
echo -e $outputPath"/norm_20.zip .."
unzip -q $outputPath"/norm_20.zip" -d $outputPath
rm $outputPath"doc3d/norm_20.zip"
echo -e $outputPath"/norm_21.zip .."
unzip -q $outputPath"/norm_21.zip" -d $outputPath
rm $outputPath"doc3d/norm_21.zip"
echo -e $outputPath"/alb_1.zip .."
unzip -q $outputPath"/alb_1.zip" -d $outputPath
rm $outputPath"doc3d/alb_1.zip"
echo -e $outputPath"/alb_2.zip .."
unzip -q $outputPath"/alb_2.zip" -d $outputPath
rm $outputPath"doc3d/alb_2.zip"
echo -e $outputPath"/alb_3.zip .."
unzip -q $outputPath"/alb_3.zip" -d $outputPath
rm $outputPath"doc3d/alb_3.zip"
echo -e $outputPath"/alb_4.zip .."
unzip -q $outputPath"/alb_4.zip" -d $outputPath
rm $outputPath"doc3d/alb_4.zip"
echo -e $outputPath"/alb_5.zip .."
unzip -q $outputPath"/alb_5.zip" -d $outputPath
rm $outputPath"doc3d/alb_5.zip"
echo -e $outputPath"/alb_6.zip .."
unzip -q $outputPath"/alb_6.zip" -d $outputPath
rm $outputPath"doc3d/alb_6.zip"
echo -e $outputPath"/alb_7.zip .."
unzip -q $outputPath"/alb_7.zip" -d $outputPath
rm $outputPath"doc3d/alb_7.zip"
echo -e $outputPath"/alb_8.zip .."
unzip -q $outputPath"/alb_8.zip" -d $outputPath
rm $outputPath"doc3d/alb_8.zip"
echo -e "\n### ------------------------------------------------------- ###\n"
echo "### All done!"
echo -e "\n### ------------------------------------------------------- ###\n"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.