text stringlengths 1 1.05M |
|---|
<reponame>EinarBaldvin/Ratatosk<filename>src/Common.hpp
#ifndef RATATOSK_COMMON_HPP
#define RATATOSK_COMMON_HPP
#include <iostream>
#include "CompactedDBG.hpp"
#include "PairID.hpp"
#include "TinyBloomFilter.hpp"
#define RATATOSK_VERSION "0.4"
struct Correct_Opt : CDBG_Build_opt {
vector<string> filenames_long_in; // Long reads to correct
vector<string> filenames_helper_long_in; // Accurate long reads helping with coloring on the 2nd round
vector<string> filenames_short_all; // Unmapped short reads
vector<string> filenames_long_phase; // Phasing files long reads
vector<string> filenames_short_phase; // Phasing files short reads
string filename_long_out; // Output filename prefix for long reads
bool pass1_only;
bool pass2_only;
int out_qual;
int trim_qual;
size_t small_k;
size_t insert_sz;
size_t min_cov_vertices;
size_t max_cov_vertices;
size_t min_nb_km_unmapped;
size_t nb_partitions;
size_t min_bases_partition;
size_t max_len_weak_region1;
size_t max_len_weak_region2;
size_t buffer_sz;
size_t h_seed;
double weak_region_len_factor;
double large_k_factor;
Correct_Opt() : out_qual(1), trim_qual(0), small_k(31), insert_sz(500),
nb_partitions(1000), min_bases_partition(100000), buffer_sz(1048576),
min_nb_km_unmapped(small_k), min_cov_vertices(2), max_cov_vertices(128),
weak_region_len_factor(1.25), large_k_factor(1.5), h_seed(0),
max_len_weak_region1(1000), max_len_weak_region2(10000),
pass1_only(false), pass2_only(false) {
k = 63;
clipTips = false;
deleteIsolated = false;
useMercyKmers = false;
}
};
struct CustomHashUint64_t {
size_t operator()(const uint64_t v) const {
return XXH64(&v, sizeof(uint64_t), 0);
}
};
struct CustomHashString {
size_t operator()(const string& s) const {
return XXH64(s.c_str(), s.length(), 0);
}
};
struct HapReads {
unordered_map<uint64_t, uint64_t, CustomHashUint64_t> read2hap;
unordered_map<string, uint64_t, CustomHashString> hapBlock2id;
unordered_map<string, uint64_t, CustomHashString> hapType2id;
vector<PairID> hap2phasedReads;
PairID hap2unphasedReads;
size_t block_id;
size_t type_id;
HapReads() {
clear();
}
void clear() {
read2hap.clear();
hapBlock2id.clear();
hapType2id.clear();
hap2phasedReads.clear();
hap2unphasedReads.clear();
block_id = 0;
type_id = 0;
}
};
struct WeightsPairID {
PairID noWeight_pids;
PairID weighted_pids;
PairID all_pids;
double weight;
double sum_pids_weights;
WeightsPairID() {
clear();
};
void clear() {
noWeight_pids.clear();
weighted_pids.clear();
all_pids.clear();
weight = 2.0;
sum_pids_weights = 0.0;
}
};
// returns maximal entropy score for random sequences which is log2|A| where A is the alphabet
// returns something close to 0 for highly repetitive sequences
double getEntropy(const char* s, const size_t len);
size_t getMaxPaths(const double seq_entropy, const size_t max_len_path, const size_t k);
size_t getMaxBranch(const double seq_entropy, const size_t max_len_path, const size_t k);
bool hasEnoughSharedPairID(const PairID& a, const PairID& b, const size_t min_shared_ids);
bool hasEnoughSharedPairID(const TinyBloomFilter<uint32_t>& tbf_a, const PairID& a, const PairID& b, const size_t min_shared_ids);
size_t getNumberSharedPairID(const PairID& a, const PairID& b);
size_t getNumberSharedPairID(const TinyBloomFilter<uint32_t>& tbf_a, const PairID& a, const PairID& b);
// This order of nucleotide in this array is important
static const char ambiguity_c[16] = {'.', 'A', 'C', 'M', 'G', 'R', 'S', 'V', 'T', 'W', 'Y', 'H', 'K', 'D', 'B', 'N'};
inline void toUpperCase(char* s, const size_t len) {
for (char* s_tmp = s; s_tmp < (s + len); ++s_tmp) *s_tmp &= 0xDF;
}
inline void copyFile(const string& dest, const vector<string>& src){
std::ofstream out(dest, std::ios::binary | std::ios::app);
for (const auto& s : src){
std::ifstream in(s, std::ios::binary);
out << in.rdbuf();
}
}
inline size_t countRecordsFASTX(const string& filename){
const vector<string> v(1, filename);
FileParser fp(v);
size_t i = 0;
size_t nb_rec = 0;
string s;
while (fp.read(s, i)) ++nb_rec;
fp.close();
return nb_rec;
}
inline size_t countRecordsFASTX(const vector<string>& filenames){
FileParser fp(filenames);
size_t i = 0;
size_t nb_rec = 0;
string s;
while (fp.read(s, i)) ++nb_rec;
fp.close();
return nb_rec;
}
inline char getAmbiguity(const bool nuc_a, const bool nuc_c, const bool nuc_g, const bool nuc_t) {
const uint8_t idx = static_cast<uint8_t>(nuc_a) | (static_cast<uint8_t>(nuc_c) << 1) | (static_cast<uint8_t>(nuc_g) << 2) | (static_cast<uint8_t>(nuc_t) << 3);
return ambiguity_c[idx];
}
inline uint8_t getAmbiguityIndex(const char nuc_ambiguity){
const char c = nuc_ambiguity & 0xDF;
uint8_t idx = 0;
idx += (static_cast<size_t>(!(c == ambiguity_c[0])) - 1) & 0x0;
idx += (static_cast<size_t>(!(c == ambiguity_c[1])) - 1) & 0x1;
idx += (static_cast<size_t>(!(c == ambiguity_c[2])) - 1) & 0x2;
idx += (static_cast<size_t>(!(c == ambiguity_c[3])) - 1) & 0x3;
idx += (static_cast<size_t>(!(c == ambiguity_c[4])) - 1) & 0x4;
idx += (static_cast<size_t>(!(c == ambiguity_c[5])) - 1) & 0x5;
idx += (static_cast<size_t>(!(c == ambiguity_c[6])) - 1) & 0x6;
idx += (static_cast<size_t>(!(c == ambiguity_c[7])) - 1) & 0x7;
idx += (static_cast<size_t>(!(c == ambiguity_c[8])) - 1) & 0x8;
idx += (static_cast<size_t>(!(c == ambiguity_c[9])) - 1) & 0x9;
idx += (static_cast<size_t>(!(c == ambiguity_c[10])) - 1) & 0xA;
idx += (static_cast<size_t>(!(c == ambiguity_c[11])) - 1) & 0xB;
idx += (static_cast<size_t>(!(c == ambiguity_c[12])) - 1) & 0xC;
idx += (static_cast<size_t>(!(c == ambiguity_c[13])) - 1) & 0xD;
idx += (static_cast<size_t>(!(c == ambiguity_c[14])) - 1) & 0xE;
idx += (static_cast<size_t>(!(c == ambiguity_c[15])) - 1) & 0xF;
return idx;
}
inline char getAmbiguityIndexRev(const uint8_t idx){
return ambiguity_c[idx];
}
inline void getAmbiguityRev(const char nuc_ambiguity, bool& nuc_a, bool& nuc_c, bool& nuc_g, bool& nuc_t) {
const uint8_t idx = getAmbiguityIndex(nuc_ambiguity);
nuc_a = static_cast<bool>(idx & 0x1);
nuc_c = static_cast<bool>(idx & 0x2);
nuc_g = static_cast<bool>(idx & 0x4);
nuc_t = static_cast<bool>(idx & 0x8);
return;
}
inline void getStdQual(string& s) {
for (auto& c : s){
if (c < static_cast<char>(33)) c = static_cast<char>(33);
if (c > static_cast<char>(73)) c = static_cast<char>(73);
}
}
inline char getQual(const double score, const size_t qv_min = 0) {
const char phred_base_std = static_cast<char>(33);
const char phred_scale_std = static_cast<char>(40);
const double qv_score = score * static_cast<double>(phred_scale_std - qv_min);
return static_cast<char>(qv_score + phred_base_std + qv_min);
}
inline double getScore(const char c, const size_t qv_min = 0) {
const char phred_base_std = static_cast<char>(33);
const char phred_scale_std = static_cast<char>(40);
const double qv_score = static_cast<double>(c - phred_base_std - qv_min);
return (qv_score / static_cast<double>(phred_scale_std - qv_min));
}
inline bool isValidHap(const PairID& hap_ids, const uint64_t hap_id) {
return (hap_ids.isEmpty() || hap_ids.contains(hap_id));
}
inline pair<size_t, size_t> getMinMaxLength(const size_t l, const double len_factor) {
return {static_cast<size_t>(max(l / len_factor, 1.0)), static_cast<size_t>(max(l * len_factor, 1.0))};
}
size_t approximate_log2(size_t v);
#endif |
GIT=https://gitlab.redox-os.org/redox-os/netutils.git
|
const React = require('react');
const CountdownTimer = () => {
const initialValues = {
second: 40,
third: 60,
fourth: 80,
};
const [values, setValues] = React.useState(initialValues);
React.useEffect(() => {
const interval = setInterval(() => {
setValues(prevValues => {
if (prevValues.second > 0) {
return { ...prevValues, second: prevValues.second - 1 };
} else if (prevValues.third > 0) {
return { ...prevValues, third: prevValues.third - 1 };
} else if (prevValues.fourth > 0) {
return { ...prevValues, fourth: prevValues.fourth - 1 };
} else {
clearInterval(interval);
return prevValues;
}
});
}, 1000);
return () => clearInterval(interval);
}, []);
return (
<div>
{values.second > 0 ? (
<p>Countdown: {values.second}</p>
) : values.third > 0 ? (
<p>Countdown: {values.third}</p>
) : values.fourth > 0 ? (
<p>Countdown: {values.fourth}</p>
) : (
<p>Countdown ended</p>
)}
</div>
);
};
module.exports = CountdownTimer; |
<reponame>gustavnikolaj/linter-js-standard-engine<filename>lib/register.js
const caches = require('./caches')
const commands = require('./commands')
const optInManager = require('./optInManager')
const reportError = require('./reportError')
const commandDisposable = atom.commands.add('atom-workspace', commands)
let lint = (...args) => {
const { lint: loaded } = require('./linting')
lint = loaded
return loaded(...args)
}
exports.config = {
enabledProjects: {
title: 'Enable',
description: 'Control whether linting should be enabled manually, for each project, or is enabled for all projects.',
type: 'number',
default: optInManager.SOME,
enum: [
{value: optInManager.NONE, description: 'Reset existing permissions'},
{value: optInManager.SOME, description: 'Decide for each project'},
{value: optInManager.ALL, description: 'Enable for all projects'}
]
},
grammarScopes: {
title: 'Scopes',
description: 'Grammar scopes for which linting is enabled. Reload window for changes to take effect.',
type: 'array',
default: ['javascript', 'source.js', 'source.js.jsx'],
items: {
type: 'string'
}
}
}
exports.activate = state => {
optInManager.activate(state && state.optIn)
}
exports.deactivate = () => {
caches.clearAll()
commandDisposable.dispose()
optInManager.deactivate()
}
exports.provideLinter = () => ({
name: 'standard-engine',
grammarScopes: atom.config.get('linter-js-standard-engine.grammarScopes'),
scope: 'file',
lintsOnChange: true,
lint (textEditor) {
return lint(textEditor, reportError)
}
})
exports.serialize = () => ({ optIn: optInManager.serialize() })
|
#include <iostream>
#include <string>
using namespace std;
// Function to print permutations of string s
void permute(string s, int l, int r)
{
// Base case
if (l == r)
cout<<s<<endl;
else
{
// Permutations made
for (int i = l; i <= r; i++)
{
// Swapping done
swap(s[l], s[i]);
// Recursion called
permute(s, l+1, r);
//backtrack
swap(s[l], s[i]);
}
}
}
int main()
{
string s = "hello";
int n = s.size();
permute(s, 0, n-1);
return 0;
} |
<reponame>moltar/plugin-throttling.js
const noop = () => Promise.resolve();
// @ts-ignore
export function wrapRequest(state, request, options) {
return state.retryLimiter.schedule(doRequest, state, request, options);
}
// @ts-ignore
async function doRequest(state, request, options) {
const isWrite = options.method !== "GET" && options.method !== "HEAD";
const isSearch =
options.method === "GET" && options.url.startsWith("/search/");
const isGraphQL = options.url.startsWith("/graphql");
const retryCount = ~~options.request.retryCount;
const jobOptions = retryCount > 0 ? { priority: 0, weight: 0 } : {};
if (state.clustering) {
// Remove a job from Redis if it has not completed or failed within 60s
// Examples: Node process terminated, client disconnected, etc.
// @ts-ignore
jobOptions.expiration = 1000 * 60;
}
// Guarantee at least 1000ms between writes
// GraphQL can also trigger writes
if (isWrite || isGraphQL) {
await state.write.key(state.id).schedule(jobOptions, noop);
}
// Guarantee at least 3000ms between requests that trigger notifications
if (isWrite && state.triggersNotification(options.url)) {
await state.notifications.key(state.id).schedule(jobOptions, noop);
}
// Guarantee at least 2000ms between search requests
if (isSearch) {
await state.search.key(state.id).schedule(jobOptions, noop);
}
const req = state.global.key(state.id).schedule(jobOptions, request, options);
if (isGraphQL) {
const res = await req;
if (
res.data.errors != null &&
// @ts-ignore
res.data.errors.some((error) => error.type === "RATE_LIMITED")
) {
const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), {
headers: res.headers,
data: res.data,
});
throw error;
}
}
return req;
}
|
<filename>src/shared/view/drafts/FundsMarketplace/containers/FundsMarketplace/FundsMarketplace.style.ts<gh_stars>0
import injectSheet, { Theme, WithStyles } from 'react-jss';
import { rule } from 'shared/helpers/style';
const percentForMargin = 3;
const cardMargin = (percentForMargin / 2).toFixed(1);
const fundCardStyles = (itemAtRow: number) => {
const width = 100 / itemAtRow - percentForMargin;
const margin = cardMargin;
return {
width: `${width}%`,
marginLeft: `${margin}%`,
marginRight: `${margin}%`,
};
};
const styles = ({ extra: theme }: Theme) => ({
root: rule({
}),
header: rule({
display: 'flex',
justifyContent: 'space-between',
marginBottom: '1.6875rem',
padding: `0 ${cardMargin}%`,
}),
title: rule({
display: 'flex',
justifyContent: 'space-between',
fontFamily: theme.typography.primaryFont,
fontSize: '1.125rem',
fontWeight: 'bold',
color: theme.palette.text.primary,
}),
content: rule({
display: 'flex',
flexDirection: 'column',
[theme.breakpoints.up('sm')]: rule({
flexDirection: 'row',
flexWrap: 'wrap',
}),
}),
search: rule({
marginRight: '1rem',
fontSize: '1.125rem',
color: theme.colors.black,
}),
fundCard: rule({
flexGrow: 1,
boxShadow: `0 0.0625rem 0.4375rem 0 rgba(184, 184, 184, 0.5)`,
marginBottom: '1.375rem',
[theme.breakpoints.up('sm')]: rule({
flexGrow: 0,
}),
[theme.breakpoints.between('sm', 'md')]: rule({
...fundCardStyles(2),
}),
[theme.breakpoints.up('md')]: rule({
...fundCardStyles(3),
}),
}),
});
export const provideStyles = injectSheet(styles);
export type StylesProps = WithStyles<typeof styles>;
|
<reponame>Ashindustry007/competitive-programming
// https://cses.fi/problemset/task/1654/
#include <bits/stdc++.h>
using namespace std;
int N = 21;
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
int n;
cin >> n;
vector<int> a(n), b(1 << N), c(1 << N);
for (int i = 0; i < n; i++) cin >> a[i], b[a[i]]++, c[a[i]]++;
for (int i = 0; i < N; i++)
for (int j = 0; j < (1 << N); j++)
if (j & (1 << i)) b[j] += b[j ^ (1 << i)];
else c[j] += c[j ^ (1 << i)];
for (int x : a)
cout << b[x] << " " << c[x] << " " << n - b[(1 << N) - x - 1] << "\n";
}
|
<reponame>Accessible-Concepts/scrumlr.io
import Parse from "parse";
import {NoteClientModel} from "types/note";
import {ActionType, ReduxAction} from "store/action";
// eslint-disable-next-line default-param-last
export const noteReducer = (state: NoteClientModel[] = [], action: ReduxAction): NoteClientModel[] => {
switch (action.type) {
case ActionType.AddNote: {
const localNote: NoteClientModel = {
columnId: action.columnId,
text: action.text,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
author: Parse.User.current()!.id,
focus: false,
dirty: true,
positionInStack: -1,
};
return [...state, localNote];
}
case ActionType.CreatedNote: {
const newState = [...state];
const foundExistingNoteIndex = newState.findIndex((note) => !note.id && note.text === action.note.text);
if (foundExistingNoteIndex >= 0) {
newState.splice(foundExistingNoteIndex, 1, action.note);
} else {
newState.push(action.note);
}
return newState;
}
case ActionType.DeleteNote: {
return state.filter((note) => note.id !== action.noteId);
}
case ActionType.EditNote: {
const noteIndex = state.findIndex((note) => note.id === action.note.id);
const newState: NoteClientModel[] = state;
newState.splice(noteIndex, 1, {
...state[noteIndex],
...action.note,
dirty: true,
});
return newState;
}
case ActionType.UpdatedNote: {
const noteIndex = state.findIndex((note) => note.id === action.note.id);
if (noteIndex >= 0) {
const newState = [...state];
newState.splice(noteIndex, 1, action.note);
return newState;
}
return state;
}
case ActionType.DragNote: {
const dragedOn = state.find((note) => note.id === action.note.dragOnId);
const note = state.find((noteInList) => noteInList.id === action.note.id);
const childNotes = (state.filter((noteInList) => noteInList.parentId === action.note.id) as NoteClientModel[]) ?? [];
if (dragedOn) {
const childNotesDragedOn = (state.filter((noteInList) => noteInList.parentId === action.note.dragOnId) as NoteClientModel[]) ?? [];
dragedOn!.parentId = action.note.id;
dragedOn!.positionInStack = childNotes.length + 1;
childNotesDragedOn
.sort((a, b) => a.positionInStack - b.positionInStack)
.forEach((child, index) => {
child.parentId = action.note.id;
child.positionInStack = index + childNotes.length + 2;
});
note!.positionInStack = 0;
}
if (action.note.columnId) {
note!.columnId = action.note.columnId;
childNotes.forEach((childNote) => {
childNote.columnId = action.note.columnId!;
});
}
return state;
}
case ActionType.UnstackNote: {
const unstack = state.find((note) => note.id === action.note.id);
unstack!.parentId = undefined;
unstack!.positionInStack = -1;
const childNotes = (state.filter((note) => note.parentId === action.note.parentId) as NoteClientModel[]) ?? [];
if (childNotes.length === 0) {
const parent = state.find((note) => note.id === action.note.parentId);
parent!.positionInStack = -1;
}
return state;
}
case ActionType.InitializeNotes: {
return [...action.notes];
}
default: {
return state;
}
}
};
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.tablet = void 0;
var tablet = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M12.5 0h-10c-0.825 0-1.5 0.675-1.5 1.5v13c0 0.825 0.675 1.5 1.5 1.5h10c0.825 0 1.5-0.675 1.5-1.5v-13c0-0.825-0.675-1.5-1.5-1.5zM7.5 15.5c-0.276 0-0.5-0.224-0.5-0.5s0.224-0.5 0.5-0.5 0.5 0.224 0.5 0.5-0.224 0.5-0.5 0.5zM12 14h-9v-12h9v12z"
}
}]
};
exports.tablet = tablet; |
<reponame>jsonshen/MyBatisX<filename>core/src/main/java/org/shenjia/mybatis/paging/PagingSelectStatementProvider.java
package org.shenjia.mybatis.paging;
import java.util.Map;
import org.mybatis.dynamic.sql.select.render.SelectStatementProvider;
public class PagingSelectStatementProvider implements
SelectStatementProvider {
private Map<String, Object> parameters;
private String selectStatement;
public PagingSelectStatementProvider(Map<String, Object> parameters,
String selectStatement) {
this.parameters = parameters;
this.selectStatement = selectStatement;
}
@Override
public Map<String, Object> getParameters() {
return parameters;
}
@Override
public String getSelectStatement() {
return selectStatement;
}
}
|
#!/usr/bin/env bash
echo -n "
██████╗ █████╗ ███████╗██╗ ██╗███████╗██╗ ██╗██╗ ██╗
██╔══██╗██╔══██╗██╔════╝██║ ██║██╔════╝██║ ██║╚██╗██╔╝
██████╔╝███████║███████╗███████║█████╗ ██║ ██║ ╚███╔╝
██╔══██╗██╔══██║╚════██║██╔══██║██╔══╝ ██║ ██║ ██╔██╗
██████╔╝██║ ██║███████║██║ ██║██║ ███████╗██║██╔╝ ██╗
╚═════╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═╝ ╚══════╝╚═╝╚═╝ ╚═╝
"
echo ""
echo "Welcome to bashflix installation script!"
echo ""
echo "The following software will be installed:"
echo "* pirate-get & we-get - torrent search"
echo "* peerflix & vlc - torrent streaming and playing"
echo "* subliminal - subtitles"
echo ""
case $EUID in
0) : ;; # we are running script as root so we are okay
*) echo "Please enter you system password (used on brew, apt, npm, pip, etc)." && /usr/bin/sudo $0 "${@}" ;; # not root, become root for the rest of this session (and ask for the sudo password only once)
esac
formula_install() {
PACINSTALL=false
for i in "${@}"; do
if ! command -v "${i}" > /dev/null 2>&1
then
echo -e "Installing ${i}"
$PACMAN "${i}" && PACINSTALL=true
fi
done
if [ "${PACINSTALL}" == true ]
then
return 0
else
return 1
fi
}
library_install() {
PACINSTALL=false
for i in "${@}"; do
if ! $PACSEARCH | grep "^${i}$" > /dev/null 2>&1
then
echo "Installing ${i}"
$PACMAN "${i}" && PACINSTALL=true
fi
done
if [[ "${PACINSTALL}" == true ]]
then
return 0
else
return 1
fi
}
if [[ $(uname -s) == "Darwin" ]]
then
KERNEL="Darwin"
OS="macos"
elif [[ $(uname -s) == "Linux" ]]
then
KERNEL="Linux"
if [[ -f /etc/arch-release ]]
then
OS="arch"
elif [[ -f /etc/debian_version ]]
then
OS="debian"
elif [[ -f /etc/redhat-release ]]
then
OS="fedora"
fi
else
exit 1
fi
if [[ "$OS" == "macos" ]]; then
echo "Looking for Homebrew ..."
#if command -v brew > /dev/null 2>&1 # check if brew is installed
if ! which brew &>/dev/null; then
echo "Preparing to install Homebrew ..."
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
brew update
fi
PACMAN='brew install'
PACSEARCH='brew list'
else
declare -A osInfo;
osInfo[/etc/redhat-release]='sudo dnf --assumeyes install'
osInfo[/etc/arch-release]='sudo pacman -S --noconfirm'
osInfo[/etc/gentoo-release]='sudo emerge'
osInfo[/etc/SuSE-release]='sudo zypper in'
osInfo[/etc/debian_version]='sudo apt-get --assume-yes install'
declare -A osSearch;
osSearch[/etc/redhat-release]='dnf list installed'
osSearch[/etc/arch-release]='pacman -Qq'
osSearch[/etc/gentoo-release]="cd /var/db/pkg/ && ls -d */*| sed 's/\/$//'"
osSearch[/etc/SuSE-release]='rpm -qa'
osSearch[/etc/debian_version]='dpkg -l' # previously `apt list --installed`. Can use `sudo apt-cache search`.
for f in "${!osInfo[@]}"
do
if [[ -f $f ]];then
PACMAN="${osInfo[$f]}"
fi
done
for s in "${!osSearch[@]}"
do
if [[ -f $s ]];then
PACSEARCH="${osSearch[$s]}"
fi
done
sudo apt update -y
sudo apt install -y git curl software-properties-common build-essential libssl-dev
fi
#define the formula that the majority of OSs use
#so that we only have to redefine formula minimally, as required
PYTHON3='python3'
PIP3='python3-pip'
NPM='npm'
NODE='nodejs'
case $OS in
macos)
PYTHON3='python' # Includes pip3 on macOS
NODE='node'
;;
arch)
PYTHON3='python'
PIP3='python-pip'
;;
debian) ;;
fedora) ;;
esac
formula_install "${PYTHON3}" "${NODE}" "${NPM}" "${PIP3}"
library_install "${PIP3}"
if [[ "$OS" == "macos" ]]; then
brew upgrade node
brew upgrade python3
brew cask install vlc
else
sudo apt install -y npm vlc libxslt1-dev libxml2-dev
fi
pip3 install --upgrade pip
pip3 install --upgrade pirate-get
pip3 install --upgrade subliminal
sudo npm install -g npm@latest
sudo npm install -g peerflix
sudo pip install git+https://github.com/rachmadaniHaryono/we-get
#sudo npm install webtorrent-cli -g
#brew install webtorrent-cli
cd ~
sudo rm -rf bashflix
git clone https://github.com/0zz4r/bashflix.git
cd bashflix
script_directory="$(pwd)"
chmod +x ${script_directory}/bashflix.sh
sudo ln -fs ${script_directory}/bashflix.sh /usr/local/bin/bashflix
sudo echo >$HOME/.bashflix_history
mkdir $HOME/.local/share/bashflix
mv ./plugins/* $HOME/.local/share/bashflix
cd -
|
# Utility function for quickly open a browser url.
# Arguments:
# 1. url: The url to open.
#
function _do_browser_open() {
local url=$1
if [ "${DO_OS}" = "mac" ]; then
# For mac, the open command is supported by default.
open "${url}"
elif which xdg-open &>/dev/null; then
xdg-open "${url}"
elif which gnome-open &>/dev/null; then
gnome-open "${url}"
elif which cygstart &>/dev/null; then
cygstart "${url}"
else
echo "Cannot open ${url}"
fi
}
# Utility function for quickly open a browser url in async mode.
# This function will used curl to wait for the url to be available
# before actually open it
# Arguments:
# 1. url: The url to open.
#
function _do_browser_open_async() {
local url=$1
printf "Openning ${url}"
until $(curl --output /dev/null --silent --head --max-time 1 --fail $url); do
sleep 1
printf "."
done && echo "open ${url}" && _do_browser_open "${url}" &
echo $!
}
|
<gh_stars>1-10
define(function () {
var KMLPanel = function (wwd, KmlFile) {
this.wwd = wwd;
this.KmlFile = KmlFile;
this.myKML = {};
var self = this;
this.index = 0;
this.fileTypeKML = 1;
$("#fileTypeKML").change(function () {
var val = $("#fileTypeKML").val();
if (val == "0") {
$("#csv-KML").show();
$("#KMLTxtArea").hide();
self.fileTypeKML = 0;
} else if (val == "1") {
$("#csv-KML").hide();
$("#KMLTxtArea").show();
self.fileTypeKML = 1;
}
});
$("#loadKMLBtn").on("click", function () {
self.addTIFF(self.wwd);
})
};
KMLPanel.prototype.addTIFF = function (wwd) {
var self = this;
var KMLLayer = new WorldWind.RenderableLayer("KML");
KMLLayer.index = this.index++;
this.myKML[this.index] = KMLLayer;
$("#loading").show();
try {
if (this.fileTypeKML === 0) {
var resourcesUrl = $("#csv-KML").get(0).files[0];
} else {
var resourcesUrl = document.getElementById("KMLTxtArea").value;
}
var kmlFilePromise = new this.KmlFile(resourcesUrl);
} catch (e) {
$("#loading").hide();
alert("Error occurred:" + e)
}
kmlFilePromise.then(function (kmlFile) {
KMLLayer.addRenderable(kmlFile);
wwd.addLayer(KMLLayer);
wwd.redraw();
$("#loading").hide();
self.createInterface(wwd);
try {
var el = kmlFile.node.children[0];
el = $(el).find("Folder").children();
el = $(el).find("LookAt").children();
var lng = el[0].textContent;
var lat = el[1].textContent;
wwd.goTo(new WorldWind.Position(lat, lng));
} catch (e) {
console.log("cannot find psoition of KML")
}
}).catch(function (e) {
$("#loading").hide();
alert("Error occurred:" + e)
});
};
KMLPanel.prototype.createInterface = function (wwd) {
$("#KMLList").html("");
var self = this;
for (var key in self.myKML) {
var name = self.myKML[key].displayName + " " + key;
var myDiv = $("<div key=" + key + " class='listJson'>❌" + name + "</div>");
$("#KMLList").append(myDiv);
myDiv.on('click', function () {
var myKey = $(this).attr("key");
wwd.removeLayer(self.myKML[myKey]);
$(this).remove();
delete(self.myKML[myKey]);
wwd.redraw();
})
}
};
return KMLPanel;
}
);
|
/*ckwg +29
* Copyright 2016-2017 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \file
* \brief Interface and implementation of WALL timer classes
*/
#ifndef KWIVER_VITAL_WALL_TIMER_H
#define KWIVER_VITAL_WALL_TIMER_H
#include <vital/util/timer.h>
#include <string>
#include <chrono>
#include <iostream>
namespace kwiver {
namespace vital {
// ----------------------------------------------------------------
/**
* @brief Interval wall clock timer class.
*
* This class represents an interval timer that measures wall clock time.
*/
class wall_timer
: public timer
{
public:
wall_timer()
{ }
~wall_timer()
{
m_active = false;
}
virtual bool timer_available() { return true; }
/**
* @brief Start the timer.
*
* The clock time when this timer is started is saved.
*/
virtual void start()
{
m_active = true;
m_start = std::chrono::steady_clock::now();
}
/**
* @brief Stop the timer.
*
* The time this clock was stopped is saved. This value is used to
* determine the elapsed time.
*/
virtual void stop()
{
m_active = false;
m_end = std::chrono::steady_clock::now();
}
/**
* @brief Calculate elapsed time.
*
* The elapsed time of this timer is returned. This method works if
* the timer is stopped or still running.
*
* @return Seconds since the timer was started.
*/
virtual double elapsed() const
{
if (m_active)
{
// Take a snapshot of the interval.
std::chrono::duration< double > elapsed_seconds = std::chrono::steady_clock::now() - m_start;
return elapsed_seconds.count();
}
else
{
std::chrono::duration< double > elapsed_seconds = m_end - m_start;
return elapsed_seconds.count();
}
}
private:
std::chrono::time_point< std::chrono::steady_clock > m_start;
std::chrono::time_point< std::chrono::steady_clock > m_end;
}; // end class wall_timer
template class scoped_timer< wall_timer >;
typedef scoped_timer< wall_timer > scoped_wall_timer;
} } // end namespace
#endif /* KWIVER_VITAL_SCOPED_TIMER_H */
|
import collections
def mode(numbers):
counts = collections.Counter(numbers)
max_count = max(counts.values())
return [item for item, count in counts.items() if count == max_count]
numbers = [1, 2, 3, 2, 4, 2]
mode = mode(numbers)
print(mode)
# Output: [2] |
echo "Pushing images to dockerhub"
COMMIT=$(git rev-parse --verify HEAD)
echo "Pushing image with commit #$COMMIT"
docker login
docker push kobbikobb/guessthename_api:$COMMIT
docker push kobbikobb/guessthename_frontend:$COMMIT
echo "Pushed all images to dockerhub"
|
<gh_stars>1-10
public class ProgEx9_7 {
public static void main(String args[]) {
MyComplex z1 = new MyComplex();
MyComplex z2 = new MyComplex();
z1.real = Double.parseDouble(args[0]);
z1.imag = Double.parseDouble(args[1]);
z2.real = Double.parseDouble(args[2]);
z2.imag = Double.parseDouble(args[3]);
z1.display();
z2.display();
}
}
|
package model
import (
"encoding/json"
"io"
"k8s.io/client-go/kubernetes"
)
// Cluster represents a K8s cluster.
type Cluster struct {
ClusterID string
MaxScaling int
RotateMasters bool
RotateWorkers bool
MaxDrainRetries int
EvictGracePeriod int
WaitBetweenRotations int
WaitBetweenDrains int
WaitBetweenPodEvictions int
ClientSet *kubernetes.Clientset
}
// ClusterFromReader decodes a json-encoded cluster from the given io.Reader.
func ClusterFromReader(reader io.Reader) (*Cluster, error) {
cluster := Cluster{}
decoder := json.NewDecoder(reader)
err := decoder.Decode(&cluster)
if err != nil && err != io.EOF {
return nil, err
}
return &cluster, nil
}
|
def add_floats(float_1, float_2):
try:
result = float_1 + float_2
return result
except Exception:
return "Error: Invalid input"
# Test cases
print(add_floats(3.5, 2.5)) # Output: 6.0
print(add_floats(5.5, '2.5')) # Output: Error: Invalid input
print(add_floats(4.0, 6.0)) # Output: 10.0 |
#!/bin/bash
scriptPos=${0%/*}
source "$scriptPos/image_conf.sh"
if [ -z $TEST_SCRIPT_DIR ]; then
echo "need a env variable TEST_SCRIPT_DIR that points to dir with tests to run - cancel"
exit 1
fi
if ! [ -d "$TEST_SCRIPT_DIR" ]; then
echo "TEST_SCRIPT_DIR doesn't point to a directory - cancel"
echo "TEST_SCRIPT_DIR=$TEST_SCRIPT_DIR"
exit 1
fi
testScriptDir=`pushd "$TEST_SCRIPT_DIR" > /dev/null && pwd && popd > /dev/null`
aktImgName=`docker images | grep -G "$imageBase *$imageTag *" | awk '{print $1":"$2}'`
if [ "$aktImgName" == "$imageBase:$imageTag" ]
then
echo "run container from image: $aktImgName"
else
if docker build -t $imageName $scriptPos/../image
then
echo -en "\033[1;34mImage created: $imageName \033[0m\n"
else
echo -en "\033[1;31mError while create image: $imageName \033[0m\n"
exit 1
fi
fi
# docker run --rm --entrypoint /bin/bash -it --name "$contName" -v ${testScriptDir}:/opt/myproject "$imageBase:$imageTag"
docker run --rm -v ${testScriptDir}:/opt/myTests "$imageBase:$imageTag"
|
<gh_stars>1-10
/*
* Copyright 2014-2017 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.processor;
import com.google.testing.compile.JavaFileObjects;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import javax.lang.model.element.Modifier;
import javax.tools.JavaFileObject;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
import static com.google.testing.compile.JavaSourcesSubjectFactory.javaSources;
import static org.truth0.Truth.ASSERT;
public class RealmCounterProcessorTest {
@Test
public void compileMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.compilesWithoutError();
}
@Test
public void compileIgnoredMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.annotation("Ignore")
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.compilesWithoutError();
}
@Test
public void compileIndexedMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.annotation("Index")
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.compilesWithoutError();
}
@Test
public void compileRequiredMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.annotation("Required")
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.compilesWithoutError();
}
@Test
public void compileStaticMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.modifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.compilesWithoutError();
}
@Test
public void failOnPKMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.annotation("PrimaryKey")
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.failsToCompile()
.withErrorContaining("cannot be used as primary key");
}
@Test
public void failUnlessFinalMutableRealmInteger() throws IOException {
RealmSyntheticTestClass javaFileObject = createCounterTestClass()
.modifiers(Modifier.PRIVATE)
.builder().build();
ASSERT.about(javaSources())
.that(Arrays.asList(javaFileObject))
.processedWith(new RealmProcessor())
.failsToCompile()
.withErrorContaining("must be final");
}
// This method constructs a synthetic Counter test class that *should* compile correctly.
// It returns the ref to the Counter Field. Tests can modify the
// field in perverse ways, to verify failure modes.
private RealmSyntheticTestClass.Field createCounterTestClass() {
return new RealmSyntheticTestClass.Builder().name("Counter")
.field().name("id").type("int").builder()
.field()
.name("columnMutableRealmInteger")
.type("MutableRealmInteger")
.modifiers(Modifier.PRIVATE, Modifier.FINAL)
.initializer("MutableRealmInteger.valueOf(0)")
.hasSetter(false);
}
}
|
<filename>demos/src/app/components/checkbox/checkbox.ts
import {Component, OnInit, ViewChild} from '@angular/core';
import {MdcCheckbox, MdcCheckboxChange} from '@angular-mdc/web/checkbox';
import {ComponentViewer, ComponentApi} from '../../shared/component-viewer';
@Component({template: '<component-api></component-api>'})
export class Api implements OnInit {
@ViewChild(ComponentApi, {static: true}) _componentApi: ComponentApi;
ngOnInit() {
this._componentApi.docApi = {
sections: [
{
header: 'MdcCheckbox',
selectors: [
'mdc-checkbox',
],
exportedAs: 'mdcCheckbox',
categories: [
{
name: 'Properties',
items: [
{name: 'id: string', summary: `Unique Id of the checkbox (auto generated if not supplied).`},
{name: 'name: string', summary: 'Name of the checkbox.'},
{name: 'checked: boolean', summary: 'Whether the checkbox is checked.'},
{name: 'value: string', summary: 'The value attribute of the native input element.'},
{name: 'tabIndex: number ', summary: 'Set the underlying tab index of the checkbox. (Default: 0)'},
{name: 'ariaLabel: string', summary: `Used to set the 'aria-label' attribute on the underlying input element.`},
{name: 'ariaLabelledby: string', summary: `The 'aria-labelledby' attribute takes precedence as the element's text alternative.`},
{name: 'indeterminate: boolean', summary: 'Represent a checkbox with three states (e.g. a nested list of checkable items).'},
{name: 'disabled: boolean', summary: 'Disables the component.'},
{name: 'disableRipple: boolean', summary: 'Whether ripple ink is disabled.'},
{name: 'indeterminateToChecked: boolean', summary: 'Whether the checkbox should go to checked state or unchecked when toggled from indeterminate state.'},
{name: 'touch: boolean', summary: 'Set the component touch target to 48 x 48 px.'},
]
},
{
name: 'Methods',
items: [
{name: 'focus()', summary: `Set focus to the checkbox.`},
{name: 'toggle(checked?: boolean)', summary: 'Toggles checkbox via user action. When it is indeterminate, toggle can go to checked or unchecked, depending on state.'},
]
},
{
name: 'Events',
items: [
{name: 'change(source: MdcCheckBox, checked: boolean)', summary: `Event dispatched on checked change.`},
{name: 'indeterminateChange(source: MdcCheckbox, indeterminate: boolean)', summary: 'Emit when checkbox goes in and out of indeterminate state, but not when set to checked.'},
]
},
]
},
]
};
}
}
@Component({template: '<component-viewer></component-viewer>'})
export class Checkbox implements OnInit {
@ViewChild(ComponentViewer, {static: true}) _componentViewer: ComponentViewer;
ngOnInit(): void {
this._componentViewer.template = {
title: 'Checkbox',
description: 'Checkboxes allow the user to select one or more items from a set.',
references: [{
name: 'Material Design guidelines: Checkbox',
url: 'https://material.io/design/components/selection-controls.html#checkboxes'
}, {
name: 'Material Components Web',
url: 'https://github.com/material-components/material-components-web/blob/master/packages/mdc-checkbox/README.md'
}],
mdcUrls: [
{name: 'Sass Mixins', url: 'https://github.com/material-components/material-components-web/blob/master/packages/mdc-checkbox/README.md#style-customization'},
],
code: `import {MdcCheckboxModule} from '@angular-mdc/web/checkbox';`,
sass: `@use '@material/checkbox/mdc-checkbox';
@use '@material/checkbox';`
};
}
}
@Component({templateUrl: './examples.html'})
export class Examples {
onChange(event: MdcCheckboxChange) {
console.log(event.checked);
}
toggle(cb: MdcCheckbox): void {
cb.toggle();
}
//
// Examples
//
exampleSimple = {
html: `<mdc-checkbox></mdc-checkbox>
<mdc-checkbox checked></mdc-checkbox>
<mdc-checkbox disabled></mdc-checkbox>
<mdc-checkbox checked disabled></mdc-checkbox>
<mdc-checkbox indeterminate></mdc-checkbox>
<mdc-checkbox indeterminate indeterminateToChecked="false"></mdc-checkbox>`
};
exampleLabel = {
html: `<mdc-form-field>
<mdc-checkbox></mdc-checkbox>
<label>Label</label>
</mdc-form-field>
<mdc-form-field>
<mdc-checkbox disabled></mdc-checkbox>
<label>Disabled</label>
</mdc-form-field>
<mdc-form-field alignEnd>
<mdc-checkbox></mdc-checkbox>
<label>Label</label>
</mdc-form-field>`
};
exampleDynamic = {
html: `<mdc-form-field #formField>
<mdc-checkbox #cb indeterminateToChecked (change)="onChange($event)"></mdc-checkbox>
<label>Checkbox value is {{cb.checked}}</label>
</mdc-form-field>`,
ts: `import { MdcCheckbox, MdcCheckboxChange } from '@angular-mdc/web';
onChange(event: MdcCheckboxChange) {
console.log(event.checked);
}
toggle(cb: MdcCheckbox): void {
cb.toggle();
}`
};
exampleTheme = {
html: `<mdc-form-field>
<mdc-checkbox class="demo-checkbox--custom-all"></mdc-checkbox>
<label>Custom Stroke/Fill/Ink</label>
</mdc-form-field>
<mdc-form-field>
<mdc-checkbox class="demo-checkbox--custom-stroke-and-fill"></mdc-checkbox>
<label>Custom Stroke and Fill</label>
</mdc-form-field>`,
sass: `https://raw.githubusercontent.com/trimox/angular-mdc-web/master/demos/src/styles/_checkbox.scss`
};
exampleAccessibility = {
html: `<div class="mdc-touch-target-wrapper">
<mdc-form-field>
<mdc-checkbox touch></mdc-checkbox>
<label>My Accessible Checkbox</label>
</mdc-form-field>
</div>`
};
}
|
"""
Create a program to classify if a given lyric is from a pop song or a rap song
Input:
- A dataset containing 100 lyrics from some popular songs
Output:
- A program that can accurately classify the genre of a given lyric
"""
import keras
from keras.layers import Dense, Embedding, LSTM, GlobalMaxPooling1D
from keras.models import Sequential
# Create the model
model = Sequential()
model.add(Embedding(input_dim=20000, output_dim=100))
model.add(LSTM(128, dropout=0.2, recurrent_dropout=0.2))
model.add(GlobalMaxPooling1D())
model.add(Dense(1, activation='sigmoid'))
# Compile and train the model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
model.fit(X_train, y_train, batch_size=32, epochs=5)
# Evaluate the model
accuracy = model.evaluate(X_test, y_test, batch_size=32)
print(f'Model accuracy: {accuracy[1] * 100:.2f}%') |
#
# Integrates history-substring-search into Prezto.
#
# Authors:
# Suraj N. Kurapati <sunaku@gmail.com>
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Load dependencies.
pmodload 'editor'
# Source module files.
if (( ! $+functions[history-substring-search-up] )); then
source "${0:h}/external/zsh-history-substring-search.zsh" || return 1
fi
#
# Search
#
zstyle -s ':prezto:module:history-substring-search:color' found \
'HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND' \
|| HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND='bg=magenta,fg=white,bold'
zstyle -s ':prezto:module:history-substring-search:color' not-found \
'HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND' \
|| HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND='bg=red,fg=white,bold'
zstyle -s ':prezto:module:history-substring-search' globbing-flags \
'HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS' \
|| HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS='i'
if zstyle -t ':prezto:module:history-substring-search' case-sensitive; then
HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS="${HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS//i}"
fi
if ! zstyle -t ':prezto:module:history-substring-search' color; then
unset HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_{FOUND,NOT_FOUND}
fi
#
# Key Bindings
#
if [[ -n "$key_info" ]]; then
# Emacs
bindkey -M emacs "$key_info[Control]P" history-substring-search-up
bindkey -M emacs "$key_info[Control]N" history-substring-search-down
# Vi
bindkey -M vicmd "k" history-substring-search-up
bindkey -M vicmd "j" history-substring-search-down
# Emacs and Vi
for keymap in 'emacs' 'viins'; do
bindkey -M "$keymap" "$key_info[Up]" history-substring-search-up
bindkey -M "$keymap" "$key_info[Down]" history-substring-search-down
done
unset keymap
fi
# OPTION 2: for iTerm2 running on Apple MacBook laptops
# zmodload zsh/terminfo
# bindkey "$terminfo[cuu1]" history-substring-search-up
# bindkey "$terminfo[cud1]" history-substring-search-down
|
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union, Sequence
import pprint
from urllib.parse import urljoin
import requests
from qf_lib.common.utils.logging.qf_parent_logger import qf_logger
from datetime import datetime
from qf_lib.common.utils.miscellaneous.to_list_conversion import convert_to_list
from qf_lib.data_providers.bloomberg.exceptions import BloombergError
from qf_lib.data_providers.bloomberg.helpers import convert_to_bloomberg_date
class BloombergBeapHapiUniverseProvider:
"""
Class to prepare and create universe for Bloomberg HAPI
Parameters
----------
host: str
The host address e.g. 'https://api.bloomberg.com'
session: requests.Session
The session object
account_url: str
The URL of hapi account
"""
def __init__(self, host: str, session: requests.Session, account_url: str):
self.host = host
self.session = session
self.account_url = account_url
self.logger = qf_logger.getChild(self.__class__.__name__)
def get_universe_url(self, universe_id: str, tickers: Union[dict, str, Sequence[str]], fieldsOverrides: bool) -> str:
"""
Method to create hapi universe and get universe address URL
Parameters
----------
universe_id: str
ID of the hapi universe
tickers: Union[dict, str, Sequence[str]]
Ticker str, list of tickers str or dict
fieldsOverrides: bool
If True, it uses fieldsOvverides
Returns
-------
universe_url
URL address of created hapi universe
"""
if not isinstance(tickers, dict):
tickers, got_single_field = convert_to_list(tickers, str)
contains = [{'@type': 'Identifier', 'identifierType': 'TICKER', 'identifierValue': ticker} for ticker in tickers]
if fieldsOverrides:
# noinspection PyTypeChecker
contains[0]['fieldOverrides'] = [
{
'@type': 'FieldOverride',
'mnemonic': "CHAIN_DATE",
'override': convert_to_bloomberg_date(datetime.now())
},
{
'@type': 'FieldOverride',
'mnemonic': "INCLUDE_EXPIRED_CONTRACTS",
'override': "Y"
}
]
universe_payload = {
'@type': 'Universe',
'identifier': universe_id,
'title': 'Universe Payload',
'description': 'Universe Payload used in creating fields component',
'contains': contains
}
self.logger.info('Universe component payload:\n:%s', pprint.pformat(universe_payload))
universe_url = urljoin(self.account_url, 'universes/{}/'.format(universe_id))
# check if already exists, if not then post
response = self.session.get(universe_url)
if response.status_code != 200:
universe_url = urljoin(self.account_url, 'universes/')
response = self.session.post(universe_url, json=universe_payload)
# Check it went well and extract the URL of the created universe
if response.status_code != requests.codes.created:
self.logger.error('Unexpected response status code: %s', response.status_code)
raise BloombergError('Unexpected response')
universe_location = response.headers['Location']
universe_url = urljoin(self.host, universe_location)
self.logger.info('Universe successfully created at %s', universe_url)
return universe_url
|
<filename>Casks/seafile-client.rb
cask "seafile-client" do
version "8.0.2"
sha256 "fbdb3f14e46b28c9a7865dbbc0cc559fba388a1e28021a85bc6ab4a0ebea59a5"
url "https://download.seadrive.org/seafile-client-#{version}.dmg",
verified: "seadrive.org/"
name "Seafile Client"
desc "File syncing client"
homepage "https://www.seafile.com/"
livecheck do
url "https://www.seafile.com/en/download/"
regex(%r{href=.*?/seafile[._-]client[._-]v?(\d+(?:\.\d+)+)\.dmg}i)
end
depends_on macos: ">= :high_sierra"
app "Seafile Client.app"
end
|
#!/usr/bin/env bash
# shellcheck disable=SC2128
MAIN_HTTP=""
CASE_ERR=""
tokenAddr="1Q8hGLfoGe63efeWa8fJ4Pnukhkngt6poK"
recvAddr="14KEKbYtKKQm4wMthSK9J4La4nAiidGozt"
superManager="0xc34b5d9d44ac7b754806f761d3d4d2c4fe5214f6b074c19f069c4f5c2a29c8cc"
tokenSymbol="ABCDE"
token_addr=""
execName="token"
#color
RED='\033[1;31m'
GRE='\033[1;32m'
NOC='\033[0m'
# $2=0 means true, other false
function echo_rst() {
if [ "$2" -eq 0 ]; then
echo -e "${GRE}$1 ok${NOC}"
else
echo -e "${RED}$1 fail${NOC}"
CASE_ERR="FAIL"
fi
}
function chain33_ImportPrivkey() {
local pri=$2
local acc=$3
local req='"method":"Chain33.ImportPrivkey", "params":[{"privkey":"'"$pri"'", "label":"tokenAddr"}]'
echo "#request: $req"
resp=$(curl -ksd "{$req}" "$1")
echo "#response: $resp"
ok=$(jq '(.error|not) and (.result.label=="tokenAddr") and (.result.acc.addr == "'"$acc"'")' <<<"$resp")
[ "$ok" == true ]
echo_rst "$FUNCNAME" "$?"
}
function Chain33_SendToAddress() {
local from="$1"
local to="$2"
local amount=$3
local req='"method":"Chain33.SendToAddress", "params":[{"from":"'"$from"'","to":"'"$to"'", "amount":'"$amount"', "note":"test\n"}]'
# echo "#request: $req"
resp=$(curl -ksd "{$req}" "${MAIN_HTTP}")
# echo "#response: $resp"
ok=$(jq '(.error|not) and (.result.hash|length==66)' <<<"$resp")
[ "$ok" == true ]
echo_rst "$FUNCNAME" "$?"
hash=$(jq '(.result.hash)' <<<"$resp")
echo "hash=$hash"
# query_tx "$hash"
}
function chain33_unlock() {
ok=$(curl -k -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.UnLock","params":[{"passwd":"1314fuzamei","timeout":0}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result.isOK")
[ "$ok" == true ]
rst=$?
echo_rst "$FUNCNAME" "$rst"
}
function block_wait() {
local req='"method":"Chain33.GetLastHeader","params":[]'
cur_height=$(curl -ksd "{$req}" ${MAIN_HTTP} | jq ".result.height")
expect=$((cur_height + ${1}))
local count=0
while true; do
new_height=$(curl -ksd "{$req}" ${MAIN_HTTP} | jq ".result.height")
if [ "${new_height}" -ge "${expect}" ]; then
break
fi
count=$((count + 1))
sleep 1
done
echo "wait new block $count s, cur height=$expect,old=$cur_height"
}
function signRawTx() {
unsignedTx=$1
addr=$2
signedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.SignRawTx","params":[{"addr":"'"${addr}"'","txHex":"'"${unsignedTx}"'","expire":"120s"}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "$signedTx" == "null" ]; then
return 1
else
return 0
fi
}
function sendSignedTx() {
txHash=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.SendTransaction","params":[{"token":"","data":"'"${signedTx}"'"}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "$txHash" == "null" ]; then
return 1
else
return 0
fi
}
# 查询交易的执行结果
# 根据传入的规则,校验查询的结果 (参数1: 校验规则 参数2: 预期匹配结果)
function queryTransaction() {
validator=$1
expectRes=$2
echo "txhash=${txHash}"
res=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.QueryTransaction","params":[{"hash":"'"${txHash}"'"}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r "${validator}")
if [ "${res}" != "${expectRes}" ]; then
return 1
else
return 0
fi
}
function init() {
ispara=$(echo '"'"${MAIN_HTTP}"'"' | jq '.|contains("8901")')
echo "ipara=$ispara"
chain33_ImportPrivkey "${MAIN_HTTP}" "${superManager}" "${tokenAddr}"
if [ "$ispara" == true ]; then
execName="user.p.para.token"
token_addr=$(curl -ksd '{"method":"Chain33.ConvertExectoAddr","params":[{"execname":"user.p.para.token"}]}' ${MAIN_HTTP} | jq -r ".result")
Chain33_SendToAddress "$recvAddr" "$tokenAddr" 100000000000
block_wait 1
Chain33_SendToAddress "$tokenAddr" "$token_addr" 1000000000
block_wait 1
else
token_addr=$(curl -ksd '{"method":"Chain33.ConvertExectoAddr","params":[{"execname":"token"}]}' ${MAIN_HTTP} | jq -r ".result")
from="12qyocayNF7Lv6C9qW4avxs2E7U41fKSfv"
Chain33_SendToAddress "$from" "$tokenAddr" 10000000000
block_wait 1
Chain33_SendToAddress "$tokenAddr" "$token_addr" 1000000000
block_wait 1
fi
echo "token=$token_addr"
updateConfig
}
function updateConfig() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.CreateTransaction","params":[{"execer": "manage","actionName":"Modify","payload":{ "key": "token-blacklist","value": "BTY","op": "add","addr": ""}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "update config create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "update config signRawTx" "$?"
sendSignedTx
echo_rst "update config sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "update config queryExecRes" "$?"
}
function token_preCreate() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"token.CreateRawTokenPreCreateTx","params":[{"name": "yinhebib", "symbol": "'"${tokenSymbol}"'", "total": 100000000000, "price": 100, "category": 1,"owner":"'${tokenAddr}'"}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token preCreate create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token preCreate signRawTx" "$?"
sendSignedTx
echo_rst "token preCreate sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "token preCreate queryExecRes" "$?"
}
function token_getPreCreated() {
res=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.Query","params":[{"execer":"'"${execName}"'","funcName":"GetTokens","payload":{"queryAll":true,"status":0,"tokens":[],"symbolOnly":false}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".error | not")
if [ "${res}" != "true" ]; then
echo_rst "token preCreate create tx" 1
return
fi
}
function token_finish() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"token.CreateRawTokenFinishTx","params":[{"symbol": "'"${tokenSymbol}"'", "owner":"'${tokenAddr}'"}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token finish create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token finish signRawTx" "$?"
sendSignedTx
echo_rst "token finish sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "token finish queryExecRes" "$?"
}
function token_getFinishCreated() {
res=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.Query","params":[{"execer":"'"${execName}"'","funcName":"GetTokens","payload":{"queryAll":true,"status":1,"tokens":[],"symbolOnly":false}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result.tokens" | grep "symbol")
if [ "${res}" != "" ]; then
echo_rst "token get finishCreated create tx" 0
else
echo_rst "token get finishCreated create tx" 1
fi
}
function token_assets() {
res=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.Query","params":[{"execer": "'"${execName}"'","funcName":"GetAccountTokenAssets","payload": {"address":"'"${recvAddr}"'", "execer": "token"}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP})
if [ "${res}" == "" ]; then
echo_rst "token get balance tx" 1
return
fi
tokenInfo=$(echo "${res}" | jq -r '.result.tokenAssets' | grep -A 6 -B 1 "${tokenSymbol}")
addr=$(echo "${tokenInfo}" | grep "addr" | awk -F '"' '{print $4}')
balance=$(echo "${tokenInfo}" | grep "balance" | awk -F '"' '{print $4}')
if [ "${addr}" == "${recvAddr}" ] && [ "${balance}" -eq 1000000000 ]; then
echo_rst "token get assets tx" 0
else
echo_rst "token get assets tx" 1
fi
}
function token_balance() {
res=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"token.GetTokenBalance","params":[{"addresses": ["'${tokenAddr}'"],"tokenSymbol":"'"${tokenSymbol}"'","execer": "'"${execName}"'"}]}' -H 'content-type:text/plain;' ${MAIN_HTTP})
if [ "${res}" == "" ]; then
echo_rst "token get balance tx" 1
return
fi
addr=$(echo "${res}" | jq -r ".result[0].addr")
balance=$(echo "${res}" | jq -r ".result[0].balance")
if [ "${addr}" == "${tokenAddr}" ] && [ "${balance}" -eq 100000000000 ]; then
echo_rst "token get balance tx" 0
else
echo_rst "token get balance tx" 1
fi
}
function token_burn() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"token.CreateRawTokenBurnTx","params":[{"symbol": "'"${tokenSymbol}"'","amount": 10000}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token burn create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token burn signRawTx" "$?"
sendSignedTx
echo_rst "token burn sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "token burn queryExecRes" "$?"
}
function token_mint() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"token.CreateRawTokenMintTx","params":[{"symbol": "'"${tokenSymbol}"'","amount": 10000}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token mint create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token mint signRawTx" "$?"
sendSignedTx
echo_rst "token mint sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "token mint queryExecRes" "$?"
}
function token_transfer() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.CreateTransaction","params":[{"execer": "'"${execName}"'","actionName":"Transfer","payload": {"cointoken":"'"${tokenSymbol}"'", "amount": "1000000000", "note": "", "to": "'"${recvAddr}"'"}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token transfer create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token transfer signRawTx" "$?"
sendSignedTx
echo_rst "token transfer sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "token transfer queryExecRes" "$?"
}
function token_sendExec() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.CreateTransaction","params":[{"execer": "'"${execName}"'","actionName":"TransferToExec","payload": {"cointoken":"'"${tokenSymbol}"'", "amount": "10", "note": "", "to": "'"${token_addr}"'", "execName": "'"${execName}"'"}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token sendExec create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token sendExec signRawTx" "$?"
sendSignedTx
echo_rst "token sendExec sendSignedTx" "$?"
block_wait 2
queryTransaction ".error | not" "true"
echo_rst "token sendExec queryExecRes" "$?"
}
function token_withdraw() {
unsignedTx=$(curl -s --data-binary '{"jsonrpc":"2.0","id":2,"method":"Chain33.CreateTransaction","params":[{"execer": "'"${execName}"'","actionName":"Withdraw","payload": {"cointoken":"'"${tokenSymbol}"'", "amount": "10", "note": "", "to": "'"${token_addr}"'", "execName": "'"${execName}"'"}}]}' -H 'content-type:text/plain;' ${MAIN_HTTP} | jq -r ".result")
if [ "${unsignedTx}" == "" ]; then
echo_rst "token withdraw create tx" 1
return
fi
signRawTx "${unsignedTx}" "${tokenAddr}"
echo_rst "token withdraw signRawTx" "$?"
sendSignedTx
echo_rst "token withdraw sendSignedTx" "$?"
block_wait 1
queryTransaction ".error | not" "true"
echo_rst "token withdraw queryExecRes" "$?"
}
function run_test() {
local ip=$1
set -x
token_preCreate
token_getPreCreated
token_finish
token_getFinishCreated
token_balance
token_burn
token_mint
token_transfer
token_sendExec
token_assets
token_withdraw
set +x
}
function main() {
local ip=$1
MAIN_HTTP=$ip
echo "=========== # token rpc test ============="
echo "main_ip=$MAIN_HTTP"
init
run_test "$ip"
if [ -n "$CASE_ERR" ]; then
echo -e "${RED}=============Token Rpc Test Fail=============${NOC}"
exit 1
else
echo -e "${GRE}=============Token Rpc Test Pass==============${NOC}"
fi
}
main "$1"
|
#!/bin/bash
#
# Based mostly on the TED-LIUM and Switchboard recipe
#
# Copyright 2017 Johns Hopkins University (Author: Shinji Watanabe and Yenda Trmal)
# Apache 2.0
#
# Begin configuration section.
nj=96
decode_nj=20
stage=0
nnet_stage=-10
decode_stage=1
decode_only=false
num_data_reps=4
foreground_snrs="20:10:15:5:0"
background_snrs="20:10:15:5:0"
enhancement=beamformit # gss or beamformit
# End configuration section
. ./utils/parse_options.sh
. ./cmd.sh
. ./path.sh
if [ $decode_only == "true" ]; then
stage=16
fi
set -e # exit on error
# chime5 main directory path
# please change the path accordingly
chime5_corpus=/export/corpora4/CHiME5
# chime6 data directories, which are generated from ${chime5_corpus},
# to synchronize audio files across arrays and modify the annotation (JSON) file accordingly
chime6_corpus=${PWD}/CHiME6
json_dir=${chime6_corpus}/transcriptions
audio_dir=${chime6_corpus}/audio
if [[ ${enhancement} == *gss* ]]; then
enhanced_dir=${enhanced_dir}_multiarray
enhancement=${enhancement}_multiarray
fi
if [[ ${enhancement} == *beamformit* ]]; then
enhanced_dir=${enhanced_dir}
enhancement=${enhancement}
fi
test_sets="dev_${enhancement} eval_${enhancement}"
train_set=train_worn_simu_u400k
# This script also needs the phonetisaurus g2p, srilm, beamformit
./local/check_tools.sh || exit 1
###########################################################################
# We first generate the synchronized audio files across arrays and
# corresponding JSON files. Note that this requires sox v14.4.2,
# which is installed via miniconda in ./local/check_tools.sh
###########################################################################
if [ $stage -le 0 ]; then
local/generate_chime6_data.sh \
--cmd "$train_cmd" \
${chime5_corpus} \
${chime6_corpus}
fi
###########################################################################
# We prepare dict and lang in stages 1 to 3.
###########################################################################
if [ $stage -le 1 ]; then
echo "$0: prepare data..."
# skip u03 and u04 as they are missing
for mictype in worn u01 u02 u05 u06; do
local/prepare_data.sh --mictype ${mictype} \
${audio_dir}/train ${json_dir}/train data/train_${mictype}
done
for dataset in dev; do
for mictype in worn; do
local/prepare_data.sh --mictype ${mictype} \
${audio_dir}/${dataset} ${json_dir}/${dataset} \
data/${dataset}_${mictype}
done
done
fi
if [ $stage -le 2 ]; then
echo "$0: train lm ..."
local/prepare_dict.sh
utils/prepare_lang.sh \
data/local/dict "<unk>" data/local/lang data/lang
local/train_lms_srilm.sh \
--train-text data/train_worn/text --dev-text data/dev_worn/text \
--oov-symbol "<unk>" --words-file data/lang/words.txt \
data/ data/srilm
fi
LM=data/srilm/best_3gram.gz
if [ $stage -le 3 ]; then
# Compiles G for chime5 trigram LM
echo "$0: prepare lang..."
utils/format_lm.sh \
data/lang $LM data/local/dict/lexicon.txt data/lang
fi
#########################################################################################
# In stages 4 to 7, we augment and fix train data for our training purpose. point source
# noises are extracted from chime corpus. Here we use 400k utterances from array microphones,
# its augmentation and all the worn set utterances in train.
#########################################################################################
if [ $stage -le 4 ]; then
# remove possibly bad sessions (P11_S03, P52_S19, P53_S24, P54_S24)
# see http://spandh.dcs.shef.ac.uk/chime_challenge/data.html for more details
utils/copy_data_dir.sh data/train_worn data/train_worn_org # back up
grep -v -e "^P11_S03" -e "^P52_S19" -e "^P53_S24" -e "^P54_S24" data/train_worn_org/text > data/train_worn/text
utils/fix_data_dir.sh data/train_worn
fi
if [ $stage -le 5 ]; then
local/extract_noises.py $chime6_corpus/audio/train $chime6_corpus/transcriptions/train \
local/distant_audio_list distant_noises
local/make_noise_list.py distant_noises > distant_noise_list
noise_list=distant_noise_list
if [ ! -d RIRS_NOISES/ ]; then
# Download the package that includes the real RIRs, simulated RIRs, isotropic noises and point-source noises
wget --no-check-certificate http://www.openslr.org/resources/28/rirs_noises.zip
unzip rirs_noises.zip
fi
# This is the config for the system using simulated RIRs and point-source noises
rvb_opts+=(--rir-set-parameters "0.5, RIRS_NOISES/simulated_rirs/smallroom/rir_list")
rvb_opts+=(--rir-set-parameters "0.5, RIRS_NOISES/simulated_rirs/mediumroom/rir_list")
rvb_opts+=(--noise-set-parameters $noise_list)
steps/data/reverberate_data_dir.py \
"${rvb_opts[@]}" \
--prefix "rev" \
--foreground-snrs $foreground_snrs \
--background-snrs $background_snrs \
--speech-rvb-probability 1 \
--pointsource-noise-addition-probability 1 \
--isotropic-noise-addition-probability 1 \
--num-replications $num_data_reps \
--max-noises-per-minute 1 \
--source-sampling-rate 16000 \
data/train_worn data/train_worn_rvb
fi
if [ $stage -le 6 ]; then
# combine mix array and worn mics
# randomly extract first 400k utterances from all mics
# if you want to include more training data, you can increase the number of array mic utterances
utils/combine_data.sh data/train_uall data/train_u01 data/train_u02 data/train_u05 data/train_u06
utils/subset_data_dir.sh data/train_uall 400000 data/train_u400k
utils/combine_data.sh data/${train_set} data/train_worn data/train_worn_rvb data/train_u400k
# only use left channel for worn mic recognition
# you can use both left and right channels for training
for dset in train dev; do
utils/copy_data_dir.sh data/${dset}_worn data/${dset}_worn_stereo
grep "\.L-" data/${dset}_worn_stereo/text > data/${dset}_worn/text
utils/fix_data_dir.sh data/${dset}_worn
done
fi
if [ $stage -le 7 ]; then
# Split speakers up into 3-minute chunks. This doesn't hurt adaptation, and
# lets us use more jobs for decoding etc.
for dset in ${train_set}; do
utils/copy_data_dir.sh data/${dset} data/${dset}_nosplit
utils/data/modify_speaker_info.sh --seconds-per-spk-max 180 data/${dset}_nosplit data/${dset}
done
fi
##################################################################################
# Now make 13-dim MFCC features. We use 13-dim fetures for GMM-HMM systems.
##################################################################################
if [ $stage -le 8 ]; then
# Now make MFCC features.
# mfccdir should be some place with a largish disk where you
# want to store MFCC features.
echo "$0: make features..."
mfccdir=mfcc
for x in ${train_set}; do
steps/make_mfcc.sh --nj 20 --cmd "$train_cmd" \
data/$x exp/make_mfcc/$x $mfccdir
steps/compute_cmvn_stats.sh data/$x exp/make_mfcc/$x $mfccdir
utils/fix_data_dir.sh data/$x
done
fi
###################################################################################
# Stages 9 to 13 train monophone and triphone models. They will be used for
# generating lattices for training the chain model
###################################################################################
if [ $stage -le 9 ]; then
# make a subset for monophone training
utils/subset_data_dir.sh --shortest data/${train_set} 100000 data/${train_set}_100kshort
utils/subset_data_dir.sh data/${train_set}_100kshort 30000 data/${train_set}_30kshort
fi
if [ $stage -le 10 ]; then
# Starting basic training on MFCC features
steps/train_mono.sh --nj $nj --cmd "$train_cmd" \
data/${train_set}_30kshort data/lang exp/mono
fi
if [ $stage -le 11 ]; then
steps/align_si.sh --nj $nj --cmd "$train_cmd" \
data/${train_set} data/lang exp/mono exp/mono_ali
steps/train_deltas.sh --cmd "$train_cmd" \
2500 30000 data/${train_set} data/lang exp/mono_ali exp/tri1
fi
if [ $stage -le 12 ]; then
steps/align_si.sh --nj $nj --cmd "$train_cmd" \
data/${train_set} data/lang exp/tri1 exp/tri1_ali
steps/train_lda_mllt.sh --cmd "$train_cmd" \
4000 50000 data/${train_set} data/lang exp/tri1_ali exp/tri2
fi
if [ $stage -le 13 ]; then
steps/align_si.sh --nj $nj --cmd "$train_cmd" \
data/${train_set} data/lang exp/tri2 exp/tri2_ali
steps/train_sat.sh --cmd "$train_cmd" \
5000 100000 data/${train_set} data/lang exp/tri2_ali exp/tri3
fi
#######################################################################
# Perform data cleanup for training data.
#######################################################################
if [ $stage -le 14 ]; then
# The following script cleans the data and produces cleaned data
steps/cleanup/clean_and_segment_data.sh --nj ${nj} --cmd "$train_cmd" \
--segmentation-opts "--min-segment-length 0.3 --min-new-segment-length 0.6" \
data/${train_set} data/lang exp/tri3 exp/tri3_cleaned data/${train_set}_cleaned
fi
##########################################################################
# CHAIN MODEL TRAINING
# skipping decoding here and performing it in step 16
##########################################################################
if [ $stage -le 15 ]; then
# chain TDNN
local/chain/run_tdnn.sh --nj ${nj} \
--stage $nnet_stage \
--train-set ${train_set}_cleaned \
--test-sets "$test_sets" \
--gmm tri3_cleaned --nnet3-affix _${train_set}_cleaned_rvb
fi
##########################################################################
# DECODING is done in the local/decode.sh script. This script performs
# enhancement, fixes test sets performs feature extraction and 2 stage decoding
##########################################################################
if [ $stage -le 16 ]; then
local/decode.sh --stage $decode_stage \
--enhancement $enhancement \
--train_set "$train_set"
fi
exit 0;
|
#!/bin/bash
# Original:
# https://stackoverflow.com/a/25498342/2877698
# I've made a couple of tweaks; nothing substantial
start=$(date +%s%3N)
eval $(xdotool getmouselocation --shell)
xwd -root -screen -silent \
| convert xwd:- -crop "1x1+$X+$Y" txt:- \
| awk '/^[^#]/{\
a = gensub(/\(|\)/, "", "g", $2);\
split(a, b, ",");\
printf "RGB: (%d,%d,%d)\n", b[1] / 256, b[2] / 256, b[3] / 256;\
}'
end=$(date +%s%3N)
echo "Mouse: ($X,$Y)"
echo "Start: $start"
echo "End: $end"
echo "Difference: $(($end - $start))"
|
#!/usr/bin/env bats
setup() {
PLUGINS_DIR=/tmp/kitchen/ohai/plugins
}
@test "ohai gets dovecot version" {
unset BUSSER_ROOT GEM_HOME GEM_PATH GEM_CACHE
ohai -d $PLUGINS_DIR | tr -d $'\n' | grep -q '"dovecot":\s*{\s*"version"'
}
@test "ohai prints nothing to stderr" {
unset BUSSER_ROOT GEM_HOME GEM_PATH GEM_CACHE
[ -z "`ohai -d $PLUGINS_DIR 2>&1 > /dev/null | grep -v 'The plugin path .* does not exist'`" ]
}
|
<filename>JavaScript Algorithms and Data Structures Certification (300 hours)/Object Oriented Programming/09. myHouse instanceof House.js
/*
https://www.freecodecamp.org/learn/javascript-algorithms-and-data-structures/object-oriented-programming/understand-own-properties
Add the own properties of canary to the array ownProps.
(1) ownProps should include the values "numLegs" and "name".
(2) You should solve this challenge without using the built in method
Object.keys().
(3) You should solve this challenge without hardcoding the ownProps array.
*/
function Bird(name) {
this.name = name;
this.numLegs = 2;
}
let canary = new Bird("Tweety");
let ownProps = [];
// Add your code below this line
for (const prop in canary) {
if (canary.hasOwnProperty(prop)) {
ownProps.push(prop);
}
}
console.log(ownProps); // [ 'name', 'numLegs' ] |
#!/bin/bash -l
# Abort if any command returns an error
set -e
# Record what we're doing
set -x
# Set the package name
export PKG=oneapi
export PKG_VERSION=$1
# Load build environment
module purge
module load gcc
# Make full path names to locations
LIB_BUILD_DIR=${BUILD_DIR}/${PKG}/${PKG_VERSION}
LIB_INSTALL_DIR=${INSTALL_DIR}/${PKG}/${PKG_VERSION}
# Clean if they already exist
rm -rf ${LIB_BUILD_DIR}
rm -rf ${LIB_INSTALL_DIR}
# Make the build directory and cd into it
mkdir -p ${LIB_BUILD_DIR}
cd ${LIB_BUILD_DIR}
# Run the Script (Base ToolKit)
bash ${TAR_DIR}/${PKG}_base-${PKG_VERSION}.sh -a --action=install --install-dir=${LIB_INSTALL_DIR} --components=all --eula=accept --intel-sw-improvement-program-consent=decline --silent
# Run the Script (HPC Toolkit)
bash ${TAR_DIR}/${PKG}_hpc-${PKG_VERSION}.sh -a --action=install --install-dir=${LIB_INSTALL_DIR} --components=all --eula=accept --intel-sw-improvement-program-consent=decline --silent
# ------------------------------------------------------
# Advisor Module
# ------------------------------------------------------
mkdir -p ${MODULE_DIR}/base/advisor
cat << EOF > ${MODULE_DIR}/base/advisor/${PKG_VERSION}.lua
help([[ Intel Advisor version ${PKG_VERSION} ]])
family("advisor")
-- Conflicting modules
-- Modulepath for packages built by this compiler
-- Environment Paths
prepend_path("PATH", "${LIB_INSTALL_DIR}/advisor/${PKG_VERSION}/bin64")
prepend_path("PYTHONPATH", "${LIB_INSTALL_DIR}/advisor/${PKG_VERSION}/pythonapi")
-- Environment Variables
setenv("ADVISOR_2021_DIR", "${LIB_INSTALL_DIR}/advisor/${PKG_VERSION}")
setenv("APM", "${LIB_INSTALL_DIR}/advisor/${PKG_VERSION}/perfmodels")
EOF
# Compiler Module
gnu_c_compiler=${CC}
gnu_bin_dir=$(dirname ${CC})
gnu_base_name=$(dirname ${gnu_bin_dir})
# ------------------------------------------------------
# Compiler Module
# ------------------------------------------------------
mkdir -p ${MODULE_DIR}/base/${PKG}
cat << EOF > ${MODULE_DIR}/base/${PKG}/${PKG_VERSION}.lua
help([[ ${PKG} version ${PKG_VERSION} ]])
family("compiler")
-- Conflicting modules
conflict("gcc")
conflict("llvm")
-- Modulepath for packages built by this compiler
prepend_path("MODULEPATH", "${MODULE_DIR}/compiler/${PKG}/${PKG_VERSION}")
-- Point at Latest GCC Compiler
prepend_path("PATH", "${gnu_base_name}/bin")
prepend_path("LD_LIBRARY_PATH", "${gnu_base_name}/lib")
prepend_path("LD_LIBRARY_PATH", "${gnu_base_name}/lib64")
-- Environment Paths
prepend_path("PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin/intel64")
prepend_path("PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin")
prepend_path("CPATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/include")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib/emu")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib/x64")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/compiler/lib/intel64_lin")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib/emu")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib/x64")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/compiler/lib/intel64_lin")
prepend_path("OCL_ICD_FILENAMES", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/lib/x64/libintelocl.so")
prepend_path("MANPATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/documentation/en/man/common")
-- Environment Variables
setenv("CPP", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin/icpx -E")
setenv("CC", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin/icx")
setenv("CXX", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin/icpx")
setenv("FPP", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin/fpp")
setenv("FC", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/bin/ifx")
setenv("INTEL_TARGET_ARCH", "intel64")
EOF
# ------------------------------------------------------
# MPI Module
# ------------------------------------------------------
impi_install_dir=${LIB_INSTALL_DIR}/mpi/${PKG_VERSION} # Location of IMPI
impi_module_dir=${MODULE_DIR}/compiler/${PKG}/${PKG_VERSION}/impi # Module lua dir (full path)
impi_module_file=${impi_module_dir}/${PKG_VERSION}.lua # Module lua file (full path)
mkdir -p ${impi_module_dir}
cat << EOF > ${impi_module_file}
help([[ Intel MPI ${PKG} version ${PKG_VERSION} ]])
family("mpi")
-- Dependencies
prereq_any("${PKG}/${PKG_VERSION}")
-- Modulepath for packages built by this compiler
prepend_path("MODULEPATH", "${MODULE_DIR}/mpi/impi/${PKG_VERSION}/${PKG}/${PKG_VERSION}")
-- Environment Paths
prepend_path("PATH", "${impi_install_dir}/bin")
prepend_path("LIBRARY_PATH", "${impi_install_dir}/lib")
prepend_path("LIBRARY_PATH", "${impi_install_dir}/lib/release")
prepend_path("LD_LIBRARY_PATH", "${impi_install_dir}/lib")
prepend_path("LD_LIBRARY_PATH", "${impi_install_dir}/lib/release")
prepend_path("CLASSPATH", "${impi_install_dir}/lib/mpi.jar")
-- SetUp Intel Variables
setenv("I_MPI_ROOT", "${impi_install_dir}")
if os.getenv("CC") then
setenv("I_MPI_CC", os.getenv("CC"))
end
if os.getenv("CXX") then
setenv("I_MPI_CXX", os.getenv("CXX"))
end
if os.getenv("FC") then
setenv("I_MPI_FC", os.getenv("FC"))
end
-- Setup environment variables
setenv("MPI_ROOT", "${impi_install_dir}")
setenv("MPI_C_COMPILER", "${impi_install_dir}/bin/mpiicc")
setenv("MPI_CXX_COMPILER", "${impi_install_dir}/bin/mpiicpc")
setenv("MPI_Fortran_COMPILER", "${impi_install_dir}/bin/mpiifort")
-- Should be then re-set serial vars ???
setenv("CC", "${impi_install_dir}/bin/mpiicc")
setenv("CXX", "${impi_install_dir}/bin/mpiicpc")
setenv("FC", "${impi_install_dir}/bin/mpiifort")
EOF
if [ -d "${impi_install_dir}/libfabric" ]; then
cat << EOF >> ${impi_module_file}
-- IB Fabric Variables for IMPI
prepend_path("PATH", "${impi_install_dir}/libfabric/bin")
prepend_path("LIBRARY_PATH", "${impi_install_dir}/libfabric/lib")
prepend_path("LD_LIBRARY_PATH", "${impi_install_dir}/libfabric/lib")
setenv("FI_PROVIDER", "mlx")
setenv("FI_PROVIDER_PATH", "${impi_install_dir}/libfabric/lib/prov")
EOF
fi
if [ -x "$(command -v sbatch)" ]; then
slurm_command=$(command -v sbatch)
pmi_path=${slurm_command%/*/*}
if [ -f "${pmi_path}/lib/libpmi.so" ]; then
cat << EOF >> ${impi_module_file}
-- Slurm Environment for IMPI
setenv("I_MPI_PMI_LIBRARY", "${pmi_path}/lib/libpmi.so")
EOF
fi
fi
# ------------------------------------------------------
# MKL Module
# ------------------------------------------------------
mkdir -p ${MODULE_DIR}/base/mkl
cat << EOF > ${MODULE_DIR}/base/mkl/${PKG_VERSION}.lua
help([[ MKL version ${PKG_VERSION} ]])
family("blas")
-- Conflicting modules
-- Modulepath for packages built by this compiler
-- Environment Paths
prepend_path("PATH", "${LIB_INSTALL_DIR}/mkl/${PKG_VERSION}/bin/intel64")
prepend_path("CPATH", "${LIB_INSTALL_DIR}/mkl/${PKG_VERSION}/include")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/mkl/${PKG_VERSION}/lib/intel64")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/mkl/${PKG_VERSION}/lib/intel64")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/compiler/lib/intel64_lin")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/compiler/${PKG_VERSION}/linux/compiler/lib/intel64_lin")
EOF
# ------------------------------------------------------
# VTune Module
# ------------------------------------------------------
# VTune version is different that OneAPI Version (Seriously!!!)
if [ ${PKG_VERSION} == "2021.3.0" ]; then
VTUNE_VERSION=2021.5.0
else
VTUNE_VERSION=${PKG_VERSION}
fi
mkdir -p ${MODULE_DIR}/base/vtune
cat << EOF > ${MODULE_DIR}/base/vtune/${VTUNE_VERSION}.lua
help([[ Intel VTune version ${VTUNE_VERSION} ]])
family("vtune")
-- Conflicting modules
-- Modulepath for packages built by this compiler
-- Environment Paths
prepend_path("PATH", "${LIB_INSTALL_DIR}/vtune/${VTUNE_VERSION}/bin64")
-- Environment Variables
setenv("VTUNE_PROFILER_2021_DIR", "${LIB_INSTALL_DIR}/vtune/${VTUNE_VERSION}")
setenv("INTEL_LIBITTNOTIFY64", "${LIB_INSTALL_DIR}/vtune/${VTUNE_VERSION}/lib64/runtime/libittnotify_collector.so")
prepend_path("PKG_CONFIG_PATH", "${LIB_INSTALL_DIR}/vtune/${VTUNE_VERSION}/include/pkgconfig/lib64")
-- VTune Instrumentation & Tracing API
prepend_path("CPATH", "${LIB_INSTALL_DIR}/vtune/2021.5.0/sdk/include")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/vtune/2021.5.0/sdk/include")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/vtune/2021.5.0/sdk/include")
prepend_path("LIBRARY_PATH", "${LIB_INSTALL_DIR}/vtune/2021.5.0/sdk/lib64")
prepend_path("LD_LIBRARY_PATH", "${LIB_INSTALL_DIR}/vtune/2021.5.0/sdk/lib64")
EOF
|
import Vue from 'vue'
import Vuex, {StoreOptions} from 'vuex'
Vue.use(Vuex)
export default new Vuex.Store(/** @type {StoreOptions} */ {
state: {
/**
* @type {object}
*/
token: JSON.parse(sessionStorage.getItem("token")) || null,
user: JSON.parse(sessionStorage.getItem("user")) || null,
},
getters: {
token: (state, getters, rootState, rootGetters) => state.token,
user: (state, getters, rootState, rootGetters) => state.user,
},
mutations: {
setToken: (state, payload) => {
state.token = payload
if (payload) {
sessionStorage.setItem("token", JSON.stringify(payload))
} else {
sessionStorage.removeItem("token")
}
},
setUser: (state, payload) => {
state.user = payload
if (payload) {
sessionStorage.setItem("user", JSON.stringify(payload))
} else {
sessionStorage.removeItem("user")
}
},
},
actions: {
signin: (injectee, payload) => {
const {commit} = injectee
commit("setToken", payload)
},
setCurrentUser: (injectee, payload) => {
const {commit} = injectee
commit("setUser", payload)
},
signout: (injectee, payload) => {
const {commit} = injectee
commit("setToken", null)
commit("setUser", null)
},
},
modules: {}
})
|
package com.pkhope.diary.adapter;
import java.util.List;
import android.content.Context;
import android.graphics.Color;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.pkhope.diary.R;
import com.pkhope.diary.model.Diary;
public class DiaryAdapter extends ArrayAdapter<Diary> {
private int mResourceId;
public DiaryAdapter(Context context, int textViewResourceId, List<Diary> objects) {
super(context, textViewResourceId,objects);
mResourceId = textViewResourceId;
}
@Override
public View getView(int position, View converView, ViewGroup parent) {
Diary diary = getItem(position);
View view = LayoutInflater.from(getContext()).inflate(mResourceId, null);
TextView date = (TextView) view.findViewById(R.id.tv_date);
TextView week = (TextView) view.findViewById(R.id.tv_week);
TextView diarycontent = (TextView) view.findViewById(R.id.tv_content);
date.setText(diary.getDate());
week.setText(diary.getWeek());
diarycontent.setText(diary.getContent());
view.setBackgroundColor(Color.WHITE);
return view;
}
}
|
package net.nokok.draft;
import java.lang.annotation.Annotation;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.logging.Logger;
public class BindingBuilder {
private static final Logger logger = Logger.getLogger(BindingBuilder.class.getName());
private final Class<?> module;
public BindingBuilder(Class<?> module) {
if (!module.isInterface()) {
throw new IllegalArgumentException("Module must be interface");
}
this.module = Objects.requireNonNull(module);
}
private List<Binding> getBindings(Class<?> module) {
if (!module.isAnnotationPresent(Module.class)) {
return Collections.emptyList();
}
if (!Modifier.isPublic(module.getModifiers())) {
logger.warning(String.format("Cannot access module %s", module));
}
List<Binding> bindings = new ArrayList<>(module.getDeclaredMethods().length);
for (Method method : module.getDeclaredMethods()) {
Type[] methodGenericParameterTypes = method.getGenericParameterTypes();
Type bindTo = method.getGenericReturnType();
Type bindFrom;
List<Annotation> qualifier;
int parameterLength = methodGenericParameterTypes.length;
if (parameterLength == 0) {
bindFrom = bindTo;
qualifier = Arrays.asList(method.getAnnotations());
} else {
bindFrom = methodGenericParameterTypes[0];
qualifier = Arrays.asList(method.getParameterAnnotations()[0]);
}
if (method.isDefault()) {
try {
ClassLoader classLoader = this.module.getClassLoader();
Object proxy = Proxy.newProxyInstance(classLoader, new Class[]{this.module}, (p, m, a) -> {
Constructor<MethodHandles.Lookup> ctor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class);
ctor.setAccessible(true);
return ctor.newInstance(this.module)
.in(this.module)
.unreflectSpecial(method, this.module)
.bindTo(p)
.invokeWithArguments(new Object[parameterLength]);
});
Object result = method.invoke(proxy, new Object[parameterLength]);
bindings.add(new InstanceBinding(qualifier, bindFrom, bindTo, result));
} catch (Throwable e) {
//invokeWithArguments throws Throwable
throw new RuntimeException(e);
}
} else {
bindings.add(new SimpleBinding(qualifier, bindFrom, bindTo));
}
}
return bindings;
}
public List<Binding> getBindings() {
List<Binding> bindings = new ArrayList<>();
Optional<Class<?>> superModuleOpt = Arrays.stream(this.module.getInterfaces()).filter(c -> c.isAnnotationPresent(Module.class)).findFirst();
while (superModuleOpt.isPresent()) {
Class<?> superModule = superModuleOpt.get();
bindings.addAll(getBindings(superModule));
superModuleOpt = Arrays.stream(superModule.getInterfaces()).filter(c -> c.isAnnotationPresent(Module.class)).findFirst();
}
bindings.addAll(getBindings(this.module));
Map<Key, Binding> bindingMap = new HashMap<>();
bindings.forEach(b -> bindingMap.put(b.getKey(), b));
return new ArrayList<>(bindingMap.values());
}
}
|
<gh_stars>1-10
/* eslint-disable no-undefined */
import { Button, Form } from '@freecodecamp/react-bootstrap';
import {
CardNumberElement,
CardExpiryElement,
useStripe,
useElements,
Elements
} from '@stripe/react-stripe-js';
import { loadStripe } from '@stripe/stripe-js';
import type {
StripeCardNumberElementChangeEvent,
StripeCardExpiryElementChangeEvent,
PaymentIntentResult
} from '@stripe/stripe-js';
import React, { useState } from 'react';
import envData from '../../../../config/env.json';
import { AddDonationData } from './paypal-button';
const { stripePublicKey }: { stripePublicKey: string | null } = envData;
export type HandleAuthentication = (
clientSecret: string,
paymentMethod: string
) => Promise<PaymentIntentResult | { error: { type: string } }>;
interface FormPropTypes {
onDonationStateChange: (donationState: AddDonationData) => void;
postStripeCardDonation: (
paymentMethodId: string,
handleAuthentication: HandleAuthentication
) => void;
t: (label: string) => string;
theme: string;
processing: boolean;
}
interface Element {
elementType: 'cardNumber' | 'cardExpiry';
complete: boolean;
error?: null | { type: 'validation_error'; code: string; message: string };
}
type PaymentInfoValidation = Element[];
const StripeCardForm = ({
theme,
t,
onDonationStateChange,
postStripeCardDonation,
processing
}: FormPropTypes): JSX.Element => {
const [isSubmissionValid, setSubmissionValidity] = useState(true);
const [isTokenizing, setTokenizing] = useState(false);
const [paymentInfoValidation, setPaymentValidity] =
useState<PaymentInfoValidation>([
{
elementType: 'cardNumber',
complete: false,
error: null
},
{
elementType: 'cardExpiry',
complete: false,
error: null
}
]);
const isPaymentInfoValid = paymentInfoValidation.every(
({ complete, error }) => complete && !error
);
const isSubmitting = isTokenizing || processing;
const stripe = useStripe();
const elements = useElements();
function handleInputChange(
event:
| StripeCardNumberElementChangeEvent
| StripeCardExpiryElementChangeEvent
) {
const { elementType, error, complete } = event;
setPaymentValidity(
paymentInfoValidation.map(element => {
if (element.elementType === elementType)
return { elementType, error, complete };
return element;
})
);
}
const options = {
style: {
base: {
fontSize: '18px',
color: `${theme === 'night' ? '#fff' : '#0a0a23'}`,
'::placeholder': {
color: `#858591`
}
}
}
};
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!isPaymentInfoValid) return setSubmissionValidity(false);
else setSubmissionValidity(true);
if (!isSubmitting && stripe && elements) {
const cardElement = elements.getElement(CardNumberElement);
if (cardElement) {
setTokenizing(true);
const { paymentMethod, error } = await stripe.createPaymentMethod({
type: 'card',
card: cardElement
});
if (error) {
onDonationStateChange({
redirecting: false,
processing: false,
success: false,
error: t('donate.went-wrong')
});
} else if (paymentMethod)
postStripeCardDonation(paymentMethod.id, handleAuthentication);
}
}
return setTokenizing(false);
};
const handleAuthentication = async (
clientSecret: string,
paymentMethod: string
) => {
if (stripe) {
return stripe.confirmCardPayment(clientSecret, {
// eslint-disable-next-line camelcase, @typescript-eslint/naming-convention
payment_method: paymentMethod
});
}
return { error: { type: 'StripeNotLoaded' } };
};
return (
<Form className='donation-form' onSubmit={handleSubmit}>
<div
className={`donation-elements${
!isSubmissionValid ? ' failed-submition' : ''
}`}
>
<CardNumberElement
className='form-control donate-input-element'
onChange={handleInputChange}
options={options}
/>
<CardExpiryElement
className='form-control donate-input-element'
onChange={handleInputChange}
options={options}
/>
</div>
<div className={'form-status'}>
{!isSubmissionValid && <p>{t('donate.valid-card')}</p>}
</div>
<Button
block={true}
bsStyle='primary'
className='confirm-donation-btn'
disabled={!stripe || !elements || isSubmitting}
type='submit'
>
Donate
</Button>
</Form>
);
};
const CardFormWrapper = (props: FormPropTypes): JSX.Element | null => {
if (!stripePublicKey) {
return null;
} else {
return (
<Elements stripe={loadStripe(stripePublicKey)}>
<StripeCardForm {...props} />
</Elements>
);
}
};
export default CardFormWrapper;
|
<reponame>rahulsing/aws-glue-table-versions-cleanup-utility
package software.aws.glue.tableversions.utils;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
public class TestDDBUtil {
public static void main(String[] args) {
String ddbTableName_1 = "glue_table_version_cleanup_planner";
String ddbTableName_2 = "glue_table_version_cleanup_statistics";
long executionId = System.currentTimeMillis();
DDBUtil ddbUtil = new DDBUtil();
String hashKey_1 = "execution_batch_id";
String rangeKey_1 = "database_name_table_name";
String hashKey_2 = "execution_id";
String rangeKey_2 = "execution_batch_id";
String databaseName = "test_db";
String tableName = "test_table";
int numTableVersionsB4Cleanup = 20;
int numVersionsRetained = 10;
int numDeletedVersions = 10;
long executionBatchId = System.currentTimeMillis();
AmazonDynamoDB ddbClient = AmazonDynamoDBClientBuilder.standard().withRegion("us-east-1").build();
String notificationTime = new java.util.Date().toString();
ddbUtil.insertTableDetailsToDynamoDB(ddbClient, ddbTableName_1, hashKey_1, rangeKey_1, executionBatchId,
databaseName, tableName, notificationTime);
ddbUtil.insertCleanupStatusToDynamoDB(ddbClient, ddbTableName_2, hashKey_2, rangeKey_2, executionId,
Long.toString(executionBatchId), databaseName, tableName, numTableVersionsB4Cleanup,
numVersionsRetained, numDeletedVersions);
}
}
|
class Book:
def __init__(self, title, author, isbn):
self.title = title
self.author = author
self.isbn = isbn
def cadastrar(inventory):
try:
title = input("Enter the title of the book: ")
author = input("Enter the author of the book: ")
isbn = input("Enter the ISBN of the book: ")
if not title or not author or not isbn:
raise ValueError("Title, author, and ISBN cannot be empty.")
new_book = Book(title, author, isbn)
inventory.append(new_book)
print("Book added successfully to the inventory.")
except ValueError as ve:
print(f"Error: {ve}")
except Exception as e:
print(f"An unexpected error occurred: {e}")
def display_inventory(inventory):
if not inventory:
print("The inventory is empty.")
else:
print("Current Inventory:")
for index, book in enumerate(inventory, start=1):
print(f"{index}. Title: {book.title}, Author: {book.author}, ISBN: {book.isbn}")
def main():
inventory = []
while True:
print("\nMenu:")
print("1. Add a new book to the inventory")
print("2. Display current list of books")
print("3. Exit")
choice = input("Enter your choice: ")
if choice == "1":
cadastrar(inventory)
elif choice == "2":
display_inventory(inventory)
elif choice == "3":
print("Exiting the program.")
break
else:
print("Invalid choice. Please try again.")
if __name__ == "__main__":
main() |
<!DOCTYPE html>
<html>
<head>
<title>Form Example</title>
</head>
<body>
<form method="POST">
Name: <input type="text" name="name"/><br>
Age: <input type="text" name="age"/><br>
Height (in cm): <input type="text" name="height"/><br>
<input type="submit"/>
</form>
</body>
</html> |
interface LatLngFilter {
starting_index: number
ending_index: number
search_ascending: boolean
search__time_start?: any
}
export default LatLngFilter |
<reponame>parker565/SampleSpringRest-2
import { Component, Inject, forwardRef } from '@angular/core';
import { DataTable } from './table.component';
import { PAGINATION_TEMPLATE } from './pagination.template';
import { PAGINATION_STYLE } from "./pagination.style";
export var DataTablePagination = (function () {
function DataTablePagination(dataTable) {
this.dataTable = dataTable;
}
DataTablePagination.prototype.pageBack = function () {
this.dataTable.offset -= Math.min(this.dataTable.limit, this.dataTable.offset);
};
DataTablePagination.prototype.pageForward = function () {
this.dataTable.offset += this.dataTable.limit;
};
DataTablePagination.prototype.pageFirst = function () {
this.dataTable.offset = 0;
};
DataTablePagination.prototype.pageLast = function () {
this.dataTable.offset = (this.maxPage - 1) * this.dataTable.limit;
};
Object.defineProperty(DataTablePagination.prototype, "maxPage", {
get: function () {
return Math.ceil(this.dataTable.itemCount / this.dataTable.limit);
},
enumerable: true,
configurable: true
});
Object.defineProperty(DataTablePagination.prototype, "limit", {
get: function () {
return this.dataTable.limit;
},
set: function (value) {
this.dataTable.limit = Number(value); // TODO better way to handle that value of number <input> is string?
},
enumerable: true,
configurable: true
});
Object.defineProperty(DataTablePagination.prototype, "page", {
get: function () {
return this.dataTable.page;
},
set: function (value) {
this.dataTable.page = Number(value);
},
enumerable: true,
configurable: true
});
DataTablePagination.decorators = [
{ type: Component, args: [{
selector: 'data-table-pagination',
template: PAGINATION_TEMPLATE,
styles: [PAGINATION_STYLE]
},] },
];
/** @nocollapse */
DataTablePagination.ctorParameters = function () { return [
{ type: DataTable, decorators: [{ type: Inject, args: [forwardRef(function () { return DataTable; }),] },] },
]; };
return DataTablePagination;
}());
//# sourceMappingURL=pagination.component.js.map |
import { ISelectionController } from '../../../typings/interfaces'
import { GridImplementationFactory } from '../../../typings/interfaces/implementations/grid-implementation.factory'
import { BaseSelectionOperation } from './base-selection-operation.abstract'
export class EmitFocusedCell extends BaseSelectionOperation {
constructor(private readonly controller: ISelectionController) {
super(controller)
}
public run(): boolean {
const state = this.selectionState
if (!state) return false
if (!state.hasShiftKey && state.isAdding) {
const nextFocusedCell = GridImplementationFactory.gridFocusedCell(state.startCellPos)
if (state.focusedCell?.rowKey !== nextFocusedCell.rowKey || state.focusedCell?.columnKey !== nextFocusedCell.columnKey) {
state.focusedCell = nextFocusedCell
this.controller.gridEvents.CellFocusChangedEvent.emit(nextFocusedCell)
return true
}
}
return false
}
}
|
module Docurest
module Field
class DateTime < Docurest::Field::Base
def convert
::DateTime.parse(value)
end
end
end
end
|
#!/bin/bash
ARCHIPEL_ORCHESTRATOR_VERSION="test"
# Launch Archipel orchestrator in docker container
function launch_orchestrator () {
echo "Starting $1..."
# Launching docker container of node
docker run -d --name "$1" \
--network archipel \
-v $1_service:/service \
-v /var/run/docker.sock:/var/run/docker.sock \
--env NODE_WS="$2" \
--env MNEMONIC="$3" \
--env POLKADOT_NAME="$4" \
--env POLKADOT_PREFIX="$5" \
--env POLKADOT_IMAGE="parity/polkadot:latest" \
--env POLKADOT_KEY_GRAN="april shift pupil quit mandate school cost oven gospel castle brain student" \
--env POLKADOT_KEY_BABE="region run sunset rule light gap cool element angle example laundry stadium" \
--env POLKADOT_KEY_IMON="screen sustain clog husband assist noble artist sea fringe afford coil hawk" \
--env POLKADOT_KEY_PARA="produce hover hurdle lobster december slight hat note quit bomb drama notice" \
--env POLKADOT_KEY_AUDI="oak tail stomach fluid trade aunt fire fringe mercy roast style garlic" \
--env POLKADOT_ADDITIONAL_OPTIONS="--db-cache 512 --rpc-methods=Unsafe" \
--env DEBUG="app,chain,docker,metrics,polkadot,service" \
--env ALIVE_TIME=60000 \
--env SERVICE="polkadot" \
--env SUSPEND_SERVICE="false" \
luguslabs/archipel-orchestrator:$ARCHIPEL_ORCHESTRATOR_VERSION
echo "Waiting 5 seconds to be sure that orchestrator is started..."
sleep 5
}
docker volume create orchestrator1_service
docker volume create orchestrator2_service
docker volume create orchestrator3_service
launch_orchestrator "orchestrator1" "ws://172.28.42.2:9944" "mushroom ladder bomb tornado clown wife bean creek axis flat pave cloud" "validator1" "node1-"
launch_orchestrator "orchestrator2" "ws://172.28.42.3:9944" "fiscal toe illness tunnel pill spatial kind dash educate modify sustain suffer" "validator2" "node2-"
launch_orchestrator "orchestrator3" "ws://172.28.42.4:9944" "borrow initial guard hunt corn trust student opera now economy thumb argue" "validator3" "node3-"
echo "Orchestrators were created."
docker ps
|
/*
*
* Created on: 2021年1月1日
* Author: Lzy
*/
#include "test_devctrl.h"
Test_DevCtrl::Test_DevCtrl(QObject *parent) : Test_Object(parent)
{
}
Test_DevCtrl *Test_DevCtrl::bulid(QObject *parent)
{
static Test_DevCtrl* sington = nullptr;
if(sington == nullptr)
sington = new Test_DevCtrl(parent);
return sington;
}
void Test_DevCtrl::initFunSlot()
{
mRtu = nullptr;
mSiRtu = Ctrl_SiRtu::bulid(this);
mIpRtu = Ctrl_IpRtu::bulid(this);
}
bool Test_DevCtrl::setCurTh(int i)
{
bool ret = initDev();
if(ret) {
ret = mRtu->setCurTh(i);
}
return ret;
}
bool Test_DevCtrl::setVolTh(int i)
{
bool ret = initDev();
if(ret) {
ret = mRtu->setVolTh(i);
}
return ret;
}
bool Test_DevCtrl::eleClean()
{
bool ret = initDev();
if(ret) {
ret = mRtu->eleClean();
}
return ret;
}
bool Test_DevCtrl::factorySet()
{
bool ret = initDev();
if(ret) {
ret = mRtu->factorySet();
}
return ret;
}
bool Test_DevCtrl::initDev()
{
bool ret = true;
switch (mDt->devType) {
case SI_PDU: mRtu = mSiRtu; break;
case IP_PDU: mRtu = mIpRtu; break;
default: ret = false; break;
}
return ret;
}
|
/** @jsx jsx */
import { Button, Classes, Icon, IconName } from "@blueprintjs/core"
import { IconNames } from "@blueprintjs/icons"
import { css, jsx } from "@emotion/core"
import { Link } from "react-router-dom"
import { defaultTheme, ITheme } from "../../utilities"
import { IThemeProps } from "./DimensionAwareNavbar"
/**
* <MenuButton
* handleClick={this.handleClick}
* isOpen={this.state.isOpen}
* menuIcon={this.props.menuIcon}
* menuOpenIcon={this.props.menuOpenIcon}
* menuButtonAsLink={this.props.menuButtonAsLink}
* menuShowButton={this.props.menuShowButton}
* />
*/
export interface IMenuButtonExternalProps extends IThemeProps {
homeUrl?: string
menuIcon?: IconName | JSX.Element | string
menuOpenIcon?: IconName | JSX.Element | string
menuButtonAsLink?: boolean
menuShowButton?: boolean
}
export interface IMenuButtonProps extends IMenuButtonExternalProps {
handleClick: () => any
isOpen: boolean
}
const cssButton = (theme: ITheme) => css`
background-color: ${theme.navbarBackground} !important;
box-shadow: none !important;
background-image: none !important;
top: 15px;
left: 15px;
position: absolute;
z-index: 1020;
`
const cssIcon = (theme: ITheme) => css`
color: ${theme.button} !important;
&:hover {
color: ${theme.buttonHover};
}
`
const cssButtonImg = css`
max-height: 32px;
`
const ImgOrIcon = (
props: { icon: IconName | JSX.Element | string } & IThemeProps
) => {
const { icon, theme } = props
if (
typeof icon === "string" &&
(icon.startsWith("/") || icon.startsWith("http"))
) {
return <img css={cssButtonImg} src={icon as string} />
} else {
return <Icon css={cssIcon(theme)} iconSize={32} icon={icon as IconName} />
}
}
const OpenCloseIcon = (
props: {
isOpen: boolean
menuIcon?: IconName | JSX.Element | string
menuOpenIcon?: IconName | JSX.Element | string
} & IThemeProps
) => {
const { isOpen, menuIcon, menuOpenIcon, theme } = props
if (isOpen) {
return <ImgOrIcon icon={menuOpenIcon} theme={theme} />
} else {
return <ImgOrIcon icon={menuIcon} theme={theme} />
}
}
export const MenuButton = (props: IMenuButtonProps) => {
const {
handleClick,
homeUrl,
isOpen,
menuIcon = IconNames.MENU,
menuOpenIcon = IconNames.CROSS,
menuButtonAsLink = false,
menuShowButton = true,
theme = defaultTheme
} = props
if (menuShowButton && !menuButtonAsLink) {
return (
<Button css={cssButton(theme)} onClick={handleClick}>
<OpenCloseIcon
isOpen={isOpen}
menuIcon={menuIcon}
menuOpenIcon={menuOpenIcon}
theme={theme}
/>
</Button>
)
} else if (menuShowButton && menuButtonAsLink) {
return (
<Link css={cssButton(theme)} className={Classes.BUTTON} to={homeUrl}>
<OpenCloseIcon isOpen={false} menuIcon={menuIcon} theme={theme} />
</Link>
)
} else {
return <div />
}
}
|
#!/usr/bin/env bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# shellcheck disable=SC2034 # Variables sourced in other scripts.
# Common utilities, variables and checks for all build scripts.
set -o errexit
set -o nounset
set -o pipefail
# Unset CDPATH, having it set messes up with script import paths
unset CDPATH
USER_ID=$(id -u)
GROUP_ID=$(id -g)
DOCKER_OPTS=${DOCKER_OPTS:-""}
IFS=" " read -r -a DOCKER <<< "docker ${DOCKER_OPTS}"
DOCKER_HOST=${DOCKER_HOST:-""}
# This will canonicalize the path
KUBE_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. && pwd -P)
source "${KUBE_ROOT}/hack/lib/init.sh"
# Constants
readonly KUBE_BUILD_IMAGE_REPO=kube-build
readonly KUBE_BUILD_IMAGE_CROSS_TAG="$(cat "${KUBE_ROOT}/build/build-image/cross/VERSION")"
readonly KUBE_DOCKER_REGISTRY="${KUBE_DOCKER_REGISTRY:-k8s.gcr.io}"
readonly KUBE_BASE_IMAGE_REGISTRY="${KUBE_BASE_IMAGE_REGISTRY:-k8s.gcr.io/build-image}"
# This version number is used to cause everyone to rebuild their data containers
# and build image. This is especially useful for automated build systems like
# Jenkins.
#
# Increment/change this number if you change the build image (anything under
# build/build-image) or change the set of volumes in the data container.
readonly KUBE_BUILD_IMAGE_VERSION_BASE="$(cat "${KUBE_ROOT}/build/build-image/VERSION")"
readonly KUBE_BUILD_IMAGE_VERSION="${KUBE_BUILD_IMAGE_VERSION_BASE}-${KUBE_BUILD_IMAGE_CROSS_TAG}"
# Here we map the output directories across both the local and remote _output
# directories:
#
# *_OUTPUT_ROOT - the base of all output in that environment.
# *_OUTPUT_SUBPATH - location where golang stuff is built/cached. Also
# persisted across docker runs with a volume mount.
# *_OUTPUT_BINPATH - location where final binaries are placed. If the remote
# is really remote, this is the stuff that has to be copied
# back.
# OUT_DIR can come in from the Makefile, so honor it.
readonly LOCAL_OUTPUT_ROOT="${KUBE_ROOT}/${OUT_DIR:-_output}"
readonly LOCAL_OUTPUT_SUBPATH="${LOCAL_OUTPUT_ROOT}/dockerized"
readonly LOCAL_OUTPUT_BINPATH="${LOCAL_OUTPUT_SUBPATH}/bin"
readonly LOCAL_OUTPUT_GOPATH="${LOCAL_OUTPUT_SUBPATH}/go"
readonly LOCAL_OUTPUT_IMAGE_STAGING="${LOCAL_OUTPUT_ROOT}/images"
# This is a symlink to binaries for "this platform" (e.g. build tools).
readonly THIS_PLATFORM_BIN="${LOCAL_OUTPUT_ROOT}/bin"
readonly REMOTE_ROOT="/go/src/${KUBE_GO_PACKAGE}"
readonly REMOTE_OUTPUT_ROOT="${REMOTE_ROOT}/_output"
readonly REMOTE_OUTPUT_SUBPATH="${REMOTE_OUTPUT_ROOT}/dockerized"
readonly REMOTE_OUTPUT_BINPATH="${REMOTE_OUTPUT_SUBPATH}/bin"
readonly REMOTE_OUTPUT_GOPATH="${REMOTE_OUTPUT_SUBPATH}/go"
# This is the port on the workstation host to expose RSYNC on. Set this if you
# are doing something fancy with ssh tunneling.
readonly KUBE_RSYNC_PORT="${KUBE_RSYNC_PORT:-}"
# This is the port that rsync is running on *inside* the container. This may be
# mapped to KUBE_RSYNC_PORT via docker networking.
readonly KUBE_CONTAINER_RSYNC_PORT=8730
# These are the default versions (image tags) for their respective base images.
readonly __default_debian_iptables_version=buster-v1.6.7
readonly __default_go_runner_version=v2.3.1-go1.16.12-buster.0
readonly __default_setcap_version=buster-v2.0.4
# These are the base images for the Docker-wrapped binaries.
readonly KUBE_GORUNNER_IMAGE="${KUBE_GORUNNER_IMAGE:-$KUBE_BASE_IMAGE_REGISTRY/go-runner:$__default_go_runner_version}"
readonly KUBE_APISERVER_BASE_IMAGE="${KUBE_APISERVER_BASE_IMAGE:-$KUBE_GORUNNER_IMAGE}"
readonly KUBE_CONTROLLER_MANAGER_BASE_IMAGE="${KUBE_CONTROLLER_MANAGER_BASE_IMAGE:-$KUBE_GORUNNER_IMAGE}"
readonly KUBE_SCHEDULER_BASE_IMAGE="${KUBE_SCHEDULER_BASE_IMAGE:-$KUBE_GORUNNER_IMAGE}"
readonly KUBE_PROXY_BASE_IMAGE="${KUBE_PROXY_BASE_IMAGE:-$KUBE_BASE_IMAGE_REGISTRY/debian-iptables:$__default_debian_iptables_version}"
# This is the image used in a multi-stage build to apply capabilities to Docker-wrapped binaries.
readonly KUBE_BUILD_SETCAP_IMAGE="${KUBE_BUILD_SETCAP_IMAGE:-$KUBE_BASE_IMAGE_REGISTRY/setcap:$__default_setcap_version}"
# Get the set of master binaries that run in Docker (on Linux)
# Entry format is "<binary-name>,<base-image>".
# Binaries are placed in /usr/local/bin inside the image.
# `make` users can override any or all of the base images using the associated
# environment variables.
#
# $1 - server architecture
kube::build::get_docker_wrapped_binaries() {
### If you change any of these lists, please also update DOCKERIZED_BINARIES
### in build/BUILD. And kube::golang::server_image_targets
local targets=(
"kube-apiserver,${KUBE_APISERVER_BASE_IMAGE}"
"kube-controller-manager,${KUBE_CONTROLLER_MANAGER_BASE_IMAGE}"
"kube-scheduler,${KUBE_SCHEDULER_BASE_IMAGE}"
"kube-proxy,${KUBE_PROXY_BASE_IMAGE}"
)
echo "${targets[@]}"
}
# ---------------------------------------------------------------------------
# Basic setup functions
# Verify that the right utilities and such are installed for building Kube. Set
# up some dynamic constants.
# Args:
# $1 - boolean of whether to require functioning docker (default true)
#
# Vars set:
# KUBE_ROOT_HASH
# KUBE_BUILD_IMAGE_TAG_BASE
# KUBE_BUILD_IMAGE_TAG
# KUBE_BUILD_IMAGE
# KUBE_BUILD_CONTAINER_NAME_BASE
# KUBE_BUILD_CONTAINER_NAME
# KUBE_DATA_CONTAINER_NAME_BASE
# KUBE_DATA_CONTAINER_NAME
# KUBE_RSYNC_CONTAINER_NAME_BASE
# KUBE_RSYNC_CONTAINER_NAME
# DOCKER_MOUNT_ARGS
# LOCAL_OUTPUT_BUILD_CONTEXT
function kube::build::verify_prereqs() {
local -r require_docker=${1:-true}
kube::log::status "Verifying Prerequisites...."
kube::build::ensure_tar || return 1
kube::build::ensure_rsync || return 1
if ${require_docker}; then
kube::build::ensure_docker_in_path || return 1
if kube::build::is_osx; then
kube::build::docker_available_on_osx || return 1
fi
kube::util::ensure_docker_daemon_connectivity || return 1
if (( KUBE_VERBOSE > 6 )); then
kube::log::status "Docker Version:"
"${DOCKER[@]}" version | kube::log::info_from_stdin
fi
fi
KUBE_GIT_BRANCH=$(git symbolic-ref --short -q HEAD 2>/dev/null || true)
KUBE_ROOT_HASH=$(kube::build::short_hash "${HOSTNAME:-}:${KUBE_ROOT}:${KUBE_GIT_BRANCH}")
KUBE_BUILD_IMAGE_TAG_BASE="build-${KUBE_ROOT_HASH}"
KUBE_BUILD_IMAGE_TAG="${KUBE_BUILD_IMAGE_TAG_BASE}-${KUBE_BUILD_IMAGE_VERSION}"
KUBE_BUILD_IMAGE="${KUBE_BUILD_IMAGE_REPO}:${KUBE_BUILD_IMAGE_TAG}"
KUBE_BUILD_CONTAINER_NAME_BASE="kube-build-${KUBE_ROOT_HASH}"
KUBE_BUILD_CONTAINER_NAME="${KUBE_BUILD_CONTAINER_NAME_BASE}-${KUBE_BUILD_IMAGE_VERSION}"
KUBE_RSYNC_CONTAINER_NAME_BASE="kube-rsync-${KUBE_ROOT_HASH}"
KUBE_RSYNC_CONTAINER_NAME="${KUBE_RSYNC_CONTAINER_NAME_BASE}-${KUBE_BUILD_IMAGE_VERSION}"
KUBE_DATA_CONTAINER_NAME_BASE="kube-build-data-${KUBE_ROOT_HASH}"
KUBE_DATA_CONTAINER_NAME="${KUBE_DATA_CONTAINER_NAME_BASE}-${KUBE_BUILD_IMAGE_VERSION}"
DOCKER_MOUNT_ARGS=(--volumes-from "${KUBE_DATA_CONTAINER_NAME}")
LOCAL_OUTPUT_BUILD_CONTEXT="${LOCAL_OUTPUT_IMAGE_STAGING}/${KUBE_BUILD_IMAGE}"
kube::version::get_version_vars
kube::version::save_version_vars "${KUBE_ROOT}/.dockerized-kube-version-defs"
# Without this, the user's umask can leak through.
umask 0022
}
# ---------------------------------------------------------------------------
# Utility functions
function kube::build::docker_available_on_osx() {
if [[ -z "${DOCKER_HOST}" ]]; then
if [[ -S "/var/run/docker.sock" ]]; then
kube::log::status "Using Docker for MacOS"
return 0
fi
kube::log::status "No docker host is set."
kube::log::status "It looks like you're running Mac OS X, but Docker for Mac cannot be found."
kube::log::status "See: https://docs.docker.com/engine/installation/mac/ for installation instructions."
return 1
fi
}
function kube::build::is_osx() {
[[ "$(uname)" == "Darwin" ]]
}
function kube::build::is_gnu_sed() {
[[ $(sed --version 2>&1) == *GNU* ]]
}
function kube::build::ensure_rsync() {
if [[ -z "$(which rsync)" ]]; then
kube::log::error "Can't find 'rsync' in PATH, please fix and retry."
return 1
fi
}
function kube::build::ensure_docker_in_path() {
if [[ -z "$(which docker)" ]]; then
kube::log::error "Can't find 'docker' in PATH, please fix and retry."
kube::log::error "See https://docs.docker.com/installation/#installation for installation instructions."
return 1
fi
}
function kube::build::ensure_tar() {
if [[ -n "${TAR:-}" ]]; then
return
fi
# Find gnu tar if it is available, bomb out if not.
TAR=tar
if which gtar &>/dev/null; then
TAR=gtar
else
if which gnutar &>/dev/null; then
TAR=gnutar
fi
fi
if ! "${TAR}" --version | grep -q GNU; then
echo " !!! Cannot find GNU tar. Build on Linux or install GNU tar"
echo " on Mac OS X (brew install gnu-tar)."
return 1
fi
}
function kube::build::has_docker() {
which docker &> /dev/null
}
function kube::build::has_ip() {
which ip &> /dev/null && ip -Version | grep 'iproute2' &> /dev/null
}
# Detect if a specific image exists
#
# $1 - image repo name
# $2 - image tag
function kube::build::docker_image_exists() {
[[ -n $1 && -n $2 ]] || {
kube::log::error "Internal error. Image not specified in docker_image_exists."
exit 2
}
[[ $("${DOCKER[@]}" images -q "${1}:${2}") ]]
}
# Delete all images that match a tag prefix except for the "current" version
#
# $1: The image repo/name
# $2: The tag base. We consider any image that matches $2*
# $3: The current image not to delete if provided
function kube::build::docker_delete_old_images() {
# In Docker 1.12, we can replace this with
# docker images "$1" --format "{{.Tag}}"
for tag in $("${DOCKER[@]}" images "${1}" | tail -n +2 | awk '{print $2}') ; do
if [[ "${tag}" != "${2}"* ]] ; then
V=3 kube::log::status "Keeping image ${1}:${tag}"
continue
fi
if [[ -z "${3:-}" || "${tag}" != "${3}" ]] ; then
V=2 kube::log::status "Deleting image ${1}:${tag}"
"${DOCKER[@]}" rmi "${1}:${tag}" >/dev/null
else
V=3 kube::log::status "Keeping image ${1}:${tag}"
fi
done
}
# Stop and delete all containers that match a pattern
#
# $1: The base container prefix
# $2: The current container to keep, if provided
function kube::build::docker_delete_old_containers() {
# In Docker 1.12 we can replace this line with
# docker ps -a --format="{{.Names}}"
for container in $("${DOCKER[@]}" ps -a | tail -n +2 | awk '{print $NF}') ; do
if [[ "${container}" != "${1}"* ]] ; then
V=3 kube::log::status "Keeping container ${container}"
continue
fi
if [[ -z "${2:-}" || "${container}" != "${2}" ]] ; then
V=2 kube::log::status "Deleting container ${container}"
kube::build::destroy_container "${container}"
else
V=3 kube::log::status "Keeping container ${container}"
fi
done
}
# Takes $1 and computes a short has for it. Useful for unique tag generation
function kube::build::short_hash() {
[[ $# -eq 1 ]] || {
kube::log::error "Internal error. No data based to short_hash."
exit 2
}
local short_hash
if which md5 >/dev/null 2>&1; then
short_hash=$(md5 -q -s "$1")
else
short_hash=$(echo -n "$1" | md5sum)
fi
echo "${short_hash:0:10}"
}
# Pedantically kill, wait-on and remove a container. The -f -v options
# to rm don't actually seem to get the job done, so force kill the
# container, wait to ensure it's stopped, then try the remove. This is
# a workaround for bug https://github.com/docker/docker/issues/3968.
function kube::build::destroy_container() {
"${DOCKER[@]}" kill "$1" >/dev/null 2>&1 || true
if [[ $("${DOCKER[@]}" version --format '{{.Server.Version}}') = 17.06.0* ]]; then
# Workaround https://github.com/moby/moby/issues/33948.
# TODO: remove when 17.06.0 is not relevant anymore
DOCKER_API_VERSION=v1.29 "${DOCKER[@]}" wait "$1" >/dev/null 2>&1 || true
else
"${DOCKER[@]}" wait "$1" >/dev/null 2>&1 || true
fi
"${DOCKER[@]}" rm -f -v "$1" >/dev/null 2>&1 || true
}
# ---------------------------------------------------------------------------
# Building
function kube::build::clean() {
if kube::build::has_docker ; then
kube::build::docker_delete_old_containers "${KUBE_BUILD_CONTAINER_NAME_BASE}"
kube::build::docker_delete_old_containers "${KUBE_RSYNC_CONTAINER_NAME_BASE}"
kube::build::docker_delete_old_containers "${KUBE_DATA_CONTAINER_NAME_BASE}"
kube::build::docker_delete_old_images "${KUBE_BUILD_IMAGE_REPO}" "${KUBE_BUILD_IMAGE_TAG_BASE}"
V=2 kube::log::status "Cleaning all untagged docker images"
"${DOCKER[@]}" rmi "$("${DOCKER[@]}" images -q --filter 'dangling=true')" 2> /dev/null || true
fi
if [[ -d "${LOCAL_OUTPUT_ROOT}" ]]; then
kube::log::status "Removing _output directory"
rm -rf "${LOCAL_OUTPUT_ROOT}"
fi
}
# Set up the context directory for the kube-build image and build it.
function kube::build::build_image() {
mkdir -p "${LOCAL_OUTPUT_BUILD_CONTEXT}"
# Make sure the context directory owned by the right user for syncing sources to container.
chown -R "${USER_ID}":"${GROUP_ID}" "${LOCAL_OUTPUT_BUILD_CONTEXT}"
cp /etc/localtime "${LOCAL_OUTPUT_BUILD_CONTEXT}/"
chmod u+w "${LOCAL_OUTPUT_BUILD_CONTEXT}/localtime"
cp "${KUBE_ROOT}/build/build-image/Dockerfile" "${LOCAL_OUTPUT_BUILD_CONTEXT}/Dockerfile"
cp "${KUBE_ROOT}/build/build-image/rsyncd.sh" "${LOCAL_OUTPUT_BUILD_CONTEXT}/"
dd if=/dev/urandom bs=512 count=1 2>/dev/null | LC_ALL=C tr -dc 'A-Za-z0-9' | dd bs=32 count=1 2>/dev/null > "${LOCAL_OUTPUT_BUILD_CONTEXT}/rsyncd.password"
chmod go= "${LOCAL_OUTPUT_BUILD_CONTEXT}/rsyncd.password"
kube::build::docker_build "${KUBE_BUILD_IMAGE}" "${LOCAL_OUTPUT_BUILD_CONTEXT}" 'false' "--build-arg=KUBE_BUILD_IMAGE_CROSS_TAG=${KUBE_BUILD_IMAGE_CROSS_TAG} --build-arg=KUBE_BASE_IMAGE_REGISTRY=${KUBE_BASE_IMAGE_REGISTRY}"
# Clean up old versions of everything
kube::build::docker_delete_old_containers "${KUBE_BUILD_CONTAINER_NAME_BASE}" "${KUBE_BUILD_CONTAINER_NAME}"
kube::build::docker_delete_old_containers "${KUBE_RSYNC_CONTAINER_NAME_BASE}" "${KUBE_RSYNC_CONTAINER_NAME}"
kube::build::docker_delete_old_containers "${KUBE_DATA_CONTAINER_NAME_BASE}" "${KUBE_DATA_CONTAINER_NAME}"
kube::build::docker_delete_old_images "${KUBE_BUILD_IMAGE_REPO}" "${KUBE_BUILD_IMAGE_TAG_BASE}" "${KUBE_BUILD_IMAGE_TAG}"
kube::build::ensure_data_container
kube::build::sync_to_container
}
# Build a docker image from a Dockerfile.
# $1 is the name of the image to build
# $2 is the location of the "context" directory, with the Dockerfile at the root.
# $3 is the value to set the --pull flag for docker build; true by default
# $4 is the set of --build-args for docker.
function kube::build::docker_build() {
local -r image=$1
local -r context_dir=$2
local -r pull="${3:-true}"
local build_args
IFS=" " read -r -a build_args <<< "$4"
readonly build_args
local -ra build_cmd=("${DOCKER[@]}" build -t "${image}" "--pull=${pull}" "${build_args[@]}" "${context_dir}")
kube::log::status "Building Docker image ${image}"
local docker_output
docker_output=$("${build_cmd[@]}" 2>&1) || {
cat <<EOF >&2
+++ Docker build command failed for ${image}
${docker_output}
To retry manually, run:
${build_cmd[*]}
EOF
return 1
}
}
function kube::build::ensure_data_container() {
# If the data container exists AND exited successfully, we can use it.
# Otherwise nuke it and start over.
local ret=0
local code=0
code=$(docker inspect \
-f '{{.State.ExitCode}}' \
"${KUBE_DATA_CONTAINER_NAME}" 2>/dev/null) || ret=$?
if [[ "${ret}" == 0 && "${code}" != 0 ]]; then
kube::build::destroy_container "${KUBE_DATA_CONTAINER_NAME}"
ret=1
fi
if [[ "${ret}" != 0 ]]; then
kube::log::status "Creating data container ${KUBE_DATA_CONTAINER_NAME}"
# We have to ensure the directory exists, or else the docker run will
# create it as root.
mkdir -p "${LOCAL_OUTPUT_GOPATH}"
# We want this to run as root to be able to chown, so non-root users can
# later use the result as a data container. This run both creates the data
# container and chowns the GOPATH.
#
# The data container creates volumes for all of the directories that store
# intermediates for the Go build. This enables incremental builds across
# Docker sessions. The *_cgo paths are re-compiled versions of the go std
# libraries for true static building.
local -ra docker_cmd=(
"${DOCKER[@]}" run
--volume "${REMOTE_ROOT}" # white-out the whole output dir
--volume /usr/local/go/pkg/linux_386_cgo
--volume /usr/local/go/pkg/linux_amd64_cgo
--volume /usr/local/go/pkg/linux_arm_cgo
--volume /usr/local/go/pkg/linux_arm64_cgo
--volume /usr/local/go/pkg/linux_ppc64le_cgo
--volume /usr/local/go/pkg/darwin_amd64_cgo
--volume /usr/local/go/pkg/darwin_386_cgo
--volume /usr/local/go/pkg/windows_amd64_cgo
--volume /usr/local/go/pkg/windows_386_cgo
--name "${KUBE_DATA_CONTAINER_NAME}"
--hostname "${HOSTNAME}"
"${KUBE_BUILD_IMAGE}"
chown -R "${USER_ID}":"${GROUP_ID}"
"${REMOTE_ROOT}"
/usr/local/go/pkg/
)
"${docker_cmd[@]}"
fi
}
# Run a command in the kube-build image. This assumes that the image has
# already been built.
function kube::build::run_build_command() {
kube::log::status "Running build command..."
kube::build::run_build_command_ex "${KUBE_BUILD_CONTAINER_NAME}" -- "$@"
}
# Run a command in the kube-build image. This assumes that the image has
# already been built.
#
# Arguments are in the form of
# <container name> <extra docker args> -- <command>
function kube::build::run_build_command_ex() {
[[ $# != 0 ]] || { echo "Invalid input - please specify a container name." >&2; return 4; }
local container_name="${1}"
shift
local -a docker_run_opts=(
"--name=${container_name}"
"--user=$(id -u):$(id -g)"
"--hostname=${HOSTNAME}"
"${DOCKER_MOUNT_ARGS[@]}"
)
local detach=false
[[ $# != 0 ]] || { echo "Invalid input - please specify docker arguments followed by --." >&2; return 4; }
# Everything before "--" is an arg to docker
until [ -z "${1-}" ] ; do
if [[ "$1" == "--" ]]; then
shift
break
fi
docker_run_opts+=("$1")
if [[ "$1" == "-d" || "$1" == "--detach" ]] ; then
detach=true
fi
shift
done
# Everything after "--" is the command to run
[[ $# != 0 ]] || { echo "Invalid input - please specify a command to run." >&2; return 4; }
local -a cmd=()
until [ -z "${1-}" ] ; do
cmd+=("$1")
shift
done
docker_run_opts+=(
--env "KUBE_FASTBUILD=${KUBE_FASTBUILD:-false}"
--env "KUBE_BUILDER_OS=${OSTYPE:-notdetected}"
--env "KUBE_VERBOSE=${KUBE_VERBOSE}"
--env "KUBE_BUILD_WITH_COVERAGE=${KUBE_BUILD_WITH_COVERAGE:-}"
--env "KUBE_BUILD_PLATFORMS=${KUBE_BUILD_PLATFORMS:-}"
--env "GOFLAGS=${GOFLAGS:-}"
--env "GOGCFLAGS=${GOGCFLAGS:-}"
--env "SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH:-}"
)
# use GOLDFLAGS only if it is set explicitly.
if [[ -v GOLDFLAGS ]]; then
docker_run_opts+=(
--env "GOLDFLAGS=${GOLDFLAGS:-}"
)
fi
if [[ -n "${DOCKER_CGROUP_PARENT:-}" ]]; then
kube::log::status "Using ${DOCKER_CGROUP_PARENT} as container cgroup parent"
docker_run_opts+=(--cgroup-parent "${DOCKER_CGROUP_PARENT}")
fi
# If we have stdin we can run interactive. This allows things like 'shell.sh'
# to work. However, if we run this way and don't have stdin, then it ends up
# running in a daemon-ish mode. So if we don't have a stdin, we explicitly
# attach stderr/stdout but don't bother asking for a tty.
if [[ -t 0 ]]; then
docker_run_opts+=(--interactive --tty)
elif [[ "${detach}" == false ]]; then
docker_run_opts+=("--attach=stdout" "--attach=stderr")
fi
local -ra docker_cmd=(
"${DOCKER[@]}" run "${docker_run_opts[@]}" "${KUBE_BUILD_IMAGE}")
# Clean up container from any previous run
kube::build::destroy_container "${container_name}"
"${docker_cmd[@]}" "${cmd[@]}"
if [[ "${detach}" == false ]]; then
kube::build::destroy_container "${container_name}"
fi
}
function kube::build::rsync_probe {
# Wait unil rsync is up and running.
local tries=20
while (( tries > 0 )) ; do
if rsync "rsync://k8s@${1}:${2}/" \
--password-file="${LOCAL_OUTPUT_BUILD_CONTEXT}/rsyncd.password" \
&> /dev/null ; then
return 0
fi
tries=$(( tries - 1))
sleep 0.1
done
return 1
}
# Start up the rsync container in the background. This should be explicitly
# stopped with kube::build::stop_rsyncd_container.
#
# This will set the global var KUBE_RSYNC_ADDR to the effective port that the
# rsync daemon can be reached out.
function kube::build::start_rsyncd_container() {
IPTOOL=ifconfig
if kube::build::has_ip ; then
IPTOOL="ip address"
fi
kube::build::stop_rsyncd_container
V=3 kube::log::status "Starting rsyncd container"
kube::build::run_build_command_ex \
"${KUBE_RSYNC_CONTAINER_NAME}" -p 127.0.0.1:"${KUBE_RSYNC_PORT}":"${KUBE_CONTAINER_RSYNC_PORT}" -d \
-e ALLOW_HOST="$(${IPTOOL} | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1')" \
-- /rsyncd.sh >/dev/null
local mapped_port
if ! mapped_port=$("${DOCKER[@]}" port "${KUBE_RSYNC_CONTAINER_NAME}" ${KUBE_CONTAINER_RSYNC_PORT} 2> /dev/null | cut -d: -f 2) ; then
kube::log::error "Could not get effective rsync port"
return 1
fi
local container_ip
container_ip=$("${DOCKER[@]}" inspect --format '{{ .NetworkSettings.IPAddress }}' "${KUBE_RSYNC_CONTAINER_NAME}")
# Sometimes we can reach rsync through localhost and a NAT'd port. Other
# times (when we are running in another docker container on the Jenkins
# machines) we have to talk directly to the container IP. There is no one
# strategy that works in all cases so we test to figure out which situation we
# are in.
if kube::build::rsync_probe 127.0.0.1 "${mapped_port}"; then
KUBE_RSYNC_ADDR="127.0.0.1:${mapped_port}"
return 0
elif kube::build::rsync_probe "${container_ip}" ${KUBE_CONTAINER_RSYNC_PORT}; then
KUBE_RSYNC_ADDR="${container_ip}:${KUBE_CONTAINER_RSYNC_PORT}"
return 0
fi
kube::log::error "Could not connect to rsync container."
return 1
}
function kube::build::stop_rsyncd_container() {
V=3 kube::log::status "Stopping any currently running rsyncd container"
unset KUBE_RSYNC_ADDR
kube::build::destroy_container "${KUBE_RSYNC_CONTAINER_NAME}"
}
function kube::build::rsync {
local -a rsync_opts=(
--archive
"--password-file=${LOCAL_OUTPUT_BUILD_CONTEXT}/rsyncd.password"
)
if (( KUBE_VERBOSE >= 6 )); then
rsync_opts+=("-iv")
fi
if (( KUBE_RSYNC_COMPRESS > 0 )); then
rsync_opts+=("--compress-level=${KUBE_RSYNC_COMPRESS}")
fi
V=3 kube::log::status "Running rsync"
rsync "${rsync_opts[@]}" "$@"
}
# This will launch rsyncd in a container and then sync the source tree to the
# container over the local network.
function kube::build::sync_to_container() {
kube::log::status "Syncing sources to container"
kube::build::start_rsyncd_container
# rsync filters are a bit confusing. Here we are syncing everything except
# output only directories and things that are not necessary like the git
# directory and generated files. The '- /' filter prevents rsync
# from trying to set the uid/gid/perms on the root of the sync tree.
# As an exception, we need to sync generated files in staging/, because
# they will not be re-generated by 'make'. Note that the 'H' filtered files
# are hidden from rsync so they will be deleted in the target container if
# they exist. This will allow them to be re-created in the container if
# necessary.
kube::build::rsync \
--delete \
--filter='H /.git' \
--filter='- /.make/' \
--filter='- /_tmp/' \
--filter='- /_output/' \
--filter='- /' \
--filter='H zz_generated.*' \
--filter='H generated.proto' \
"${KUBE_ROOT}/" "rsync://k8s@${KUBE_RSYNC_ADDR}/k8s/"
kube::build::stop_rsyncd_container
}
# Copy all build results back out.
function kube::build::copy_output() {
kube::log::status "Syncing out of container"
kube::build::start_rsyncd_container
# The filter syntax for rsync is a little obscure. It filters on files and
# directories. If you don't go in to a directory you won't find any files
# there. Rules are evaluated in order. The last two rules are a little
# magic. '+ */' says to go in to every directory and '- /**' says to ignore
# any file or directory that isn't already specifically allowed.
#
# We are looking to copy out all of the built binaries along with various
# generated files.
kube::build::rsync \
--prune-empty-dirs \
--filter='- /_temp/' \
--filter='+ /vendor/' \
--filter='+ /staging/***/Godeps/**' \
--filter='+ /_output/dockerized/bin/**' \
--filter='+ zz_generated.*' \
--filter='+ generated.proto' \
--filter='+ *.pb.go' \
--filter='+ types.go' \
--filter='+ */' \
--filter='- /**' \
"rsync://k8s@${KUBE_RSYNC_ADDR}/k8s/" "${KUBE_ROOT}"
kube::build::stop_rsyncd_container
}
|
<reponame>luciuschoi/capistrano-unicorn-nginx
module Capistrano
module UnicornNginx
VERSION = "3.3.3"
end
end
|
// Objects in a program should be replaceable with instances of their subtypes
// w/o altering the correctness of the program
#include <iostream>
class Rectangle
{
protected:
int width, height;
public:
Rectangle(const int width, const int height)
: width{width}, height{height} { }
int get_width() const { return width; }
virtual void set_width(const int width) { this->width = width; }
int get_height() const { return height; }
virtual void set_height(const int height) { this->height = height; }
int area() const { return width * height; }
};
class Square : public Rectangle
{
public:
Square(int size): Rectangle(size,size) {}
void set_width(const int width) override {
this->width = height = width;
}
void set_height(const int height) override {
this->height = width = height;
}
};
struct RectangleFactory
{
static Rectangle create_rectangle(int w, int h);
static Rectangle create_square(int size);
};
void process(Rectangle& r)
{
int w = r.get_width();
r.set_height(10);
std::cout << "expected area = " << (w * 10)
<< ", got " << r.area() << std::endl;
}
int main342342()
{
Rectangle r{ 5,5 };
process(r);
Square s{ 5 };
process(s);
getchar();
return 0;
} |
<gh_stars>100-1000
package io.cattle.platform.systemstack.lock;
import io.cattle.platform.lock.definition.AbstractBlockingLockDefintion;
public class ScheduledUpgradeLock extends AbstractBlockingLockDefintion {
public ScheduledUpgradeLock() {
super("SCHEDULED.UPGRADE");
}
}
|
public class paperCast {
private String paperRoll;
public paperCast(String paperRoll) {
this.paperRoll = paperRoll;
}
public String produceSheet(int sheetLength) {
if (paperRoll.length() < sheetLength) {
return "Not enough paper left!";
} else {
String producedSheet = paperRoll.substring(0, sheetLength);
paperRoll = paperRoll.substring(sheetLength);
return producedSheet;
}
}
} |
package com.bendoerr.saltedmocha.libsodium;
import com.bendoerr.saltedmocha.CryptoException;
import com.bendoerr.saltedmocha.Util;
import org.junit.Test;
import java.security.SecureRandom;
import java.util.Random;
import static com.bendoerr.saltedmocha.libsodium.CryptoBoxEasy.*;
import static com.bendoerr.saltedmocha.nacl.CryptoBox.crypto_box_beforenm;
import static com.bendoerr.saltedmocha.nacl.CryptoBox.crypto_box_keypair;
import static java.lang.System.arraycopy;
import static org.bouncycastle.util.Arrays.*;
import static org.bouncycastle.util.encoders.Hex.toHexString;
import static org.junit.Assert.*;
//import static com.bendoerr.saltedmocha.nacl.CryptoBox.*;
public class CryptoBoxEasyTest {
@Test
public void test_box_easy_a() throws Exception {
System.out.println("libsodium/test/default/box_easy.c");
byte[] alicesk = new byte[]{
(byte) 0x77, (byte) 0x07, (byte) 0x6d, (byte) 0x0a, (byte) 0x73,
(byte) 0x18, (byte) 0xa5, (byte) 0x7d, (byte) 0x3c, (byte) 0x16,
(byte) 0xc1, (byte) 0x72, (byte) 0x51, (byte) 0xb2, (byte) 0x66,
(byte) 0x45, (byte) 0xdf, (byte) 0x4c, (byte) 0x2f, (byte) 0x87,
(byte) 0xeb, (byte) 0xc0, (byte) 0x99, (byte) 0x2a, (byte) 0xb1,
(byte) 0x77, (byte) 0xfb, (byte) 0xa5, (byte) 0x1d, (byte) 0xb9,
(byte) 0x2c, (byte) 0x2a};
byte[] bobpk = new byte[]{
(byte) 0xde, (byte) 0x9e, (byte) 0xdb, (byte) 0x7d, (byte) 0x7b,
(byte) 0x7d, (byte) 0xc1, (byte) 0xb4, (byte) 0xd3, (byte) 0x5b,
(byte) 0x61, (byte) 0xc2, (byte) 0xec, (byte) 0xe4, (byte) 0x35,
(byte) 0x37, (byte) 0x3f, (byte) 0x83, (byte) 0x43, (byte) 0xc8,
(byte) 0x5b, (byte) 0x78, (byte) 0x67, (byte) 0x4d, (byte) 0xad,
(byte) 0xfc, (byte) 0x7e, (byte) 0x14, (byte) 0x6f, (byte) 0x88,
(byte) 0x2b, (byte) 0x4f};
byte[] nonce = new byte[]{
(byte) 0x69, (byte) 0x69, (byte) 0x6e, (byte) 0xe9, (byte) 0x55,
(byte) 0xb6, (byte) 0x2b, (byte) 0x73, (byte) 0xcd, (byte) 0x62,
(byte) 0xbd, (byte) 0xa8, (byte) 0x75, (byte) 0xfc, (byte) 0x73,
(byte) 0xd6, (byte) 0x82, (byte) 0x19, (byte) 0xe0, (byte) 0x03,
(byte) 0x6b, (byte) 0x7a, (byte) 0x0b, (byte) 0x37};
byte[] m = new byte[]{
(byte) 0xbe, (byte) 0x07, (byte) 0x5f, (byte) 0xc5, (byte) 0x3c,
(byte) 0x81, (byte) 0xf2, (byte) 0xd5, (byte) 0xcf, (byte) 0x14,
(byte) 0x13, (byte) 0x16, (byte) 0xeb, (byte) 0xeb, (byte) 0x0c,
(byte) 0x7b, (byte) 0x52, (byte) 0x28, (byte) 0xc5, (byte) 0x2a,
(byte) 0x4c, (byte) 0x62, (byte) 0xcb, (byte) 0xd4, (byte) 0x4b,
(byte) 0x66, (byte) 0x84, (byte) 0x9b, (byte) 0x64, (byte) 0x24,
(byte) 0x4f, (byte) 0xfc, (byte) 0xe5, (byte) 0xec, (byte) 0xba,
(byte) 0xaf, (byte) 0x33, (byte) 0xbd, (byte) 0x75, (byte) 0x1a,
(byte) 0x1a, (byte) 0xc7, (byte) 0x28, (byte) 0xd4, (byte) 0x5e,
(byte) 0x6c, (byte) 0x61, (byte) 0x29, (byte) 0x6c, (byte) 0xdc,
(byte) 0x3c, (byte) 0x01, (byte) 0x23, (byte) 0x35, (byte) 0x61,
(byte) 0xf4, (byte) 0x1d, (byte) 0xb6, (byte) 0x6c, (byte) 0xce,
(byte) 0x31, (byte) 0x4a, (byte) 0xdb, (byte) 0x31, (byte) 0x0e,
(byte) 0x3b, (byte) 0xe8, (byte) 0x25, (byte) 0x0c, (byte) 0x46,
(byte) 0xf0, (byte) 0x6d, (byte) 0xce, (byte) 0xea, (byte) 0x3a,
(byte) 0x7f, (byte) 0xa1, (byte) 0x34, (byte) 0x80, (byte) 0x57,
(byte) 0xe2, (byte) 0xf6, (byte) 0x55, (byte) 0x6a, (byte) 0xd6,
(byte) 0xb1, (byte) 0x31, (byte) 0x8a, (byte) 0x02, (byte) 0x4a,
(byte) 0x83, (byte) 0x8f, (byte) 0x21, (byte) 0xaf, (byte) 0x1f,
(byte) 0xde, (byte) 0x04, (byte) 0x89, (byte) 0x77, (byte) 0xeb,
(byte) 0x48, (byte) 0xf5, (byte) 0x9f, (byte) 0xfd, (byte) 0x49,
(byte) 0x24, (byte) 0xca, (byte) 0x1c, (byte) 0x60, (byte) 0x90,
(byte) 0x2e, (byte) 0x52, (byte) 0xf0, (byte) 0xa0, (byte) 0x89,
(byte) 0xbc, (byte) 0x76, (byte) 0x89, (byte) 0x70, (byte) 0x40,
(byte) 0xe0, (byte) 0x82, (byte) 0xf9, (byte) 0x37, (byte) 0x76,
(byte) 0x38, (byte) 0x48, (byte) 0x64, (byte) 0x5e, (byte) 0x07,
(byte) 0x05};
byte[] out = new byte[]{
(byte) 0xf3, (byte) 0xff, (byte) 0xc7, (byte) 0x70, (byte) 0x3f,
(byte) 0x94, (byte) 0x00, (byte) 0xe5, (byte) 0x2a, (byte) 0x7d,
(byte) 0xfb, (byte) 0x4b, (byte) 0x3d, (byte) 0x33, (byte) 0x05,
(byte) 0xd9, (byte) 0x8e, (byte) 0x99, (byte) 0x3b, (byte) 0x9f,
(byte) 0x48, (byte) 0x68, (byte) 0x12, (byte) 0x73, (byte) 0xc2,
(byte) 0x96, (byte) 0x50, (byte) 0xba, (byte) 0x32, (byte) 0xfc,
(byte) 0x76, (byte) 0xce, (byte) 0x48, (byte) 0x33, (byte) 0x2e,
(byte) 0xa7, (byte) 0x16, (byte) 0x4d, (byte) 0x96, (byte) 0xa4,
(byte) 0x47, (byte) 0x6f, (byte) 0xb8, (byte) 0xc5, (byte) 0x31,
(byte) 0xa1, (byte) 0x18, (byte) 0x6a, (byte) 0xc0, (byte) 0xdf,
(byte) 0xc1, (byte) 0x7c, (byte) 0x98, (byte) 0xdc, (byte) 0xe8,
(byte) 0x7b, (byte) 0x4d, (byte) 0xa7, (byte) 0xf0, (byte) 0x11,
(byte) 0xec, (byte) 0x48, (byte) 0xc9, (byte) 0x72, (byte) 0x71,
(byte) 0xd2, (byte) 0xc2, (byte) 0x0f, (byte) 0x9b, (byte) 0x92,
(byte) 0x8f, (byte) 0xe2, (byte) 0x27, (byte) 0x0d, (byte) 0x6f,
(byte) 0xb8, (byte) 0x63, (byte) 0xd5, (byte) 0x17, (byte) 0x38,
(byte) 0xb4, (byte) 0x8e, (byte) 0xee, (byte) 0xe3, (byte) 0x14,
(byte) 0xa7, (byte) 0xcc, (byte) 0x8a, (byte) 0xb9, (byte) 0x32,
(byte) 0x16, (byte) 0x45, (byte) 0x48, (byte) 0xe5, (byte) 0x26,
(byte) 0xae, (byte) 0x90, (byte) 0x22, (byte) 0x43, (byte) 0x68,
(byte) 0x51, (byte) 0x7a, (byte) 0xcf, (byte) 0xea, (byte) 0xbd,
(byte) 0x6b, (byte) 0xb3, (byte) 0x73, (byte) 0x2b, (byte) 0xc0,
(byte) 0xe9, (byte) 0xda, (byte) 0x99, (byte) 0x83, (byte) 0x2b,
(byte) 0x61, (byte) 0xca, (byte) 0x01, (byte) 0xb6, (byte) 0xde,
(byte) 0x56, (byte) 0x24, (byte) 0x4a, (byte) 0x9e, (byte) 0x88,
(byte) 0xd5, (byte) 0xf9, (byte) 0xb3, (byte) 0x79, (byte) 0x73,
(byte) 0xf6, (byte) 0x22, (byte) 0xa4, (byte) 0x3d, (byte) 0x14,
(byte) 0xa6, (byte) 0x59, (byte) 0x9b, (byte) 0x1f, (byte) 0x65,
(byte) 0x4c, (byte) 0xb4, (byte) 0x5a, (byte) 0x74, (byte) 0xe3,
(byte) 0x55, (byte) 0xa5};
System.out.println("\t m: " + toHexString(m));
System.out.println("\t n: " + toHexString(nonce));
System.out.println("\tpk: " + toHexString(bobpk));
System.out.println("\tsk: " + toHexString(alicesk));
byte[] c = new byte[m.length + crypto_box_MACBYTES];
crypto_box_easy(c, m, nonce, bobpk, alicesk);
System.out.println("\t c: " + toHexString(c));
assertArrayEquals(out, c);
}
@Test
public void test_box_easy_b() throws Exception {
System.out.println("libsodium/test/default/box_easy.c");
byte[] alicesk = new byte[]{
(byte) 0x77, (byte) 0x07, (byte) 0x6d, (byte) 0x0a, (byte) 0x73,
(byte) 0x18, (byte) 0xa5, (byte) 0x7d, (byte) 0x3c, (byte) 0x16,
(byte) 0xc1, (byte) 0x72, (byte) 0x51, (byte) 0xb2, (byte) 0x66,
(byte) 0x45, (byte) 0xdf, (byte) 0x4c, (byte) 0x2f, (byte) 0x87,
(byte) 0xeb, (byte) 0xc0, (byte) 0x99, (byte) 0x2a, (byte) 0xb1,
(byte) 0x77, (byte) 0xfb, (byte) 0xa5, (byte) 0x1d, (byte) 0xb9,
(byte) 0x2c, (byte) 0x2a};
byte[] bobpk = new byte[]{
(byte) 0xde, (byte) 0x9e, (byte) 0xdb, (byte) 0x7d, (byte) 0x7b,
(byte) 0x7d, (byte) 0xc1, (byte) 0xb4, (byte) 0xd3, (byte) 0x5b,
(byte) 0x61, (byte) 0xc2, (byte) 0xec, (byte) 0xe4, (byte) 0x35,
(byte) 0x37, (byte) 0x3f, (byte) 0x83, (byte) 0x43, (byte) 0xc8,
(byte) 0x5b, (byte) 0x78, (byte) 0x67, (byte) 0x4d, (byte) 0xad,
(byte) 0xfc, (byte) 0x7e, (byte) 0x14, (byte) 0x6f, (byte) 0x88,
(byte) 0x2b, (byte) 0x4f};
byte[] nonce = new byte[]{
(byte) 0x69, (byte) 0x69, (byte) 0x6e, (byte) 0xe9, (byte) 0x55,
(byte) 0xb6, (byte) 0x2b, (byte) 0x73, (byte) 0xcd, (byte) 0x62,
(byte) 0xbd, (byte) 0xa8, (byte) 0x75, (byte) 0xfc, (byte) 0x73,
(byte) 0xd6, (byte) 0x82, (byte) 0x19, (byte) 0xe0, (byte) 0x03,
(byte) 0x6b, (byte) 0x7a, (byte) 0x0b, (byte) 0x37};
byte[] m = new byte[]{
(byte) 0xbe, (byte) 0x07, (byte) 0x5f, (byte) 0xc5, (byte) 0x3c,
(byte) 0x81, (byte) 0xf2, (byte) 0xd5, (byte) 0xcf, (byte) 0x14,
(byte) 0x13, (byte) 0x16, (byte) 0xeb, (byte) 0xeb, (byte) 0x0c,
(byte) 0x7b, (byte) 0x52, (byte) 0x28, (byte) 0xc5, (byte) 0x2a,
(byte) 0x4c, (byte) 0x62, (byte) 0xcb, (byte) 0xd4, (byte) 0x4b,
(byte) 0x66, (byte) 0x84, (byte) 0x9b, (byte) 0x64, (byte) 0x24,
(byte) 0x4f, (byte) 0xfc, (byte) 0xe5, (byte) 0xec, (byte) 0xba,
(byte) 0xaf, (byte) 0x33, (byte) 0xbd, (byte) 0x75, (byte) 0x1a,
(byte) 0x1a, (byte) 0xc7, (byte) 0x28, (byte) 0xd4, (byte) 0x5e,
(byte) 0x6c, (byte) 0x61, (byte) 0x29, (byte) 0x6c, (byte) 0xdc,
(byte) 0x3c, (byte) 0x01, (byte) 0x23, (byte) 0x35, (byte) 0x61,
(byte) 0xf4, (byte) 0x1d, (byte) 0xb6, (byte) 0x6c, (byte) 0xce,
(byte) 0x31, (byte) 0x4a, (byte) 0xdb, (byte) 0x31, (byte) 0x0e,
(byte) 0x3b, (byte) 0xe8, (byte) 0x25, (byte) 0x0c, (byte) 0x46,
(byte) 0xf0, (byte) 0x6d, (byte) 0xce, (byte) 0xea, (byte) 0x3a,
(byte) 0x7f, (byte) 0xa1, (byte) 0x34, (byte) 0x80, (byte) 0x57,
(byte) 0xe2, (byte) 0xf6, (byte) 0x55, (byte) 0x6a, (byte) 0xd6,
(byte) 0xb1, (byte) 0x31, (byte) 0x8a, (byte) 0x02, (byte) 0x4a,
(byte) 0x83, (byte) 0x8f, (byte) 0x21, (byte) 0xaf, (byte) 0x1f,
(byte) 0xde, (byte) 0x04, (byte) 0x89, (byte) 0x77, (byte) 0xeb,
(byte) 0x48, (byte) 0xf5, (byte) 0x9f, (byte) 0xfd, (byte) 0x49,
(byte) 0x24, (byte) 0xca, (byte) 0x1c, (byte) 0x60, (byte) 0x90,
(byte) 0x2e, (byte) 0x52, (byte) 0xf0, (byte) 0xa0, (byte) 0x89,
(byte) 0xbc, (byte) 0x76, (byte) 0x89, (byte) 0x70, (byte) 0x40,
(byte) 0xe0, (byte) 0x82, (byte) 0xf9, (byte) 0x37, (byte) 0x76,
(byte) 0x38, (byte) 0x48, (byte) 0x64, (byte) 0x5e, (byte) 0x07,
(byte) 0x05};
byte[] out = new byte[]{
(byte) 0xf3, (byte) 0xff, (byte) 0xc7, (byte) 0x70, (byte) 0x3f,
(byte) 0x94, (byte) 0x00, (byte) 0xe5, (byte) 0x2a, (byte) 0x7d,
(byte) 0xfb, (byte) 0x4b, (byte) 0x3d, (byte) 0x33, (byte) 0x05,
(byte) 0xd9, (byte) 0x8e, (byte) 0x99, (byte) 0x3b, (byte) 0x9f,
(byte) 0x48, (byte) 0x68, (byte) 0x12, (byte) 0x73, (byte) 0xc2,
(byte) 0x96, (byte) 0x50, (byte) 0xba, (byte) 0x32, (byte) 0xfc,
(byte) 0x76, (byte) 0xce, (byte) 0x48, (byte) 0x33, (byte) 0x2e,
(byte) 0xa7, (byte) 0x16, (byte) 0x4d, (byte) 0x96, (byte) 0xa4,
(byte) 0x47, (byte) 0x6f, (byte) 0xb8, (byte) 0xc5, (byte) 0x31,
(byte) 0xa1, (byte) 0x18, (byte) 0x6a, (byte) 0xc0, (byte) 0xdf,
(byte) 0xc1, (byte) 0x7c, (byte) 0x98, (byte) 0xdc, (byte) 0xe8,
(byte) 0x7b, (byte) 0x4d, (byte) 0xa7, (byte) 0xf0, (byte) 0x11,
(byte) 0xec, (byte) 0x48, (byte) 0xc9, (byte) 0x72, (byte) 0x71,
(byte) 0xd2, (byte) 0xc2, (byte) 0x0f, (byte) 0x9b, (byte) 0x92,
(byte) 0x8f, (byte) 0xe2, (byte) 0x27, (byte) 0x0d, (byte) 0x6f,
(byte) 0xb8, (byte) 0x63, (byte) 0xd5, (byte) 0x17, (byte) 0x38,
(byte) 0xb4, (byte) 0x8e, (byte) 0xee, (byte) 0xe3, (byte) 0x14,
(byte) 0xa7, (byte) 0xcc, (byte) 0x8a, (byte) 0xb9, (byte) 0x32,
(byte) 0x16, (byte) 0x45, (byte) 0x48, (byte) 0xe5, (byte) 0x26,
(byte) 0xae, (byte) 0x90, (byte) 0x22, (byte) 0x43, (byte) 0x68,
(byte) 0x51, (byte) 0x7a, (byte) 0xcf, (byte) 0xea, (byte) 0xbd,
(byte) 0x6b, (byte) 0xb3, (byte) 0x73, (byte) 0x2b, (byte) 0xc0,
(byte) 0xe9, (byte) 0xda, (byte) 0x99, (byte) 0x83, (byte) 0x2b,
(byte) 0x61, (byte) 0xca, (byte) 0x01, (byte) 0xb6, (byte) 0xde,
(byte) 0x56, (byte) 0x24, (byte) 0x4a, (byte) 0x9e, (byte) 0x88,
(byte) 0xd5, (byte) 0xf9, (byte) 0xb3, (byte) 0x79, (byte) 0x73,
(byte) 0xf6, (byte) 0x22, (byte) 0xa4, (byte) 0x3d, (byte) 0x14,
(byte) 0xa6, (byte) 0x59, (byte) 0x9b, (byte) 0x1f, (byte) 0x65,
(byte) 0x4c, (byte) 0xb4, (byte) 0x5a, (byte) 0x74, (byte) 0xe3,
(byte) 0x55, (byte) 0xa5};
if (System.getProperty("tests.on.travis") == null) {
byte[] c = new byte[m.length + crypto_box_MACBYTES];
byte[] bigM = new byte[Util.MAX_ARRAY_SIZE];
arraycopy(m, 0, bigM, 0, m.length);
try {
crypto_box_easy(c, bigM, nonce, bobpk, alicesk);
fail("should have thrown exception");
} catch (CryptoException exception) {
assertEquals("m is too big", exception.getMessage());
}
} else {
System.out.println("Running on Travis. Skipped.");
}
}
@Test
public void test_box_easy2_a() throws Exception {
System.out.println("libsodium/test/default/box_easy2.c");
Random r = new SecureRandom();
int mlen = r.nextInt(10000);
byte[] m = new byte[mlen];
byte[] m2 = new byte[mlen];
byte[] c = new byte[mlen + crypto_box_MACBYTES];
byte[] alicepk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] alicesk = new byte[crypto_box_SECRETKEYBYTES];
byte[] bobpk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] bobsk = new byte[crypto_box_SECRETKEYBYTES];
byte[] nonce = new byte[crypto_box_NONCEBYTES];
crypto_box_keypair(alicepk, alicesk);
crypto_box_keypair(bobpk, bobsk);
r.nextBytes(m);
r.nextBytes(nonce);
// Basic Open Close
crypto_box_easy(c, m, nonce, bobpk, alicesk);
crypto_box_open_easy(m2, c, nonce, alicepk, bobsk);
assertArrayEquals(m, m2);
}
@Test
public void test_box_easy2_b() throws Exception {
System.out.println("libsodium/test/default/box_easy2.c");
Random r = new SecureRandom();
int mlen = r.nextInt(100);
byte[] m = new byte[mlen];
byte[] m2 = new byte[mlen];
byte[] c = new byte[mlen + crypto_box_MACBYTES];
byte[] alicepk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] alicesk = new byte[crypto_box_SECRETKEYBYTES];
byte[] bobpk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] bobsk = new byte[crypto_box_SECRETKEYBYTES];
byte[] nonce = new byte[crypto_box_NONCEBYTES];
crypto_box_keypair(alicepk, alicesk);
crypto_box_keypair(bobpk, bobsk);
r.nextBytes(m);
r.nextBytes(nonce);
// Can't open truncated message
for (int i = 0; i < mlen + crypto_box_MACBYTES; i++) {
byte[] small = new byte[i];
arraycopy(c, 0, small, 0, i);
try {
crypto_box_open_easy(m2, small, nonce, alicepk, bobsk);
fail("short open() should have failed");
} catch (CryptoException exception) {
Throwable cause = exception.getCause() != null ? exception.getCause() : exception;
if (i < crypto_box_MACBYTES)
assertEquals("mlen=" + mlen + " and i=" + i,
cause.getMessage(), "c is too small");
else
assertEquals("mlen=" + mlen + " and i=" + i,
cause.getMessage(), "failed to verify");
}
}
}
@Test
public void test_box_easy2_c() throws Exception {
System.out.println("libsodium/test/default/box_easy2.c");
Random r = new SecureRandom();
int mlen = r.nextInt(10000);
byte[] m = new byte[mlen];
byte[] m2 = new byte[mlen];
byte[] c = new byte[mlen + crypto_box_MACBYTES];
byte[] alicepk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] alicesk = new byte[crypto_box_SECRETKEYBYTES];
byte[] bobpk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] bobsk = new byte[crypto_box_SECRETKEYBYTES];
byte[] nonce = new byte[crypto_box_NONCEBYTES];
crypto_box_keypair(alicepk, alicesk);
crypto_box_keypair(bobpk, bobsk);
r.nextBytes(m);
r.nextBytes(nonce);
arraycopy(m, 0, c, 0, mlen);
crypto_box_easy(c, copyOf(c, mlen), nonce, bobpk, alicesk);
assertFalse(areEqual(copyOf(m, mlen), copyOf(c, mlen)));
assertFalse(areEqual(copyOf(m, mlen), copyOfRange(c, crypto_box_MACBYTES, c.length)));
crypto_box_open_easy(c, copyOf(c, mlen + crypto_box_MACBYTES), nonce, alicepk, bobsk);
}
@Test
public void test_box_easy2_d() throws Exception {
System.out.println("libsodium/test/default/box_easy2.c");
Random r = new SecureRandom();
int mlen = r.nextInt(10000);
byte[] m = new byte[mlen];
byte[] m2 = new byte[mlen];
byte[] c = new byte[mlen + crypto_box_MACBYTES];
byte[] alicepk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] alicesk = new byte[crypto_box_SECRETKEYBYTES];
byte[] bobpk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] bobsk = new byte[crypto_box_SECRETKEYBYTES];
byte[] nonce = new byte[crypto_box_NONCEBYTES];
byte[] k1 = new byte[crypto_box_BEFORENMBYTES];
byte[] k2 = new byte[crypto_box_BEFORENMBYTES];
crypto_box_keypair(alicepk, alicesk);
crypto_box_keypair(bobpk, bobsk);
r.nextBytes(m);
r.nextBytes(nonce);
crypto_box_beforenm(k1, alicepk, bobsk);
crypto_box_beforenm(k2, bobpk, alicesk);
if (System.getProperty("tests.on.travis") == null) {
try {
crypto_box_easy_afternm(c, copyOf(m, Util.MAX_ARRAY_SIZE), nonce, k1);
fail("crypto_box_easy_afternm() with a short ciphertext should have failed");
} catch (CryptoException ce) {
assertEquals("m is too big", ce.getMessage());
}
} else {
System.out.println("Running on Travis. Skipped.");
}
crypto_box_easy_afternm(c, m, nonce, k1);
crypto_box_open_easy_afternm(m2, c, nonce, k2);
assertArrayEquals(m, m2);
try {
crypto_box_open_easy_afternm(m2, copyOf(c, crypto_box_MACBYTES - 1), nonce, k2);
} catch (CryptoException exception) {
Throwable cause = exception.getCause() != null ? exception.getCause() : exception;
assertEquals("c is too small", cause.getMessage());
}
}
@Test
public void test_box_easy2_e() throws Exception {
System.out.println("libsodium/test/default/box_easy2.c");
Random r = new SecureRandom();
int mlen = r.nextInt(10000);
byte[] m = new byte[mlen];
byte[] m2 = new byte[mlen];
byte[] c = new byte[mlen];
byte[] alicepk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] alicesk = new byte[crypto_box_SECRETKEYBYTES];
byte[] bobpk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] bobsk = new byte[crypto_box_SECRETKEYBYTES];
byte[] nonce = new byte[crypto_box_NONCEBYTES];
byte[] mac = new byte[crypto_box_MACBYTES];
crypto_box_keypair(alicepk, alicesk);
crypto_box_keypair(bobpk, bobsk);
r.nextBytes(m);
r.nextBytes(nonce);
crypto_box_detatched(c, mac, m, nonce, alicepk, bobsk);
crypto_box_open_detached(m2, c, mac, nonce, bobpk, alicesk);
assertArrayEquals(m, m2);
}
@Test
public void test_box_easy2_f() throws Exception {
System.out.println("libsodium/test/default/box_easy2.c");
Random r = new SecureRandom();
int mlen = r.nextInt(10000);
byte[] m = new byte[mlen];
byte[] m2 = new byte[mlen];
byte[] c = new byte[mlen];
byte[] alicepk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] alicesk = new byte[crypto_box_SECRETKEYBYTES];
byte[] bobpk = new byte[crypto_box_PUBLICKEYBYTES];
byte[] bobsk = new byte[crypto_box_SECRETKEYBYTES];
byte[] nonce = new byte[crypto_box_NONCEBYTES];
byte[] k1 = new byte[crypto_box_BEFORENMBYTES];
byte[] k2 = new byte[crypto_box_BEFORENMBYTES];
byte[] mac = new byte[crypto_box_MACBYTES];
crypto_box_keypair(alicepk, alicesk);
crypto_box_keypair(bobpk, bobsk);
r.nextBytes(m);
r.nextBytes(nonce);
crypto_box_beforenm(k1, alicepk, bobsk);
crypto_box_beforenm(k2, bobpk, alicesk);
crypto_box_detached_afternm(c, mac, m, nonce, k1);
crypto_box_open_detached_afternm(m2, c, mac, nonce, k2);
assertArrayEquals(m, m2);
}
}
|
<reponame>cj7865794408/MoErDuo_Music_Lib<filename>app/src/main/java/com/cyll/musiclake/DemoActivity.java
//package com.cyll.musiclake;
//
//import android.app.Activity;
//import android.os.Bundle;
//
//import com.cyl.musiclake.api.LogoutCallBack;
//import com.cyl.musiclake.common.NavigationHelper;
//import com.cyl.musiclake.utils.ToastUtils;
//
//import org.jetbrains.annotations.Nullable;
//
//public class DemoActivity extends Activity {
// @Override
// protected void onCreate(@Nullable Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
//// String token = "c3c1c3e31dd14f499caacb2d2b3aefea";
//// String token = "c3c1c3e31dd14f499caacb2d2b3aef22";
//// String MEDStudentId = "bba5be2582ed4796bc00cc791521b9e5";
//// String url = "http://192.168.1.181:8050/ParentServer/";
//// String sid = "81ac4d49-f85c-4bbb-87a5-cb80185d009a";
// String token = "<KEY>";
// String MEDStudentId = "3bac2219cada4f94b966ffcec34489d1";
// String url = "http://test.api.p.ajia.cn:8080/ajiau-api/ParentServer/";
// String sid = "";
// String yid = "";
// NavigationHelper.INSTANCE.navigateToPlaying(this, null, token, MEDStudentId, url, sid, yid, new LogoutCallBack() {
// @Override
// public void logoutCallBack(int code) {
// ToastUtils.show("登陆失效");
// }
// });
//// finish();
// }
//
//}
|
package myproject.repository;
import java.util.List;
import javax.transaction.Transactional;
import org.springframework.data.jpa.repository.JpaRepository;
import myproject.model.User;
public interface UserRepository extends JpaRepository<User, Integer> {
boolean existsByUsername(String username);
User findByUsername(String username);
@Transactional
void deleteByUsername(String username);
public List<User> findAll();
}
|
<reponame>XAIOS/Hexo-AlphaDust-Mine<gh_stars>0
var _menuOn = false;
function _menuShow() {
$('nav a').addClass('menu-active');
$('.menu-bg').show();
$('.menu-item').css({
opacity: 0
});
TweenLite.to('.menu-container', 1, {
padding: '0 40px'
});
TweenLite.to('.menu-bg', 1, {
opacity: '0.92'
});
TweenMax.staggerTo('.menu-item', 0.5, {
opacity: 1
}, 0.3);
_menuOn = true;
$('.menu-bg').hover(function() {
$('nav a').toggleClass('menu-close-hover');
});
}
function _menuHide() {
$('nav a').removeClass('menu-active');
TweenLite.to('.menu-bg', 0.5, {
opacity: '0',
onComplete: function() {
$('.menu-bg').hide();
}
});
TweenLite.to('.menu-container', 0.5, {
padding: '0 100px'
});
$('.menu-item').css({
opacity: 0
});
_menuOn = false;
}
function initMenu() {
$('nav a').click(function() {
if (_menuOn) {
_menuHide();
} else {
_menuShow();
}
});
$('.menu-bg').click(function(e) {
if (_menuOn && e.target === this) {
_menuHide();
}
});
}
function displayArchives() {
$('.archive-post').css({
opacity: 0
});
TweenMax.staggerTo('.archive-post', 0.4, {
opacity: 1
}, 0.15);
}
initMenu();
displayArchives();
$('.menu-bg').on('touchmove', function(e) {
e.preventDefault()
})
|
<reponame>cchampou/esbuild-example-app
const fs = require('fs');
const path = require('path');
const outDir = 'dist';
const publicDir = 'public';
const outputFiles = fs.readdirSync(outDir);
for (const file of outputFiles) {
fs.unlinkSync(path.join(outDir, file));
}
const publicFiles = fs.readdirSync(publicDir);
for (const file of publicFiles) {
fs.copyFileSync(path.join(publicDir, file), path.join(outDir, file));
}
require('esbuild')
.build({
entryPoints: ['src/App.tsx'],
bundle: true,
outdir: 'dist',
minify: 'development' !== process.env.NODE_ENV,
sourcemap: 'development' === process.env.NODE_ENV ? 'inline' : false,
legalComments: 'none',
watch: 'development' === process.env.NODE_ENV,
splitting: true,
logLevel: 'info',
format: 'esm',
loader: {
'.png': 'dataurl',
'.svg': 'dataurl',
},
define: {
'process.env.NODE_ENV': `"${process.env.NODE_ENV}"`,
},
})
.catch((err) => {
console.log(err);
return process.exit(1);
});
|
<reponame>ponkotuy/hproftree<gh_stars>0
package com.ponkotuy.hprof.construct
import com.ponkotuy.hprof.lines.Result.{Sample, StartTrace, Code}
/**
*
* @author ponkotuy
* Date: 15/02/11.
*/
case class Trace(num: Int, codes: Vector[Code], sample: Sample)
object Trace {
def fromResult(start: StartTrace, codes: Iterable[Code], sample: Sample) =
Trace(start.num, codes.toVector, sample)
}
|
<gh_stars>0
// @flow
export default [
{
type: 'station_search',
id: 2514,
title: 'Hamburg Hbf',
eva_ids: ['8002549', '8098549'],
distanceInKm: 0,
location: [10.006909, 53.552736],
},
{
type: 'station_search',
id: 2513,
title: 'Hamburg Dammtor',
eva_ids: ['8002548'],
distanceInKm: 0,
location: [9.989566, 53.560751],
},
];
|
#!/bin/bash
if git log -1 --pretty=%B | grep "^RELEASE.*";
then
echo "Publishing a release on commit msg"
./gradlew distributeBuild
else
echo "Not a release by commit msg"
fi
|
#pragma once
#include <typed-geometry/detail/utility.hh>
#include <typed-geometry/feature/assert.hh>
namespace tg
{
/// Array type with compile time memory footprint but runtime size
template <class T, size_t N>
struct capped_array
{
using index_t = detail::compact_size_t_typed<T, N>;
constexpr capped_array(size_t size) : _size(index_t(size))
{
TG_CONTRACT(size <= N);
new (&_u._data[0]) T[size]();
}
~capped_array()
{
for (size_t i = 0; i < _size; ++i)
_u._data[i].~T();
}
const T& operator[](size_t pos) const
{
TG_CONTRACT(pos < _size);
return _u._data[pos];
}
T& operator[](size_t pos)
{
TG_CONTRACT(pos < _size);
return _u._data[pos];
}
constexpr T* begin() { return &_u._data[0]; }
constexpr T const* begin() const { return &_u._data[0]; }
constexpr T* end() { return &_u._data[0] + _size; }
constexpr T const* end() const { return &_u._data[0] + _size; }
constexpr size_t size() const { return _size; }
constexpr T* data() { return &_u._data[0]; }
constexpr T const* data() const { return &_u._data[0]; }
template <int M>
constexpr bool operator==(capped_array<T, M> const& rhs) const
{
if (_size != rhs._size)
return false;
for (size_t i = 0; i < _size; ++i)
{
if ((*this)[i] != rhs[i])
return false;
}
return true;
}
template <int M>
constexpr bool operator!=(capped_array<T, M> const& rhs) const
{
if (_size != rhs._size)
return true;
for (size_t i = 0; i < _size; ++i)
{
if ((*this)[i] != rhs[i])
return true;
}
return false;
}
private:
/// uninitialized data
union DataHolder {
DataHolder() {}
~DataHolder() {}
T _data[N];
};
index_t _size = 0;
DataHolder _u;
};
}
|
echo "Start program"
dow=$(date +%u)
if [[ dow -eq 5 ]]
then
echo "Thank Moses it's Friday"
fi
echo "End program"
|
<reponame>opentaps/opentaps-1
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.client.lookup.configuration;
import java.util.Arrays;
import java.util.List;
/**
* Defines the interface between the server and client for the InvoiceItemLookupService
* Technically not a java interface, but it defines all the constants needed on both sides
* which makes the code more robust.
*/
public abstract class InvoiceItemLookupConfiguration {
private InvoiceItemLookupConfiguration() { }
public static final String URL_FIND_INVOICE_ITEMS = "gwtFindInvoiceItems";
public static final String URL_POST_INVOICE_ITEMS = "gwtPostInvoiceItems";
public static final String URL_POST_INVOICE_ITEMS_BATCH = "gwtPostInvoiceItemsBatch";
public static final String INOUT_INVOICE_ID = "invoiceId";
public static final String INOUT_ITEM_SEQUENCE = "invoiceItemSeqId";
public static final String INOUT_ITEM_TYPE = "invoiceItemTypeId";
public static final String INOUT_DESCRIPTION = "description";
public static final String INOUT_GL_ACCOUNT = "overrideGlAccountId";
public static final String INOUT_PRODUCT = "productId";
public static final String INOUT_QUANTITY = "quantity";
public static final String INOUT_AMOUNT = "amount";
public static final String INOUT_TAXABLE = "taxableFlag";
public static final String INOUT_TAX_AUTH = "taxAuthPartyId";
public static final String OUT_TAX_AUTH_DESCRIPTION = "taxAuthPartyIdDescription";
public static final String OUT_PRODUCT_DESCRIPTION = "productIdDescription";
public static final String OUT_GL_ACCOUNT_DESCRIPTION = "overrideGlAccountIdDescription";
public static final List<String> LIST_OUT_FIELDS = AccountingTagLookupConfiguration.addAccountingTagsToFieldList(Arrays.asList(
INOUT_INVOICE_ID,
INOUT_ITEM_SEQUENCE,
INOUT_ITEM_TYPE,
INOUT_DESCRIPTION,
INOUT_GL_ACCOUNT,
INOUT_PRODUCT,
INOUT_QUANTITY,
INOUT_AMOUNT,
INOUT_TAXABLE,
INOUT_TAX_AUTH,
OUT_TAX_AUTH_DESCRIPTION,
OUT_PRODUCT_DESCRIPTION,
OUT_GL_ACCOUNT_DESCRIPTION
));
/**
* Defines which invoice types should allow product entry for its items.
*/
public static final List<String> WITH_PRODUCT_INVOICE_TYPES = Arrays.asList(
"SALES_INVOICE",
"PURCHASE_INVOICE"
);
}
|
<reponame>oc-elixir-elm/oc-elixir-elm.github.io
//import Native.Json //
var _elm_lang$virtual_dom$Native_VirtualDom = function() {
var STYLE_KEY = 'STYLE';
var EVENT_KEY = 'EVENT';
var ATTR_KEY = 'ATTR';
var ATTR_NS_KEY = 'ATTR_NS';
//////////// VIRTUAL DOM NODES ////////////
function text(string)
{
return {
type: 'text',
text: string
};
}
function node(tag)
{
return F2(function(factList, kidList) {
return nodeHelp(tag, factList, kidList);
});
}
function nodeHelp(tag, factList, kidList)
{
var organized = organizeFacts(factList);
var namespace = organized.namespace;
var facts = organized.facts;
var children = [];
var descendantsCount = 0;
while (kidList.ctor !== '[]')
{
var kid = kidList._0;
descendantsCount += (kid.descendantsCount || 0);
children.push(kid);
kidList = kidList._1;
}
descendantsCount += children.length;
return {
type: 'node',
tag: tag,
facts: facts,
children: children,
namespace: namespace,
descendantsCount: descendantsCount
};
}
function keyedNode(tag, factList, kidList)
{
var organized = organizeFacts(factList);
var namespace = organized.namespace;
var facts = organized.facts;
var children = [];
var descendantsCount = 0;
while (kidList.ctor !== '[]')
{
var kid = kidList._0;
descendantsCount += (kid._1.descendantsCount || 0);
children.push(kid);
kidList = kidList._1;
}
descendantsCount += children.length;
return {
type: 'keyed-node',
tag: tag,
facts: facts,
children: children,
namespace: namespace,
descendantsCount: descendantsCount
};
}
function custom(factList, model, impl)
{
var facts = organizeFacts(factList).facts;
return {
type: 'custom',
facts: facts,
model: model,
impl: impl
};
}
function map(tagger, node)
{
return {
type: 'tagger',
tagger: tagger,
node: node,
descendantsCount: 1 + (node.descendantsCount || 0)
};
}
function thunk(func, args, thunk)
{
return {
type: 'thunk',
func: func,
args: args,
thunk: thunk,
node: undefined
};
}
function lazy(fn, a)
{
return thunk(fn, [a], function() {
return fn(a);
});
}
function lazy2(fn, a, b)
{
return thunk(fn, [a,b], function() {
return A2(fn, a, b);
});
}
function lazy3(fn, a, b, c)
{
return thunk(fn, [a,b,c], function() {
return A3(fn, a, b, c);
});
}
// FACTS
function organizeFacts(factList)
{
var namespace, facts = {};
while (factList.ctor !== '[]')
{
var entry = factList._0;
var key = entry.key;
if (key === ATTR_KEY || key === ATTR_NS_KEY || key === EVENT_KEY)
{
var subFacts = facts[key] || {};
subFacts[entry.realKey] = entry.value;
facts[key] = subFacts;
}
else if (key === STYLE_KEY)
{
var styles = facts[key] || {};
var styleList = entry.value;
while (styleList.ctor !== '[]')
{
var style = styleList._0;
styles[style._0] = style._1;
styleList = styleList._1;
}
facts[key] = styles;
}
else if (key === 'namespace')
{
namespace = entry.value;
}
else
{
facts[key] = entry.value;
}
factList = factList._1;
}
return {
facts: facts,
namespace: namespace
};
}
//////////// PROPERTIES AND ATTRIBUTES ////////////
function style(value)
{
return {
key: STYLE_KEY,
value: value
};
}
function property(key, value)
{
return {
key: key,
value: value
};
}
function attribute(key, value)
{
return {
key: ATTR_KEY,
realKey: key,
value: value
};
}
function attributeNS(namespace, key, value)
{
return {
key: ATTR_NS_KEY,
realKey: key,
value: {
value: value,
namespace: namespace
}
};
}
function on(name, options, decoder)
{
return {
key: EVENT_KEY,
realKey: name,
value: {
options: options,
decoder: decoder
}
};
}
function equalEvents(a, b)
{
if (!a.options === b.options)
{
if (a.stopPropagation !== b.stopPropagation || a.preventDefault !== b.preventDefault)
{
return false;
}
}
return _elm_lang$core$Native_Json.equality(a.decoder, b.decoder);
}
//////////// RENDERER ////////////
function renderer(parent, tagger, initialVirtualNode)
{
var eventNode = { tagger: tagger, parent: undefined };
var domNode = render(initialVirtualNode, eventNode);
parent.appendChild(domNode);
var state = 'NO_REQUEST';
var currentVirtualNode = initialVirtualNode;
var nextVirtualNode = initialVirtualNode;
function registerVirtualNode(vNode)
{
if (state === 'NO_REQUEST')
{
rAF(updateIfNeeded);
}
state = 'PENDING_REQUEST';
nextVirtualNode = vNode;
}
function updateIfNeeded()
{
switch (state)
{
case 'NO_REQUEST':
throw new Error(
'Unexpected draw callback.\n' +
'Please report this to <https://github.com/elm-lang/core/issues>.'
);
case 'PENDING_REQUEST':
rAF(updateIfNeeded);
state = 'EXTRA_REQUEST';
var patches = diff(currentVirtualNode, nextVirtualNode);
domNode = applyPatches(domNode, currentVirtualNode, patches, eventNode);
currentVirtualNode = nextVirtualNode;
return;
case 'EXTRA_REQUEST':
state = 'NO_REQUEST';
return;
}
}
return { update: registerVirtualNode };
}
var rAF =
typeof requestAnimationFrame !== 'undefined'
? requestAnimationFrame
: function(cb) { setTimeout(cb, 1000 / 60); };
//////////// RENDER ////////////
function render(vNode, eventNode)
{
switch (vNode.type)
{
case 'thunk':
if (!vNode.node)
{
vNode.node = vNode.thunk();
}
return render(vNode.node, eventNode);
case 'tagger':
var subNode = vNode.node;
var tagger = vNode.tagger;
while (subNode.type === 'tagger')
{
typeof tagger !== 'object'
? tagger = [tagger, subNode.tagger]
: tagger.push(subNode.tagger);
subNode = subNode.node;
}
var subEventRoot = {
tagger: tagger,
parent: eventNode
};
var domNode = render(subNode, subEventRoot);
domNode.elm_event_node_ref = subEventRoot;
return domNode;
case 'text':
return document.createTextNode(vNode.text);
case 'node':
var domNode = vNode.namespace
? document.createElementNS(vNode.namespace, vNode.tag)
: document.createElement(vNode.tag);
applyFacts(domNode, eventNode, vNode.facts);
var children = vNode.children;
for (var i = 0; i < children.length; i++)
{
domNode.appendChild(render(children[i], eventNode));
}
return domNode;
case 'keyed-node':
var domNode = vNode.namespace
? document.createElementNS(vNode.namespace, vNode.tag)
: document.createElement(vNode.tag);
applyFacts(domNode, eventNode, vNode.facts);
var children = vNode.children;
for (var i = 0; i < children.length; i++)
{
domNode.appendChild(render(children[i]._1, eventNode));
}
return domNode;
case 'custom':
var domNode = vNode.impl.render(vNode.model);
applyFacts(domNode, eventNode, vNode.facts);
return domNode;
}
}
//////////// APPLY FACTS ////////////
function applyFacts(domNode, eventNode, facts)
{
for (var key in facts)
{
var value = facts[key];
switch (key)
{
case STYLE_KEY:
applyStyles(domNode, value);
break;
case EVENT_KEY:
applyEvents(domNode, eventNode, value);
break;
case ATTR_KEY:
applyAttrs(domNode, value);
break;
case ATTR_NS_KEY:
applyAttrsNS(domNode, value);
break;
case 'value':
if (domNode[key] !== value)
{
domNode[key] = value;
}
break;
default:
domNode[key] = value;
break;
}
}
}
function applyStyles(domNode, styles)
{
var domNodeStyle = domNode.style;
for (var key in styles)
{
domNodeStyle[key] = styles[key];
}
}
function applyEvents(domNode, eventNode, events)
{
var allHandlers = domNode.elm_handlers || {};
for (var key in events)
{
var handler = allHandlers[key];
var value = events[key];
if (typeof value === 'undefined')
{
domNode.removeEventListener(key, handler);
allHandlers[key] = undefined;
}
else if (typeof handler === 'undefined')
{
var handler = makeEventHandler(eventNode, value);
domNode.addEventListener(key, handler);
allHandlers[key] = handler;
}
else
{
handler.info = value;
}
}
domNode.elm_handlers = allHandlers;
}
function makeEventHandler(eventNode, info)
{
function eventHandler(event)
{
var info = eventHandler.info;
var value = A2(_elm_lang$core$Native_Json.run, info.decoder, event);
if (value.ctor === 'Ok')
{
var options = info.options;
if (options.stopPropagation)
{
event.stopPropagation();
}
if (options.preventDefault)
{
event.preventDefault();
}
var message = value._0;
var currentEventNode = eventNode;
while (currentEventNode)
{
var tagger = currentEventNode.tagger;
if (typeof tagger === 'function')
{
message = tagger(message);
}
else
{
for (var i = tagger.length; i--; )
{
message = tagger[i](message);
}
}
currentEventNode = currentEventNode.parent;
}
}
};
eventHandler.info = info;
return eventHandler;
}
function applyAttrs(domNode, attrs)
{
for (var key in attrs)
{
var value = attrs[key];
if (typeof value === 'undefined')
{
domNode.removeAttribute(key);
}
else
{
domNode.setAttribute(key, value);
}
}
}
function applyAttrsNS(domNode, nsAttrs)
{
for (var key in nsAttrs)
{
var pair = nsAttrs[key];
var namespace = pair.namespace;
var value = pair.value;
if (typeof value === 'undefined')
{
domNode.removeAttributeNS(namespace, key);
}
else
{
domNode.setAttributeNS(namespace, key, value);
}
}
}
//////////// DIFF ////////////
function diff(a, b)
{
var patches = [];
diffHelp(a, b, patches, 0);
return patches;
}
function makePatch(type, index, data)
{
return {
index: index,
type: type,
data: data,
domNode: undefined,
eventNode: undefined
};
}
function diffHelp(a, b, patches, index)
{
if (a === b)
{
return;
}
var aType = a.type;
var bType = b.type;
// Bail if you run into different types of nodes. Implies that the
// structure has changed significantly and it's not worth a diff.
if (aType !== bType)
{
patches.push(makePatch('p-redraw', index, b));
return;
}
// Now we know that both nodes are the same type.
switch (bType)
{
case 'thunk':
var aArgs = a.args;
var bArgs = b.args;
var i = aArgs.length;
var same = a.func === b.func && i === bArgs.length;
while (same && i--)
{
same = aArgs[i] === bArgs[i];
}
if (same)
{
b.node = a.node;
return;
}
b.node = b.thunk();
var subPatches = [];
diffHelp(a.node, b.node, subPatches, 0);
if (subPatches.length > 0)
{
patches.push(makePatch('p-thunk', index, subPatches));
}
return;
case 'tagger':
// gather nested taggers
var aTaggers = a.tagger;
var bTaggers = b.tagger;
var nesting = false;
var aSubNode = a.node;
while (aSubNode.type === 'tagger')
{
nesting = true;
typeof aTaggers !== 'object'
? aTaggers = [aTaggers, aSubNode.tagger]
: aTaggers.push(aSubNode.tagger);
aSubNode = aSubNode.node;
}
var bSubNode = b.node;
while (bSubNode.type === 'tagger')
{
nesting = true;
typeof bTaggers !== 'object'
? bTaggers = [bTaggers, bSubNode.tagger]
: bTaggers.push(bSubNode.tagger);
bSubNode = bSubNode.node;
}
// Just bail if different numbers of taggers. This implies the
// structure of the virtual DOM has changed.
if (nesting && aTaggers.length !== bTaggers.length)
{
patches.push(makePatch('p-redraw', index, b));
return;
}
// check if taggers are "the same"
if (nesting ? !pairwiseRefEqual(aTaggers, bTaggers) : aTaggers !== bTaggers)
{
patches.push(makePatch('p-tagger', index, bTaggers));
}
// diff everything below the taggers
diffHelp(aSubNode, bSubNode, patches, index + 1);
return;
case 'text':
if (a.text !== b.text)
{
patches.push(makePatch('p-text', index, b.text));
return;
}
return;
case 'node':
// Bail if obvious indicators have changed. Implies more serious
// structural changes such that it's not worth it to diff.
if (a.tag !== b.tag || a.namespace !== b.namespace)
{
patches.push(makePatch('p-redraw', index, b));
return;
}
var factsDiff = diffFacts(a.facts, b.facts);
if (typeof factsDiff !== 'undefined')
{
patches.push(makePatch('p-facts', index, factsDiff));
}
diffChildren(a, b, patches, index);
return;
case 'keyed-node':
// Bail if obvious indicators have changed. Implies more serious
// structural changes such that it's not worth it to diff.
if (a.tag !== b.tag || a.namespace !== b.namespace)
{
patches.push(makePatch('p-redraw', index, b));
return;
}
var factsDiff = diffFacts(a.facts, b.facts);
if (typeof factsDiff !== 'undefined')
{
patches.push(makePatch('p-facts', index, factsDiff));
}
diffKeyedChildren(a, b, patches, index);
return;
case 'custom':
if (a.impl !== b.impl)
{
patches.push(makePatch('p-redraw', index, b));
return;
}
var factsDiff = diffFacts(a.facts, b.facts);
if (typeof factsDiff !== 'undefined')
{
patches.push(makePatch('p-facts', index, factsDiff));
}
var patch = b.impl.diff(a,b);
if (patch)
{
patches.push(makePatch('p-custom', index, patch));
return;
}
return;
}
}
// assumes the incoming arrays are the same length
function pairwiseRefEqual(as, bs)
{
for (var i = 0; i < as.length; i++)
{
if (as[i] !== bs[i])
{
return false;
}
}
return true;
}
// TODO Instead of creating a new diff object, it's possible to just test if
// there *is* a diff. During the actual patch, do the diff again and make the
// modifications directly. This way, there's no new allocations. Worth it?
function diffFacts(a, b, category)
{
var diff;
// look for changes and removals
for (var aKey in a)
{
if (aKey === STYLE_KEY || aKey === EVENT_KEY || aKey === ATTR_KEY || aKey === ATTR_NS_KEY)
{
var subDiff = diffFacts(a[aKey], b[aKey] || {}, aKey);
if (subDiff)
{
diff = diff || {};
diff[aKey] = subDiff;
}
continue;
}
// remove if not in the new facts
if (!(aKey in b))
{
diff = diff || {};
diff[aKey] =
(typeof category === 'undefined')
? (typeof a[aKey] === 'string' ? '' : null)
:
(category === STYLE_KEY)
? ''
:
(category === EVENT_KEY || category === ATTR_KEY)
? undefined
:
{ namespace: a[aKey].namespace, value: undefined };
continue;
}
var aValue = a[aKey];
var bValue = b[aKey];
// reference equal, so don't worry about it
if (aValue === bValue && aKey !== 'value'
|| category === EVENT_KEY && equalEvents(aValue, bValue))
{
continue;
}
diff = diff || {};
diff[aKey] = bValue;
}
// add new stuff
for (var bKey in b)
{
if (!(bKey in a))
{
diff = diff || {};
diff[bKey] = b[bKey];
}
}
return diff;
}
function diffChildren(aParent, bParent, patches, rootIndex)
{
var aChildren = aParent.children;
var bChildren = bParent.children;
var aLen = aChildren.length;
var bLen = bChildren.length;
// FIGURE OUT IF THERE ARE INSERTS OR REMOVALS
if (aLen > bLen)
{
patches.push(makePatch('p-remove-last', rootIndex, aLen - bLen));
}
else if (aLen < bLen)
{
patches.push(makePatch('p-append', rootIndex, bChildren.slice(aLen)));
}
// PAIRWISE DIFF EVERYTHING ELSE
var index = rootIndex;
var minLen = aLen < bLen ? aLen : bLen;
for (var i = 0; i < minLen; i++)
{
index++;
var aChild = aChildren[i];
diffHelp(aChild, bChildren[i], patches, index);
index += aChild.descendantsCount || 0;
}
}
//////////// KEYED DIFF ////////////
function diffKeyedChildren(aParent, bParent, patches, rootIndex)
{
var localPatches = [];
var changes = {}; // Dict String Entry
var inserts = []; // Array { index : Int, entry : Entry }
// type Entry = { tag : String, vnode : VNode, index : Int, data : _ }
var aChildren = aParent.children;
var bChildren = bParent.children;
var aLen = aChildren.length;
var bLen = bChildren.length;
var aIndex = 0;
var bIndex = 0;
var index = rootIndex;
while (aIndex < aLen && bIndex < bLen)
{
var a = aChildren[aIndex];
var b = bChildren[bIndex];
var aKey = a._0;
var bKey = b._0;
var aNode = a._1;
var bNode = b._1;
// check if keys match
if (aKey === bKey)
{
index++;
diffHelp(aNode, bNode, localPatches, index);
index += aNode.descendantsCount || 0;
aIndex++;
bIndex++;
continue;
}
// look ahead 1 to detect insertions and removals.
var aLookAhead = aIndex + 1 < aLen;
var bLookAhead = bIndex + 1 < bLen;
if (aLookAhead)
{
var aNext = aChildren[aIndex + 1];
var aNextKey = aNext._0;
var aNextNode = aNext._1;
var oldMatch = bKey === aNextKey;
}
if (bLookAhead)
{
var bNext = bChildren[bIndex + 1];
var bNextKey = bNext._0;
var bNextNode = bNext._1;
var newMatch = aKey === bNextKey;
}
// swap a and b
if (aLookAhead && bLookAhead && newMatch && oldMatch)
{
index++;
diffHelp(aNode, bNextNode, localPatches, index);
insertNode(changes, localPatches, aKey, bNode, bIndex, inserts);
index += aNode.descendantsCount || 0;
index++;
removeNode(changes, localPatches, aKey, aNextNode, index);
index += aNextNode.descendantsCount || 0;
aIndex += 2;
bIndex += 2;
continue;
}
// insert b
if (bLookAhead && newMatch)
{
index++;
insertNode(changes, localPatches, bKey, bNode, bIndex, inserts);
diffHelp(aNode, bNextNode, localPatches, index);
index += aNode.descendantsCount || 0;
aIndex += 1;
bIndex += 2;
continue;
}
// remove a
if (aLookAhead && oldMatch)
{
index++;
removeNode(changes, localPatches, aKey, aNode, index);
index += aNode.descendantsCount || 0;
index++;
diffHelp(aNextNode, bNode, localPatches, index);
index += aNextNode.descendantsCount || 0;
aIndex += 2;
bIndex += 1;
continue;
}
// remove a, insert b
if (aLookAhead && bLookAhead && aNextKey === bNextKey)
{
index++;
removeNode(changes, localPatches, aKey, aNode, index);
insertNode(changes, localPatches, bKey, bNode, bIndex, inserts);
index += aNode.descendantsCount || 0;
index++;
diffHelp(aNextNode, bNextNode, localPatches, index);
index += aNextNode.descendantsCount || 0;
aIndex += 2;
bIndex += 2;
continue;
}
break;
}
// eat up any remaining nodes with removeNode and insertNode
while (aIndex < aLen)
{
index++;
var a = aChildren[aIndex];
var aNode = a._1;
removeNode(changes, localPatches, a._0, aNode, index);
index += aNode.descendantsCount || 0;
aIndex++;
}
var endInserts;
while (bIndex < bLen)
{
endInserts = endInserts || [];
var b = bChildren[bIndex];
insertNode(changes, localPatches, b._0, b._1, undefined, endInserts);
bIndex++;
}
if (localPatches.length > 0 || inserts.length > 0 || typeof endInserts !== 'undefined')
{
patches.push(makePatch('p-reorder', rootIndex, {
patches: localPatches,
inserts: inserts,
endInserts: endInserts
}));
}
}
//////////// CHANGES FROM KEYED DIFF ////////////
var POSTFIX = '_elmW6BL';
function insertNode(changes, localPatches, key, vnode, bIndex, inserts)
{
var entry = changes[key];
// never seen this key before
if (typeof entry === 'undefined')
{
entry = {
tag: 'insert',
vnode: vnode,
index: bIndex,
data: undefined
};
inserts.push({ index: bIndex, entry: entry });
changes[key] = entry;
return;
}
// this key was removed earlier, a match!
if (entry.tag === 'remove')
{
inserts.push({ index: bIndex, entry: entry });
entry.tag = 'move';
var subPatches = [];
diffHelp(entry.vnode, vnode, subPatches, entry.index);
entry.index = bIndex;
entry.data.data = {
patches: subPatches,
entry: entry
};
return;
}
// this key has already been inserted or moved, a duplicate!
insertNode(changes, localPatches, key + POSTFIX, vnode, bIndex, inserts);
}
function removeNode(changes, localPatches, key, vnode, index)
{
var entry = changes[key];
// never seen this key before
if (typeof entry === 'undefined')
{
var patch = makePatch('p-remove', index, undefined);
localPatches.push(patch);
changes[key] = {
tag: 'remove',
vnode: vnode,
index: index,
data: patch
};
return;
}
// this key was inserted earlier, a match!
if (entry.tag === 'insert')
{
entry.tag = 'move';
var subPatches = [];
diffHelp(vnode, entry.vnode, subPatches, index);
var patch = makePatch('p-remove', index, {
patches: subPatches,
entry: entry
});
localPatches.push(patch);
return;
}
// this key has already been removed or moved, a duplicate!
removeNode(changes, localPatches, key + POSTFIX, vnode, index);
}
//////////// ADD DOM NODES ////////////
//
// Each DOM node has an "index" assigned in order of traversal. It is important
// to minimize our crawl over the actual DOM, so these indexes (along with the
// descendantsCount of virtual nodes) let us skip touching entire subtrees of
// the DOM if we know there are no patches there.
function addDomNodes(domNode, vNode, patches, eventNode)
{
addDomNodesHelp(domNode, vNode, patches, 0, 0, vNode.descendantsCount, eventNode);
}
// assumes `patches` is non-empty and indexes increase monotonically.
function addDomNodesHelp(domNode, vNode, patches, i, low, high, eventNode)
{
var patch = patches[i];
var index = patch.index;
while (index === low)
{
var patchType = patch.type;
if (patchType === 'p-thunk')
{
addDomNodes(domNode, vNode.node, patch.data, eventNode);
}
else if (patchType === 'p-reorder')
{
patch.domNode = domNode;
patch.eventNode = eventNode;
var subPatches = patch.data.patches;
if (subPatches.length > 0)
{
addDomNodesHelp(domNode, vNode, subPatches, 0, low, high, eventNode);
}
}
else if (patchType === 'p-remove')
{
patch.domNode = domNode;
patch.eventNode = eventNode;
var data = patch.data;
if (typeof data !== 'undefined')
{
data.entry.data = domNode;
var subPatches = data.patches;
if (subPatches.length > 0)
{
addDomNodesHelp(domNode, vNode, subPatches, 0, low, high, eventNode);
}
}
}
else
{
patch.domNode = domNode;
patch.eventNode = eventNode;
}
i++;
if (!(patch = patches[i]) || (index = patch.index) > high)
{
return i;
}
}
switch (vNode.type)
{
case 'tagger':
var subNode = vNode.node;
while (subNode.type === "tagger")
{
subNode = subNode.node;
}
return addDomNodesHelp(domNode, subNode, patches, i, low + 1, high, domNode.elm_event_node_ref);
case 'node':
var vChildren = vNode.children;
var childNodes = domNode.childNodes;
for (var j = 0; j < vChildren.length; j++)
{
low++;
var vChild = vChildren[j];
var nextLow = low + (vChild.descendantsCount || 0);
if (low <= index && index <= nextLow)
{
i = addDomNodesHelp(childNodes[j], vChild, patches, i, low, nextLow, eventNode);
if (!(patch = patches[i]) || (index = patch.index) > high)
{
return i;
}
}
low = nextLow;
}
return i;
case 'keyed-node':
var vChildren = vNode.children;
var childNodes = domNode.childNodes;
for (var j = 0; j < vChildren.length; j++)
{
low++;
var vChild = vChildren[j]._1;
var nextLow = low + (vChild.descendantsCount || 0);
if (low <= index && index <= nextLow)
{
i = addDomNodesHelp(childNodes[j], vChild, patches, i, low, nextLow, eventNode);
if (!(patch = patches[i]) || (index = patch.index) > high)
{
return i;
}
}
low = nextLow;
}
return i;
case 'text':
case 'thunk':
throw new Error('should never traverse `text` or `thunk` nodes like this');
}
}
//////////// APPLY PATCHES ////////////
function applyPatches(rootDomNode, oldVirtualNode, patches, eventNode)
{
if (patches.length === 0)
{
return rootDomNode;
}
addDomNodes(rootDomNode, oldVirtualNode, patches, eventNode);
return applyPatchesHelp(rootDomNode, patches);
}
function applyPatchesHelp(rootDomNode, patches)
{
for (var i = 0; i < patches.length; i++)
{
var patch = patches[i];
var localDomNode = patch.domNode
var newNode = applyPatch(localDomNode, patch);
if (localDomNode === rootDomNode)
{
rootDomNode = newNode;
}
}
return rootDomNode;
}
function applyPatch(domNode, patch)
{
switch (patch.type)
{
case 'p-redraw':
return redraw(domNode, patch.data, patch.eventNode);
case 'p-facts':
applyFacts(domNode, patch.eventNode, patch.data);
return domNode;
case 'p-text':
domNode.replaceData(0, domNode.length, patch.data);
return domNode;
case 'p-thunk':
return applyPatchesHelp(domNode, patch.data);
case 'p-tagger':
domNode.elm_event_node_ref.tagger = patch.data;
return domNode;
case 'p-remove-last':
var i = patch.data;
while (i--)
{
domNode.removeChild(domNode.lastChild);
}
return domNode;
case 'p-append':
var newNodes = patch.data;
for (var i = 0; i < newNodes.length; i++)
{
domNode.appendChild(render(newNodes[i], patch.eventNode));
}
return domNode;
case 'p-remove':
var data = patch.data;
if (typeof data === 'undefined')
{
domNode.parentNode.removeChild(domNode);
return domNode;
}
var entry = data.entry;
if (typeof entry.index !== 'undefined')
{
domNode.parentNode.removeChild(domNode);
}
entry.data = applyPatchesHelp(domNode, data.patches);
return domNode;
case 'p-reorder':
var data = patch.data;
// end inserts
var endInserts = data.endInserts;
var end;
if (typeof endInserts !== 'undefined')
{
if (endInserts.length === 1)
{
var insert = endInserts[0];
var entry = insert.entry;
var end = entry.tag === 'move'
? entry.data
: render(entry.vnode, patch.eventNode);
}
else
{
end = document.createDocumentFragment();
for (var i = 0; i < endInserts.length; i++)
{
var insert = endInserts[i];
var entry = insert.entry;
var node = entry.tag === 'move'
? entry.data
: render(entry.vnode, patch.eventNode);
end.appendChild(node);
}
}
}
// removals
domNode = applyPatchesHelp(domNode, data.patches);
// inserts
var inserts = data.inserts;
for (var i = 0; i < inserts.length; i++)
{
var insert = inserts[i];
var entry = insert.entry;
var node = entry.tag === 'move'
? entry.data
: render(entry.vnode, patch.eventNode);
domNode.insertBefore(node, domNode.childNodes[insert.index]);
}
if (typeof end !== 'undefined')
{
domNode.appendChild(end);
}
return domNode;
case 'p-custom':
var impl = patch.data;
return impl.applyPatch(domNode, impl.data);
default:
throw new Error('Ran into an unknown patch!');
}
}
function redraw(domNode, vNode, eventNode)
{
var parentNode = domNode.parentNode;
var newNode = render(vNode, eventNode);
if (typeof newNode.elm_event_node_ref === 'undefined')
{
newNode.elm_event_node_ref = domNode.elm_event_node_ref;
}
if (parentNode && newNode !== domNode)
{
parentNode.replaceChild(newNode, domNode);
}
return newNode;
}
//////////// PROGRAMS ////////////
function programWithFlags(details)
{
return {
init: details.init,
update: details.update,
subscriptions: details.subscriptions,
view: details.view,
renderer: renderer
};
}
return {
node: node,
text: text,
custom: custom,
map: F2(map),
on: F3(on),
style: style,
property: F2(property),
attribute: F2(attribute),
attributeNS: F3(attributeNS),
lazy: F2(lazy),
lazy2: F3(lazy2),
lazy3: F4(lazy3),
keyedNode: F3(keyedNode),
programWithFlags: programWithFlags
};
}(); |
<reponame>SynthSys/BioDare2-BACK
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.features.search.lucene;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
import static org.mockito.Mockito.*;
import static ed.biodare2.backend.features.search.lucene.Fields.ID;
/**
*
* @author tzielins
*/
public class LuceneWriterTest {
@Rule
public TemporaryFolder testFolder = new TemporaryFolder();
Path indexDir;
public LuceneWriterTest() {
}
@Before
public void setUp() throws Exception {
indexDir = testFolder.newFolder().toPath();
}
@After
public void close() {
}
@Test
public void testCreatesUsingPath() throws Exception {
try (LuceneWriter instance = new LuceneWriter(indexDir)) {
assertNotNull(instance);
assertTrue(Files.list(indexDir).count() > 0);
}
}
@Test
public void updateCallsCommit() throws IOException {
try (LuceneWriter instance = new LuceneWriter(indexDir)) {
Document doc = new Document();
doc.add(new StoredField(ID, 123));
Term id = new Term(ID,"123");
long resp = instance.writeDocument(id, doc);
assertTrue(resp >= 0);
resp = instance.indexWriter.commit();
assertEquals(-1, resp);
}
}
}
|
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
import re
text ='abbaadfafagafagbbabadfff'
pattern = 'ab'
for match in re.findall(pattern,text):
print'Found "%s"' % match
|
set -x
# Install system packages.
packages_apt () {
case $COMPILER in
4.01) PPA=avsm/ocaml41+opam12;;
4.02) PPA=avsm/ocaml42+opam12;;
4.03) PPA=avsm/ocaml42+opam12; DO_SWITCH=yes;;
4.04) PPA=avsm/ocaml42+opam12; DO_SWITCH=yes;;
4.05) PPA=avsm/ocaml42+opam12; DO_SWITCH=yes;;
4.06) PPA=avsm/ocaml42+opam12; DO_SWITCH=yes;;
4.07) PPA=avsm/ocaml42+opam12; DO_SWITCH=yes;;
*) echo Unsupported compiler $COMPILER; exit 1;;
esac
sudo add-apt-repository -y ppa:$PPA
sudo apt-get update -qq
if [ -z "$DO_SWITCH" ]
then
sudo apt-get install -qq ocaml-nox
fi
sudo apt-get install -qq opam
if [ "$LIBEV" != no ]
then
sudo apt-get install -qq libev-dev
fi
}
packages_homebrew () {
brew update > /dev/null
# See https://github.com/Homebrew/homebrew-core/issues/26358.
brew upgrade python > /dev/null
if [ "$COMPILER" = system ]
then
brew install ocaml
else
DO_SWITCH=yes
fi
brew install opam
if [ "$LIBEV" != no ]
then
brew install libev
fi
}
# This code is dead for now – there is some upstream problem in MacPorts, so we
# have disabled testing on it. If that is not fixed soon, this code should be
# removed from this script.
packages_macports () {
eval `wget -q -O - https://aantron.github.io/binaries/macports/x86_64/macports/current/install.sh | bash`
sudo port install pkgconfig | cat
if [ "$LIBEV" != no ]
then
sudo port install libev | cat
fi
wget -q -O - https://aantron.github.io/binaries/macports/x86_64/opam/1.2/install.sh | bash
wget -q -O - https://aantron.github.io/binaries/macports/x86_64/ocaml/$COMPILER/install.sh | bash
}
packages_osx () {
case $PACKAGE_MANAGER in
macports) packages_macports;;
*) packages_homebrew;;
esac
}
packages () {
case $TRAVIS_OS_NAME in
linux) packages_apt;;
osx) packages_osx;;
*) echo Unsupported system $TRAVIS_OS_NAME; exit 1;;
esac
}
packages
# Initialize OPAM and switch to the right compiler, if necessary.
case $COMPILER in
4.01) OCAML_VERSION=4.01.0;;
4.02) OCAML_VERSION=4.02.3;;
4.03) OCAML_VERSION=4.03.0;;
4.04) OCAML_VERSION=4.04.2;;
4.05) OCAML_VERSION=4.05.0;;
4.06) OCAML_VERSION=4.06.1;;
4.07) OCAML_VERSION=4.07.0;;
system) OCAML_VERSION=`ocamlc -version`;;
*) echo Unsupported compiler $COMPILER; exit 1;;
esac
if [ "$FLAMBDA" = yes ]
then
SWITCH="$OCAML_VERSION+flambda"
else
SWITCH="$OCAML_VERSION"
fi
if [ -n "$DO_SWITCH" ]
then
opam init --compiler=$SWITCH -ya
else
opam init -ya
fi
eval `opam config env`
ACTUAL_COMPILER=`ocamlc -version`
if [ "$ACTUAL_COMPILER" != "$OCAML_VERSION" ]
then
echo Expected OCaml $OCAML_VERSION, but $ACTUAL_COMPILER is installed
fi
# Install Lwt's development dependencies.
make dev-deps
if [ "$LIBEV" != no ]
then
opam install -y conf-libev
fi
# Build and run the tests.
if [ "$LIBEV" != no ]
then
LIBEV_FLAG=true
else
LIBEV_FLAG=false
fi
ocaml src/util/configure.ml -use-libev $LIBEV_FLAG
make build
make test
make coverage
# Run the packaging tests.
make clean
make install-for-packaging-test
make packaging-test
# Some sanity checks.
if [ "$LIBEV" == no ]
then
! opam list -i conf-libev
fi
opam list -i ppx_tools_versioned
! opam list -i batteries
|
/*
* Copyright (C) 2019 by J.J. (<EMAIL>)
* Licensed under the Apache License, Version 2.0 (the "License");
*/
package com.amolla.service;
import com.amolla.sdk.Tube;
import com.amolla.sdk.To;
import com.amolla.service.util.UtilService;
import com.amolla.service.info.InfoService;
import com.amolla.service.port.UartService;
import android.text.TextUtils;
import android.app.Service;
import android.app.AppGlobals;
import android.content.Context;
import android.content.Intent;
import android.content.ComponentName;
import android.content.ServiceConnection;
import android.os.ServiceManager;
import android.os.IBinder;
import android.util.Log;
import java.io.File;
import java.util.HashMap;
@SuppressWarnings("unchecked")
public class DynamicService extends Service {
private static final String TAG = DynamicService.class.getSimpleName();
private static final boolean DEBUG = true;
private static IBinder getService(final String name, Context context) {
if (TextUtils.isEmpty(name) || context == null) return null;
final String[] token = name.split(Tube.STR_TOKEN);
switch (Tube.BASIC_SERVICE.valueOf(token[0])) {
case UTIL: return new UtilService(context);
case INFO: return new InfoService(context);
}
switch (Tube.RUNTIME_SERVICE.valueOf(token[0])) {
case PORT_UART: return new UartService(context, token[1]);
}
return null;
}
@Override
public IBinder onBind(Intent intent) {
return getService(intent.getStringExtra(To.P0), this);
}
private static boolean bindService(final String name) {
Intent intent = new Intent();
intent.setClassName(DynamicService.class.getPackage().getName().trim(), DynamicService.class.getName().trim());
intent.putExtra(To.P0, name);
if (AppGlobals.getInitialApplication().bindService(intent,
new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
Log.i(TAG, "Connected " + componentName);
try {
ServiceManager.addService(name, iBinder);
Log.d(TAG, "ServiceManager added " + name);
} catch (Exception e) { e.printStackTrace(); }
}
@Override
public void onServiceDisconnected(ComponentName componentName) {
Log.i(TAG, "Discnnected " + componentName);
}
}, Context.BIND_AUTO_CREATE)) {
return true;
}
Log.e(TAG, "Fail to bind " + name);
return false;
}
private static boolean startService(final String name, Context context) {
if (context == null) return bindService(name);
try {
ServiceManager.addService(name, getService(name, context));
Log.d(TAG, "ServiceManager added " + name);
return true;
} catch (Exception e) {
if (DEBUG) { e.printStackTrace(); }
}
Log.e(TAG, "Fail to start " + name);
return false;
}
public static boolean startService(final String ports, String name, Context context) {
boolean result = true;
if (TextUtils.isEmpty(ports)) {
if (DEBUG) Log.d(TAG, name + " service is skipped because the port is not set");
return result;
}
final boolean hasToken = name.contains(Tube.STR_TOKEN);
final String[] path = ports.split(",");
for (int i = 0; i < path.length; i++) {
File file = new File(path[i]);
if (file == null || !file.exists()) {
if (DEBUG) Log.d(TAG, name + " service is skipped because the path is invalid");
if (hasToken) return result;
continue;
}
if (!startService((hasToken ? name : name + Tube.STR_TOKEN + path[i]), context)) {
result = false;
}
if (hasToken) break;
}
return result;
}
public static boolean startAll(HashMap<String, String> map, Context context) {
boolean result = true;
for (Tube.DEFAULT_SERVICE service : Tube.DEFAULT_SERVICE.values()) {
if (!startService(service.name() + Tube.STR_SERVICE_SUFFIX, context)) {
result = false;
}
}
if (map == null) {
Log.e(TAG, "The runtime services could not be started because the kernel interface map is null");
return false;
}
for (Tube.RUNTIME_SERVICE service : Tube.RUNTIME_SERVICE.values()) {
switch (service) {
case PORT_UART:
if (!startService(map.get("SERIAL_UART_PORT"), service.name(), context)) {
result = false;
}
break;
default: break;
}
}
return result;
}
}
|
def remove_duplicates(my_arr):
unique_arr = []
for elem in my_arr:
if elem not in unique_arr:
unique_arr.append(elem)
return unique_arr
unique_arr = remove_duplicates(my_arr)
print(unique_arr) |
/*
* mobile
* Convert data to the API version as expected by mobile app
*/
'use strict';
var fs = require('fs');
var ent = require('ent');
var YAML = require('yamljs');
var shell = require('shelljs');
var deHtml = function(s) {
if (s) {
s = s.replace(/<br>/g, '\n');
s = s.replace(/<a[- a-zA-Z\"\'=\/:._]*>/g, '');
s = s.replace(/<\/[a-zA-Z]*>/g, '');
s = ent.decode(s);
}
return s;
};
var src = '_data';
var trg = '_site/api';
// load files from src: speakers.yml, sessions.yml, schedule-data.yml
var speakers = YAML.load(src + '/speakers.yml');
var sessions = YAML.load(src + '/sessions.yml');
var schedule = YAML.load(src + '/schedule.yml');
// give each speaker a unique number
// create output arrays
var i, j, k;
var speakerNum = {};
var speakersOut = [];
var aSpeaker;
for (i=0; i<speakers.length; i++) {
if (!speakers[i].current) continue;
speakerNum[speakers[i].id] = i+1;
aSpeaker = {
'bio': deHtml(speakers[i].bio),
'id': i+1,
'name': speakers[i].name + ' ' + speakers[i].surname,
'photo': 'http://www.devfest.at/img/speakers/' + speakers[i].thumbnailUrl
}
if (speakers[i].title && speakers[i].company) {
aSpeaker.title = speakers[i].title + ', ' + speakers[i].company;
}
else if (speakers[i].title) {
aSpeaker.title = speakers[i].title;
}
else {
aSpeaker.title = speakers[i].company;
}
if (speakers[i].social) {
for (j=0; j<speakers[i].social.length; j++) {
var aLink = speakers[i].social[j];
if (aLink.name == "twitter") {
aSpeaker.twitter = aLink.link.replace(/https?:\/\/w?w?w?.?twitter.com\//, '');
}
else if (aLink.name == "google-plus") {
aSpeaker.gplus = aLink.link;
}
else if (aLink.name == "xing") {
aSpeaker.xing = aLink.link;
}
else if (aLink.name == "linkedin") {
aSpeaker.linkedin = aLink.link;
}
else if (aLink.name == "github") {
aSpeaker.github = aLink.link.replace(/https?:\/\/w?w?w?.?github.com\//, '');
}
else if (aLink.link.indexOf("xing.com") !== -1) {
aSpeaker.xing = aLink.link;
}
else {
aSpeaker.website = aLink.link;
}
}
}
speakersOut[i] = aSpeaker;
}
// create assoc of sessions by id
var sessionsById = {};
for (i=0; i<sessions.length; i++) {
sessionsById[sessions[i].id] = sessions[i];
}
var x = 1;
var sessionsOut = [];
// create output sessions
for (i=0; i<schedule.length; i++) {
for (j=0; j<schedule[i].timeslots.length; j++) {
for (k=0; k<schedule[i].timeslots[j].sessionIds.length; k++) {
var sessionId = schedule[i].timeslots[j].sessionIds[k];
if (sessionId in sessionsById) {
var aSession = {
'description': deHtml(sessionsById[sessionId].description),
'id': x,
'title': deHtml(sessionsById[sessionId].title)
};
x++;
// get session image (if exists)
if ('imageUrl' in sessionsById[sessionId]) {
aSession.photo = 'http://www.devfest.at/img/' + sessionsById[sessionId].imageUrl;
}
// get the duration
var date1 = schedule[i].date + ' ' + schedule[i].timeslots[j].startTime;
var date2 = schedule[i].date + ' ' + schedule[i].timeslots[j].endTime;
var duration = ((new Date(date2).getTime()) - (new Date(date1).getTime())) / (1000 * 60);
aSession.startAt = date1;
aSession.duration = duration;
var sessionSpeakers = [];
if ('speakers' in sessionsById[sessionId]) {
for (var t=0; t<sessionsById[sessionId].speakers.length; t++) {
var speakerStrId = sessionsById[sessionId].speakers[t];
if (speakerStrId in speakerNum) {
sessionSpeakers.push(speakerNum[speakerStrId]);
}
}
}
if (sessionSpeakers.length) {
aSession.speakersId = sessionSpeakers;
}
// get the room id
if (schedule[i].timeslots[j].sessionIds.length == 1) {
if (sessionsById[sessionId].service) {
// we have a service session
aSession.roomId = 0;
}
else {
// we have a content session
aSession.roomId = 1;
}
}
else if (schedule[i].timeslots[j].sessionIds.length == 2) {
// first session in room 1, second in room 2
if (k == 0) {
aSession.roomId = 1;
}
else {
aSession.roomId = 2;
}
}
else {
aSession.roomId = k + 1;
}
// add session
sessionsOut.push(aSession);
}
}
}
}
// serialize sessions & speakers to file
try {
fs.accessSync(trg);
}
catch(e) {
// fs.mkdirSync(trg);
shell.mkdir('-p', trg);
}
try {
fs.accessSync(trg + '/v1');
}
catch(e) {
fs.mkdirSync(trg + '/v1');
}
fs.writeFileSync(trg + '/v1/speakers.json', JSON.stringify(speakersOut));
fs.writeFileSync(trg + '/v1/sessions.json', JSON.stringify(sessionsOut));
|
<reponame>mvaliev/gp2s-pncc<filename>frontend/src/services/GridService.js
/*
* Copyright 2018 Genentech Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author <NAME> on 26.10.2017.
*/
import { HTTP } from '@/utils/http-common'
import ProtocolType from '@/components/App/Grid/ProtocolType'
import Polarity from '@/components/App/Admin/SurfaceTreatmentProtocol/Polarity'
import ConcentrationType from '@/components/App/ConcentrationType'
import _ from 'lodash'
const URI = 'grid/'
function _decodeEnums (entity) {
if (entity && entity.protocolType) {
entity.protocolType = ProtocolType.enumValueOf(entity.protocolType)
}
const polarity = _.get(entity, 'surfaceTreatmentProtocol.polarity')
if (polarity) {
entity.surfaceTreatmentProtocol.polarity = Polarity.enumValueOf(polarity)
}
const concentrationType = _.get(entity, 'concentration.concentrationType')
if (concentrationType) {
entity.concentration.concentrationType = ConcentrationType.enumValueOf(concentrationType)
}
return entity
}
function _encodeEnums (grid) {
let result = Object.assign({}, grid)
result.protocolType = grid.protocolType.name
if (result.surfaceTreatmentProtocol) {
result.surfaceTreatmentProtocol.polarity = grid.surfaceTreatmentProtocol.polarity.name
}
const concentrationType = _.get(grid, 'concentration.concentrationType')
if (concentrationType) {
result.concentration = Object.assign({}, result.concentration)
result.concentration.concentrationType = concentrationType.name
}
return result
}
function _cleanupDilutionOrConcentrationFactorFields (result) {
const concentrationType = _.get(result, 'concentration.concentrationType')
if (ConcentrationType.Dilution === concentrationType) {
_.set(result, 'concentration.concentrationFactor', null)
} else if (ConcentrationType.Concentration === concentrationType) {
_.set(result, 'concentration.dilutionFactor', null)
}
}
function _cleanupFields (grid) {
let result = Object.assign({}, grid)
_cleanupCryoStorageFields(result)
_cleanupStainStorageFields(result)
if (_.get(result, 'concentration.isDilutedOrConcentrated')) {
_cleanupDilutionOrConcentrationFactorFields(result)
} else {
_.set(result, 'concentration.concentrationType', ConcentrationType.Concentration)
_.set(result, 'concentration.concentrationFactor', null)
_.set(result, 'concentration.dilutionFactor', null)
}
return result
}
function _cleanupCryoStorageFields (result) {
if (result.protocolType !== ProtocolType.Cryo) {
result.cryoStorageDevice = null
result.cylinderNumberLabel = null
result.tubeNumberLabel = null
result.boxNumberLabel = null
}
}
function _cleanupStainStorageFields (result) {
if (result.protocolType !== ProtocolType.Stain) {
result.storageBoxLabelNumber = null
result.positionWithinBox = null
if (result.cryoStorageDevice) {
if (!result.cryoStorageDevice.hasCylinders) {
result.cylinderNumberLabel = null
}
if (!result.cryoStorageDevice.hasTubes) {
result.tubeNumberLabel = null
}
if (!result.cryoStorageDevice.hasBoxes) {
result.boxNumberLabel = null
}
}
}
}
export default class GridService {
getGridBy (slugOrId) {
return HTTP.get(URI + slugOrId).then(result => {
_decodeEnums(result.data)
return result
})
}
getGridProjectsBy (slugOrId) {
return HTTP.get(URI + slugOrId + '/projects')
}
listEntities () {
return HTTP.get(URI)
}
listEntitiesByProject (slugOrId) {
return HTTP.get('project/' + slugOrId + '/' + URI).then(result => {
return result
})
}
countEntitiesByProject (slugOrId) {
return HTTP.get('project/' + slugOrId + '/' + URI + 'count')
}
findAvailableGrids (projectSlugOrId) {
return HTTP.get(URI + 'findAvailableGrids/' + projectSlugOrId)
}
findAvailableGridsOrGridWithId (projectSlugOrId, gridId) {
return HTTP.get(URI + 'findAvailableGrids/' + projectSlugOrId + '/' + gridId)
}
createGrid (projectSlugOrId, grid) {
const body = JSON.stringify(_encodeEnums(_cleanupFields(grid)))
return HTTP.post(URI + projectSlugOrId, body)
}
saveGrid (projectSlugOrId, grid) {
return HTTP.put(URI + projectSlugOrId, _encodeEnums(_cleanupFields(grid)))
}
getDefaultValues (projectSlugOrId) {
return HTTP.get('default-value/grid/' + projectSlugOrId).then(result => {
_decodeEnums(result.data)
return result
})
}
}
|
<reponame>sickhye/hazelcast-go-client
// Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License")
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package common
const (
SERVICE_NAME_ATOMIC_LONG = "hz:impl:atomicLongService"
SERVICE_NAME_ATOMIC_REFERENCE = "hz:impl:atomicReferenceService"
SERVICE_NAME_COUNT_DOWN_LATCH = "hz:impl:countDownLatchService"
SERVICE_NAME_ID_GENERATOR = "hz:impl:idGeneratorService"
SERVICE_NAME_EXECUTOR = "hz:impl:executorService"
SERVICE_NAME_LOCK = "hz:impl:lockService"
SERVICE_NAME_LIST = "hz:impl:listService"
SERVICE_NAME_MULTI_MAP = "hz:impl:multiMapService"
SERVICE_NAME_MAP = "hz:impl:mapService"
SERVICE_NAME_RELIABLE_TOPIC = "hz:impl:reliableTopicService"
SERVICE_NAME_REPLICATED_MAP = "hz:impl:replicatedMapService"
SERVICE_NAME_RINGBUFFER_SERVICE = "hz:impl:ringbufferService"
SERVICE_NAME_SEMAPHORE = "hz:impl:semaphoreService"
SERVICE_NAME_SET = "hz:impl:setService"
SERVICE_NAME_QUEUE = "hz:impl:queueService"
SERVICE_NAME_TOPIC = "hz:impl:topicService"
SERVICE_NAME_ID_GENERATOR_ATOMIC_LONG_PREFIX = "hz:atomic:idGenerator:"
)
type MessageType uint16
//MESSAGE TYPES
const (
MESSAGE_TYPE_EXCEPTION MessageType = 109
)
type ErrorCode int32
//ERROR CODES
const (
ERROR_CODE_UNDEFINED ErrorCode = 0
ERROR_CODE_ARRAY_INDEX_OUT_OF_BOUNDS ErrorCode = 1
ERROR_CODE_ARRAY_STORE ErrorCode = 2
ERROR_CODE_AUTHENTICATION ErrorCode = 3
ERROR_CODE_CACHE ErrorCode = 4
ERROR_CODE_CACHE_LOADER ErrorCode = 5
ERROR_CODE_CACHE_NOT_EXISTS ErrorCode = 6
ERROR_CODE_CACHE_WRITER ErrorCode = 7
ERROR_CODE_CALLER_NOT_MEMBER ErrorCode = 8
ERROR_CODE_CANCELLATION ErrorCode = 9
ERROR_CODE_CLASS_CAST ErrorCode = 10
ERROR_CODE_CLASS_NOT_FOUND ErrorCode = 11
ERROR_CODE_CONCURRENT_MODIFICATION ErrorCode = 12
ERROR_CODE_CONFIG_MISMATCH ErrorCode = 13
ERROR_CODE_CONFIGURATION ErrorCode = 14
ERROR_CODE_DISTRIBUTED_OBJECT_DESTROYED ErrorCode = 15
ERROR_CODE_DUPLICATE_INSTANCE_NAME ErrorCode = 16
ERROR_CODE_EOF ErrorCode = 17
ERROR_CODE_ENTRY_PROCESSOR ErrorCode = 18
ERROR_CODE_EXECUTION ErrorCode = 19
ERROR_CODE_HAZELCAST ErrorCode = 20
ERROR_CODE_HAZELCAST_INSTANCE_NOT_ACTIVE ErrorCode = 21
ERROR_CODE_HAZELCAST_OVERLOAD ErrorCode = 22
ERROR_CODE_HAZELCAST_SERIALIZATION ErrorCode = 23
ERROR_CODE_IO ErrorCode = 24
ERROR_CODE_ILLEGAL_ARGUMENT ErrorCode = 25
ERROR_CODE_ILLEGAL_ACCESS_EXCEPTION ErrorCode = 26
ERROR_CODE_ILLEGAL_ACCESS_ERROR ErrorCode = 27
ERROR_CODE_ILLEGAL_MONITOR_STATE ErrorCode = 28
ERROR_CODE_ILLEGAL_STATE ErrorCode = 29
ERROR_CODE_ILLEGAL_THREAD_STATE ErrorCode = 30
ERROR_CODE_INDEX_OUT_OF_BOUNDS ErrorCode = 31
ERROR_CODE_INTERRUPTED ErrorCode = 32
ERROR_CODE_INVALID_ADDRESS ErrorCode = 33
ERROR_CODE_INVALID_CONFIGURATION ErrorCode = 34
ERROR_CODE_MEMBER_LEFT ErrorCode = 35
ERROR_CODE_NEGATIVE_ARRAY_SIZE ErrorCode = 36
ERROR_CODE_NO_SUCH_ELEMENT ErrorCode = 37
ERROR_CODE_NOT_SERIALIZABLE ErrorCode = 38
ERROR_CODE_NIL_POINTER ErrorCode = 39
ERROR_CODE_OPERATION_TIMEOUT ErrorCode = 40
ERROR_CODE_PARTITION_MIGRATING ErrorCode = 41
ERROR_CODE_QUERY ErrorCode = 42
ERROR_CODE_QUERY_RESULT_SIZE_EXCEEDED ErrorCode = 43
ERROR_CODE_QUORUM ErrorCode = 44
ERROR_CODE_REACHED_MAX_SIZE ErrorCode = 45
ERROR_CODE_REJECTED_EXECUTION ErrorCode = 46
ERROR_CODE_REMOTE_MAP_REDUCE ErrorCode = 47
ERROR_CODE_RESPONSE_ALREADY_SENT ErrorCode = 48
ERROR_CODE_RETRYABLE_HAZELCAST ErrorCode = 49
ERROR_CODE_RETRYABLE_IO ErrorCode = 50
ERROR_CODE_RUNTIME ErrorCode = 51
ERROR_CODE_SECURITY ErrorCode = 52
ERROR_CODE_SOCKET ErrorCode = 53
ERROR_CODE_STALE_SEQUENCE ErrorCode = 54
ERROR_CODE_TARGET_DISCONNECTED ErrorCode = 55
ERROR_CODE_TARGET_NOT_MEMBER ErrorCode = 56
ERROR_CODE_TIMEOUT ErrorCode = 57
ERROR_CODE_TOPIC_OVERLOAD ErrorCode = 58
ERROR_CODE_TOPOLOGY_CHANGED ErrorCode = 59
ERROR_CODE_TRANSACTION ErrorCode = 60
ERROR_CODE_TRANSACTION_NOT_ACTIVE ErrorCode = 61
ERROR_CODE_TRANSACTION_TIMED_OUT ErrorCode = 62
ERROR_CODE_URI_SYNTAX ErrorCode = 63
ERROR_CODE_UTF_DATA_FORMAT ErrorCode = 64
ERROR_CODE_UNSUPPORTED_OPERATION ErrorCode = 65
ERROR_CODE_WRONG_TARGET ErrorCode = 66
ERROR_CODE_XA ErrorCode = 67
ERROR_CODE_ACCESS_CONTROL ErrorCode = 68
ERROR_CODE_LOGIN ErrorCode = 69
ERROR_CODE_UNSUPPORTED_CALLBACK ErrorCode = 70
ERROR_CODE_NO_DATA_MEMBER ErrorCode = 71
ERROR_CODE_REPLICATED_MAP_CANT_BE_CREATED ErrorCode = 72
ERROR_CODE_MAX_MESSAGE_SIZE_EXCEEDED ErrorCode = 73
ERROR_CODE_WAN_REPLICATION_QUEUE_FULL ErrorCode = 74
ERROR_CODE_ASSERTION_ERROR ErrorCode = 75
ERROR_CODE_OUT_OF_MEMORY_ERROR ErrorCode = 76
ERROR_CODE_STACK_OVERFLOW_ERROR ErrorCode = 77
ERROR_CODE_NATIVE_OUT_OF_MEMORY_ERROR ErrorCode = 78
ERROR_CODE_NOT_FOUND ErrorCode = 79
ERROR_CODE_STALE_TASK_ID ErrorCode = 80
ERROR_CODE_DUPLICATE_TASK ErrorCode = 81
ERROR_CODE_STALE_TASK ErrorCode = 82
)
/*
Event Response Constants
*/
const (
EVENT_MEMBER = 200
EVENT_MEMBERLIST = 201
EVENT_MEMBERATTRIBUTECHANGE = 202
EVENT_ENTRY = 203
EVENT_ITEM = 204
EVENT_TOPIC = 205
EVENT_PARTITIONLOST = 206
EVENT_DISTRIBUTEDOBJECT = 207
EVENT_CACHEINVALIDATION = 208
EVENT_MAPPARTITIONLOST = 209
EVENT_CACHE = 210
EVENT_CACHEBATCHINVALIDATION = 211
// ENTERPRISE
EVENT_QUERYCACHESINGLE = 212
EVENT_QUERYCACHEBATCH = 213
EVENT_CACHEPARTITIONLOST = 214
EVENT_IMAPINVALIDATION = 215
EVENT_IMAPBATCHINVALIDATION = 216
)
const (
BYTE_SIZE_IN_BYTES = 1
BOOL_SIZE_IN_BYTES = 1
UINT8_SIZE_IN_BYTES = 1
SHORT_SIZE_IN_BYTES = 2
CHAR_SIZE_IN_BYTES = 2
INT_SIZE_IN_BYTES = 4
INT32_SIZE_IN_BYTES = 4
FLOAT_SIZE_IN_BYTES = 4
INT64_SIZE_IN_BYTES = 8
DOUBLE_SIZE_IN_BYTES = 8
LONG_SIZE_IN_BYTES = 8
VERSION = 0
BEGIN_FLAG uint8 = 0x80
END_FLAG uint8 = 0x40
BEGIN_END_FLAG uint8 = BEGIN_FLAG | END_FLAG
LISTENER_FLAG uint8 = 0x01
PAYLOAD_OFFSET = 18
SIZE_OFFSET = 0
FRAME_LENGTH_FIELD_OFFSET = 0
VERSION_FIELD_OFFSET = FRAME_LENGTH_FIELD_OFFSET + INT_SIZE_IN_BYTES
FLAGS_FIELD_OFFSET = VERSION_FIELD_OFFSET + BYTE_SIZE_IN_BYTES
TYPE_FIELD_OFFSET = FLAGS_FIELD_OFFSET + BYTE_SIZE_IN_BYTES
CORRELATION_ID_FIELD_OFFSET = TYPE_FIELD_OFFSET + SHORT_SIZE_IN_BYTES
PARTITION_ID_FIELD_OFFSET = CORRELATION_ID_FIELD_OFFSET + INT64_SIZE_IN_BYTES
DATA_OFFSET_FIELD_OFFSET = PARTITION_ID_FIELD_OFFSET + INT_SIZE_IN_BYTES
HEADER_SIZE = DATA_OFFSET_FIELD_OFFSET + SHORT_SIZE_IN_BYTES
NIL_ARRAY_LENGTH = -1
)
const (
ENTRYEVENT_ADDED int32 = 1
ENTRYEVENT_REMOVED int32 = 2
ENTRYEVENT_UPDATED int32 = 4
ENTRYEVENT_EVICTED int32 = 8
ENTRYEVENT_EVICT_ALL int32 = 16
ENTRYEVENT_CLEAR_ALL int32 = 32
ENTRYEVENT_MERGED int32 = 64
ENTRYEVENT_EXPIRED int32 = 128
ENTRYEVENT_INVALIDATION int32 = 256
)
const (
ITEM_ADDED int32 = 1
ITEM_REMOVED int32 = 2
)
const (
NIL_KEY_IS_NOT_ALLOWED string = "nil key is not allowed"
NIL_KEYS_ARE_NOT_ALLOWED string = "nil keys collection is not allowed"
NIL_VALUE_IS_NOT_ALLOWED string = "nil value is not allowed"
NIL_PREDICATE_IS_NOT_ALLOWED string = "predicate should not be nil"
NIL_MAP_IS_NOT_ALLOWED string = "nil map is not allowed"
NIL_ARG_IS_NOT_ALLOWED string = "nil arg is not allowed"
NIL_SLICE_IS_NOT_ALLOWED string = "nil slice is not allowed"
NIL_LISTENER_IS_NOT_ALLOWED string = "nil listener is not allowed"
NIL_AGGREGATOR_IS_NOT_ALLOWED string = "aggregator should not be nil"
NIL_PROJECTION_IS_NOT_ALLOWED string = "projection should not be nil"
)
|
<filename>node_modules/react-icons-kit/md/ic_donut_large.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_donut_large = void 0;
var ic_donut_large = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M11,5.08V2C6,2.5,2,6.81,2,12s4,9.5,9,10v-3.08c-3-0.48-6-3.4-6-6.92S8,5.56,11,5.08z M18.97,11H22c-0.47-5-4-8.53-9-9 v3.08C16,5.51,18.54,8,18.97,11z M13,18.92V22c5-0.47,8.53-4,9-9h-3.03C18.54,16,16,18.49,13,18.92z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M11,5.08V2C6,2.5,2,6.81,2,12s4,9.5,9,10v-3.08c-3-0.48-6-3.4-6-6.92S8,5.56,11,5.08z M18.97,11H22c-0.47-5-4-8.53-9-9 v3.08C16,5.51,18.54,8,18.97,11z M13,18.92V22c5-0.47,8.53-4,9-9h-3.03C18.54,16,16,18.49,13,18.92z"
},
"children": []
}]
}]
}]
}]
}]
}]
}]
};
exports.ic_donut_large = ic_donut_large; |
package com.guigu.juc.countdownlatch;
import java.util.concurrent.CountDownLatch;
/**
* @Description : TODO CuntDownLatch是并发编程辅助工具之一
* @Author : yangguang
* @Date : 2019/11/20
*/
public class CountDownLatchDemo {
/**
* 多线程在完成同一任务时,如果想让它们一起完成,也就是没有完成时阻塞,CountDownLatch就是非常好的工具
*/
private static int count = 6;
//模拟6个运动员一起比赛,只有6个运动员都到达终点才结束比赛
private static final CountDownLatch countDownLatch = new CountDownLatch(count);
public static void main(String[] args) throws Exception
{
for(int i = 0 ; i < count; ++i)
{
new Thread(()->{
System.out.println(Thread.currentThread().getName() + " one--ok--到达了终点");
//在当前线程内部阻塞
countDownLatch.countDown();
}).start();
}
//如果有线程仍然没有执行完,那么继续等待
countDownLatch.await();
System.out.println("比赛结束--over");
}
}
|
require 'minitest/autorun'
require 'yaml'
require_relative '../lib/pipeline_builder'
describe PipelineBuilder do
describe '#build' do
def build_for(projects)
builder = PipelineBuilder.new(projects)
YAML.load(builder.build)
end
it "succeeds for an empty list of projects" do
data = build_for([])
assert_nil data['jobs']
end
it "adds an ondemand and a scheduled job for each project" do
data = build_for([
{ 'name' => 'foo' },
{ 'name' => 'bar' }
])
jobs = data['jobs'].map { |job| job['name'] }.sort
jobs.must_equal %w(
zap-ondemand-bar
zap-ondemand-foo
zap-scheduled-bar
zap-scheduled-foo
)
end
it "uses the specified Slack channel" do
builder = PipelineBuilder.new([
{
'name' => 'foo',
'slack_channel' => 'bar'
}
])
# check the posting to the central channel
yaml = builder.build
# the Concourse template variable gets interpreted by YAML as a Hash, so check the unparsed version
yaml.must_include 'channel: {{slack-channel}}'
# check the posting to the specified channel
data = YAML.load(yaml)
job = data['jobs'].first
step = job['plan'].last
sub_step = step['on_success']['aggregate'].first
sub_step['params']['channel'].must_equal '#bar'
end
it "uses the templatized Slack channel when none is specified" do
builder = PipelineBuilder.new([
{ 'name' => 'foo' }
])
yaml = builder.build
# the Concourse template variable gets interpreted by YAML as a Hash, so check the unparsed version
yaml.must_include 'channel: {{slack-channel}}'
end
end
end
|
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: zhima.credit.pe.zmgo.settle.refund response.
*
* @author auto create
* @since 1.0, 2021-08-10 15:57:37
*/
public class ZhimaCreditPeZmgoSettleRefundResponse extends AlipayResponse {
private static final long serialVersionUID = 1315259241176629949L;
/**
* 退款操作错误原因描述
*/
@ApiField("fail_reason")
private String failReason;
/**
* 外部请求号
*/
@ApiField("out_request_no")
private String outRequestNo;
/**
* 本次请求退款金额
*/
@ApiField("refund_amount")
private String refundAmount;
/**
* 芝麻GO退款生成的内部唯一流水,外部商户可以拿这个参数进行对账
*/
@ApiField("refund_opt_no")
private String refundOptNo;
/**
* 此字段含义,商家接入时需咨询对接技术。
*/
@ApiField("retry")
private Boolean retry;
/**
* 本次退款的扣款计划单号
*/
@ApiField("withhold_plan_no")
private String withholdPlanNo;
public void setFailReason(String failReason) {
this.failReason = failReason;
}
public String getFailReason( ) {
return this.failReason;
}
public void setOutRequestNo(String outRequestNo) {
this.outRequestNo = outRequestNo;
}
public String getOutRequestNo( ) {
return this.outRequestNo;
}
public void setRefundAmount(String refundAmount) {
this.refundAmount = refundAmount;
}
public String getRefundAmount( ) {
return this.refundAmount;
}
public void setRefundOptNo(String refundOptNo) {
this.refundOptNo = refundOptNo;
}
public String getRefundOptNo( ) {
return this.refundOptNo;
}
public void setRetry(Boolean retry) {
this.retry = retry;
}
public Boolean getRetry( ) {
return this.retry;
}
public void setWithholdPlanNo(String withholdPlanNo) {
this.withholdPlanNo = withholdPlanNo;
}
public String getWithholdPlanNo( ) {
return this.withholdPlanNo;
}
}
|
<reponame>mehmetkillioglu/rclgo<gh_stars>0
package test
import (
"testing"
"unsafe"
. "github.com/smartystreets/goconvey/convey"
sensor_msgs "github.com/mehmetkillioglu/rclgo/internal/msgs/sensor_msgs/msg"
std_msgs "github.com/mehmetkillioglu/rclgo/internal/msgs/std_msgs/msg"
test_msgs "github.com/mehmetkillioglu/rclgo/internal/msgs/test_msgs/msg"
)
func TestSerDesROS2Messages(t *testing.T) {
SetDefaultFailureMode(FailureContinues)
Convey("std_msgs.ColorRGBA", t, func() {
goObj := std_msgs.NewColorRGBA()
std_msgs.ColorRGBATypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_std_msgs__ColorRGBA()))
So(goObj, ShouldResemble, Fixture_Go_std_msgs__ColorRGBA())
cobj := std_msgs.ColorRGBATypeSupport.PrepareMemory()
defer std_msgs.ColorRGBATypeSupport.ReleaseMemory(cobj)
std_msgs.ColorRGBATypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_std_msgs__msg__ColorRGBA)(cobj), ShouldResemble, Fixture_C_std_msgs__ColorRGBA())
})
Convey("std_msgs.String", t, func() {
goObj := std_msgs.NewString()
std_msgs.StringTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_std_msgs__String()))
So(goObj, ShouldResemble, Fixture_Go_std_msgs__String())
cobj := std_msgs.StringTypeSupport.PrepareMemory()
defer std_msgs.StringTypeSupport.ReleaseMemory(cobj)
std_msgs.StringTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_std_msgs__msg__String)(cobj), ShouldResemble, Fixture_C_std_msgs__String())
})
Convey("sensor_msgs.ChannelFloat32", t, func() {
goObj := sensor_msgs.NewChannelFloat32()
sensor_msgs.ChannelFloat32TypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_sensor_msgs__ChannelFloat32()))
So(goObj, ShouldResemble, Fixture_Go_sensor_msgs__ChannelFloat32())
cobj := sensor_msgs.ChannelFloat32TypeSupport.PrepareMemory()
defer sensor_msgs.ChannelFloat32TypeSupport.ReleaseMemory(cobj)
sensor_msgs.ChannelFloat32TypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_sensor_msgs__msg__ChannelFloat32)(cobj), ShouldResemble, Fixture_C_sensor_msgs__ChannelFloat32())
})
Convey("sensor_msgs.Illuminance", t, func() {
goObj := sensor_msgs.NewIlluminance()
sensor_msgs.IlluminanceTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_sensor_msgs__Illuminance()))
So(goObj, ShouldResemble, Fixture_Go_sensor_msgs__Illuminance())
cobj := sensor_msgs.IlluminanceTypeSupport.PrepareMemory()
defer sensor_msgs.IlluminanceTypeSupport.ReleaseMemory(cobj)
sensor_msgs.IlluminanceTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_sensor_msgs__msg__Illuminance)(cobj), ShouldResemble, Fixture_C_sensor_msgs__Illuminance())
})
Convey("std_msgs.Int64MultiArray", t, func() {
goObj := std_msgs.NewInt64MultiArray()
std_msgs.Int64MultiArrayTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_std_msgs__Int64MultiArray()))
So(goObj, ShouldResemble, Fixture_Go_std_msgs__Int64MultiArray())
cobj := std_msgs.Int64MultiArrayTypeSupport.PrepareMemory()
defer std_msgs.Int64MultiArrayTypeSupport.ReleaseMemory(cobj)
std_msgs.Int64MultiArrayTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_std_msgs__msg__Int64MultiArray)(cobj), ShouldResemble, Fixture_C_std_msgs__Int64MultiArray())
})
}
/*
ROS2 test_msgs -package has test messages for all the ways messages can be defined.
*/
func TestSerDesROS2Messages_test_msgs(t *testing.T) {
SetDefaultFailureMode(FailureContinues)
Convey("test_msgs.Arrays", t, func() {
goObj := test_msgs.NewArrays()
test_msgs.ArraysTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__Arrays()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__Arrays())
cobj := test_msgs.ArraysTypeSupport.PrepareMemory()
defer test_msgs.ArraysTypeSupport.ReleaseMemory(cobj)
test_msgs.ArraysTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__Arrays)(cobj), ShouldResemble, Fixture_C_test_msgs__Arrays())
})
Convey("test_msgs.BasicTypes", t, func() {
goObj := test_msgs.NewBasicTypes()
test_msgs.BasicTypesTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__BasicTypes()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__BasicTypes())
cobj := test_msgs.BasicTypesTypeSupport.PrepareMemory()
defer test_msgs.BasicTypesTypeSupport.ReleaseMemory(cobj)
test_msgs.BasicTypesTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__BasicTypes)(cobj), ShouldResemble, Fixture_C_test_msgs__BasicTypes())
})
Convey("test_msgs.Builtins", t, func() {
goObj := test_msgs.NewBuiltins()
test_msgs.BuiltinsTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__Builtins()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__Builtins())
cobj := test_msgs.BuiltinsTypeSupport.PrepareMemory()
defer test_msgs.BuiltinsTypeSupport.ReleaseMemory(cobj)
test_msgs.BuiltinsTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__Builtins)(cobj), ShouldResemble, Fixture_C_test_msgs__Builtins())
})
Convey("test_msgs.BoundedSequences", t, func() {
goObj := test_msgs.NewBoundedSequences()
test_msgs.BoundedSequencesTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__BoundedSequences()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__BoundedSequences())
cobj := test_msgs.BoundedSequencesTypeSupport.PrepareMemory()
defer test_msgs.BoundedSequencesTypeSupport.ReleaseMemory(cobj)
test_msgs.BoundedSequencesTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__BoundedSequences)(cobj), ShouldResemble, Fixture_C_test_msgs__BoundedSequences())
})
Convey("test_msgs.Constants", t, func() {
goObj := test_msgs.NewConstants()
test_msgs.ConstantsTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__Constants()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__Constants())
cobj := test_msgs.ConstantsTypeSupport.PrepareMemory()
defer test_msgs.ConstantsTypeSupport.ReleaseMemory(cobj)
test_msgs.ConstantsTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__Constants)(cobj), ShouldResemble, Fixture_C_test_msgs__Constants())
})
Convey("test_msgs.Defaults", t, func() {
goObj := test_msgs.NewDefaults()
test_msgs.DefaultsTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__Defaults()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__Defaults())
cobj := test_msgs.DefaultsTypeSupport.PrepareMemory()
defer test_msgs.DefaultsTypeSupport.ReleaseMemory(cobj)
test_msgs.DefaultsTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__Defaults)(cobj), ShouldResemble, Fixture_C_test_msgs__Defaults())
})
Convey("test_msgs.Empty", t, func() {
goObj := test_msgs.NewEmpty()
test_msgs.EmptyTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__Empty()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__Empty())
cobj := test_msgs.EmptyTypeSupport.PrepareMemory()
defer test_msgs.EmptyTypeSupport.ReleaseMemory(cobj)
test_msgs.EmptyTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__Empty)(cobj), ShouldResemble, Fixture_C_test_msgs__Empty())
})
Convey("test_msgs.MultiNested", t, func() {
goObj := test_msgs.NewMultiNested()
test_msgs.MultiNestedTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__MultiNested()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__MultiNested())
cobj := test_msgs.MultiNestedTypeSupport.PrepareMemory()
defer test_msgs.MultiNestedTypeSupport.ReleaseMemory(cobj)
test_msgs.MultiNestedTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__MultiNested)(cobj), ShouldResemble, Fixture_C_test_msgs__MultiNested())
})
Convey("test_msgs.Nested", t, func() {
goObj := test_msgs.NewNested()
test_msgs.NestedTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__Nested()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__Nested())
cobj := test_msgs.NestedTypeSupport.PrepareMemory()
defer test_msgs.NestedTypeSupport.ReleaseMemory(cobj)
test_msgs.NestedTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__Nested)(cobj), ShouldResemble, Fixture_C_test_msgs__Nested())
})
Convey("test_msgs.UnboundedSequences do not allocate memory for empty slices", t, func() {
goObj := test_msgs.NewUnboundedSequences()
test_msgs.UnboundedSequencesTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__UnboundedSequences_no_allocate_memory_on_empty_slice()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__UnboundedSequences_no_allocate_memory_on_empty_slice())
cobj := test_msgs.UnboundedSequencesTypeSupport.PrepareMemory()
defer test_msgs.UnboundedSequencesTypeSupport.ReleaseMemory(cobj)
test_msgs.UnboundedSequencesTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__UnboundedSequences)(cobj), ShouldResemble, Fixture_C_test_msgs__UnboundedSequences_no_allocate_memory_on_empty_slice())
})
Convey("test_msgs.UnboundedSequences", t, func() {
goObj := test_msgs.NewUnboundedSequences()
test_msgs.UnboundedSequencesTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__UnboundedSequences()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__UnboundedSequences())
cobj := test_msgs.UnboundedSequencesTypeSupport.PrepareMemory()
defer test_msgs.UnboundedSequencesTypeSupport.ReleaseMemory(cobj)
test_msgs.UnboundedSequencesTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__UnboundedSequences)(cobj), ShouldResemble, Fixture_C_test_msgs__UnboundedSequences())
})
Convey("test_msgs.WStrings", t, func() {
goObj := test_msgs.NewWStrings()
test_msgs.WStringsTypeSupport.AsGoStruct(goObj, unsafe.Pointer(Fixture_C_test_msgs__WStrings()))
So(goObj, ShouldResemble, Fixture_Go_test_msgs__WStrings())
cobj := test_msgs.WStringsTypeSupport.PrepareMemory()
defer test_msgs.WStringsTypeSupport.ReleaseMemory(cobj)
test_msgs.WStringsTypeSupport.AsCStruct(cobj, goObj)
So((*_Ctype_struct_test_msgs__msg__WStrings)(cobj), ShouldResemble, Fixture_C_test_msgs__WStrings())
})
}
func TestClone(t *testing.T) {
SetDefaultFailureMode(FailureContinues)
Convey("Cloning works", t, func() {
{
goObj := Fixture_Go_std_msgs__ColorRGBA()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
clone.B = 200
So(clone, ShouldNotResemble, goObj)
}
{
goObj := Fixture_Go_std_msgs__String()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
clone.Data = "cloned string"
So(clone, ShouldNotResemble, goObj)
}
{
goObj := Fixture_Go_sensor_msgs__ChannelFloat32()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
clone.Name = "cloned string"
clone.Values[2] = 4.2
So(clone, ShouldNotResemble, goObj)
}
{
goObj := Fixture_Go_sensor_msgs__Illuminance()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
clone.Header.Stamp.Nanosec = 1231234321
So(clone, ShouldNotResemble, goObj)
}
{
goObj := Fixture_Go_std_msgs__Int64MultiArray()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
clone.Data[1] = 99
So(clone, ShouldNotResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__Arrays()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
clone.BasicTypesValues[2].Uint64Value = 4312
So(clone, ShouldNotResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__BasicTypes()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__Builtins()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__BoundedSequences()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__Constants()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__Defaults()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__Empty()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__MultiNested()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__Nested()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__UnboundedSequences_no_allocate_memory_on_empty_slice()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__UnboundedSequences()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
{
goObj := Fixture_Go_test_msgs__WStrings()
clone := goObj.Clone()
So(clone, ShouldResemble, goObj)
}
})
}
func TestSetDefaults(t *testing.T) {
SetDefaultFailureMode(FailureContinues)
Convey("SetDefaults works", t, func() {
{
newMsg := std_msgs.NewColorRGBA()
fxMsg := Fixture_Go_std_msgs__ColorRGBA()
So(fxMsg, ShouldNotResemble, newMsg)
fxMsg.SetDefaults()
So(fxMsg, ShouldResemble, newMsg)
}
{
newMsg := std_msgs.NewString()
fxMsg := Fixture_Go_std_msgs__String()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := sensor_msgs.NewChannelFloat32()
fxMsg := Fixture_Go_sensor_msgs__ChannelFloat32()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := sensor_msgs.NewIlluminance()
fxMsg := Fixture_Go_sensor_msgs__Illuminance()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := std_msgs.NewInt64MultiArray()
fxMsg := Fixture_Go_std_msgs__Int64MultiArray()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewArrays()
fxMsg := Fixture_Go_test_msgs__Arrays()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewBasicTypes()
fxMsg := Fixture_Go_test_msgs__BasicTypes()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewBuiltins()
fxMsg := Fixture_Go_test_msgs__Builtins()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewBoundedSequences()
fxMsg := Fixture_Go_test_msgs__BoundedSequences()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewConstants()
fxMsg := Fixture_Go_test_msgs__Constants()
So(newMsg, ShouldResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewDefaults()
fxMsg := Fixture_Go_test_msgs__Defaults()
So(newMsg, ShouldResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewMultiNested()
fxMsg := Fixture_Go_test_msgs__MultiNested()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewNested()
fxMsg := Fixture_Go_test_msgs__Nested()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewUnboundedSequences()
fxMsg := Fixture_Go_test_msgs__UnboundedSequences()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
{
newMsg := test_msgs.NewWStrings()
fxMsg := Fixture_Go_test_msgs__WStrings()
So(newMsg, ShouldNotResemble, fxMsg)
fxMsg.SetDefaults()
So(newMsg, ShouldResemble, fxMsg)
}
})
}
|
<gh_stars>0
//
// Copyright (c) 2010-2011 <NAME> and <NAME>
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
#include "Environment.h"
#include "../../Systems/Systems.h"
#include "../../Systems/LogSystem/FileLogSystem/FileLogSystem.h"
#include "../../Systems/LogSystem/MultiLogSystem/MultiLogSystem.h"
#include "../../Systems/LogSystem/RocketLogSystem/RocketLogSystem.h"
#include "../../Systems/LogSystem/ThreadsafeLogSystem/ThreadsafeLogSystem.h"
#include "../../Systems/TimeSystem/TimeSystem.h"
#include "../../Systems/EntitySystem/EntitySystem.h"
#include "../../Systems/AssetSystem/AssetSystem.h"
#include "../../RuntimeObjectSystem/ObjectFactorySystem/ObjectFactorySystem.h"
#include "../../RuntimeObjectSystem/RuntimeObjectSystem.h"
#include "../../Systems/GUISystem/GUISystem.h"
#include "../../RuntimeCompiler/FileChangeNotifier.h"
#include "CompilerLogger.h"
Environment::Environment( IGame* pGame )
{
sys = new SystemTable();
// Set the systems library to refer to this system table
gSys = sys;
sys->pGame = pGame;
// init AssetSystem first as this establishes the Asset dir used by many systems
sys->pAssetSystem = new AssetSystem("Assets");
FileLogSystem *pFileLog = new FileLogSystem();
pFileLog->SetLogPath("Log.txt");
pFileLog->SetVerbosity(eLV_COMMENTS);
pFileLog->Log(eLV_EVENTS, "Started file logger\n");
RocketLogSystem *pRocketLog = new RocketLogSystem();
pRocketLog->SetVerbosity(eLV_COMMENTS);
pRocketLog->Log(eLV_EVENTS,"Started RocketLib logger\n");
sys->pRocketLogSystem = pRocketLog;
MultiLogSystem *pMultiLog = new MultiLogSystem();
pMultiLog->AddLogSystem(pFileLog);
pMultiLog->AddLogSystem(pRocketLog);
ThreadsafeLogSystem *pThreadsafeLog = new ThreadsafeLogSystem();
pThreadsafeLog->SetProtectedLogSystem(pMultiLog);
sys->pLogSystem = pThreadsafeLog;
sys->pLogSystem->Log(eLV_EVENTS, "All logs initialised\n");
pCompilerLogger = new CompilerLogger(this);
sys->pRuntimeObjectSystem = new RuntimeObjectSystem();
sys->pObjectFactorySystem = sys->pRuntimeObjectSystem->GetObjectFactorySystem();
sys->pFileChangeNotifier = sys->pRuntimeObjectSystem->GetFileChangeNotifier();
sys->pObjectFactorySystem->SetObjectConstructorHistorySize( 5 );
sys->pTimeSystem = new TimeSystem();
sys->pEntitySystem = new EntitySystem();
sys->pGUISystem = new GUISystem();
}
void Environment::Init()
{
sys->pTimeSystem->StartSession();
sys->pRuntimeObjectSystem->Initialise( pCompilerLogger, sys );
}
Environment::~Environment()
{
// Reverse order as a rule
delete sys->pGUISystem;
delete sys->pEntitySystem;
delete sys->pTimeSystem;
delete sys->pRuntimeObjectSystem;
delete sys->pLogSystem;
delete sys->pAssetSystem;
delete pCompilerLogger;
delete sys;
}
|
import argparse
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
import numpy as np
def load_dataset(file_path):
# Implement dataset loading from the given file_path
# Return the features (X) and labels (y)
pass
def evaluate_feature_accuracy(clf, features, x_train, x_test, y_train, y_test):
score_dict = {}
for feat in range(len(features["CODE"])):
clf.fit(x_train.tolist(), y_train.tolist())
score = clf.score(x_test.tolist(), y_test.tolist())
score_dict[features["CODE"][feat]] = score
print("Feature {} accuracy is {}, train dataset has {} elements, test dataset has {} elements".format(
features["CODE"][feat], score, len(x_train), len(x_test)))
with open("feature_accuracies.txt", "a") as f:
f.write("Feature {} accuracy is {}, train dataset has {} elements, test dataset has {} elements\n".format(
features["CODE"][feat], score, len(x_train), len(x_test)))
return score_dict
def main():
parser = argparse.ArgumentParser()
parser.add_argument("input_file", help="Path to the input dataset file")
parser.add_argument("output_file", help="Path to the output file for feature accuracies")
# Add any other relevant command-line arguments
args = parser.parse_args()
# Load the dataset from the input file
X, y = load_dataset(args.input_file)
# Split the dataset into training and testing sets
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Define the classifier and evaluate feature accuracy
clf = DecisionTreeClassifier()
feature_accuracies = evaluate_feature_accuracy(clf, features, x_train, x_test, y_train, y_test)
# Write feature accuracies to the output file
with open(args.output_file, "w") as f:
for feature, accuracy in feature_accuracies.items():
f.write("Feature {} accuracy: {}\n".format(feature, accuracy))
if __name__ == "__main__":
main() |
<reponame>snyjm-18/osss-ucx
/* For license: see LICENSE file at top-level */
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif /* HAVE_CONFIG_H */
#include "shmem_mutex.h"
#include "shmemu.h"
#include "shmemc.h"
#ifdef ENABLE_PSHMEM
#pragma weak shmem_barrier_all = pshmem_barrier_all
#define shmem_barrier_all pshmem_barrier_all
#pragma weak shmem_barrier = pshmem_barrier
#define shmem_barrier pshmem_barrier
#endif /* ENABLE_PSHMEM */
void
shmem_barrier(int start, int log2stride, int size, long *pSync)
{
SHMEMU_CHECK_INIT();
SHMEMU_CHECK_SYMMETRIC(pSync, 4);
logger(LOG_BARRIERS,
"%s(start=%d, log2stride=%d, size=%d, pSync=%p)",
__func__,
start, log2stride, size, pSync
);
SHMEMT_MUTEX_PROTECT(shmemc_barrier(start, log2stride, size, pSync));
}
void
shmem_barrier_all(void)
{
SHMEMU_CHECK_INIT();
logger(LOG_BARRIERS,
"%s()",
__func__
);
SHMEMT_MUTEX_PROTECT(shmemc_barrier_all());
}
#ifdef ENABLE_PSHMEM
#pragma weak shmem_sync_all = pshmem_sync_all
#define shmem_sync_all pshmem_sync_all
#pragma weak shmem_sync = pshmem_sync
#define shmem_sync pshmem_sync
#endif /* ENABLE_PSHMEM */
void
shmem_sync(int start, int log2stride, int size, long *pSync)
{
SHMEMU_CHECK_INIT();
SHMEMU_CHECK_SYMMETRIC(pSync, 4);
logger(LOG_BARRIERS,
"%s(start=%d, log2stride=%d, size=%d, pSync=%p)",
__func__,
start, log2stride, size, pSync
);
SHMEMT_MUTEX_PROTECT(shmemc_sync(start, log2stride, size, pSync));
}
void
shmem_sync_all(void)
{
SHMEMU_CHECK_INIT();
logger(LOG_BARRIERS,
"%s()",
__func__
);
SHMEMT_MUTEX_PROTECT(shmemc_sync_all());
}
|
#!/bin/bash
# RunIPhoneUnitTest.sh
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# Runs all unittests through the iPhone simulator. We don't handle running them
# on the device. To run on the device just choose "run".
set -o errexit
set -o nounset
# Uncomment the next line to trace execution.
#set -o verbose
# Controlling environment variables:
# GTM_DISABLE_ZOMBIES -
# Set to a non-zero value to turn on zombie checks. You will probably
# want to turn this off if you enable leaks.
GTM_DISABLE_ZOMBIES=${GTM_DISABLE_ZOMBIES:=1}
# GTM_ENABLE_LEAKS -
# Set to a non-zero value to turn on the leaks check. You will probably want
# to disable zombies, otherwise you will get a lot of false positives.
# GTM_DISABLE_TERMINATION
# Set to a non-zero value so that the app doesn't terminate when it's finished
# running tests. This is useful when using it with external tools such
# as Instruments.
# GTM_LEAKS_SYMBOLS_TO_IGNORE
# List of comma separated symbols that leaks should ignore. Mainly to control
# leaks in frameworks you don't have control over.
# Search this file for GTM_LEAKS_SYMBOLS_TO_IGNORE to see examples.
# Please feel free to add other symbols as you find them but make sure to
# reference Radars or other bug systems so we can track them.
# GTM_REMOVE_GCOV_DATA
# Before starting the test, remove any *.gcda files for the current run so
# you won't get errors when the source file has changed and the data can't
# be merged.
#
GTM_REMOVE_GCOV_DATA=${GTM_REMOVE_GCOV_DATA:=0}
# GTM_DISABLE_USERDIR_SETUP
# Controls whether or not CFFIXED_USER_HOME is erased and set up from scratch
# for you each time the script is run. In some cases you may have a wrapper
# script calling this one that takes care of that for us so you can set up
# a certain user configuration.
GTM_DISABLE_USERDIR_SETUP=${GTM_DISABLE_USERDIR_SETUP:=0}
# GTM_DISABLE_IPHONE_LAUNCH_DAEMONS
# Controls whether or not we launch up the iPhone Launch Daemons before
# we start testing. You need Launch Daemons to test anything that interacts
# with security. Note that it is OFF by default. Set
# GTM_DISABLE_IPHONE_LAUNCH_DAEMONS=0 before calling this script
# to turn it on.
GTM_DISABLE_IPHONE_LAUNCH_DAEMONS=${GTM_DISABLE_IPHONE_LAUNCH_DAEMONS:=1}
# GTM_TEST_AFTER_BUILD
# When set to 1, tests are run only when TEST_AFTER_BUILD is set to "YES".
# This can be used to have tests run as an after build step when running
# from the command line, but not when running from within XCode.
GTM_USE_TEST_AFTER_BUILD=${GTM_USE_TEST_AFTER_BUILD:=0}
ScriptDir=$(dirname "$(echo $0 | sed -e "s,^\([^/]\),$(pwd)/\1,")")
ScriptName=$(basename "$0")
ThisScript="${ScriptDir}/${ScriptName}"
GTMXcodeNote() {
echo ${ThisScript}:${1}: note: GTM ${2}
}
# Creates a file containing the plist for the securityd daemon and prints the
# filename to stdout.
GTMCreateLaunchDaemonPlist() {
local plist_file
plist_file="$TMPDIR/securityd.$$.plist"
echo $plist_file
# Create the plist file with PlistBuddy.
/usr/libexec/PlistBuddy \
-c "Add :Label string RunIPhoneLaunchDaemons" \
-c "Add :ProgramArguments array" \
-c "Add :ProgramArguments: string \"$IPHONE_SIMULATOR_ROOT/usr/libexec/securityd\"" \
-c "Add :EnvironmentVariables dict" \
-c "Add :EnvironmentVariables:DYLD_ROOT_PATH string \"$IPHONE_SIMULATOR_ROOT\"" \
-c "Add :EnvironmentVariables:IPHONE_SIMULATOR_ROOT string \"$IPHONE_SIMULATOR_ROOT\"" \
-c "Add :EnvironmentVariables:CFFIXED_USER_HOME string \"$CFFIXED_USER_HOME\"" \
-c "Add :MachServices dict" \
-c "Add :MachServices:com.apple.securityd bool YES" "$plist_file" > /dev/null
}
if [[ "$GTM_USE_TEST_AFTER_BUILD" == 1 && "$TEST_AFTER_BUILD" == "NO" ]]; then
GTMXcodeNote ${LINENO} "Skipping running of unittests since TEST_AFTER_BUILD=NO."
elif [ "$PLATFORM_NAME" == "iphonesimulator" ]; then
# We kill the iPhone simulator because otherwise we run into issues where
# the unittests fail becuase the simulator is currently running, and
# at this time the iPhone SDK won't allow two simulators running at the same
# time.
set +e
/usr/bin/killall "iPhone Simulator"
set -e
if [ $GTM_REMOVE_GCOV_DATA -ne 0 ]; then
if [ "${OBJECT_FILE_DIR}-${CURRENT_VARIANT}" != "-" ]; then
if [ -d "${OBJECT_FILE_DIR}-${CURRENT_VARIANT}" ]; then
GTMXcodeNote ${LINENO} "Removing any .gcda files"
(cd "${OBJECT_FILE_DIR}-${CURRENT_VARIANT}" && \
find . -type f -name "*.gcda" -print0 | xargs -0 rm -f )
fi
fi
fi
export DYLD_ROOT_PATH="$SDKROOT"
export DYLD_FRAMEWORK_PATH="$CONFIGURATION_BUILD_DIR"
export IPHONE_SIMULATOR_ROOT="$SDKROOT"
export CFFIXED_USER_HOME="$TEMP_FILES_DIR/iPhone Simulator User Dir"
# See http://developer.apple.com/technotes/tn2004/tn2124.html for an
# explanation of these environment variables.
# NOTE: any setup work is done before turning on the environment variables
# to avoid having the setup work also get checked by what the variables
# enabled.
if [ $GTM_DISABLE_USERDIR_SETUP -eq 0 ]; then
# Cleanup user home directory
if [ -d "$CFFIXED_USER_HOME" ]; then
rm -rf "$CFFIXED_USER_HOME"
fi
mkdir "$CFFIXED_USER_HOME"
mkdir "$CFFIXED_USER_HOME/Documents"
mkdir -p "$CFFIXED_USER_HOME/Library/Caches"
fi
if [ $GTM_DISABLE_IPHONE_LAUNCH_DAEMONS -eq 0 ]; then
# Remove any instance of RunIPhoneLaunchDaemons left running in the case the
# 'trap' below fails. We first must check for RunIPhoneLaunchDaemons'
# presence as 'launchctl remove' will kill this script if run from within an
# Xcode build.
launchctl list | grep RunIPhoneLaunchDaemons && launchctl remove RunIPhoneLaunchDaemons
# If we want to test anything that interacts with the keychain, we need
# securityd up and running.
LAUNCH_DAEMON_PLIST="$(GTMCreateLaunchDaemonPlist)"
launchctl load $LAUNCH_DAEMON_PLIST
rm $LAUNCH_DAEMON_PLIST
# No matter how we exit, we want to shut down our launchctl job.
trap "launchctl remove RunIPhoneLaunchDaemons" INT TERM EXIT
fi
if [ $GTM_DISABLE_ZOMBIES -eq 0 ]; then
GTMXcodeNote ${LINENO} "Enabling zombies"
export CFZombieLevel=3
export NSZombieEnabled=YES
fi
export MallocScribble=YES
export MallocPreScribble=YES
export MallocGuardEdges=YES
export MallocStackLogging=YES
export NSAutoreleaseFreedObjectCheckEnabled=YES
# Turn on the mostly undocumented OBJC_DEBUG stuff.
export OBJC_DEBUG_FRAGILE_SUPERCLASSES=YES
export OBJC_DEBUG_UNLOAD=YES
# Turned off due to the amount of false positives from NS classes.
# export OBJC_DEBUG_FINALIZERS=YES
export OBJC_DEBUG_NIL_SYNC=YES
export OBJC_PRINT_REPLACED_METHODS=YES
# 6251475 iPhone simulator leaks @ CFHTTPCookieStore shutdown if
# CFFIXED_USER_HOME empty
GTM_LEAKS_SYMBOLS_TO_IGNORE="CFHTTPCookieStore"
# Start our app.
"$TARGET_BUILD_DIR/$EXECUTABLE_PATH" -RegisterForSystemEvents
else
GTMXcodeNote ${LINENO} "Skipping running of unittests for device build."
fi
exit 0
|
import styled from 'styled-components';
export const Container = styled.div`
width: 100%;
height: 700px;
display: flex;
align-items: center;
justify-content: center;
@media (max-width: 320px) {
justify-content: center;
height: 300px;
margin-bottom: 50px;
}
`;
|
require 'rails_helper'
RSpec.describe Student, type: :model do
it { should have_many(:tardies).through(:students_tardies) }
it { should validate_presence_of(:name) }
it { should validate_uniqueness_of(:name) }
it { should validate_length_of(:name).is_at_least(1) }
it "should have a working factory" do
Fabricate.build(:student).should be_valid
end
end
|
<filename>src/misc_utils.py<gh_stars>0
import os
import json
import tensorflow as tf
def create_folder(directory):
try:
if not os.path.exists(directory):
os.makedirs(directory)
except OSError:
print('Error: Creating directory. ' + directory)
def save_model_to_json(model, model_path):
model_json = model.to_json()
with open("{}".format(model_path), "w") as json_file:
json.dump(model_json, json_file)
def load_json_model(model_path):
with open("{}".format(model_path)) as json_file:
model_json = json.load(json_file)
uncompiled_model = tf.keras.models.model_from_json(model_json)
return uncompiled_model
|
<gh_stars>0
package top.luyuni.qaa.service.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.util.HtmlUtils;
import top.luyuni.qaa.dao.CommentDAO;
import top.luyuni.qaa.model.Comment;
import top.luyuni.qaa.service.ICommentService;
import java.util.List;
@Service
public class CommentServiceImpl implements ICommentService {
@Autowired
CommentDAO commentDAO;
@Autowired
SensitiveServiceImpl sensitiveServiceImpl;
public List<Comment> getCommentsByEntity(int entityId, int entityType) {
return commentDAO.selectCommentByEntity(entityId, entityType);
}
public int addComment(Comment comment) {
comment.setContent(HtmlUtils.htmlEscape(comment.getContent()));
comment.setContent(sensitiveServiceImpl.filter(comment.getContent()));
return commentDAO.addComment(comment) > 0 ? comment.getId() : 0;
}
public int getCommentCount(int entityId, int entityType) {
return commentDAO.getCommentCount(entityId, entityType);
}
public int getUserCommentCount(int userId) {
return commentDAO.getUserCommentCount(userId);
}
public boolean deleteComment(int commentId) {
return commentDAO.updateStatus(commentId, 1) > 0;
}
public Comment getCommentById(int id) {
return commentDAO.getCommentById(id);
}
}
|
<reponame>potados99/tarvern-pin-archiver<filename>lib/responder/PinByReactionResponder.ts
import Responder from "./Responder";
import { MessageReaction } from "discord.js";
import PinService from "../service/PinService";
export default class PinByReactionResponder implements Responder {
constructor(private readonly reaction: MessageReaction) {
}
public async handle() {
await new PinService(this.reaction).handleReaction();
}
}
|
<reponame>gpiantoni/fima
"""Miscellaneous functions that might be useful across modules"""
from bidso import file_Core
from numpy import argmax, unravel_index, zeros, std, sqrt, median, moveaxis
from ast import literal_eval
from numpy import arange, array, clip, where
from plotly.colors import sequential, cyclical, diverging
from scipy.stats import ttest_1samp
from os import nice
def make_name(filename, event_type, ext='.html'):
"""Create name based on the data file name
Parameters
----------
filename : str
filename of the dataset of interest
event_type : str
event type used to identify the trials (one of 'cues', 'open', 'close',
'movements', 'extension', 'flexion')
ext : str
extension of the file
Returns
-------
str
file name specific to this filename and event type
"""
f = file_Core(filename)
if f.acquisition is None:
acq = ''
else:
acq = '_{f.acquisition}'
return f'{f.subject}_run-{f.run}{acq}_{event_type}{ext}'
def get_events(names):
"""Get events, but order by finger
Parameters
----------
names : list of str
all the events in the dataset
Returns
-------
list of str
list of events (thumb, index etc)
"""
from .parameters import FINGERS_CLOSED, FINGERS_OPEN, FINGERS_FLEXION, FINGERS_EXTENSION
events = []
if 'thumb close' in names:
events.extend(FINGERS_CLOSED)
if 'thumb open' in names:
events.extend(FINGERS_OPEN)
if 'thumb flexion' in names:
events.extend(FINGERS_FLEXION)
if 'thumb extension' in names:
events.extend(FINGERS_EXTENSION)
return events
def find_max_point(parameters, tf_cht):
"""Take the channel with the highest value and the interval containing that
point
Parameters
----------
tf_cht : instance of ChanTime
Returns
-------
str
channel with largest activity
tuple of float
interval centered around largest activity
"""
INTERVAL = parameters['spectrum']['select']['timeinterval']
if parameters['spectrum']['select']['peak'] == 'positive':
gain = 1
elif parameters['spectrum']['select']['peak'] == 'negative':
gain = -1
ind = unravel_index(argmax(gain * tf_cht.data[0], axis=None), tf_cht.data[0].shape)
max_chan = tf_cht.chan[0][ind[0]]
max_timeinterval = (
tf_cht.time[0][ind[1]] - INTERVAL / 2,
tf_cht.time[0][ind[1]] + INTERVAL / 2,
)
return max_chan, max_timeinterval
def get_color_for_val(value, colorscale, vmin=0, vmax=1):
"""Return RGB value for plotly
Parameters
----------
value : float
value of interest (between vmin and vmax)
colorscale : str
name of the color scale
vmin : float
minimal value (lowest value on the color scale)
vmax : float
maximal value (highest value on the color scale)
Returns
-------
str
RGB color in plotly format
"""
if colorscale in dir(sequential):
color_values = getattr(sequential, colorscale)
elif colorscale in dir(cyclical):
color_values = getattr(cyclical, colorscale)
elif colorscale in dir(diverging):
color_values = getattr(diverging, colorscale)
else:
raise ValueError(f'Could not find colorscale "{colorscale}"')
scale = arange(len(color_values)) / (len(color_values) - 1)
colors = array([literal_eval(color[3:]) for color in color_values]) / 255
value = clip(value, vmin, vmax)
v = (value - vmin) / (vmax - vmin)
v_max = where(v <= scale)[0][0]
v_min = where(v >= scale)[0][-1]
if v_min == v_max:
vv = v_min
else:
vv = (v - scale[v_min]) / (scale[v_max] - scale[v_min])
val_color = colors[v_min, :] + vv * (colors[v_max, :] - colors[v_min, :])
val_color_0255 = (255 * val_color + 0.5).astype(int)
return f'rgb{str(tuple(val_color_0255))}'
def group_per_condition(data, names, operator='mean'):
from .parameters import EVENTS
dat = []
for ev in EVENTS:
i = create_bool(names, ev)
y = data.data[0][..., i]
if operator == 'mean':
dat.append(y.mean(axis=-1))
elif operator == 'median':
dat.append(median(y, axis=-1))
elif operator == 'std':
dat.append(std(y, axis=-1))
elif operator == 'sem':
dat.append(std(y, axis=-1) / sqrt(y.shape[-1]))
elif operator == 'tstat':
res = ttest_1samp(y, 0, axis=-1)
dat.append(res.statistic)
out = data._copy()
out.data[0] = moveaxis(array(dat), 0, -1)
out.axis.pop('trial_axis')
out.axis['event'] = array((1, ), dtype='O')
out.axis['event'][0] = array(EVENTS)
return out, array(EVENTS)
def create_bool(events, to_select):
i_finger = zeros(len(events), dtype='bool')
for i, evt in enumerate(events):
if evt.startswith(to_select):
i_finger[i] = True
return i_finger
def be_nice():
nice(10)
|
require "active_record/mysql_row_level_security"
|
#!/bin/bash
ENV="dev"
ARCHIVE_NAME="./deploy/${ENV}/web.tar.gz"
PACKER_TMP_DIR="./packer_tmp"
TOTAL_STEPS=4
echo "[Step 1/${TOTAL_STEPS}] creating ${PACKER_TMP_DIR} ..."
if [ -d "${PACKER_TMP_DIR}" ]; then
echo "remove ${PACKER_TMP_DIR}"
rm -rf "${PACKER_TMP_DIR}"
fi
mkdir "${PACKER_TMP_DIR}"
echo "[Step 2/${TOTAL_STEPS}] cp files to ${PACKER_TMP_DIR} ..."
# cp -rf pages lang containers components stores config static utils next.config.js .babelrc "${PACKER_TMP_DIR}"
cp -rf pages lang containers components stores config static utils .babelrc "${PACKER_TMP_DIR}"
cp package-docker.json "${PACKER_TMP_DIR}/package.json"
# cp package.json "${PACKER_TMP_DIR}/package.json"
echo "[Step 3/${TOTAL_STEPS}] creating ${ARCHIVE_NAME} ..."
cd "${PACKER_TMP_DIR}"
tar czvf "../${ARCHIVE_NAME}" * .babelrc
cd ../
echo "[Step 4/${TOTAL_STEPS}] cleanup ..."
rm -rf "${PACKER_TMP_DIR}"
echo "--------------------------"
echo "done !"
|
<gh_stars>1-10
import * as React from "react"
import PropTypes from "prop-types"
import { Navbar } from "./navbar/navbar"
import { Footer } from "./Footer"
const Layout = ({ children }) => {
return (
<>
<Navbar />
<main>{children}</main>
<Footer />
</>
)
}
Layout.propTypes = {
children: PropTypes.node.isRequired,
}
export default Layout
|
/* SPDX-License-Identifier: BSD-3-Clause */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <libgen.h>
#include <limits.h>
#include <fcntl.h>
#include <err.h>
#include <cmark.h>
#define SECTION_MAX 8
struct hvn_man_args {
char *input, *output;
char *pagename;
int section;
int width;
};
struct hvn_man_render_option {
const char *name;
int value;
};
static const struct hvn_man_render_option renderoptions[] = {
{ "sourcepos", CMARK_OPT_SOURCEPOS },
{ "hardbreaks", CMARK_OPT_HARDBREAKS },
{ "nobreaks", CMARK_OPT_NOBREAKS },
{ "smart", CMARK_OPT_SMART },
{ "safe", CMARK_OPT_SAFE },
{ "unsafe", CMARK_OPT_UNSAFE },
{ "validate-utf8", CMARK_OPT_VALIDATE_UTF8 },
};
static void
hvn_man_parse(int fd, cmark_parser *parser) {
const int pagesize = getpagesize();
char buffer[pagesize];
ssize_t readval;
while(readval = read(fd, buffer, sizeof(buffer)), readval > 0) {
cmark_parser_feed(parser, buffer, readval);
}
if(readval < 0) {
perror("read");
}
}
static void
hvn_man_render(FILE *output, cmark_node *document, int options, const struct hvn_man_args *args) {
char *result = cmark_render_man(document, options, args->width);
fprintf(output, ".TH %s %d\n", args->pagename, args->section);
fputs(result, output);
free(result);
}
static void
hvn_man_usage(const char *progname) {
fprintf(stderr, "usage: %s [-i <input>] [-o <output>] [-p <pagename>] [-s <section>] [-w <width>] [<render options>...]\n", progname);
exit(EXIT_FAILURE);
}
static const struct hvn_man_args
hvn_man_parse_args(int argc, char **argv) {
struct hvn_man_args args = {
.input = NULL, .output = NULL,
.pagename = NULL,
.section = 0,
.width = 0,
};
int c;
while(c = getopt(argc, argv, ":i:o:p:s:w:"), c != -1) {
switch(c) {
case 'i':
free(args.input);
args.input = strdup(optarg);
break;
case 'o':
free(args.output);
args.output = strdup(optarg);
break;
case 'p':
free(args.pagename);
args.pagename = strdup(optarg);
break;
case 's': {
char *endptr;
const unsigned long value = strtoul(optarg, NULL, 10);
if(value == 0 || value > SECTION_MAX || *endptr != '\0') {
warnx("Invalid section number '%s'", optarg);
hvn_man_usage(*argv);
}
args.section = value;
} break;
case 'w': {
char *endptr;
const unsigned long value = strtoul(optarg, NULL, 0);
if(value == 0 || value > INT_MAX || *endptr != '\0') {
warnx("Invalid wrap width '%s'", optarg);
hvn_man_usage(*argv);
}
args.width = value;
} break;
case ':':
warnx("-%c: Missing argument", optopt);
hvn_man_usage(*argv);
default:
warnx("Unknown argument -%c", optopt);
hvn_man_usage(*argv);
}
}
/* Set output name from input's if available */
if(args.input != NULL && args.output == NULL) {
const char *filename = basename(args.input);
const char *extension = strrchr(filename, '.');
const size_t length = extension != NULL ?
extension - filename : strlen(filename);
args.output = strndup(filename, length);
}
/* Set default missing parameters if output is set */
if(args.output != NULL) {
const char *filename = basename(args.output);
const char *extension = strchr(filename, '.');
const size_t length = extension != NULL ?
extension - filename : strlen(filename);
/* Create pagename from output name */
if(args.pagename == NULL) {
args.pagename = strndup(filename, length);
}
/* Extract section number from extension if possible */
if(args.section == 0) {
const char *section = extension + 1;
char *endptr;
const unsigned long value = strtoul(section, &endptr, 10);
if(value != 0 && value <= SECTION_MAX && *endptr == '\0') {
args.section = value;
}
}
}
/* Last parameters, if we're in a command pipeline and nothing was specified */
if(args.pagename == NULL) {
args.pagename = strdup("?");
}
if(args.section == 0) {
args.section = 1;
}
return args;
}
int
main(int argc, char **argv) {
const struct hvn_man_args args = hvn_man_parse_args(argc, argv);
char **argpos = argv + optind, ** const argend = argv + argc;
int options = CMARK_OPT_DEFAULT;
/* Parse render options */
while(argpos != argend) {
const struct hvn_man_render_option *current = renderoptions,
* const end = renderoptions + sizeof(renderoptions) / sizeof(*renderoptions);
const char *name = *argpos;
while(current != end && strcmp(current->name, name) != 0) {
current++;
}
if(current != end) {
options |= current->value;
}
argpos++;
}
/* Parse input file */
cmark_parser *parser = cmark_parser_new(options);
if(args.input != NULL) {
int fd = open(args.input, O_RDONLY);
if(fd < 0) {
err(EXIT_FAILURE, "open %s", args.input);
}
hvn_man_parse(fd, parser);
close(fd);
} else {
hvn_man_parse(STDIN_FILENO, parser);
}
/* Print output file */
cmark_node *document = cmark_parser_finish(parser);
cmark_parser_free(parser);
if(args.output != NULL) {
FILE *output = fopen(args.output, "w");
if(output == NULL) {
err(EXIT_FAILURE, "fopen %s", args.output);
}
hvn_man_render(output, document, options, &args);
fclose(output);
} else {
hvn_man_render(stdout, document, options, &args);
}
cmark_node_free(document);
free(args.pagename);
free(args.output);
free(args.input);
return EXIT_SUCCESS;
}
|
#!/usr/bin/env bash
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 UPR.
#
# iroko is free software; you can redistribute it and/or modify it under the
# terms of the MIT License; see LICENSE file for more details.
script_path=$(dirname "$0")
pipfile_lock_path="$script_path/../Pipfile.lock"
if [ ! -f $pipfile_lock_path ]; then
echo "'Pipfile.lock' not found. Generate it by running 'pipenv lock'."
exit 1
fi
# Extract Pipfile.lock hash to use as the docker image tag
deps_ver=$(cat $pipfile_lock_path | python -c "import json,sys;print(json.load(sys.stdin)['_meta']['hash']['sha256'])")
# Build dependencies image
docker build -f Dockerfile.base -t iroko-base:$deps_ver .
# Build application image
docker build --build-arg DEPENDENCIES_VERSION=$deps_ver . -t iroko
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.gui.components.form.flexible.impl.elements.table.tab;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.ComponentEventListener;
import org.olat.core.gui.components.form.flexible.FormItem;
import org.olat.core.gui.components.form.flexible.FormItemCollection;
import org.olat.core.gui.components.form.flexible.elements.FormLink;
import org.olat.core.gui.components.form.flexible.impl.Form;
import org.olat.core.gui.components.form.flexible.impl.FormItemImpl;
import org.olat.core.gui.components.form.flexible.impl.elements.FormLinkImpl;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableElementImpl;
import org.olat.core.gui.components.form.flexible.impl.elements.table.filter.FlexiFilterButton;
import org.olat.core.gui.components.form.flexible.impl.elements.table.filter.FlexiFiltersElementImpl;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.ControllerEventListener;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.translator.Translator;
import org.olat.core.util.StringHelper;
/**
*
* Initial date: 12 juil. 2021<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class FlexiFilterTabsElementImpl extends FormItemImpl implements FormItemCollection, ControllerEventListener, ComponentEventListener {
private final FlexiFilterTabsComponent component;
private List<FlexiFiltersTab> tabs;
private final List<FlexiFiltersTab> customTabs = new ArrayList<>();
private FlexiFiltersTab selectedTab;
private final FormLink removeFiltersButton;
private final FlexiTableElementImpl tableEl;
private final Map<String,FormItem> components;
public FlexiFilterTabsElementImpl(String name, FlexiTableElementImpl tableEl, Translator translator) {
super(name);
this.tableEl = tableEl;
component = new FlexiFilterTabsComponent(this, translator);
component.setDomReplacementWrapperRequired(false);
String dispatchId = component.getDispatchID();
removeFiltersButton = new FormLinkImpl(dispatchId.concat("_rmrFiltersButton"), "rmrFiltersButton", "remove.active.filters", Link.LINK);
removeFiltersButton.setDomReplacementWrapperRequired(false);
removeFiltersButton.setIconLeftCSS("o_icon o_icon_delete_item");
removeFiltersButton.setElementCssClass("o_table_remove_filters");
removeFiltersButton.setTranslator(translator);
components = Map.of("rmrFiltersButton", removeFiltersButton);
}
protected FlexiTableElementImpl getTableEl() {
return tableEl;
}
protected boolean isFiltersExpanded() {
FlexiFiltersElementImpl filtersEl = tableEl.getFiltersElement();
return filtersEl != null && filtersEl.isEnabled() && filtersEl.isExpanded();
}
protected boolean hasFilterChanges() {
FlexiFiltersElementImpl filtersEl = tableEl.getFiltersElement();
if(filtersEl == null) {
return false;
}
List<FlexiFilterButton> filterButtons = filtersEl.getFiltersButtons();
for(FlexiFilterButton filterButton:filterButtons) {
if(!filterButton.isImplicit() && filterButton.isChanged()) {
return true;
}
}
return false;
}
protected FormLink getRemoveFiltersButton() {
return removeFiltersButton;
}
public List<FlexiFiltersTab> getFilterTabs() {
return tabs;
}
public void setFilterTabs(List<? extends FlexiFiltersTab> tabs) {
this.tabs = new ArrayList<>(tabs);
}
public List<FlexiFiltersTab> getCustomFilterTabs() {
return customTabs;
}
public void addCustomFilterTab(FlexiFiltersTab customPreset) {
customTabs.add(customPreset);
component.setDirty(true);
}
public FlexiFiltersTab getSelectedTab() {
return selectedTab;
}
public void setSelectedTab(FlexiFiltersTab selectedTab) {
this.selectedTab = selectedTab;
component.setDirty(true);
}
public void removeSelectedTab(FlexiFiltersTab tab) {
customTabs.remove(tab);
component.setDirty(true);
}
public FlexiFiltersTab getFilterTabById(String id) {
if(id == null) return null;
for(FlexiFiltersTab customTab:customTabs) {
if(id.equals(customTab.getId())) {
return customTab;
}
}
if(tabs != null) {
for(FlexiFiltersTab tab:tabs) {
if(id.equals(tab.getId())) {
return tab;
}
}
}
return null;
}
public List<String> getImplicitFiltersOfSelectedTab() {
return selectedTab == null ? null : selectedTab.getImplicitFilters();
}
@Override
public void evalFormRequest(UserRequest ureq) {
Form form = getRootForm();
String dispatchuri = form.getRequestParameter("dispatchuri");
if(removeFiltersButton != null
&& removeFiltersButton.getFormDispatchId().equals(dispatchuri)) {
component.fireEvent(ureq, new RemoveFiltersEvent());
} else if(getFormDispatchId().equals(dispatchuri)) {
String selectTabId = form.getRequestParameter("tab");
if(selectTabId != null) {
FlexiFiltersTab tab = getTabById(selectTabId);
if(tab != null) {
setSelectedTab(tab);
component.fireEvent(ureq, new SelectFilterTabEvent(tab));
component.setDirty(true);
}
}
}
}
private FlexiFiltersTab getTabById(String id) {
if(!StringHelper.containsNonWhitespace(id)) {
return null;
}
for(FlexiFiltersTab tab:tabs) {
if(tab.getId().equals(id)) {
return tab;
}
}
for(FlexiFiltersTab tab:customTabs) {
if(tab.getId().equals(id)) {
return tab;
}
}
return null;
}
@Override
public void dispatchEvent(UserRequest ureq, Component source, Event event) {
//
}
@Override
public void dispatchEvent(UserRequest ureq, Controller source, Event event) {
//
}
@Override
public void reset() {
//
}
@Override
protected Component getFormItemComponent() {
return component;
}
@Override
public Iterable<FormItem> getFormItems() {
return new ArrayList<>(components.values());
}
@Override
public FormItem getFormComponent(String name) {
return components.get(name);
}
@Override
protected void rootFormAvailable() {
for(FormItem item:getFormItems()) {
rootFormAvailable(item);
}
}
private final void rootFormAvailable(FormItem item) {
if(item != null && item.getRootForm() != getRootForm()) {
item.setRootForm(getRootForm());
}
}
}
|
class Deque {
constructor() {
this.collection = [];
}
addFront(element) {
this.collection.unshift(element);
}
addBack(element) {
this.collection.push(element);
}
removeFront() {
return this.collection.shift();
}
removeBack() {
return this.collection.pop();
}
getFront() {
return this.collection[0];
}
getBack() {
return this.collection[this.collection.length - 1];
}
isEmpty() {
return this.collection.length === 0;
}
} |
# sudo apt-get update
# sudo apt-get install python-virtualenv
# Download
sudo wget https://raw.githubusercontent.com/zrthxn/venv/master/venv.sh -O /usr/local/bin/venv
sudo ln -s /usr/local/bin/venv /usr/bin
sudo chmod +x /usr/local/bin/venv
|
<gh_stars>10-100
package io.opensphere.featureactions.editor.ui;
import java.awt.Dimension;
import java.awt.Window;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.fx.JFXDialog;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.featureactions.registry.FeatureActionsRegistry;
import io.opensphere.mantle.data.DataTypeInfo;
/**
* Displays the {@link SimpleFeatureActionEditor} so the user can edit feature
* actions for a given layer.
*/
public class ActionEditorDisplayerImpl implements ActionEditorDisplayer
{
/** Stores all feature actions. */
private final FeatureActionsRegistry myActionRegistry;
/** The system toolbox. */
private final Toolbox myToolbox;
/**
* Constructs a new displayer.
*
* @param toolbox The system toolbox.
* @param actionRegistry Stores all feature actions.
*/
public ActionEditorDisplayerImpl(Toolbox toolbox, FeatureActionsRegistry actionRegistry)
{
myToolbox = toolbox;
myActionRegistry = actionRegistry;
}
@Override
public void displaySimpleEditor(Window owner, DataTypeInfo layer)
{
EventQueueUtilities.runOnEDT(() ->
{
JFXDialog dialog = new JFXDialog(owner, "Feature Actions for " + layer.getDisplayName());
dialog.setFxNode(new SimpleFeatureActionEditorUI(myToolbox, myActionRegistry, dialog, layer));
dialog.setSize(new Dimension(910, 600));
dialog.setResizable(true);
dialog.setLocationRelativeTo(owner);
dialog.setModal(false);
dialog.setVisible(true);
});
}
}
|
#!/bin/bash
# Fast fail the script on failures.
set -e
if ! [[ -z "${TRAVIS_TAG}" ]]; then
echo "No TRAVIS_TAG found: $TRAVIS_TAG"
exit 0
fi
pub global activate dart_coveralls
dart_coveralls report --debug -C -E -T --retry=1 --token=$coverallsToken ./test/all_tests.playlist.dart
|
import BigNumber from "bignumber.js"
import { fmtAmount } from "./utils"
test('check fmtAmount edge cases', () => {
expect(
fmtAmount(new BigNumber(1.126e18), "CELO")).toEqual("1.12")
expect(
fmtAmount(new BigNumber(1.123e18), "CELO", 6)).toEqual("1.123000")
expect(
fmtAmount(new BigNumber(0.123e18), "CELO", 2)).toEqual("0.12")
expect(
fmtAmount(new BigNumber(0.0000123e18), "CELO")).toEqual("0.000012")
expect(
fmtAmount(new BigNumber(1000.1e18), "CELO")).toEqual("1,000.10")
expect(
fmtAmount(new BigNumber(1100e18), "CELO")).toEqual("1,100.00")
}) |
<gh_stars>0
package com.cgovern.governor.api; |
from setuptools import find_packages, setup
setup(
name='{{ cookiecutter.repo_name }}',
packages=find_packages(where="src"),
package_dir={"": "src"},
version='0.1.0',
description='{{ cookiecutter.description }}',
author='Department of Social Protection',
license='EUPL',
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.