text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
# This script creates traing and validations splits, downloads text corpus for language modeling,
# prepares the training, validation and test data for rimes dataset
# (i.e text, images.scp, utt2spk and spk2utt). It calls process_data.py.
# Eg. local/prepare_data.sh
stage=2
download_dir=data/local
data_url="https://dl.fbaipublicfiles.com/fairseq/data/tutorial_names.tar.gz"
. ./cmd.sh
. ./path.sh
#. ./utils/parse_options.sh || exit 1;
if [ ${stage} -le 1 ]; then
wget -P $download_dir $data_url
tar -xzf $download_dir/tutorial_names.tar.gz
fairseq-preprocess \
--trainpref names/train --validpref names/valid --testpref names/test \
--source-lang input --target-lang label \
--destdir names-bin --dataset-impl raw
fi
if [ ${stage} -le 2 ]; then
mkdir -p log
log_file=log/train.log
fairseq-train names-bin \
--task simple_classification \
--arch pytorch_tutorial_rnn \
--optimizer adam --lr 0.001 --lr-shrink 0.5 \
--max-tokens 1000 | tee $log_file
fi
#python3 eval_classifier.py names-bin --path checkpoints/checkpoint_best.pt
#python3 train.py /Users/ashisharora/espresso/examples/classify_names/names-bin --task simple_classification --arch pytorch_tutorial_rnn --optimizer adam --lr 0.001 --lr-shrink 0.5 --max-tokens 1000
|
<filename>src/components/admin/Nav.js
import React, { Component } from "react";
import { Link } from "react-router-dom";
class Nav extends Component {
/**
* Renders the nav bar for the admin-side.
*/
render() {
return (
<nav className="nav">
<Link to="/admin" className="nav-brand">
<img
src={require("../../assets/images/mia-logo.jpg")}
className="nav-logo"
alt="logo"
/>
</Link>
<div className="nav-links">
<Link className="nav-link" to="/admin">
Dashboard
</Link>
<Link className="nav-link" to="/admin/sessions">
Sessions
</Link>
<Link className="nav-link" to="/admin/quizzes">
Quizzes
</Link>
</div>
<div className="nav-right">
<button onClick={this.props.signout}>Sign Out</button>
</div>
</nav>
);
}
}
export default Nav;
|
import geopandas as gpd
import holoviews as hv
import geoviews as gv
def plot_buffered_geometries(gdf, buffer_size, *args, **kwargs):
"""
Plot buffered geometries from a GeoDataFrame on a map.
Parameters:
-----------
gdf : geopandas.GeoDataFrame
GeoDataFrame containing geometries to be buffered.
buffer_size : numeric
Size of the buffer in meters (measured in EPSG:31287).
*args, **kwargs : additional arguments and keyword arguments
Additional arguments and keyword arguments to be passed to the plotting functions.
Returns:
--------
hv.Layout
Plot of the buffered geometries overlaid on a map.
Raises:
-------
ValueError
If the input GeoDataFrame is empty or does not contain valid geometries.
"""
if gdf.empty or gdf.geometry.isna().all():
raise ValueError("Input GeoDataFrame is empty or does not contain valid geometries.")
# Convert GeoDataFrame to EPSG:31287 CRS and buffer the geometries
buffered = gdf.to_crs('epsg:31287').buffer(buffer_size)
# Convert the buffered geometries back to EPSG:4326 CRS
buffered = gdf.copy().set_geometry(buffered).to_crs('epsg:4326')
# Create a plot of the buffered geometries overlaid on a map
plot = (buffered.hvplot(geo=True, tiles='OSM', alpha=0.5, line_width=0, *args, **kwargs) *
gdf.hvplot(geo=True, hover_cols=['DESIGNATION'])
).opts(active_tools=['wheel_zoom'])
return plot |
#!/bin/sh -e
set -o errexit
###
# Copyright (c) 2015-2019, Antoine "vv221/vv222" Le Gonidec
# Copyright (c) 2016-2019, Solène "Mopi" Huault
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# This software is provided by the copyright holders and contributors "as is"
# and any express or implied warranties, including, but not limited to, the
# implied warranties of merchantability and fitness for a particular purpose
# are disclaimed. In no event shall the copyright holder or contributors be
# liable for any direct, indirect, incidental, special, exemplary, or
# consequential damages (including, but not limited to, procurement of
# substitute goods or services; loss of use, data, or profits; or business
# interruption) however caused and on any theory of liability, whether in
# contract, strict liability, or tort (including negligence or otherwise)
# arising in any way out of the use of this software, even if advised of the
# possibility of such damage.
###
###
# Anima: Gate of Memories
# build native packages from the original installers
# send your bug reports to vv221@dotslashplay.it
###
script_version=20181007.3
# Set game-specific variables
GAME_ID='anima-gate-of-memories'
GAME_NAME='Anima: Gate of Memories'
ARCHIVE_GOG='gog_anima_gate_of_memories_2.0.0.1.sh'
ARCHIVE_GOG_URL='https://www.gog.com/game/anima_gate_of_memories'
ARCHIVE_GOG_MD5='681d05255e1a162947af69d3e7537748'
ARCHIVE_GOG_SIZE='8900000'
ARCHIVE_GOG_VERSION='1.0-gog2.0.0.1'
ARCHIVE_GOG_TYPE='mojosetup_unzip'
ARCHIVE_DOC_DATA_PATH='data/noarch/docs'
ARCHIVE_DOC_DATA_FILES='*'
ARCHIVE_GAME_BIN32_PATH='data/noarch/game'
ARCHIVE_GAME_BIN32_FILES='GoMLinux.x86 GoMLinux_Data/*/x86'
ARCHIVE_GAME_BIN64_PATH='data/noarch/game'
ARCHIVE_GAME_BIN64_FILES='GoMLinux.x86_64 GoMLinux_Data/*/x86_64'
ARCHIVE_GAME_ASSETS_PATH='data/noarch/game'
ARCHIVE_GAME_ASSETS_FILES='GoMLinux_Data/*.assets GoMLinux_Data/*.assets.resS'
ARCHIVE_GAME_DATA_PATH='data/noarch/game'
ARCHIVE_GAME_DATA_FILES='GoMLinux_Data'
DATA_DIRS='./logs'
APP_MAIN_TYPE='native'
APP_MAIN_EXE_BIN32='GoMLinux.x86'
APP_MAIN_EXE_BIN64='GoMLinux.x86_64'
# shellcheck disable=SC2016
APP_MAIN_OPTIONS='-logFile ./logs/$(date +%F-%R).log'
APP_MAIN_ICON='GoMLinux_Data/Resources/UnityPlayer.png'
PACKAGES_LIST='PKG_BIN32 PKG_BIN64 PKG_ASSETS PKG_DATA'
PKG_ASSETS_ID="${GAME_ID}-assets"
PKG_ASSETS_DESCRIPTION='assets'
PKG_DATA_ID="${GAME_ID}-data"
PKG_DATA_DESCRIPTION='data'
PKG_BIN32_ARCH='32'
PKG_BIN32_DEPS="$PKG_ASSETS_ID $PKG_DATA_ID glibc libstdc++ glu xcursor libxrandr gtk2 libudev1"
PKG_BIN64_ARCH='64'
PKG_BIN64_DEPS="$PKG_BIN32_DEPS"
# Load common functions
target_version='2.10'
if [ -z "$PLAYIT_LIB2" ]; then
: "${XDG_DATA_HOME:="$HOME/.local/share"}"
for path in\
"$PWD"\
"$XDG_DATA_HOME/play.it"\
'/usr/local/share/games/play.it'\
'/usr/local/share/play.it'\
'/usr/share/games/play.it'\
'/usr/share/play.it'
do
if [ -e "$path/libplayit2.sh" ]; then
PLAYIT_LIB2="$path/libplayit2.sh"
break
fi
done
fi
if [ -z "$PLAYIT_LIB2" ]; then
printf '\n\033[1;31mError:\033[0m\n'
printf 'libplayit2.sh not found.\n'
exit 1
fi
#shellcheck source=play.it-2/lib/libplayit2.sh
. "$PLAYIT_LIB2"
# Extract game data
extract_data_from "$SOURCE_ARCHIVE"
prepare_package_layout
rm --recursive "$PLAYIT_WORKDIR/gamedata"
# Write launchers
for PKG in 'PKG_BIN32' 'PKG_BIN64'; do
write_launcher 'APP_MAIN'
done
# Build package
PKG='PKG_DATA'
icons_linking_postinst 'APP_MAIN'
write_metadata 'PKG_DATA'
write_metadata 'PKG_ASSETS' 'PKG_BIN32' 'PKG_BIN64'
build_pkg
# Clean up
rm --recursive "$PLAYIT_WORKDIR"
# Print instructions
print_instructions
exit 0
|
<gh_stars>1-10
package com.release.okhelper.callBack;
import java.io.IOException;
import okhttp3.Response;
/**
* @author Mr.release
* @create 2019/4/3
* @Describe
*/
public abstract class StringCallBack extends ICallback<String> {
@Override
public String responseChange(Response response) throws IOException {
return response.body().string() + "";
}
}
|
/** The Git Build Hook Maven Plugin's validation package for logic that performs validation common between goals. */
package com.rudikershaw.gitbuildhook.validation;
|
#!/usr/bin/env bash
# initNonPortableDistroRid
#
# Input:
# targetOs: (str)
# buildArch: (str)
# isPortable: (int)
# rootfsDir: (str)
#
# Return:
# None
#
# Notes:
#
# initNonPortableDistroRid will attempt to initialize a non portable rid. These
# rids are specific to distros need to build the product/package and consume
# them on the same platform.
#
# If -portablebuild=false is passed a non-portable rid will be created for any
# distro.
#
# It is important to note that the function does not return anything, but it
# exports __DistroRid, if there is a non-portable distro rid to be used.
#
initNonPortableDistroRid()
{
# Make sure out parameter is cleared.
__DistroRid=
local targetOs="$1"
local buildArch="$2"
local isPortable="$3"
local rootfsDir="$4"
local nonPortableBuildID=""
if [ "$targetOs" = "Linux" ]; then
if [ -e "${rootfsDir}/etc/os-release" ]; then
source "${rootfsDir}/etc/os-release"
# We have forced __PortableBuild=0. This is because -portablebuld
# has been passed as false.
if (( isPortable == 0 )); then
if [ "${ID}" = "rhel" ]; then
# remove the last version digit
VERSION_ID="${VERSION_ID%.*}"
fi
if [ -z "${VERSION_ID+x}" ]; then
# Rolling release distros do not set VERSION_ID, so omit
# it here to be consistent with everything else.
nonPortableBuildID="${ID}-${buildArch}"
else
nonPortableBuildID="${ID}.${VERSION_ID}-${buildArch}"
fi
fi
elif [ -e "${rootfsDir}/android_platform" ]; then
source "$rootfsDir"/android_platform
nonPortableBuildID="$RID"
fi
fi
if [ "$targetOs" = "FreeBSD" ]; then
if (( isPortable == 0 )); then
# $rootfsDir can be empty. freebsd-version is shell script and it should always work.
__freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
nonPortableBuildID="freebsd.$__freebsd_major_version-${buildArch}"
fi
elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableBuildID="android.$__android_sdk_version-${buildArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
case "$__uname_version" in
omnios-*)
__omnios_major_version=$(echo "${__uname_version:8:2}")
nonPortableBuildID=omnios."$__omnios_major_version"-"$buildArch"
;;
joyent_*)
__smartos_major_version=$(echo "${__uname_version:7:4}")
nonPortableBuildID=smartos."$__smartos_major_version"-"$buildArch"
;;
illumos_*)
nonPortableBuildID=openindiana-"$buildArch"
;;
esac
elif [ "$targetOs" = "Solaris" ]; then
__uname_version=$(uname -v)
__solaris_major_version=$(echo "${__uname_version%.*}")
nonPortableBuildID=solaris."$__solaris_major_version"-"$buildArch"
fi
if [ -n "${nonPortableBuildID}" ]; then
__DistroRid="${nonPortableBuildID}"
# We are using a non-portable build rid. Force __PortableBuild to false.
__PortableBuild=0
export __DistroRid __PortableBuild
fi
}
# initDistroRidGlobal
#
# Input:
# os: (str)
# arch: (str)
# isPortable: (int)
# rootfsDir?: (nullable:string)
#
# Return:
# None
#
# Notes:
#
# It is important to note that the function does not return anything, but it
# exports the following variables on success:
#
# __DistroRid
# __PortableBuild
#
initDistroRidGlobal()
{
# __DistroRid must be set at the end of the function.
# Previously we would create a variable __HostDistroRid and/or __DistroRid.
#
# __HostDistroRid was used in the case of a non-portable build, it has been
# deprecated. Now only __DistroRid is supported. It will be used for both
# portable and non-portable rids and will be used in build-packages.sh
local targetOs="$1"
local buildArch="$2"
local isPortable="$3"
local rootfsDir=""
if [ "$#" -ge 4 ]; then
rootfsDir="$4"
fi
if [ -n "${rootfsDir}" ]; then
# We may have a cross build. Check for the existance of the rootfsDir
if [ ! -e "${rootfsDir}" ]; then
echo "Error rootfsDir has been passed, but the location is not valid."
exit 1
fi
fi
initNonPortableDistroRid "${targetOs}" "${buildArch}" "${isPortable}" "${rootfsDir}"
if [ "$buildArch" = "wasm" ]; then
__DistroRid=browser-wasm
export __DistroRid
fi
if [ -z "${__DistroRid}" ]; then
# The non-portable build rid was not set. Set the portable rid.
__PortableBuild=1
export __PortableBuild
local distroRid=""
# Check for musl-based distros (e.g Alpine Linux, Void Linux).
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
strings "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl; then
distroRid="linux-musl-${buildArch}"
fi
if [ -z "${distroRid}" ]; then
if [ "$targetOs" = "Linux" ]; then
distroRid="linux-$buildArch"
elif [ "$targetOs" = "OSX" ]; then
distroRid="osx-$buildArch"
elif [ "$targetOs" = "MacCatalyst" ]; then
distroRid="maccatalyst-$buildArch"
elif [ "$targetOs" = "tvOS" ]; then
distroRid="tvos-$buildArch"
elif [ "$targetOs" = "tvOSSimulator" ]; then
distroRid="tvossimulator-$buildArch"
elif [ "$targetOs" = "iOS" ]; then
distroRid="ios-$buildArch"
elif [ "$targetOs" = "iOSSimulator" ]; then
distroRid="iossimulator-$buildArch"
elif [ "$targetOs" = "Android" ]; then
distroRid="android-$buildArch"
elif [ "$targetOs" = "Browser" ]; then
distroRid="browser-$buildArch"
elif [ "$targetOs" = "FreeBSD" ]; then
distroRid="freebsd-$buildArch"
elif [ "$targetOs" = "illumos" ]; then
distroRid="illumos-$buildArch"
elif [ "$targetOs" = "Solaris" ]; then
distroRid="solaris-$buildArch"
fi
fi
__DistroRid="${distroRid}"
export __DistroRid
fi
if [ -z "$__DistroRid" ]; then
echo "DistroRid is not set. This is almost certainly an error"
exit 1
fi
echo "__DistroRid: ${__DistroRid}"
}
|
use actix_web::{web, App, HttpServer, HttpResponse, Responder, post, middleware, dev::ServiceRequest, dev::ServiceResponse, Error};
use serde::{Deserialize, Serialize};
#[derive(Deserialize)]
struct InputData {
input: String,
}
#[derive(Serialize)]
struct ProcessedData {
input: String,
length: usize,
}
#[post("/process")]
async fn process(input: web::Json<InputData>) -> impl Responder {
let input_length = input.input.len();
let processed_data = ProcessedData {
input: input.input.clone(),
length: input_length,
};
HttpResponse::Ok().json(processed_data)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let server = HttpServer::new(|| {
App::new()
.wrap(middleware::NormalizePath::default())
.service(healthz)
.service(process)
})
.bind("127.0.0.1:8080")?
.run();
server.await
} |
<filename>components/Navbar.js<gh_stars>0
import React from 'react';
function Navbar() {
return (
<div>
<div id="menuicon">
<div id="menuline"></div>
</div>
<div id="mainmenu">
<nav>
<div className="logo">
<img src="logo.png" alt="" />
<a href="/">Shop</a>
</div>
<ul>
<li>Fashion</li>
<li>Electronics</li>
<li>Food</li>
</ul>
<div className="icons-user">
{/* <% if(!currentUser){ %> */}
<a href="/login" className="btn-sec">
Login
</a>
<a href="/register" className="btn-sec blue">
Sign Up
</a>
{/* <% } else{ %> */}
</div>
<div id="close">
<i className="fa fa-close"></i>
</div>
</nav>
</div>
<div className="padding"></div>
</div>
);
}
export default Navbar;
|
export type CategoryItemProps = {
category: Category
};
const capitalizeCategoryNames = (categoryItems: CategoryItemProps[]): CategoryItemProps[] => {
return categoryItems.map(item => ({
category: item.category.charAt(0).toUpperCase() + item.category.slice(1)
}));
};
// Test the function
const categoryItems: CategoryItemProps[] = [
{ category: "electronics" },
{ category: "clothing" },
{ category: "books" }
];
const capitalizedCategories = capitalizeCategoryNames(categoryItems);
console.log(capitalizedCategories); |
package org.egovframe.rte.psl.dataaccess.dao;
import java.util.List;
import org.egovframe.rte.psl.dataaccess.EgovAbstractDAO;
import org.egovframe.rte.psl.dataaccess.vo.JobHistVO;
import org.springframework.stereotype.Repository;
@Repository("jobHistDAO")
public class JobHistDAO extends EgovAbstractDAO {
@SuppressWarnings("deprecation")
public JobHistVO selectJobHist(String queryId, JobHistVO vo) {
return (JobHistVO) getSqlMapClientTemplate().queryForObject(queryId, vo);
}
@SuppressWarnings({ "unchecked", "deprecation" })
public List<JobHistVO> selectJobHistList(String queryId, JobHistVO vo) {
return getSqlMapClientTemplate().queryForList(queryId, vo);
}
}
|
/**
* Module to add labels to breaks and continues according to tagged AST nodes
* generated by desugarLoop.
*
*/
import { NodePath, Visitor } from 'babel-traverse';
import * as t from 'babel-types';
import { While, Break } from '@stopify/util';
const labelVisitor : Visitor = {
ContinueStatement: function (path: NodePath<t.ContinueStatement>): void {
const { label } = path.node;
if (label) {
const breakStatement = t.breakStatement(label);
path.replaceWith(breakStatement);
} else {
const loopParent : NodePath<While<t.Node>> =
path.findParent(p => p.isWhileStatement());
const continueLabel = loopParent.node.continue_label;
const breakStatement = t.breakStatement(continueLabel);
path.replaceWith(breakStatement);
}
},
BreakStatement: function (path: NodePath<Break<t.BreakStatement>>): void {
const label = path.node.label;
if (label === null) {
const labeledParent : NodePath<Break<t.Node>> =
path.findParent(p => p.isLoop() || p.isSwitchStatement());
if (labeledParent === null) {
return;
}
path.node.label = <t.Identifier>labeledParent.node.break_label;
}
},
};
module.exports = function() {
return { visitor: labelVisitor };
};
|
cask :v1 => 'seafile-client' do
version '3.1.5'
sha256 '25eedd712d57bcf94fd19ba2783d5f0278c3e95403ee07f300e17dabbd6bb75a'
url "https://bitbucket.org/haiwen/seafile/downloads/seafile-client-#{version}.dmg"
homepage 'http://seafile.com/'
license :oss
app 'Seafile Client.app'
end
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.CensoreFormatter = factory());
}(this, (function () {
'use strict';
var startedDictionary = require('./data/dictionary.json');
var CensoreFormatter = {
dictionary: function () {
return this.x_Dictionary.concat(this.userDictionary)
},
x_Dictionary: startedDictionary.words,
userDictionary: [],
addWords: function (words) {
if(typeof words !== 'object') {
throw new Error('Argument in addWord method must be an array');
}
else {
this.userDictionary = this.userDictionary.concat(words);
return this;
}
},
format: function (stringToFormat, flagDep) {
let text = this.formattingText(stringToFormat, flagDep);
return text;
},
formattingText: function (stringToFormat, flagDep) {
let regular = this.regularForFormating(flagDep),
returnedString = '';
returnedString = stringToFormat.replace(regular, function (found) {
let formattedFoundString = '',
firstChar = '',
lastChar = '',
stringLength = 0,
stars__ = '';
firstChar = found.charAt(0);
lastChar = found.charAt(found.length - 1);
stringLength = found.length;
for(let j = 0 ; j < stringLength - 2 ; j++) {
stars__ += "*";
}
formattedFoundString = firstChar + stars__ + lastChar;
return formattedFoundString;
});
return returnedString;
},
regularForFormating: function (caseDep) {
let regular = '',
flags = 'gi';
for(let i = 0 ; i < this.dictionary().length ; i++) {
let item = this.dictionary()[i];
regular += '(' + item + ')';
if(i !== this.dictionary().length - 1) {
regular += '|';
}
}
if(caseDep) {
if(caseDep == true) {
flags = 'gi';
}
else {
flags = 'g';
}
}
regular = new RegExp(regular, 'gi');
return regular;
}
};
return CensoreFormatter
}))); |
<reponame>20sffactory/opencv_attempts<gh_stars>1-10
import imutils
import cv2
import numpy as np
cap = cv2.VideoCapture(1)
cap.set(3, 640)
cap.set(4, 480)
while True:
_, frame = cap.read()
frame_img = frame.copy()
img = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
img = cv2.bilateralFilter(img,11,17,17)
_,thres = cv2.threshold(img,120,255,cv2.THRESH_BINARY)
thres = cv2.bitwise_not(thres)
thres = cv2.medianBlur(thres,7)
edged = cv2.Canny(thres, 3, 180)
cnts = cv2.findContours(edged,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)[0]
cnt_len = 0
for each in range(len(cnts)):
if cv2.contourArea(cnts[each]) > 1000:
frame_img = cv2.drawContours(frame_img, cnts, each, (255,0,0), 3)
cnt_len += 1
frame_txt = frame_img.copy()
frame_txt = cv2.putText(frame_img,"Number of Coins: "+str(cnt_len),\
(50,50),cv2.FONT_HERSHEY_SIMPLEX,0.8,(255,0,0))
cv2.imshow('frame',imutils.resize(frame_txt,width=800))
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cv2.destroyAllWindows() |
public class MultiplicationTable {
public static void main(String[] args) {
int n = Integer.parseInt(args[0]);
for(int i=1; i<=10; i++) {
int result = n*i;
System.out.println(n + " X " + i + " = " + result);
}
}
} |
#!/bin/bash
set -e
set +x
trap "cd $(pwd -P)" EXIT
cd "$(dirname "$0")"
REMOTE_BROWSER_UPSTREAM="browser_upstream"
BUILD_BRANCH="playwright-build"
# COLORS
RED=$'\e[1;31m'
GRN=$'\e[1;32m'
YEL=$'\e[1;33m'
END=$'\e[0m'
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
echo "usage: export.sh [firefox|webkit] [custom_checkout_path]"
echo
echo "Exports patch from the current branch of the checkout to browser folder."
echo "The checkout has to be 'prepared', meaning that 'prepare_checkout.sh' should be"
echo "run against it first."
echo
echo "You can optionally specify custom_checkout_path if you have browser checkout somewhere else"
echo "and wish to export patches from it."
echo
exit 0
fi
if [[ $# == 0 ]]; then
echo "missing browser: 'firefox' or 'webkit'"
echo "try './export.sh --help' for more information"
exit 1
fi
# FRIENDLY_CHECKOUT_PATH is used only for logging.
FRIENDLY_CHECKOUT_PATH="";
BUILD_NUMBER_UPSTREAM_URL=""
CHECKOUT_PATH=""
EXPORT_PATH=""
EXTRA_FOLDER_PW_PATH=""
EXTRA_FOLDER_CHECKOUT_RELPATH=""
if [[ ("$1" == "firefox") || ("$1" == "firefox/") || ("$1" == "ff") ]]; then
if [[ -z "${FF_CHECKOUT_PATH}" ]]; then
FRIENDLY_CHECKOUT_PATH='$HOME/firefox';
CHECKOUT_PATH="$HOME/firefox"
else
echo "WARNING: using checkout path from FF_CHECKOUT_PATH env: ${FF_CHECKOUT_PATH}"
CHECKOUT_PATH="${FF_CHECKOUT_PATH}"
FRIENDLY_CHECKOUT_PATH="<FF_CHECKOUT_PATH>"
fi
EXTRA_FOLDER_PW_PATH="$PWD/firefox/juggler"
EXTRA_FOLDER_CHECKOUT_RELPATH="juggler"
EXPORT_PATH="$PWD/firefox"
BUILD_NUMBER_UPSTREAM_URL="https://raw.githubusercontent.com/microsoft/playwright/main/browser_patches/firefox/BUILD_NUMBER"
source "./firefox/UPSTREAM_CONFIG.sh"
elif [[ ("$1" == "firefox-beta") || ("$1" == "ff-beta") ]]; then
if [[ -z "${FF_CHECKOUT_PATH}" ]]; then
FRIENDLY_CHECKOUT_PATH='$HOME/firefox';
CHECKOUT_PATH="$HOME/firefox"
else
echo "WARNING: using checkout path from FF_CHECKOUT_PATH env: ${FF_CHECKOUT_PATH}"
CHECKOUT_PATH="${FF_CHECKOUT_PATH}"
FRIENDLY_CHECKOUT_PATH="<FF_CHECKOUT_PATH>"
fi
EXTRA_FOLDER_PW_PATH="$PWD/firefox-beta/juggler"
EXTRA_FOLDER_CHECKOUT_RELPATH="juggler"
EXPORT_PATH="$PWD/firefox-beta"
BUILD_NUMBER_UPSTREAM_URL="https://raw.githubusercontent.com/microsoft/playwright/main/browser_patches/firefox-beta/BUILD_NUMBER"
source "./firefox-beta/UPSTREAM_CONFIG.sh"
elif [[ ("$1" == "webkit") || ("$1" == "webkit/") || ("$1" == "wk") ]]; then
if [[ -z "${WK_CHECKOUT_PATH}" ]]; then
FRIENDLY_CHECKOUT_PATH='$HOME/webkit';
CHECKOUT_PATH="$HOME/webkit"
else
echo "WARNING: using checkout path from WK_CHECKOUT_PATH env: ${WK_CHECKOUT_PATH}"
CHECKOUT_PATH="${WK_CHECKOUT_PATH}"
FRIENDLY_CHECKOUT_PATH="<WK_CHECKOUT_PATH>"
fi
EXTRA_FOLDER_PW_PATH="$PWD/webkit/embedder/Playwright"
EXTRA_FOLDER_CHECKOUT_RELPATH="Tools/Playwright"
EXPORT_PATH="$PWD/webkit"
BUILD_NUMBER_UPSTREAM_URL="https://raw.githubusercontent.com/microsoft/playwright/main/browser_patches/webkit/BUILD_NUMBER"
source "./webkit/UPSTREAM_CONFIG.sh"
else
echo ERROR: unknown browser to export - "$1"
exit 1
fi
# we will use this just for beauty.
if [[ $# == 2 ]]; then
echo "WARNING: using custom checkout path $2"
CHECKOUT_PATH=$2
FRIENDLY_CHECKOUT_PATH="<custom_checkout ( $2 )>"
fi
# if there's no checkout folder - bail out.
if ! [[ -d $CHECKOUT_PATH ]]; then
echo "ERROR: $FRIENDLY_CHECKOUT_PATH is missing - nothing to export."
exit 1;
else
echo "-- checking $FRIENDLY_CHECKOUT_PATH exists - OK"
fi
# if folder exists but not a git repository - bail out.
if ! [[ -d $CHECKOUT_PATH/.git ]]; then
echo "ERROR: $FRIENDLY_CHECKOUT_PATH is not a git repository! Nothing to export."
exit 1
else
echo "-- checking $FRIENDLY_CHECKOUT_PATH is a git repo - OK"
fi
# Switch to git repository.
cd "$CHECKOUT_PATH"
# Setting up |$REMOTE_BROWSER_UPSTREAM| remote and fetch the $BASE_BRANCH
if git remote get-url $REMOTE_BROWSER_UPSTREAM >/dev/null; then
if ! [[ $(git config --get remote.$REMOTE_BROWSER_UPSTREAM.url || echo "") == "$REMOTE_URL" ]]; then
echo "ERROR: remote $REMOTE_BROWSER_UPSTREAM is not pointing to '$REMOTE_URL'! run 'prepare_checkout.sh' first"
exit 1
fi
else
echo "ERROR: checkout does not have $REMOTE_BROWSER_UPSTREAM; run 'prepare_checkout.sh' first"
exit 1
fi
# Check if git repo is dirty.
if [[ -n $(git status -s --untracked-files=no) ]]; then
echo "ERROR: $FRIENDLY_CHECKOUT_PATH has dirty GIT state - aborting export."
exit 1
else
echo "-- checking $FRIENDLY_CHECKOUT_PATH is clean - OK"
fi
PATCH_NAME=$(ls -1 "$EXPORT_PATH"/patches)
if [[ -z "$PATCH_NAME" ]]; then
PATCH_NAME="bootstrap.diff"
OLD_DIFF=""
else
OLD_DIFF=$(cat "$EXPORT_PATH"/patches/$PATCH_NAME)
fi
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
NEW_BASE_REVISION=$(git merge-base $REMOTE_BROWSER_UPSTREAM/"$BASE_BRANCH" "$CURRENT_BRANCH")
NEW_DIFF=$(git diff --diff-algorithm=myers --full-index "$NEW_BASE_REVISION" "$CURRENT_BRANCH" -- . ":!${EXTRA_FOLDER_CHECKOUT_RELPATH}")
# Increment BUILD_NUMBER
BUILD_NUMBER=$(curl ${BUILD_NUMBER_UPSTREAM_URL} | head -1)
BUILD_NUMBER=$((BUILD_NUMBER+1))
echo "REMOTE_URL=\"$REMOTE_URL\"
BASE_BRANCH=\"$BASE_BRANCH\"
BASE_REVISION=\"$NEW_BASE_REVISION\"" > "$EXPORT_PATH"/UPSTREAM_CONFIG.sh
echo "$NEW_DIFF" > "$EXPORT_PATH"/patches/$PATCH_NAME
echo $BUILD_NUMBER > "$EXPORT_PATH"/BUILD_NUMBER
echo "Changed: $(git config user.email) $(date)" >> "$EXPORT_PATH"/BUILD_NUMBER
echo "-- exporting standalone folder"
rm -rf "${EXTRA_FOLDER_PW_PATH}"
mkdir -p $(dirname "${EXTRA_FOLDER_PW_PATH}")
cp -r "${EXTRA_FOLDER_CHECKOUT_RELPATH}" "${EXTRA_FOLDER_PW_PATH}"
NEW_BASE_REVISION_TEXT="$NEW_BASE_REVISION (not changed)"
if [[ "$NEW_BASE_REVISION" != "$BASE_REVISION" ]]; then
NEW_BASE_REVISION_TEXT="$YEL$NEW_BASE_REVISION (changed)$END"
fi
echo "=============================================================="
echo " Repository: $FRIENDLY_CHECKOUT_PATH"
echo " Changes between branches: $REMOTE_BROWSER_UPSTREAM/$BASE_BRANCH..$CURRENT_BRANCH"
echo " BASE_REVISION: $NEW_BASE_REVISION_TEXT"
echo " BUILD_NUMBER: $YEL$BUILD_NUMBER (changed)$END"
echo "=============================================================="
echo
|
from django.db import models
from hill.models.abstract_base import AbstractBase
class Repository(AbstractBase):
"""A collection of Packages."""
name = models.CharField(max_length=500, help_text="The repository name.")
download_url = models.URLField(help_text="Absolute url to the home page.")
home_url = models.URLField()
class Meta:
ordering = ["name", "modified_date"]
def __str__(self):
return self.name
|
<reponame>gitter-badger/yggdrash<filename>yggdrash-core/src/test/java/io/yggdrash/core/net/PeerTest.java
package io.yggdrash.core.net;
import io.yggdrash.core.exception.NotValidateException;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class PeerTest {
@Test
public void createPeerTest() {
Peer peer = Peer.valueOf("ynode://75bff16c22e6b38c71fd2005657827acce3dfd4a1db1cc417303e42"
+ "9d7da9625525ba3f1b7794e104397467f8c5a11c8e86af4ffcc0aefcdf7024013cdc0508d"
+ "@yggdrash-node1:32918");
assertThat(peer.getHost()).isEqualTo("yggdrash-node1");
assertThat(peer.getPort()).isEqualTo(32918);
}
@Test
public void createPeerWithNodeIdTest() {
Peer peer = Peer.valueOf("75bff16c", "yggdrash-node1", 32918);
assertThat(peer.getYnodeUri()).isEqualTo("ynode://75bff16c@yggdrash-node1:32918");
}
@Test(expected = NotValidateException.class)
public void unkownSchemaTest() {
Peer.valueOf("http://75bff16c@yggdrash-node1:32918");
}
@Test
public void equalsTest() {
Peer peer1 = Peer.valueOf("ynode://75bff16c@127.0.0.1:32918");
Peer peer2 = Peer.valueOf("ynode://75bff16c@127.0.0.1:32919");
assert !peer1.equals(peer2);
}
} |
<reponame>kll5h/ShinetechOA<filename>src/main/resources/dbmigrate/hsql/customer/V0_0_0_1__customer_info.sql<gh_stars>1-10
-------------------------------------------------------------------------------
-- customer info
-------------------------------------------------------------------------------
CREATE TABLE CUSTOMER_INFO(
ID BIGINT NOT NULL,
NAME VARCHAR(200),
CREATE_TIME DATETIME,
STATUS VARCHAR(50),
TYPE VARCHAR(50),
ADDRESS VARCHAR(200),
CONTACT VARCHAR(100),
COMPANY VARCHAR(100),
DESCRIPTION VARCHAR(65535),
USER_ID VARCHAR(64),
TENANT_ID VARCHAR(64),
CONSTRAINT PK_CUSTOMER_INFO PRIMARY KEY(ID)
);
|
import React, {Component} from 'react';
class InsertAnElementOnMouseOver extends Component{
state = {
insert: false
}
render() {
return (
<div>
<h2>鼠标移动到一个元素时,插入另一个元素</h2>
<div onMouseOver={() => this.onMouseOverHandler()}>鼠标移动到我</div>
{this.state.insert ? <div>被插入的元素</div> : null}
</div>
);
}
onMouseOverHandler() {
this.setState({insert: true});
}
}
export default InsertAnElementOnMouseOver;
|
package io.swagger.model.germ;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.model.germ.PedigreeNodeParents;
import io.swagger.model.germ.PedigreeNodeSiblings;
import java.util.ArrayList;
import java.util.List;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
/**
* PedigreeNode
*/
@Validated
@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2020-03-20T16:33:36.513Z[GMT]")
public class PedigreeNode {
@JsonProperty("crossingProjectDbId")
private String crossingProjectDbId = null;
@JsonProperty("crossingYear")
private Integer crossingYear = null;
@JsonProperty("familyCode")
private String familyCode = null;
@JsonProperty("germplasmDbId")
private String germplasmDbId = null;
@JsonProperty("germplasmName")
private String germplasmName = null;
@JsonProperty("parents")
@Valid
private List<PedigreeNodeParents> parents = null;
@JsonProperty("pedigree")
private String pedigree = null;
@JsonProperty("siblings")
@Valid
private List<PedigreeNodeSiblings> siblings = null;
public PedigreeNode crossingProjectDbId(String crossingProjectDbId) {
this.crossingProjectDbId = crossingProjectDbId;
return this;
}
/**
* The crossing project used to generate this germplasm
* @return crossingProjectDbId
**/
@ApiModelProperty(example = "625e745a", value = "The crossing project used to generate this germplasm")
public String getCrossingProjectDbId() {
return crossingProjectDbId;
}
public void setCrossingProjectDbId(String crossingProjectDbId) {
this.crossingProjectDbId = crossingProjectDbId;
}
public PedigreeNode crossingYear(Integer crossingYear) {
this.crossingYear = crossingYear;
return this;
}
/**
* The year the parents were originally crossed
* @return crossingYear
**/
@ApiModelProperty(example = "2005", value = "The year the parents were originally crossed")
public Integer getCrossingYear() {
return crossingYear;
}
public void setCrossingYear(Integer crossingYear) {
this.crossingYear = crossingYear;
}
public PedigreeNode familyCode(String familyCode) {
this.familyCode = familyCode;
return this;
}
/**
* The code representing the family
* @return familyCode
**/
@ApiModelProperty(example = "F0000203", value = "The code representing the family")
public String getFamilyCode() {
return familyCode;
}
public void setFamilyCode(String familyCode) {
this.familyCode = familyCode;
}
public PedigreeNode germplasmDbId(String germplasmDbId) {
this.germplasmDbId = germplasmDbId;
return this;
}
/**
* The ID which uniquely identifies a germplasm
* @return germplasmDbId
**/
@ApiModelProperty(example = "1098ebaf", required = true, value = "The ID which uniquely identifies a germplasm")
public String getGermplasmDbId() {
return germplasmDbId;
}
public void setGermplasmDbId(String germplasmDbId) {
this.germplasmDbId = germplasmDbId;
}
public PedigreeNode germplasmName(String germplasmName) {
this.germplasmName = germplasmName;
return this;
}
/**
* A human readable name for a germplasm
* @return germplasmName
**/
@ApiModelProperty(example = "A0021004", value = "A human readable name for a germplasm")
public String getGermplasmName() {
return germplasmName;
}
public void setGermplasmName(String germplasmName) {
this.germplasmName = germplasmName;
}
public PedigreeNode parents(List<PedigreeNodeParents> parents) {
this.parents = parents;
return this;
}
public PedigreeNode addParentsItem(PedigreeNodeParents parentsItem) {
if (this.parents == null) {
this.parents = new ArrayList<PedigreeNodeParents>();
}
this.parents.add(parentsItem);
return this;
}
/**
* List of parent nodes in the pedigree tree.
* @return parents
**/
@ApiModelProperty(example = "[{\"germplasmDbId\":\"b66958de\",\"germplasmName\":\"A0000592\",\"parentType\":\"MALE\"},{\"germplasmDbId\":\"a55847ed\",\"germplasmName\":\"A0000592\",\"parentType\":\"FEMALE\"}]", value = "List of parent nodes in the pedigree tree.")
@Valid
public List<PedigreeNodeParents> getParents() {
return parents;
}
public void setParents(List<PedigreeNodeParents> parents) {
this.parents = parents;
}
public PedigreeNode pedigree(String pedigree) {
this.pedigree = pedigree;
return this;
}
/**
* The string representation of the pedigree.
* @return pedigree
**/
@ApiModelProperty(example = "A0000001/A0000002", value = "The string representation of the pedigree.")
public String getPedigree() {
return pedigree;
}
public void setPedigree(String pedigree) {
this.pedigree = pedigree;
}
public PedigreeNode siblings(List<PedigreeNodeSiblings> siblings) {
this.siblings = siblings;
return this;
}
public PedigreeNode addSiblingsItem(PedigreeNodeSiblings siblingsItem) {
if (this.siblings == null) {
this.siblings = new ArrayList<PedigreeNodeSiblings>();
}
this.siblings.add(siblingsItem);
return this;
}
/**
* List of sibling germplasm
* @return siblings
**/
@ApiModelProperty(example = "[{\"germplasmDbId\":\"334f53a3\",\"germplasmName\":\"A0021005\"},{\"germplasmDbId\":\"7bbbda8c\",\"germplasmName\":\"A0021006\"},{\"germplasmDbId\":\"ab1d9b26\",\"germplasmName\":\"A0021007\"}]", value = "List of sibling germplasm")
@Valid
public List<PedigreeNodeSiblings> getSiblings() {
return siblings;
}
public void setSiblings(List<PedigreeNodeSiblings> siblings) {
this.siblings = siblings;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PedigreeNode pedigreeNode = (PedigreeNode) o;
return Objects.equals(this.crossingProjectDbId, pedigreeNode.crossingProjectDbId) &&
Objects.equals(this.crossingYear, pedigreeNode.crossingYear) &&
Objects.equals(this.familyCode, pedigreeNode.familyCode) &&
Objects.equals(this.germplasmDbId, pedigreeNode.germplasmDbId) &&
Objects.equals(this.germplasmName, pedigreeNode.germplasmName) &&
Objects.equals(this.parents, pedigreeNode.parents) &&
Objects.equals(this.pedigree, pedigreeNode.pedigree) &&
Objects.equals(this.siblings, pedigreeNode.siblings);
}
@Override
public int hashCode() {
return Objects.hash(crossingProjectDbId, crossingYear, familyCode, germplasmDbId, germplasmName, parents, pedigree, siblings);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class PedigreeNode {\n");
sb.append(" crossingProjectDbId: ").append(toIndentedString(crossingProjectDbId)).append("\n");
sb.append(" crossingYear: ").append(toIndentedString(crossingYear)).append("\n");
sb.append(" familyCode: ").append(toIndentedString(familyCode)).append("\n");
sb.append(" germplasmDbId: ").append(toIndentedString(germplasmDbId)).append("\n");
sb.append(" germplasmName: ").append(toIndentedString(germplasmName)).append("\n");
sb.append(" parents: ").append(toIndentedString(parents)).append("\n");
sb.append(" pedigree: ").append(toIndentedString(pedigree)).append("\n");
sb.append(" siblings: ").append(toIndentedString(siblings)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
#!/bin/sh
. ${BUILDPACK_TEST_RUNNER_HOME}/lib/test_utils.sh
testCompile()
{
compile
assertEquals 0 "${RETURN}"
assertCaptured "-----> Installing CMake version: 3.8.1"
assertCaptured " Downloading binary distribution"
assertCaptured " Verifying download"
assertCaptured " Extracting download"
assertCaptured " Moving binary"
assertCaptured " Exporting PATH"
assertCaptured " Testing binary"
assertCaptured "cmake version 3.8.1"
}
|
from flask import Flask, render_template, request, jsonify, redirect
from flask import send_file
import pandas as pd
import json
import os.path
from os import path
import datetime
##additional imports
with open('./data/users.json') as json_data:
users = json.load(json_data)
app = Flask(__name__)
####functions
##route decorator
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
status = ""
return render_template("index.html", status=status)
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
if users[username]==password:
status = ""
print(username)
print(password)
url = 'https://guarded-garden-81041.herokuapp.com'
return redirect(url)
else:
status="That username and password combination is not correct"
return render_template("index.html", status = status)
if __name__ == '__main__':
app.run("0.0.0.0:5000", debug=True)
|
import React from "react";
import searchImg from "../images/search.png";
import wwwImg from "../images/www.png";
import techImg from "../images/tech.png";
import codingImg from "../images/ideas.png";
import radioImg from "../images/control-panel.png";
const exp = [{
title: "Software Engineer, Applications",
image: radioImg,
company: "RefleXion Medical",
date: "2019 - 2020",
description: ["Write Full Stack JavaScript applications","Create UI for medical radiotherapy machines","Follow best testing and design practices", "Angular, TypeScript, RxJS, NestJS, SCSS, MongoDB"]
},{
title: "Full Stack Web Development Assistant Instructor",
image: wwwImg,
company: "UCBerkeley Extension Boot Camp",
date: "2018 - 2019",
description: ["Mentor junior engineers","Perform code reviews and establish best practices","Communicate technical concepts"]
},{
title: "Web Development Mentor",
image: codingImg,
date: "2017 - 2019",
company: "",
description: ["Explain web fundamentals and JavaScript language ","Teach API integration with web applications","Elucidate common data structures and algorithms"]
},{
title: "Software Engineer",
image: techImg,
date: "2014 - 2016",
company: "Viadeo",
description: ["Built Ember web applications from scratch for Seach and Profile pages","Wrote geolocation-based matching application","Integrated Algolia search API for Ember Data ORM and Node endpoints","Rewrote Backbone search header for auto-suggestion"]
},{
title: "QA Engineer",
image: searchImg,
company: "RTLabs",
date: "2013",
description: ["Coded WebDriver automated navigation tests in Ruby"]
}
];
export default function(props){
return (
<section className='experience'>
<h2 className='heading' style={{
color: 'snow',
backgroundColor: "black",
padding: "20px"
}}>Experience</h2>
{exp.map((e)=>{
return <div className="text-container" key={e.title}>
<div className="xp-flex" style={{
display: "flex",
alignItems: "center",
justifyContent: "space-between"
}}>
<img src={e.image} style={{
height: "10rem",
margin: "5%"
}} />
<div className="xp-text">
<h2 className="title">{e.title}</h2>
<div className="xp-sub" style={{
fontSize: "0.8em",
color: "black"
}}>
<div className="">{e.company}</div>
<div className="">{e.date}</div>
</div>
<hr/>
{e.description.map((des)=><p className="" key={des}>{des}</p>)}
</div>
</div>
</div>
})}
</section>
)
} |
#!/bin/bash
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# script to run gofmt over our code (not vendor)
set -o errexit
set -o nounset
set -o pipefail
find . -name "*.go" | grep -v "\\/vendor\\/" | xargs gofmt -s -w
|
class CommAdminView:
site_title = ""
site_footer = ""
class GlobalSetting(CommAdminView):
def set_items_per_page(self, items_per_page):
ListAdminView.list_per_page = items_per_page
def set_site_title(self, title):
self.site_title = title
def set_site_footer(self, footer):
self.site_footer = footer |
/**
*
*/
package com.huatuo.custom_widget;
import android.app.Activity;
import android.content.Context;
import android.graphics.drawable.ColorDrawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.widget.PopupWindow;
import android.widget.Toast;
import com.huatuo.R;
import com.huatuo.bean.ShareObj;
import com.huatuo.custom_widget.StickyNavLayout.MyOnScrollListener;
import com.huatuo.util.CommonUtil;
import com.huatuo.util.Toast_Util;
import com.umeng.socialize.ShareAction;
import com.umeng.socialize.UMShareListener;
import com.umeng.socialize.bean.SHARE_MEDIA;
/**
*
*/
public class CustomShareBoard extends PopupWindow implements OnClickListener {
private Activity mActivity;
private ShareObj mShareObj;
public CustomShareBoard(Activity activity, ShareObj shareObj) {
super(activity);
this.mActivity = activity;
mShareObj = shareObj;
initView(activity);
}
@SuppressWarnings("deprecation")
private void initView(Context context) {
View rootView = LayoutInflater.from(context).inflate(
R.layout.custom_board, null);
// 设置点击事件
rootView.findViewById(R.id.rl_wechat).setOnClickListener(this);
rootView.findViewById(R.id.rl_circle).setOnClickListener(this);
rootView.findViewById(R.id.rl_sina).setOnClickListener(this);
rootView.findViewById(R.id.rl_ctrl_c).setOnClickListener(this);
rootView.findViewById(R.id.share_cancel).setOnClickListener(this);
setContentView(rootView);
setWidth(LayoutParams.MATCH_PARENT);
setHeight(LayoutParams.WRAP_CONTENT);
setFocusable(true);
ColorDrawable dw = new ColorDrawable(0x000000);
setBackgroundDrawable(dw);
// setBackgroundDrawable(new BitmapDrawable());
setAnimationStyle(R.style.mypopwindow_anim_shareboard_style1);
// 设置activity 背景颜色变灰
backgroundAlpha(0.3f);
setOnDismissListener(new OnDismissListener() {
@Override
public void onDismiss() {
// TODO Auto-generated method stubs
backgroundAlpha(1f);
}
});
setTouchable(true);
}
/**
* 设置添加屏幕的背景透明度
*
* @param bgAlpha
*/
public void backgroundAlpha(float bgAlpha) {
WindowManager.LayoutParams lp = mActivity.getWindow().getAttributes();
lp.alpha = bgAlpha; // 0.0-1.0
mActivity.getWindow().setAttributes(lp);
}
@Override
public void onClick(View v) {
int id = v.getId();
switch (id) {
case R.id.share_cancel:
dismiss();
break;
case R.id.rl_wechat:
// myOnShareListener.onWeiXin();
new ShareAction(mActivity).setPlatform(SHARE_MEDIA.WEIXIN)
.setCallback(umShareListener)
.withMedia(mShareObj.getImage())
.withTitle(mShareObj.getTitle())
.withText(mShareObj.getContent())
.withTargetUrl(mShareObj.getTargetUrl()).share();
dismiss();
break;
case R.id.rl_circle:
// myOnShareListener.onWeixin_circle();
new ShareAction(mActivity).setPlatform(SHARE_MEDIA.WEIXIN_CIRCLE)
.setCallback(umShareListener)
.withMedia(mShareObj.getImage())
.withTitle(mShareObj.getTitle())
.withText(mShareObj.getContent())
.withTargetUrl(mShareObj.getTargetUrl()).share();
dismiss();
break;
case R.id.rl_ctrl_c:
CommonUtil.copy(mActivity, mShareObj.getTargetUrl());
Toast_Util.showToast(mActivity, "链接已复制到粘贴板");
dismiss();
break;
}
}
private UMShareListener umShareListener = new UMShareListener() {
@Override
public void onResult(SHARE_MEDIA platform) {
// Toast.makeText(mActivity, platform + " 分享成功啦", Toast.LENGTH_SHORT)
// .show();
}
@Override
public void onError(SHARE_MEDIA platform, Throwable t) {
Toast.makeText(mActivity, platform + " 分享失败啦", Toast.LENGTH_SHORT)
.show();
}
@Override
public void onCancel(SHARE_MEDIA platform) {
Toast.makeText(mActivity, platform + " 分享取消了", Toast.LENGTH_SHORT)
.show();
}
};
// 设置自定义监听滑动回调
public void setOnShareListener(OnShareListener onShareListener) {
this.myOnShareListener = onShareListener;
}
private OnShareListener myOnShareListener;
// 自定义滑动监听回调类
public interface OnShareListener {
public void onWeiXin();
public void onWeixin_circle();
}
}
|
const communityData = [
{
id: 0,
name: "<NAME>",
collaborate: {
title: "Collaborate with our logisitics team to improve flow of goods.",
photo: "https://images.pexels.com/photos/3184296/pexels-photo-3184296.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
service: {
title: "Get in touch with our helpful customer service.",
photo: "https://images.pexels.com/photos/7682340/pexels-photo-7682340.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
projects: {
title: "We are excited to launch the new Le Raffia scholarship program.",
photo: "https://images.pexels.com/photos/1462630/pexels-photo-1462630.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
contact: {
location: "Pittsburgh",
phone: "01223445667",
email: "<EMAIL>",
address: "333 Invalid street, Pittsburgh, PA 10000.",
photo: "https://www.nationaldrugscreening.com/wp-content/uploads/2020/07/pittsburgh-996347_1920-1024x683.jpg",
},
image:
"https://images.pexels.com/photos/3769118/pexels-photo-3769118.jpeg?auto=compress&cs=tinysrgb&dpr=1&w=500",
},
{
id: 1,
name: "<NAME>",
collaborate: {
title: "Collaborate with our reserach team to study about textiles and materials.",
photo: "https://images.pexels.com/photos/7679604/pexels-photo-7679604.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
service: {
title: "It is customer service week. Contact us for specialized services.",
photo: "https://images.pexels.com/photos/7709222/pexels-photo-7709222.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
projects: {
title: "Our logistics scholarship and training program is on. Apply until 03-21-2022.",
photo: "https://images.pexels.com/photos/1438081/pexels-photo-1438081.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
contact: {
location: "Philadelphia",
phone: "1023456780",
email: "<EMAIL>",
address: "444 Invalid street, Philadelphia, PA 10000.",
photo:"https://fullsuitcase.com/wp-content/uploads/2020/12/One-day-in-Philadelphia-Pennsylvania-USA.jpg",
},
image:
"https://images.unsplash.com/photo-1606416132922-22ab37c1231e?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8MXx8YmxhY2slMjB3b21lbnxlbnwwfHwwfHw%3D&auto=format&fit=crop&w=500&q=60",
},
{
id: 2,
name: "<NAME>",
collaborate: {
title: "Are you interested in co-designing raffia bags with our design team? Getin here today!",
photo: "https://images.pexels.com/photos/3153201/pexels-photo-3153201.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
service: {
title: "We are now ranked 4.5 stars by the National Customer Service Society.",
photo: "https://images.pexels.com/photos/8867482/pexels-photo-8867482.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
projects: {
title: "Participate in our regreening project in Senegal and Mali. ",
photo: "https://images.pexels.com/photos/52706/pexels-photo-52706.jpeg?auto=compress&cs=tinysrgb&dpr=1&w=500",
},
contact: {
location: "Baltimore",
phone: "1234567890",
email: "<EMAIL>",
address: "555 Invalid street, Baltimore, MD 20000.",
photo: "https://dynamic-media-cdn.tripadvisor.com/media/photo-o/14/10/2d/da/baltimore.jpg?w=700&h=500&s=1"
},
image:
"https://images.unsplash.com/photo-1543269865-cbf427effbad?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8MTV8fGN1c3RvbWVyc3xlbnwwfHwwfHw%3D&auto=format&fit=crop&w=500&q=60",
},
{
id: 3,
name: "data 4",
collaborate: {
title: "We encourage our community of customers to send their designs. Best designs win prizes.",
photo: "https://images.pexels.com/photos/3184327/pexels-photo-3184327.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
service: {
title: "We have a new ordering chatbot in our mobile application.",
photo: "https://images.pexels.com/photos/7709301/pexels-photo-7709301.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
projects: {
title: "It's Operation Clean The Waters season once again. Register today to participate!",
photo: "https://images.pexels.com/photos/40784/drops-of-water-water-nature-liquid-40784.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
contact: {
location: "San Diego",
phone: "1234567890",
email: "<EMAIL>",
address: "666 Invalid street, San Diego, CA 90000.",
photo: "https://www.usatipps.de/wp-content/uploads/2021/05/san-diego-kalifornien.jpg"
},
image:
"https://images.unsplash.com/photo-1551836022-d5d88e9218df?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8MTh8fGN1c3RvbWVyc3xlbnwwfHwwfHw%3D&auto=format&fit=crop&w=500&q=60",
},
{
id: 4,
name: "data 5",
collaborate: {
title: "Sign up for research collaborations, and contribute to our materials research program.",
photo: "https://images.pexels.com/photos/3153198/pexels-photo-3153198.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
service: {
title: "Have you downloaded mobile catalog application? Get it done today!",
photo: "https://images.pexels.com/photos/887751/pexels-photo-887751.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
projects: {
title: "Meet and greet panel on 02-17-2022. Speak with our project teams on new topics.",
photo: "https://images.pexels.com/photos/7490889/pexels-photo-7490889.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
contact: {
location: "Austin",
phone: "440110900090",
email: "<EMAIL>",
address: "777 Invalid street, Austin, TX 20009.",
photo: "https://www.planetware.com/wpimages/2020/06/texas-austin-visitors-guide-to-exploring-downtown-where-is-austin.jpg"
},
image:
"https://images.unsplash.com/photo-1483985988355-763728e1935b?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8Mzd8fGN1c3RvbWVyc3xlbnwwfHwwfHw%3D&auto=format&fit=crop&w=500&q=60",
},
{
id: 5,
name: "<NAME>",
collaborate: {
title: "Calls to submit raffia polish designs. Best designs win a 4-month design internship.",
photo: "https://images.pexels.com/photos/7963834/pexels-photo-7963834.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
service: {
title: "Why does customer experience matter in the fashion and apparel industry?",
photo:"https://images.pexels.com/photos/33999/pexels-photo.jpg?auto=compress&cs=tinysrgb&dpr=2&h=650&w=940",
},
projects: {
title: "We are building a new raffia polishing center in Kearney, Nebraska.",
photo: "https://images.pexels.com/photos/257636/pexels-photo-257636.jpeg?auto=compress&cs=tinysrgb&dpr=1&w=500",
},
contact: {
location: "Miami",
phone: "099097765411",
email: "<EMAIL>",
address: "888 Invalid street, Miami, FL 30000",
photo: "https://img.freepik.com/fotos-kostenlos/stadt-miami-florida-sommer-sonnenuntergang-panorama-mit-bunt-beleuchteten-geschaefts-und-wohngebaeuden-und-bruecke-auf-biscayne-bay_268835-1891.jpg"
},
image:
"https://images.unsplash.com/photo-1633058713751-1b1b950014a1?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8MTh8fGJsYWNrJTIwd29tZW58ZW58MHx8MHx8&auto=format&fit=crop&w=500&q=60",
},
];
export default communityData;
|
<gh_stars>0
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
# 分割文件扩展名
import os.path
for path in ['filename.txt',
'filename',
'/path/to/filename.txt',
'/',
'',
'my-archive.tar.gz',
'no-extension.',
]:
print'%21s :'%path,os.path.splitext(path)
|
<reponame>ONSdigital/br-api-common
package uk.gov.ons.br.actions
import javax.inject.Inject
import play.api.mvc._
import uk.gov.ons.br.repository.{SearchRepository, SearchResult}
import scala.concurrent.{ExecutionContext, Future}
/*
* From the Play docs: "A controller in Play is nothing more than an object that generates Action values."
* Actions are therefore the preferred approach to re-using behaviour across Controllers.
*
* Here we define an action that encapsulates searching for a term across a repository.
* The search result is added to the request, and this custom request is passed to the controller block.
* The controller simply has to define a block of type: SearchRequest[AnyContent] => Result where the passed
* SearchRequest instance already contains the search result.
*/
trait SearchActionMaker[U] {
class SearchRequest[A](val searchResult: SearchResult[U], originalRequest: Request[A]) extends WrappedRequest[A](originalRequest)
def forTerm(term: String): ActionBuilder[SearchRequest, AnyContent] with ActionTransformer[Request, SearchRequest]
}
class DefaultSearchActionMaker[U] @Inject() (bodyParser: BodyParser[AnyContent], searchRepository: SearchRepository[U])
(implicit ec: ExecutionContext) extends SearchActionMaker[U] {
override def forTerm(term: String): ActionBuilder[SearchRequest, AnyContent] with ActionTransformer[Request, SearchRequest] =
new ActionBuilder[SearchRequest, AnyContent] with ActionTransformer[Request, SearchRequest] {
override protected def transform[A](request: Request[A]): Future[SearchRequest[A]] =
searchRepository.searchFor(term).map(new SearchRequest(_, request))
override protected def executionContext: ExecutionContext = ec
override def parser: BodyParser[AnyContent] = bodyParser
}
} |
#!/bin/bash
# Copyright (c) 2016, Dalenys
#
# Permission to use, copy, modify, and/or distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright notice
# and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
# THIS SOFTWARE.
#set -x
PROGNAME=$(basename "$0")
VERBOSE=0
AUTH=0
PORT_MONGO=27017
# output of the mongo_query function
CMD_OUTPUT=""
STATE_OK=0
STATE_WARNING=1
STATE_CRITICAL=2
STATE_UNKNOWN=3
STATE_DEPENDENT=4
MONGO_STATUS[0]="STARTUP"
MONGO_STATUS[1]="PRIMARY"
MONGO_STATUS[2]="SECONDARY"
MONGO_STATUS[3]="RECOVERING"
MONGO_STATUS[5]="STARTUP2"
MONGO_STATUS[6]="UNKNOWN"
MONGO_STATUS[7]="ARBITER"
MONGO_STATUS[8]="DOWN"
MONGO_STATUS[9]="ROLLBACK"
MONGO_STATUS[10]="REMOVED"
# display verbose is -v specifed
debug_msg() {
if [ ${VERBOSE} -eq 1 ]
then
echo "[*] ${1}"
fi
}
# check if type is "replicaset"
is_rs() {
if [ "${TYPE}" != "replicaset" ]
then
debug_msg "argument '-t' is not 'replicaset'"
echo "NOK : argument '-t' has to be 'replicaset' for this check"
exit ${STATE_UNKNOWN}
fi
debug_msg "argument '-t' is 'replicaset'"
}
# check if authentification is specifed
is_auth_given(){
if [[ ! -z "${USER_MONGO}" && ! -z "${PASSWORD_MONGO}" ]]
then
debug_msg "Credentials provided"
AUTH=1
else
debug_msg "Credentials not provided, auth skipped"
fi
}
# check if a different port is specifed
is_port(){
if [ ! -z "${PORT_MONGO}" ]
then
PORT_MONGO=${PORT_MONGO}
fi
}
# Resident memory (physical RAM) should be more than 92% used.
# Specific to MMAPv1 storage engine.
check_mem_resident() {
is_auth_given
is_port
local LIMIT=92 # Could be set with an argument but 92 seems ok:
# Assuming that your RAM is smaller than your data size, MongoDB’s resident
# set size should be a little lower than your total size of RAM (for
# example, if you have 50 GB of RAM, MongoDB should be using at least
# 46GB). If it is much lower, then your readahead is probably too high.
# from "Mongodb the definitive guide", O'Reilly
mongo_query "db.serverStatus().mem.resident"
RESIDENT=${CMD_OUTPUT%%.*}
PHY=$(free -m|grep Mem|awk '{print $2}')
RESIDENT_USED=$(echo "${RESIDENT}" "${PHY}"|awk '{print $1 *100 / $2}')
RESIDENT_USED="${RESIDENT_USED%.*}"
if [ "${RESIDENT_USED}" -lt "${LIMIT}" ]
then
echo "NOK : Resident memory used : ${RESIDENT_USED}%, readahead probably too high"
return ${STATE_WARNING}
else
echo "OK: Resident memory used : ${RESIDENT_USED}%"
return ${STATE_OK}
fi
}
# Track replication lag in a replicaset
check_rs_lag() {
is_rs # mandatory, rs.status() is specific to replicaset
is_auth_given
is_port
local HOUR=0 # could also be set in arg
mongo_query "rs.printSlaveReplicationInfo()"
HOUR=$(echo -ne $CMD_OUTPUT|awk '{print $13}')
HOUR=${HOUR#(*}
if [ "${HOUR}" -ne 0 ]
then
echo "NOK : Lag replication is ${HOUR} hr(s)"
return ${STATE_CRITICAL}
else
echo "OK : Lag replication is ${HOUR} hr(s)"
return ${STATE_OK}
fi
}
# count how many member are configured in the replicaset
# For now, we assume 3 is the right value
check_rs_count() {
is_rs # mandatory, rs.status() is specific to replicaset
is_auth_given
is_port
local NB_REQUIRED=3 # could be set in arg, but 3 is THE standard value for a replicaset
mongo_query "rs.status().members"
MY_STATE=${CMD_OUTPUT}
NB_MEMBER=$(echo "$MY_STATE"|grep -c "_id")
debug_msg "value of rs.count: ${NB_MEMBER}"
if [ "${NB_MEMBER}" -ne "${NB_REQUIRED}" ]
then
echo "NOK : total member should be 3, but is : ${NB_MEMBER}"
return ${STATE_CRITICAL}
else
echo "OK : number of instances should be 3, and is : ${NB_MEMBER}"
return ${STATE_OK}
fi
}
# return the state of the node
check_rs_status() {
is_rs # mandatory, rs.status() is specific to replicaset
is_auth_given
is_port
mongo_query "rs.status().myState"
debug_msg "value of myState: ${OUTPUT}"
MY_STATE=${CMD_OUTPUT}
if [ ${MY_STATE} -eq 2 ] || [ ${MY_STATE} -eq 1 ] || [ ${MY_STATE} -eq 7 ]
then
echo "OK - State is ${MONGO_STATUS[${MY_STATE}]}"
return ${STATE_OK}
else
echo "NOK - State is ${MONGO_STATUS[${MY_STATE}]}"
return ${STATE_CRITICAL}
fi
}
# execute a command in mongo shell, pass through an argument
# ${CMD_OUTPUT} is set in the function
mongo_query() {
local mongo_cmd=$1
local base_cmd="mongo --host ${HOST} --port ${PORT_MONGO}"
base_cmd="${base_cmd} --quiet"
base_cmd="${base_cmd} --authenticationDatabase admin"
base_cmd="${base_cmd} --eval ""printjson(${mongo_cmd})"" "
if [ ${AUTH} -eq 1 ]
then
debug_msg "Running command with auth: ${base_cmd}"
base_cmd="${base_cmd} --username ${USER_MONGO} --password ${PASSWORD_MONGO}"
else
debug_msg "Running command: ${base_cmd}"
fi
CMD_OUTPUT=$(${base_cmd})
if [ $? -ne 0 ]
then
echo "Error running mongo command."
exit ${STATE_UNKNOWN}
fi
debug_msg "result : ${CMD_OUTPUT}"
}
# usage
usage() {
echo "Usage: $PROGNAME -t [standalone|replicaset] -h [hostname] -c [check_name]"
echo "Optional :"
echo "-u [username]"
echo "-p [password]"
echo "-w [port]"
# not implemented yet :)
#echo "-i [!warning!critical]"
echo "-v verbose"
echo
echo "Any rs.xxx command has to be associated with -t replicaset"
echo
echo "check_name :"
echo "mem.resident Check resident memory usage (amount of physical memory being used, only for MMAPv1 storage engine)"
echo "rs.status Status of the local node"
echo "rs.count Count how many member are in the replicaset"
echo "rs.lag Check replication lag"
}
# entrypoint
which mongo > /dev/null
if [ $? -ne 0 ]
then
echo "mongo binary not found"
exit ${STATE_DEPENDENT}
fi
while getopts 't:h:u:p:c:vw:' OPTIONS
do
case ${OPTIONS} in
t)
TYPE=${OPTARG}
;;
h)
HOST=${OPTARG}
;;
u)
USER_MONGO=${OPTARG}
;;
p)
PASSWORD_MONGO=${OPTARG}
;;
c)
CHECK_NAME=${OPTARG}
;;
v)
VERBOSE=1
;;
w)
PORT_MONGO=${OPTARG}
;;
*)
echo "Invalid argument."
usage
return 1
;;
esac
done
debug_msg "args: type:${TYPE}, host:${HOST},check_name:${CHECK_NAME},user:${USER_MONGO},password:${PASSWORD_MONGO},port:${PORT_MONGO}"
if [[ -z "${TYPE}" || -z "${HOST}" || -z "${CHECK_NAME}" ]]
then
echo "HOST, TYPE and CHECK are required"
usage
exit $STATE_UNKNOWN
fi
case ${CHECK_NAME} in
"mem.resident")
check_mem_resident
;;
"rs.status")
check_rs_status
;;
"rs.count")
check_rs_count
;;
"rs.lag")
check_rs_lag
;;
*)
echo "Invalid check '${CHECK_NAME}'."
exit
;;
esac
# EOF
|
#!/bin/bash
. $(dirname $0)/common_functions.sh
msg "Event: $LIFECYCLE_EVENT"
## Clear App Cache
# php artisan cache:clear
START_TIME=`date +%s%N`
function clear_app_cache {
php $CURRENT_DIR/artisan cache:clear --env=$ENV
}
if clear_app_cache ; then
msg " - Clear App Cache ($(timer)s)"
else
error_exit " - Clear App Cache Failed ($(timer)s)"
fi
# Take application out of maintenance mode
START_TIME=`date +%s%N`
if [ -f "$CURRENT_DIR/artisan" ]
then
# file found."
php $CURRENT_DIR/artisan up
# sleep 10 # wait 10 seconds to let ELB pass health check?
msg " - Artisan Up ($(timer)s)"
else
error_exit " - Artisan Up Failed ($(timer)s)"
fi
|
#
# Copyright:: Copyright 2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "license_scout/log"
require "license_scout/exceptions"
require "license_scout/dependency_manager"
require "license_scout/license"
module LicenseScout
class Collector
attr_reader :dependencies
def collect
@dependencies = Set.new
if dependency_managers.empty?
raise LicenseScout::Exceptions::Error.new("Failed to find any files associated with known dependency managers in the following directories:\n#{LicenseScout::Config.directories.map { |dir| "\t• #{dir}" }.join("\n")}\n")
end
dependency_managers.each { |d| collect_licenses_from(d) }
LicenseScout::Log.info("[collector] All licenses successfully collected")
rescue Exceptions::UpstreamFetchError => e
LicenseScout::Log.error("[collector] Encountered an error attempting to fetch package metadata from upstream source:")
LicenseScout::Log.error("[collector] #{e}")
raise Exceptions::FailExit.new(e)
rescue Exceptions::PackageNotFound => e
LicenseScout::Log.error("[collector] One of the project's transitive dependencies could not be found:")
LicenseScout::Log.error("[collector] #{e}")
raise Exceptions::FailExit.new(e)
end
private
def collect_licenses_from(dep_mgr)
LicenseScout::Log.info("[collector] Collecting licenses for #{dep_mgr.type} dependencies found in #{dep_mgr.directory}/#{dep_mgr.signature}")
dep_mgr.dependencies.each do |dep|
@dependencies << dep
end
rescue LicenseScout::Exceptions::MissingSourceDirectory => e
raise LicenseScout::Exceptions::Error.new("#{e.message}\n\n\tPlease try running `#{dep_mgr.install_command}` to download the dependency.\n")
end
def dependency_managers
@dependency_managers ||= LicenseScout::Config.all_directories.map do |dir|
LicenseScout::DependencyManager.implementations.map do |implementation|
dep_mgr = implementation.new(File.expand_path(dir))
if dep_mgr.detected?
LicenseScout::Log.info("[collector] Found #{dep_mgr.signature} in #{dir}")
dep_mgr
else
nil
end
end
end.flatten.compact
end
end
end
|
#!/bin/bash
# -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
##############################################################################
# WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!
#
# This script needs to be modified to enable fencing of the host. By default it
# will fail, as the first line is 'exit 1'. You will need to remove it.
#
# In order to perform the fencing, you will probably need to install a fencing
# utility. They are typically found in: fence-agents-all (CentOS) and fence-
# agents (Ubuntu). They come with many utilities: fence_ilo, fence_ipmilan,
# fence_apc, etc...
#
# To call the fencing utility, you will need to pass some parameters, which are
# typically the iLO IP of the host, etc. We recommend you enter this information
# in the host's template, and pick it up using the xpath example below. AS AN
# EXAMPLE (only an example) the script below expects that you have defined a
# parameter called FENCE_IP in the Host's template, and it will rely on that to
# call the fencing mechanism. You should customize this to your needs. It is
# perfectly OK to discard the code below and use a different mechanism, like
# storing the information required to perform the fencing in a separate CMDB,
# etc. However, you will probably need to get the host's NAME, which should be
# done as shown below.
#
# WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!
#############################################################################
# @param $1 the host information in base64
# @return 0 on success. Make sure this script does not return 0 if it fails.
# To enable remove this line
echo ""Fence host not configured, please edit ft/fence_host.sh"" && exit 1
#-------------------------------------------------------------------------------
# Get host parameters with XPATH
#-------------------------------------------------------------------------------
if [ -z "$ONE_LOCATION" ]; then
XPATH=/var/lib/one/remotes/datastore/xpath.rb
else
XPATH=$ONE_LOCATION/var/remotes/datastore/xpath.rb
fi
if [ ! -x "$XPATH" ]; then
echo "XPATH not found: $XPATH"
exit 1
fi
XPATH="${XPATH} -b $1"
unset i j XPATH_ELEMENTS
while IFS= read -r -d '' element; do
XPATH_ELEMENTS[i++]="$element"
done < <($XPATH /HOST/ID \
/HOST/NAME \
/HOST/TEMPLATE/FENCE_IP )
HOST_ID="${XPATH_ELEMENTS[j++]}"
NAME="${XPATH_ELEMENTS[j++]}"
FENCE_IP="${XPATH_ELEMENTS[j++]}"
if [ -z "$FENCE_IP" ]; then
echo "Fence ip not found"
exit 1
fi
#-------------------------------------------------------------------------------
# Fence
#-------------------------------------------------------------------------------
# Example:
# fence_ilo -a $FENCE_IP -l <username> -p <password>
|
#include <iostream>
#include <string>
#include <vector>
using namespace std;
class Menu {
private:
string restaurantName;
vector<string> dishes;
public:
Menu(string); // Constructor
void addDish(string); // Add a dish to the menu
void deleteDish(string); // Delete a dish from the menu
string getDish(int); // Get a dish from the menu
};
#endif |
def process_data(tables, sheet, row, counter, title, price, price_old, brand, link):
try:
if price < 0:
raise ValueError("Invalid price for element {}".format(title))
if not isinstance(brand, str):
raise TypeError("Invalid brand type for element {}".format(title))
if not link:
raise ValueError("Empty link for element {}".format(title))
tables[title] = [price, price_old, brand, link]
sheet.write('A' + str(row), title)
sheet.write('B' + str(row), price)
sheet.write('C' + str(row), price_old)
sheet.write('D' + str(row), brand)
sheet.write('E' + str(row), link)
row += 1
except ValueError as ve:
print(ve)
except TypeError as te:
print(te)
except Exception as e:
print("Error processing element {}: {}".format(title, e)) |
#pragma once
#include "Config.h"
namespace ARM {
bool testEncoder();
bool testDecoder();
} |
<reponame>ShemiakinYevhen/java_prac<filename>sandbox/src/main/java/ua/stqa/pft/sandbox/Task_1_1/Pointer.java
package ua.stqa.pft.sandbox.Task_1_1;
public class Pointer {
double x;
double y;
public Pointer (double x, double y) {
this.x = x;
this.y = y;
}
public double distance (Pointer b) {
return Math.sqrt((Math.pow((b.x - this.x), 2) + Math.pow((b.y - this.y), 2)));
}
}
|
#!/usr/bin/env bash
set -e
echo 'upgrading packages to: kubelet=1.16.13-00 kubeadm=1.16.13-00 kubectl=1.16.13-00 kubernetes-cni=0.8.6-00'
apt-mark unhold kubeadm kubelet kubectl kubernetes-cni
apt-get -q update -o Acquire::Retries=3 -o Acquire::http::No-Cache=True -o Acquire::http::Timeout=30 -o Acquire::https::No-Cache=True -o Acquire::https::Timeout=30 -o Acquire::ftp::Timeout=30
apt-get -q install -y kubelet=1.16.13-00 kubeadm=1.16.13-00 kubectl=1.16.13-00 kubernetes-cni=0.8.6-00
apt-mark hold kubeadm kubelet kubectl kubernetes-cni
echo 'upgrading kubeadm in worker node'
kubeadm upgrade node
systemctl restart kubelet
while [ `systemctl is-active kubelet` != 'active' ]; do echo 'waiting for kubelet'; sleep 5; done
|
#!/bin/sh
# This script installs underworld on raijin.nci.org.au
#
# Usage:
# sh ./nci_raijin.sh <branch>
#
# branch (optional):
# branch name to checkout, i.e. 'master'(default), 'development', 'x.y.z'
# exit when any command fails
set -e
#DATE=`date +%d%b%Y` # could be used to date checkout eg,
#UW_DIR=`pwd`/underworld-$DATE
UW_DIR=`pwd`/underworld
git clone https://github.com/underworldcode/underworld2.git $UW_DIR
cd $UW_DIR
git checkout $1 # checkout the requested version
# setup modules
module purge
RUN_MODS='pbs dot mpi4py/3.0.2-py36-ompi3'
module load hdf5/1.10.2p petsc/3.9.4 gcc/5.2.0 mesa/11.2.2 swig/3.0.12 scons/3.0.1 $RUN_MODS
echo "*** The module list is: ***"
module list -t
# setup python environment with preinstalled packages (h5py, lavavu, pint)
export PYTHONPATH=/apps/underworld/opt/h5py/2.9.0-py36-ompi3/lib/python3.6/site-packages/h5py-2.9.0-py3.6-linux-x86_64.egg/:/apps/underworld/opt/lavavu/1.4.1_rc/:/apps/underworld/opt/pint/0.9_py36/lib/python3.6/site-packages/:$PYTHONPATH
echo "*** New PYTHONPATH: $PYTHONPATH ***"
# build and install code
cd libUnderworld
CONFIG="./configure.py --python-dir=`python3-config --prefix` --with-debugging=0"
echo "*** The config line is: ***"
echo "$CONFIG"
echo ""
$CONFIG
./compile.py -j4
cd .. ; source updatePyPath.sh
# some messages
echo "#####################################################################"
echo "Underworld2 built successfully at: "
echo " $UW_DIR "
echo "Remember to set the required environment before running Underworld2. "
echo " module load $RUN_MODS "
echo " export PYTHONPATH=$PYTHONPATH "
echo " export LD_PRELOAD=$OPENMPI_ROOT/lib/libmpi.so "
echo "#####################################################################"
|
#include <stdio.h>
#include <string.h>
int main(void)
{
// Variables
char str[] = "Hello World!";
char pat[] = "ll";
int len = strlen(pat);
// Iterate through the string and check for pattern matches
for (int i = 0; i < strlen(str); i++) {
// Check for pattern match from index i
int flag = 1;
for (int j = 0; j < len; j++) {
if (str[i + j] != pat[j]) {
flag = 0;
break;
}
}
// If pattern matched, print the index
if (flag) {
printf("Pattern match found at index %d\n", i);
}
}
// Return 0 if pattern was found, else 1
return 0;
} |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=PIC32MZ_EC
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/Lab11_Cache.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=Lab11_Cache.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=lab11cache/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/lab11cache/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/lab11cache.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/lab11cache.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
package fr.clementgre.pdf4teachers.datasaving.simpleconfigs;
import fr.clementgre.pdf4teachers.Main;
import fr.clementgre.pdf4teachers.datasaving.Config;
import fr.clementgre.pdf4teachers.interfaces.windows.MainWindow;
import fr.clementgre.pdf4teachers.panel.sidebar.paint.gridviewfactory.VectorGridElement;
import fr.clementgre.pdf4teachers.panel.sidebar.paint.lists.VectorData;
import fr.clementgre.pdf4teachers.utils.PlatformUtils;
import fr.clementgre.pdf4teachers.utils.svg.DefaultFavoriteVectors;
import javafx.application.Platform;
import java.util.*;
@SuppressWarnings("unchecked")
public class VectorElementsData extends SimpleConfig{
public VectorElementsData(){
super("vectorelements");
}
@Override
protected void manageLoadedData(Config config){
Platform.runLater(() -> {
ArrayList<VectorData> favouriteVectorsData = new ArrayList<>();
ArrayList<VectorData> lastVectorsData = new ArrayList<>();
for(Object data : config.getList("favorites")){
if(data instanceof HashMap map) favouriteVectorsData.add(VectorData.readYAMLDataAndGive(map));
}
for(Object data : config.getList("lasts")){
if(data instanceof HashMap map) lastVectorsData.add(VectorData.readYAMLDataAndGive(map));
}
PlatformUtils.printActionTimeIfDebug(() -> {
MainWindow.paintTab.favouriteVectors.loadVectorsList(favouriteVectorsData, false);
MainWindow.paintTab.lastVectors.loadVectorsList(lastVectorsData, false);
}, "Load favorites/last vectors");
});
}
@Override
protected void unableToLoadConfig(){
if(Main.firstLaunch) MainWindow.paintTab.favouriteVectors.loadVectorsList(DefaultFavoriteVectors.getDefaultFavoriteVectors(), false);
}
@Override
protected void addDataToConfig(Config config){
ArrayList<Object> favorites = new ArrayList<>();
for(VectorGridElement item : MainWindow.paintTab.favouriteVectors.getList().getAllItems()){
if(!item.isFake()) favorites.add(item.getVectorData().getYAMLData());
}
ArrayList<Object> lasts = new ArrayList<>();
for(VectorGridElement item : MainWindow.paintTab.lastVectors.getList().getAllItems()){
if(!item.isFake()) lasts.add(item.getVectorData().getYAMLData());
}
config.set("favorites", favorites);
config.set("lasts", lasts);
}
}
|
package org.RestModels.Request;
public class UpdateAccountInfoRequest {
private String fullName;
private String phoneNumber;
private String gitHubUsername;
public String getFullName() {
return fullName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
public String getGitHubUsername() {
return gitHubUsername;
}
public void setGitHubUsername(String gitHubUsername) {
this.gitHubUsername = gitHubUsername;
}
}
|
<reponame>wigging/cpp-programs
/*
Examples of integer, float, and double numbers. An integer is just a whole
number (no decimals). A float is a number with up to 7 decimal digits and a
double is up to 15 decimal digits of precision.
*/
#include <iostream>
int main()
{
int a = 9;
float b = 3.14;
double c = 3.14159;
std::cout << "a is " << a << std::endl;
std::cout << "b is " << b << std::endl;
std::cout << "c is " << c << std::endl;
return 0;
} |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Set Hadoop-specific environment variables here.
# The only required environment variable is JAVA_HOME. All others are
# optional. When running a distributed configuration it is best to
# set JAVA_HOME in this file, so that it is correctly defined on
# remote nodes.
# The java implementation to use.
export JAVA_HOME={{ java_home_dir }}
# The jsvc implementation to use. Jsvc is required to run secure datanodes
# that bind to privileged ports to provide authentication of data transfer
# protocol. Jsvc is not required if SASL is configured for authentication of
# data transfer protocol using non-privileged ports.
#export JSVC_HOME=${JSVC_HOME}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
if [ "$HADOOP_CLASSPATH" ]; then
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
else
export HADOOP_CLASSPATH=$f
fi
done
# The maximum amount of heap to use, in MB. Default is 1000.
#export HADOOP_HEAPSIZE=
#export HADOOP_NAMENODE_INIT_HEAPSIZE=""
# Extra Java runtime options. Empty by default.
export HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
# Command specific options appended to HADOOP_OPTS when specified
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
export HADOOP_NFS3_OPTS="$HADOOP_NFS3_OPTS"
export HADOOP_PORTMAP_OPTS="-Xmx512m $HADOOP_PORTMAP_OPTS"
# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
# On secure datanodes, user to run the datanode as after dropping privileges.
# This **MUST** be uncommented to enable secure HDFS if using privileged ports
# to provide authentication of data transfer protocol. This **MUST NOT** be
# defined if SASL is configured for authentication of data transfer protocol
# using non-privileged ports.
export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
# Where log files are stored. $HADOOP_HOME/logs by default.
#export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
# Where log files are stored in the secure data environment.
export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
###
# HDFS Mover specific parameters
###
# Specify the JVM options to be used when starting the HDFS Mover.
# These options will be appended to the options specified as HADOOP_OPTS
# and therefore may override any similar flags set in HADOOP_OPTS
#
# export HADOOP_MOVER_OPTS=""
###
# Advanced Users Only!
###
# The directory where pid files are stored. /tmp by default.
# NOTE: this should be set to a directory that can only be written to by
# the user that will run the hadoop daemons. Otherwise there is the
# potential for a symlink attack.
export HADOOP_PID_DIR=${HADOOP_PID_DIR}
export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
# A string representing this instance of hadoop. $USER by default.
export HADOOP_IDENT_STRING=$USER
|
<gh_stars>10-100
import numpy as np
import torch, torch.nn as nn, torch.nn.functional as F
import batchminer
import criteria
"""================================================================================================="""
ALLOWED_MINING_OPS = None
REQUIRES_BATCHMINER = False
REQUIRES_OPTIM = True
class Criterion(torch.nn.Module):
def __init__(self, opt):
"""
Args:
opt: Namespace containing all relevant parameters.
"""
super(Criterion, self).__init__()
self.pars = opt
####
self.ALLOWED_MINING_OPS = ALLOWED_MINING_OPS
self.REQUIRES_BATCHMINER = REQUIRES_BATCHMINER
self.REQUIRES_OPTIM = REQUIRES_OPTIM
####
self.num_proxies = opt.n_classes
self.embed_dim = opt.embed_dim
self.proxies = torch.randn(self.num_proxies, self.embed_dim)/8.
self.proxies = torch.nn.Parameter(self.proxies)
self.lr = opt.lr * opt.loss_oproxy_lrmulti
###
self.class_idxs = torch.arange(self.num_proxies)
self.name = 'oproxy'
pars = {'pos_alpha':opt.loss_oproxy_pos_alpha,
'pos_delta':opt.loss_oproxy_pos_delta,
'neg_alpha':opt.loss_oproxy_neg_alpha,
'neg_delta':opt.loss_oproxy_neg_delta}
self.pars = pars
###
self.mode = opt.loss_oproxy_mode
self.euclidean = opt.loss_oproxy_euclidean
self.d_mode = 'euclidean' if self.euclidean else 'cosine'
###
self.f_soft = torch.nn.Softplus()
def prep(self, thing):
return 1.*torch.nn.functional.normalize(thing, dim=1)
def forward(self, batch, labels, **kwargs):
"""
Args:
batch: torch.Tensor: Input of embeddings with size (BS x DIM)
labels: nparray/list: For each element of the batch assigns a class [0,...,C-1], shape: (BS x 1)
"""
###
bs = len(batch)
batch = self.prep(batch)
self.labels = labels.unsqueeze(1)
###
self.u_labels = self.labels.view(-1)
self.same_labels = (self.labels.T == self.u_labels.view(-1,1)).to(batch.device).T
self.diff_labels = (self.class_idxs.unsqueeze(1) != self.labels.T).to(torch.float).to(batch.device).T
###
if self.mode == "anchor":
self.dim = 0
elif self.mode == "nca":
self.dim = 1
###
loss = self.compute_proxyloss(batch)
###
return loss
###
def compute_proxyloss(self, batch):
proxies = self.prep(self.proxies)
pars = {k:-p if self.euclidean and 'alpha' in k else p for k,p in self.pars.items()}
###
pos_sims = self.smat(batch, proxies[self.u_labels], mode=self.d_mode)
sims = self.smat(batch, proxies, mode=self.d_mode)
###
w_pos_sims = -pars['pos_alpha']*(pos_sims-pars['pos_delta'])
w_neg_sims = pars['neg_alpha']*(sims-pars['neg_delta'])
pos_s = self.masked_logsumexp(w_pos_sims,mask=self.same_labels,dim=self.dim,max=True if self.d_mode=='euclidean' else False)
neg_s = self.masked_logsumexp(w_neg_sims,mask=self.diff_labels,dim=self.dim,max=False if self.d_mode=='euclidean' else True)
pos_s = self.f_soft(pos_s)
neg_s = self.f_soft(neg_s)
pos_s, neg_s = pos_s.mean(), neg_s.mean()
loss = pos_s + neg_s
return loss
###
def smat(self, A, B, mode='cosine'):
if mode=='cosine':
return A.mm(B.T)
elif mode=='euclidean':
As, Bs = A.shape, B.shape
return (A.mm(A.T).diag().unsqueeze(-1)+B.mm(B.T).diag().unsqueeze(0)-2*A.mm(B.T)).clamp(min=1e-20).sqrt()
###
def masked_logsumexp(self, sims, dim=0, mask=None, max=True):
if mask is None:
return torch.logsumexp(sims, dim=dim)
else:
if not max:
ref_v = (sims*mask).min(dim=dim, keepdim=True)[0]
else:
ref_v = (sims*mask).max(dim=dim, keepdim=True)[0]
nz_entries = (sims*mask)
nz_entries = nz_entries.max(dim=dim,keepdim=True)[0]+nz_entries.min(dim=dim,keepdim=True)[0]
nz_entries = torch.where(nz_entries.view(-1))[0].view(-1)
if not len(nz_entries):
return torch.tensor(0).to(torch.float).to(sims.device)
else:
return torch.log((torch.sum(torch.exp(sims-ref_v.detach())*mask,dim=dim)).view(-1)[nz_entries])+ref_v.detach().view(-1)[nz_entries]
# return torch.log((torch.sum(torch.exp(sims)*mask,dim=dim)).view(-1))[nz_entries]
|
#!/bin/bash
set -veuxo pipefail
# source it to keep the rust server alive out here
source ./scripts/generate.bash
next dev
|
<gh_stars>0
/**
* Interfaces for jutebag backend.
* Must be kept in sync with remote interfaces.!
*/
interface RemoteSaveRequest {
email: string;
items: Array<RemoteShoppingItem>;
categories: Array<RemoteCategory>;
revision: number;
}
interface RemoteShoppingItem {
id: number;
name: string;
qty: number;
category: string; // grouped by category,
stored: boolean;
}
interface RemoteCategory {
id: number;
name: string;
}
export { RemoteSaveRequest, RemoteCategory, RemoteShoppingItem }
|
#include <cstdio>
using namespace std;
void merge(int *A, int start, int end, int m)
{
int n = end - start + 1;
int i = 0;
int j = m + 1;
int B[n];
for (size_t k = 0; k < n; k++)
{
if (j > n)
{
B[k] = A[i];
i++;
}
else if (i > m)
{
B[k] = A[j];
j++;
}
else if (A[i] < A[j])
{
B[k] = A[i];
i++;
}
else
{
B[k] = A[j];
j++;
}
}
for (size_t k = 0; k < n; k++)
{
A[k] = B[k];
}
}
void mergesort(int *A, int start, int end)
{
int n = sizeof(A) / sizeof(A[0]);
printf("the size: %d", n);
if (n > 1)
{
int m = n >> 1;
mergesort(A, 0, m);
mergesort(A, m + 1, n - 1);
merge(A, start, end, m);
}
}
int main()
{
int num[5] = {3, 1, 2, 5, 4};
mergesort(num, 0, 5);
for (size_t i = 0; i < 5; i++)
{
printf("%d ", num[i]);
}
} |
# coding=utf-8
import pandas as pd
import os
import logging
from datetime import datetime
from osgeo import gdal, ogr, osr
# from fiona.crs import from_epsg,from_string,to_string
import subprocess
HOMEDIR = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
logger = logging.getLogger('root') # default logger object to write all messages into
pd.options.mode.chained_assignment = None # disable annoying pandas warnings about chained pizdarija
def create_logger(name='root'):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
FORMAT = "%(levelname)-8s: %(message)-100s --- [%(asctime)s: %(module)s.%(funcName)s, line:%(lineno)s]"
formatter = logging.Formatter(FORMAT, datefmt='%H:%M:%S')
# HANDLER FOR FILE OUTPUT (WITH THE NAME OF THE CALLING TOPMOST SCRIPT
logfile = os.path.join(HOMEDIR,"log.txt")
fh = logging.FileHandler(logfile, mode='w')
fh.setLevel(logging.INFO)
fh.setFormatter(formatter)
logger.addHandler(fh)
#HANDLER FOR CONSOLE OUTPUT
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
def roundTime(date=None, minutes=60):
"""Round a datetime object to a multiple of a timedelta
dt : datetime.datetime object, default now.
dateDelta : timedelta object, we round to a multiple of this, default 1 minute.
Author: <NAME> 2012 - Use it as you want but don't blame me.
<NAME> 2014 - Changed to use only datetime objects as variables
"""
if not isinstance(date,datetime):
raise TypeError("Date mora bit podan v datetime formatu, ne pa v stringu! Posrkbi za pretvorbo prehodno!")
return datetime(date.year, date.month, date.day, date.hour, minutes*(date.minute // minutes))
def print_full_df(x):
pd.set_option('display.max_rows', len(x.index))
pd.set_option('display.max_columns', len(x.columns))
print(x)
pd.reset_option('display.max_rows')
pd.reset_option('display.max_columns')
def find_uniform_df_cols(df):
#returns a lost of df columns that have all the same values in it.
df_copy = df.copy(deep=True)
if "geometry" in df_copy.columns:
del df_copy["geometry"] #neki hashtable ga jebe, ce je geodataframe
nunique = df_copy.apply(pd.Series.nunique)
unique_cols = nunique[nunique > 1].index
df_copy = df_copy.drop(unique_cols, axis=1)
return df_copy.columns.tolist()
# def check_epsg_equality(rasters=(),vectors=()):
# logger.info("Checking the coordinate system consistency of input data ...")
#
# epsgs = {}
#
# for raster in rasters:
# if raster is not None:
# epsgs[raster]=gdal_get_epsg(raster)
#
# for shp in vectors:
# if shp is not None:
# epsgs[shp] = ogr_get_epsg(shp)
#
# if not len(set(epsgs.values())) ==1 or None in epsgs.values():
# raise Exception("EPSG (e)quality check for the specified datasets failed. Below you can find the results:\n{}".format("\n".join(["{}: {}".format(k,v) for k,v in epsgs.items()]))
# )
# else:
# return epsgs.values()[0]
#
# def gdal_get_epsg(raster):
# #CESKA PARSING METODA FTW!
# a = str(subprocess.check_output("gdalinfo {}".format(raster),shell=True))
# a = a.split('AUTHORITY["EPSG","')[-1].split('"]]')[0]
# #<NAME>! (OBOJE GRDO ZA ZNORT...)
# # print(a)
# # a = a.split("\n")
# # a = [x for x in a if "AUTHORITY" in x][-1]
# # a = a.split('"')[-2]
# return int(a)
# #
# def ogr_get_epsg(shapefile):
#
# if isinstance(shapefile,str):
# prj_file = os.path.splitext(os.path.abspath(shapefile))[0]+".prj"
# if not os.path.isfile(prj_file):
# raise Exception("Given Shapefile {} has no prj file attached! Make sure have the georeferencing information attached to the shapefile!".format(shapefile))
#
# with open(prj_file,"r") as f:
# prj_txt = f.read()
# epsg = esri_to_epsg(prj_txt)
#
# else:
# # This means, the input is already inmemory (e.g. geodataframe). Extract it's proj4 information
# epsg = proj4_to_epsg(to_string(shapefile.crs))
#
# return epsg
#
# def esri_to_epsg(wkt_txt):
# srs = osr.SpatialReference()
# srs.ImportFromESRI([wkt_txt])
# srs.AutoIdentifyEPSG()
# epsg = srs.GetAuthorityCode(None)
#
# if not epsg: # try also with wkt web service recognition
# raise Exception("Couldn't find a epsg code for the prj string {}!".format(wkt_txt))
#
# return epsg
#
# def proj4_to_epsg(prj_txt):
# #Code that checks prj txt file and returns epsg code
#
# srs = osr.SpatialReference()
# srs.ImportFromProj4(prj_txt)
# proj4_txt = srs.ExportToProj4().strip() # Silly, but only for the sake of ordering projection
#
# #list of common proj4 files (to avoid using internet too much ---> slow!) Lahko dodas nove s pomocjo "gedit /usr/share/proj/epsg"
#
# #list of common
# proj4_dict = {
# "+proj=tmerc +lat_0=0 +lon_0=15 +k=0.9999 +x_0=500000 +y_0=-5000000 +ellps=bessel +towgs84=682,-203,480,0,0,0,0 +units=m +no_defs": 3912,
# "+proj=tmerc +lat_0=0 +lon_0=15 +k=0.9999 +x_0=500000 +y_0=-5000000 +ellps=bessel +units=m +no_defs": 3912,
# "+proj=tmerc +lat_0=0 +lon_0=15 +k=0.9999 +x_0=500000 +y_0=-5000000 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs": 3794,
# "+proj=tmerc +lat_0=0 +lon_0=15 +k=0.9999 +x_0=500000 +y_0=-5000000 +ellps=GRS80 +units=m +no_defs": 3794,
# "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs": 3857,
# "+proj=longlat +datum=WGS84 +no_defs": 4326}
#
#
# # First check if projection is in the common projections list
# if proj4_txt in proj4_dict.keys():
# epsg = proj4_dict[proj4_txt]
#
# else:
# # try to find its
# wkt_txt = srs.ExportToWkt() # export to wkt format
# epsg = esri_to_epsg(wkt_txt)
#
# return epsg
#
# def ogr2ogr(file_in=None,file_out=None,epsg_in=None,epsg_out=None):
# if not file_out:
# ime,ext = os.path.splitext(os.path.abspath(file_in))
# file_out = ime + "_{}".format(epsg_out)+ext
#
# cmd = 'ogr2ogr -f "ESRI Shapefile" -s_srs EPSG:{} -t_srs EPSG:{} {} {}'.format(epsg_in,epsg_out,file_in,file_out)
# subprocess.check_output(cmd, shell=True)
#
# return file_out
#
# def gdalwarp(file_in=None,file_out=None,epsg_in=None,epsg_out=None):
# if not file_out:
# ime,ext = os.path.splitext(os.path.abspath(file_in))
# file_out = ime + "_{}".format(epsg_out)+ext
#
# cmd = "gdalwarp -s_srs EPSG:{} -t_srs EPSG:{} {} {}".format(epsg_in,epsg_out,file_in,file_out)
# subprocess.check_output(cmd, shell=True)
#
# return file_out
#
|
<reponame>gaunthan/design-patterns-by-golang
package adapter
type HDMIPort chan int
type DVIPort chan string
|
String sentence = "I am a student";
String reversed = "";
for (int i = sentence.length()-1; i >= 0; i--) {
reversed += sentence.charAt(i);
}
System.out.println(reversed); |
alias pandoc='${HOME}/.local/pandoc-2.9.1.1/bin/pandoc'
pandoc \
Gaussian_training_session.md \
-o Gaussian_training_session.html \
--to revealjs \
--toc \
--standalone \
--self-contained \
-V theme=beige \
--css themes/my.css
## revealjs themes:
# beige black blood league moon night serif simple sky solarized white
pandoc \
Gaussian_training_session.md \
-o Gaussian_training_session.pdf \
--toc \
--standalone \
--highlight-style themes/gaussian.theme
|
#!/usr/bin/env bash
php bin/console doctrine:database:drop --env=test --force
php bin/console doctrine:database:create --env=test
php bin/console doctrine:schema:update --env=test --force
php bin/console doctrine:fixtures:load --env=test --no-interaction --fixtures=src/AppBundle/DataFixtures/ORM/
|
#!/usr/bin/env sh
# Copyright by Daniel Loos
#
# Research Group Systems Biology and Bioinformatics - Head: Assoc. Prof. Dr. Gianni Panagiotou
# https://www.leibniz-hki.de/en/systembiologie-und-bioinformatik.html
# Leibniz Institute for Natural Product Research and Infection Biology - Hans Knöll Institute (HKI)
# Adolf-Reichwein-Straße 23, 07745 Jena, Germany
#
# The project code is licensed under BSD 2-Clause.
# See the LICENSE file provided with the code for the full license.
#
# Creates THF data base
# IN: THF release fasta file and csv taxonomy annotation
# OUT: Parsed taxonomy and classifiers of the input db
#
in_fasta=${1:-/dev/stdin}
in_tax_csv=$2
out_dir=${2:-out}
mkdir -p $out_dir
# create fasta
ln -s $in_fasta $out_dir/refdb.fasta
qiime tools import \
--input-path $out_dir/refdb.fasta \
--output-path $out_dir/refdb.fasta.qza \
--type 'FeatureData[Sequence]'
# create THF taxonomy
cat $in_tax_csv \
| awk 'BEGIN {FS=","} NR>1 {print $8"\tk__"$1";p__"$2";c__"$3";o__"$4";f__"$5";g__"$6";s__"$6"_"$7";t__"$8}' \
> $out_dir/refdb.tax
qiime tools import \
--type 'FeatureData[Taxonomy]' \
--input-format HeaderlessTSVTaxonomyFormat \
--input-path $out_dir/refdb.tax \
--output-path $out_dir/refdb.tax.qza
qiime feature-classifier fit-classifier-naive-bayes \
--i-reference-reads $out_dir/refdb.fasta.qza \
--i-reference-taxonomy $out_dir/refdb.tax.qza \
--o-classifier $out_dir/refdb.ITS.classifier.qza |
<html>
<head>
<title>My Website</title>
</head>
<body>
<h1>My Website<h1>
<nav>
<ul>
<li><a href="index.html">Home</a></li>
<li><a href="about.html">About</a></li>
<li><a href="contact.html">Contact</a></li>
</ul>
</nav>
</body>
</html> |
package cn.icepear.dandelion.common.core.constant;
/**
* @author romwood
* @date 2019-04-01
*/
public interface CommonConstants {
/**
* 删除
*/
String STATUS_DEL = "1";
/**
* 正常
*/
String STATUS_NORMAL = "0";
/**
* 锁定
*/
String STATUS_LOCK = "9";
/**
* 菜单
*/
String MENU = "0";
/**
* 编码
*/
String UTF8 = "UTF-8";
/**
* JSON 资源
*/
String CONTENT_TYPE = "application/json; charset=utf-8";
/**
* 成功标记
*/
Integer SUCCESS = 0;
/**
* 失败标记
*/
Integer FAIL = 1;
/**
* 定时任务状态
*/
public enum ScheduleStatus {
/**
* 正常
*/
NORMAL(0),
/**
* 暂停
*/
PAUSE(1);
private int value;
ScheduleStatus(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
}
|
#!/bin/sh
# ------------ Canviar aquesta linia pel projecte en concret ----------------
# Exemple --> Projecte=MS-PATHS
Projecte=FAST_SAT
# -------------Canviar aquesta linia pel FINAL de la imatge a corregir-------
#--------------Hauria de tenir l'aspecte següent-----------------------------
#--------------sub-1-PatId-ses-Id-MPRAGE.nii.gz------------------------------
# Exemple --> Imatge=MPRAGE.nii.gz
# Imatge=t2_tse_fs_cor_384_BILAT_3mm
# -------------Canviar aquesta linia per l'extensió de la imatge a corregir--
#Exemple --> Ext=nii.gz
Ext=nii.gz
RunDir=/mnt/DADES/Gerard/RunNas
if [ ! -d $RunDir ]
then
mkdir $RunDir
fi
export PYTHONNOUSERSITE=True
# path to the images
NiftisPostProc=
curr_dir=$PWD
LlistaPat=$(ls $NiftisPostProc)
for Pat in $LlistaPat
do
if [ -e $NiftisPostProc/$Pat/*."$Ext" ]
then
gunzip $(ls $NiftisPostProc/$Pat/*."$Ext")
fi
FastSatImage=$(ls $NiftisPostProc/$Pat/*.nii)
if [ -f $FastSatImage ] && [ ! -f $NiftisPostProc/$Pat/label_"$Pat"_crop_Left_flipped.nii.gz ]
then
mkdir $RunDir/$Pat
cp $FastSatImage $RunDir/$Pat
LocalFastSat=$(ls $RunDir/$Pat/.)
sct_crop_image -i $RunDir/$Pat/$LocalFastSat -xmin 0 -xmax 174 -ymin 0 -ymax -1 -zmin 0 -zmax -1 -o $RunDir/$Pat/n4_"$Pat"_T2FastSat_crop_Right.nii
sct_crop_image -i $RunDir/$Pat/$LocalFastSat -xmin 175 -xmax -1 -ymin 0 -ymax -1 -zmin 0 -zmax -1 -o $RunDir/$Pat/n4_"$Pat"_T2FastSat_crop_Left.nii
matlab -nosplash -nodisplay -nodesktop -r "flipOneEye('$RunDir/$Pat/n4_"$Pat"_T2FastSat_crop_Right.nii', '$RunDir/$Pat/n4_"$Pat"_T2FastSat_crop_Left.nii');exit"
sct_label_utils -i $RunDir/$Pat/n4_"$Pat"_T2FastSat_crop_Right.nii -create-viewer 3 -o $RunDir/$Pat/label_"$Pat"_crop_Right.nii.gz
sct_label_utils -i $RunDir/$Pat/n4_"$Pat"_T2FastSat_crop_Left_flipped.nii -create-viewer 3 -o $RunDir/$Pat/label_"$Pat"_crop_Left_flipped.nii.gz
matlab -nosplash -nodisplay -nodesktop -r "cd('$curr_dir'); CreateBoxFromOpticNerveNoEye('$RunDir/$Pat', 'n4_"$Pat"_T2FastSat_crop_Right.nii', 'label_"$Pat"_crop_Right.nii.gz'); CreateBoxFromOpticNerveNoEye('$RunDir/$Pat', 'n4_"$Pat"_T2FastSat_crop_Left_flipped.nii', 'label_"$Pat"_crop_Left_flipped.nii.gz'); exit"
Moure=$(ls $RunDir/$Pat )
for Fitxer in $Moure
do
cp -r $RunDir/$Pat/$Fitxer $NiftisPostProc/$Pat
done
rm -r $RunDir/$Pat
fi
echo $Pat is done
done
|
#!/bin/bash
#
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
########## CI test ##########
CWD=$PWD
PREFIX="result" # result log prefix
#### Launch network and synch-up ledger
cd $GOPATH/src/github.com/hyperledger/fabric-test/fabric-sdk-node/test/PTE/CITest/scripts
./test_driver.sh -n -m FAB-7333-4i -p -c samplejs -t FAB-7204-4q
#### remove PTE log from synch-up ledger run
rm -f ../Logs/FAB-7204-4q*.log
#### execute testcase FAB-7333-4i: 4 threads invokes, golevelDB
./test_driver.sh -t FAB-7333-4i
#### gather TPS from docker containers
./get_peerStats.sh -r FAB-7333-4i -p peer0.org1.example.com peer0.org2.example.com -c testorgschannel1 -n $PREFIX -o $CWD -v
|
require "bosh_release_diff/hash_flattener"
require "bosh_release_diff/deployment_manifest/job"
require "bosh_release_diff/deployment_manifest/property"
module BoshReleaseDiff::DeploymentManifest
class DeploymentManifest
def initialize(hash, source)
@hash = hash
@source = source
end
def detailed_name
"#{@hash["name"]} (#{@source})"
end
def contextual_name
@source
end
def jobs_using_job_template(job_template_name)
jobs.select { |j| j.uses_job_template?(job_template_name) }
end
def jobs
@jobs ||= Array(@hash["jobs"]).map do |hash|
Job.new(hash, properties)
end
end
def properties
@properties ||= begin
hf = BoshReleaseDiff::HashFlattener.new
h = hf.flatten(@hash["properties"] || {}) || {}
h.map { |k,v| Property.new(k, v) }
end
end
end
end
|
#! /bin/bash
# 檢查專案路徑是否存在
if [ -z $PROJECT_PATH ]; then
PROJECT_PATH=null
fi
if [ ! -d $PROJECT_PATH ]; then
echo -------------------------------------------------------
echo "環境變數:PROJECT_PATH未設定, 或路徑不存在, 請重新設定"
echo "執行方式:env PROJECT_PATH shell.sh"
echo -------------------------------------------------------
exit
fi
# 處理需要建立的目錄
exist_folders=(bootstrap)
for process_folder in ${exist_folders[@]}; do
real_path=$PROJECT_PATH/$process_folder
echo Dir Path: $real_path
if [ ! -d $real_path ]; then
mkdir $real_path
echo 1. Folder Check : Folder Not Exist, Create Folder Success!
else
echo 1. Folder Check : Folder Exist!
fi
echo ---------------------------------------
done
# 處理需要建立以及權限都需要變更為777的目錄
exist_permission_folders=(storage storage/app storage/app/public storage/framework storage/framework/cache storage/framework/cache storage/framework/sessions storage/framework/views storage/logs bootstrap/cache)
for process_folder in ${exist_permission_folders[@]}; do
real_path=$PROJECT_PATH/$process_folder
echo Dir Path: $real_path
if [ ! -d $real_path ]; then
mkdir $real_path
echo 1. Folder Check : Folder Not Exist, Create Folder Success!
else
echo 1. Folder Check : Folder Exist!
fi
chmod 777 $real_path -R
echo 2. Folder Permision Change : Permision Change Success!
echo ---------------------------------------
done
|
package com.playMidi.xml2.holders;
import com.playMidi.xml2.XmlMidiTimbreSet;
import java.io.OutputStream;
import java.util.List;
import xml.NameValuePairList;
import xml.unoptimized.NameValuePair;
import xml.XMLelement;
import xml.unoptimized.NameValuePair;
/**
* Created by ra on 05/06/2017.
*/
public class Zone implements XMLelement {
int volumeMin = 0;
int volumeMax = 127;
String name;
TimbreRanges instruments = null;
public Zone(NameValuePairList attributes) {
for(int i = 0; i<attributes.size(); i++){
NameValuePair attr = attributes.get(i);
switch (attr.getNameIndex()){
case XmlMidiTimbreSet.attribute_name:
this.name = attr.getValue();
break;
case XmlMidiTimbreSet.attribute_volumeMax:
this.volumeMax = Integer.parseInt(attr.getValue());
break;
case XmlMidiTimbreSet.attribute_volumeMin:
this.volumeMin = Integer.parseInt(attr.getValue());
break;
}
}
}
/**
* @deprecated
* @return
*/
public int instrumentModulatorCount() {
return 0;
}
/**
* @deprecated
* @param out
*/
public void writeInstrumentModulator(OutputStream out) {
return;
}
int getInstrumentModulatorsCount() {
return 0;
}
/**
* @deprecated
* @param out
*/
public void writeModulators(OutputStream out) {
return;
}
/**
*
* @deprecated
* @return
*/
public int modulatorCount() {
return 0;
}
@Override
public void addChild(XMLelement f) {
if(instruments == null){
instruments = (TimbreRanges) f;
}
else{
throw new IndexOutOfBoundsException("child node already set");
}
}
@Override
public void closeElement() throws Exception {
if(instruments == null){
throw new IndexOutOfBoundsException("null instruments");
}
}
}
|
<filename>src/main/scala/pl/japila/servicediscovery/ServiceDiscoveryActor.scala
package pl.japila.servicediscovery
import akka.actor.{Props, Actor, ActorLogging}
import akka.stream.actor.{ZeroRequestStrategy, RequestStrategy, ActorSubscriber}
import akka.stream.scaladsl.{ImplicitFlowMaterializer, Source}
class ServiceDiscoveryActor(input: List[String]) extends ActorSubscriber with ActorLogging with ImplicitFlowMaterializer {
import ServiceDiscoveryActor._
override protected def requestStrategy: RequestStrategy = ZeroRequestStrategy
val flow = Source(input).map(_.toUpperCase)
def receive = {
case "run" =>
import akka.pattern.pipe
import context.dispatcher
log.debug("run flow executed")
flow.fold("")(_ + _) pipeTo sender()
case m => println(s"$m received")
}
}
object ServiceDiscoveryActor {
val props = Props(classOf[ServiceDiscoveryActor], List("a", "b", "c"))
}
|
#!/bin/sh
if test -d ./.release-venv; then
true
else
./script/release/setup-venv.sh
fi
args=$*
if test -z $args; then
args="--help"
fi
./.release-venv/bin/python ./script/release/release.py $args
|
package de.ids_mannheim.korap.handlers;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Map;
import org.springframework.jdbc.core.RowMapper;
import de.ids_mannheim.korap.auditing.AuditRecord;
import de.ids_mannheim.korap.config.Attributes;
import de.ids_mannheim.korap.config.URIParam;
import de.ids_mannheim.korap.user.KorAPUser;
import de.ids_mannheim.korap.user.User;
/**
* @author hanl
* @date 14/01/2014
*/
public class RowMapperFactory {
public static class UserMapMapper implements RowMapper<Map<?,?>> {
@Override
public Map<?, ?> mapRow (ResultSet rs, int rowNum) throws SQLException {
User user = new UserMapper().mapRow(rs, rowNum);
return user.toMap();
}
}
public static class UserMapper implements RowMapper<User> {
@Override
public User mapRow (ResultSet rs, int rowNum) throws SQLException {
User user;
switch (rs.getInt("type")) {
case 0:
user = getKorAP(rs);
break;
// case 1:
// user = getShib(rs);
// break;
default:
user = User.UserFactory.getDemoUser();
user.setId(rs.getInt("id"));
user.setAccountCreation(rs.getTimestamp(
Attributes.ACCOUNT_CREATION).getTime());
return user;
}
return user;
}
private KorAPUser getKorAP (ResultSet rs) throws SQLException {
KorAPUser user = User.UserFactory.getUser(rs
.getString(Attributes.USERNAME));
user.setPassword(rs.getString(Attributes.PASSWORD));
user.setId(rs.getInt(Attributes.ID));
user.setAccountLocked(rs.getBoolean(Attributes.ACCOUNTLOCK));
user.setAccountCreation(rs.getLong(Attributes.ACCOUNT_CREATION));
user.setAccountLink(rs.getString(Attributes.ACCOUNTLINK));
long l = rs.getLong(Attributes.URI_EXPIRATION);
URIParam param = new URIParam(
rs.getString(Attributes.URI_FRAGMENT), l == 0 ? -1
: new Timestamp(l).getTime());
user.addField(param);
return user;
}
// private ShibbolethUser getShib (ResultSet rs) throws SQLException {
// ShibbolethUser user = User.UserFactory.getShibInstance(
// rs.getString(Attributes.USERNAME),
// rs.getString(Attributes.MAIL), rs.getString(Attributes.CN));
// user.setId(rs.getInt(Attributes.ID));
// return user;
// }
}
public static class AuditMapper implements RowMapper<AuditRecord> {
@Override
public AuditRecord mapRow (ResultSet rs, int rowNum)
throws SQLException {
AuditRecord r = new AuditRecord(AuditRecord.CATEGORY.valueOf(rs
.getString("aud_category")));
r.setUserid(rs.getString("aud_user"));
r.setField_1(rs.getString("aud_field_1"));
r.setTimestamp(rs.getTimestamp("aud_timestamp").getTime());
r.setId(rs.getInt("aud_id"));
r.setStatus(rs.getInt("aud_status"));
r.setLoc(rs.getString("aud_location"));
return r;
}
}
}
|
#define WHITE 0
#define YELLOW 1
#define RED 2
#include<stdint.h>
#include<vector>
#include"graph.hpp"
/** Make a Depth First Search in the Graph and
* count all of the cycles inside of such Graph
*
* Significance of the colours:
*
* WHITE: Not explored
* YELLOW: Visited, but there's still adjacents to visit
* RED: Visited, along with the adjacents
*/
static bool depth_first_search(Graph g,counter first);
//Visit a vertex
static void visit_vertex (Graph g, counter index, std::vector<counter>* colour);
static bool depth_first_search(Graph g,counter first) {
std::vector<counter> colour;
//Mark all vertices as : "NOT VISITED"
for(int i = 0; i < g.vertices_num(); i++)
colour.push_back(WHITE);
//Loop through all vertices in the graph
for(counter i = 0; i < g.vertices_num(); i++)
//if the curr vertex hasn't been visited
if(colour.at(i) == WHITE)
//visiting current vertex
//Obs: I'm sending a pointer to the
//memory address of the 'colour' std::vector
//so that all recursive calls to 'visit_vertex'
//can update when a vertex is visited
visit_vertex(g, i,&colour);
return true;
}
static void visit_vertex (Graph g, counter index, std::vector<counter>* colour) {
//Mark current vertex as visited
colour->at(index) = YELLOW;
//Start adj std::vector
std::vector<counter> curr_vertex_adj = g.get_adjs(index);
//Loop through adj std::vector of the curr_vertex
for(int i = 0 ; i < curr_vertex_adj.size(); i++)
//if adj vertex hasn't been visited
if(colour->at(curr_vertex_adj.at(i)) == WHITE)
//recursive call to 'visit_vertex'
visit_vertex(g,curr_vertex_adj.at(i),colour);
//Mark vertex as completed
colour->at(index) = RED;
} |
<reponame>isislab-unisa/swiftlangabm
/**
* Copyright 2012 Universita' degli Studi di Salerno
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package it.isislab.swiftlang.abm.mason;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.JarURLConnection;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.jar.Attributes;
/**
* This class contains the mechanism for dynamically load jar
* @author marvit
*
*/
public class JarClassLoader extends URLClassLoader
{
private URL url;
public JarClassLoader(URL url) {
super(new URL[] { url });
this.url = url;
}
public String getMainClassName() throws IOException
{
URL u = new URL("jar", "", url + "!/");
JarURLConnection uc = (JarURLConnection)u.openConnection();
Attributes attr = uc.getMainAttributes();
return attr != null
? attr.getValue(Attributes.Name.MAIN_CLASS)
: null;
}
public void addToClassPath() throws NoSuchMethodException,
IllegalAccessException, InvocationTargetException
{
URLClassLoader urlClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
Class urlClass = URLClassLoader.class;
Method method = urlClass.getDeclaredMethod("addURL", new Class[]{URL.class});
method.setAccessible(true);
method.invoke(urlClassLoader, new Object[]{url});
}
public void invokeClass(String name, Object[] args)
throws ClassNotFoundException,
NoSuchMethodException,
InvocationTargetException
{
Class c = loadClass(name);
Method m = c.getMethod("main", new Class[] { args.getClass() });
m.setAccessible(true);
int mods = m.getModifiers();
if (m.getReturnType() != void.class || !Modifier.isStatic(mods) ||
!Modifier.isPublic(mods)) {
throw new NoSuchMethodException("main");
}
try {
m.invoke(null, new Object[] { args });
} catch (IllegalAccessException e) {
// This should not happen, as we have disabled access checks
}
}
public Object getInstanceWithSeed(String className, long seed) throws ClassNotFoundException, NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException
{
Class simClass = loadClass(className);
Constructor constr = simClass.getConstructor(new Class[]{ long.class});
Object obj = constr.newInstance(new Object[]{ seed});
return obj;
}
} |
#!/bin/bash
od=options
sd=$(dirname $0)
label=$1
shift 1
if [ $# -eq 0 ]
then
./$sd/blink.sh
fi
source ./$sd/install.sh maps/map.root $label $od/root.options $od/static.options $od/rx.options $od/1minsched.options $od/radiostats.options $od/network.options
source ./$sd/install.sh maps/map.nonroot $label $od/slave.options $od/static.options $od/rx.options $od/1minsched.options $od/radiostats.options $od/network.options $od/broadcast_1min.options
|
package top.mowang.cloud;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.netflix.hystrix.EnableHystrix;
import org.springframework.cloud.openfeign.EnableFeignClients;
import springfox.documentation.oas.annotations.EnableOpenApi;
/**
* SpringCloud-Demo
*
* @author : <NAME>
* @website : https://mowangblog.top
* @date : 2021/10/30 12:04
**/
@SuppressWarnings("all")
@EnableOpenApi
@SpringBootApplication
@EnableFeignClients
@EnableHystrix
public class OrderMainFeignHystrixApplication {
public static void main(String[] args) {
SpringApplication.run(OrderMainFeignHystrixApplication.class,args);
}
}
|
import discord
from discord.ext import commands
class JobsBulletin(commands.Cog):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.command(name='sendjob')
async def send_job_bulletin(self, ctx, *, user_message: discord.Embed):
await ctx.author.send(embed=user_message)
def setup(bot: commands.Bot):
bot.add_cog(JobsBulletin(bot)) |
<reponame>joshwyant/cc<filename>common/public/iterator.h
#ifndef COMMON_PUBLIC_ITERATOR_H__
#define COMMON_PUBLIC_ITERATOR_H__
#include <stdbool.h>
#include <stddef.h>
#include "assert.h"
typedef enum {
COLLECTION_NONE,
COLLECTION_ARRAY = 1 << 0,
COLLECTION_VECTOR = 1 << 1,
COLLECTION_LIST = 1 << 2,
COLLECTION_FORWARD_LIST = 1 << 3,
COLLECTION_STACK = 1 << 4,
COLLECTION_QUEUE = 1 << 5,
COLLECTION_PRIORITY_QUEUE = 1 << 6,
COLLECTION_SET = 1 << 7,
COLLECTION_MAP = 1 << 8,
COLLECTION_CUSTOM = 1 << 9,
} CollectionType;
typedef struct KeyInfo KeyInfo;
typedef struct RelationalKeyInfo RelationalKeyInfo;
typedef struct KeyValuePair KeyValuePair;
#define DECLARE_ITERATOR_TYPE(name, T) \
typedef struct name##Iterator name##Iterator; \
struct name##Iterator { \
CollectionType collection_type; \
void *collection; \
size_t elem_size; \
T *(*current)(const name##Iterator *iter); \
bool (*move_next)(name##Iterator * iter); \
bool (*eof)(const name##Iterator *iter); \
long long impl_data1; \
long long impl_data2; \
int version; \
};
// Default (generic) iterator
DECLARE_ITERATOR_TYPE(, void)
#define DECLARE_SINK_TYPE(name, T) \
typedef struct name##Sink name##Sink; \
struct name##Sink { \
CollectionType collection_type; \
void *collection; \
size_t elem_size; \
T *(*add)(name##Sink * sink, const T *elem); \
void *state; \
};
// Default (generic) sink
DECLARE_SINK_TYPE(, void)
#define DECLARE_INDEXER_TYPE(name, T) \
typedef struct name##Indexer name##Indexer; \
struct name##Indexer { \
CollectionType collection_type; \
void *collection; \
size_t elem_size; \
size_t (*size)(const name##Indexer *indexer); \
T *(*get)(const name##Indexer *indexer, size_t index); \
void (*set)(name##Indexer * indexer, size_t index, const T *value); \
};
// Default (generic) indexer
DECLARE_INDEXER_TYPE(, void)
struct KeyInfo {
size_t key_size;
int (*hash_fn)(const void *key);
bool (*eq_fn)(const void *key_a, const void *key_b);
};
struct RelationalKeyInfo {
KeyInfo *key_info;
int (*compare_fn)(const void *a, const void *b);
};
struct KeyValuePair {
void *key;
void *value;
};
void for_each(Iterator *iter, void (*action)(void *elem));
void for_each_ext(Iterator *iter,
void (*action)(void *elem, const Iterator *iter));
bool Iterator_copy(Sink *dest, Iterator *iter);
void Indexer_sort(const Indexer *indexer,
int (*compare_fn)(const void *a, const void *b));
bool Iterator_sort(Sink *dest, Iterator *iter,
int (*compare_fn)(const void *a, const void *b));
void Iterator_map(const Sink *dest, Iterator *iter,
void (*map_fn)(void *dest, const void *elem));
void Iterator_flat_map(const Sink *dest, Iterator *iter,
void (*map_fn)(Sink *dest, const void *elem));
// TODO: sum, product, etc.
void Iterator_reduce(const void *dest, Iterator *iter,
void (*reduce_fn)(void *dest, const void *elem));
void Iterator_filter(const Sink *dest, Iterator *iter,
bool (*filter_fn)(const void *elem));
int CString_compare(const void *a, const void *b);
int CStringCase_compare(const void *a, const void *b);
int CString_hash(const void *key);
int CStringCase_hash(const void *key);
#define DECLARE_CONTAINER_FN(name, T, hash_fn, eq_fn) \
DECLARE_ITERATOR_TYPE(name, T) \
DECLARE_INDEXER_TYPE(name, T) \
DECLARE_SINK_TYPE(name, T) \
extern KeyInfo name##KeyInfo; \
int hash_fn(const void *k); \
bool eq_fn(const void *_a, const void *_b); \
void name##_for_each(name##Iterator *iter, void (*action)(T * elem)); \
T name##_reduce(const T initial, name##Iterator *iter, \
T (*reduce_fn)(const T accum, const T elem)); \
bool name##_eof(const name##Iterator *iter); \
T name##_next(name##Iterator *iter);
#define DECLARE_CONTAINER(name, T) \
DECLARE_CONTAINER_FN(name, T, name##_hash, name##_eq)
#define DEFINE_CONTAINER_FN(name, T, hash_fn, eq_fn) \
KeyInfo name##KeyInfo = {sizeof(T), hash_fn, eq_fn}; \
void name##_for_each(name##Iterator *iter, void (*action)(T * elem)) { \
for_each((Iterator *)iter, (void (*)(void *))action); \
} \
bool name##_eof(const name##Iterator *iter) { \
return iter->eof((const name##Iterator *)iter); \
} \
T name##_next(name##Iterator *iter) { \
ASSERT(!iter->eof(iter)); \
return *(T *)iter->move_next(iter); \
}
#define DEFINE_CONTAINER(name, T, hash_expr, eq_expr) \
int name##_hash(const void *k) { \
T key = *(const T *)k; \
return (hash_expr); \
} \
bool name##_eq(const void *_a, const void *_b) { \
T a = *(const T *)_a; \
T b = *(const T *)_b; \
return (eq_expr); \
} \
T (*_##name##_current_reduce_fn)(const T, const T) = NULL; \
void _##name##__reduce(const void *dest, const void *elem) { \
*(T *)dest = \
_##name##_current_reduce_fn(*(const T *)dest, *(const T *)elem); \
} \
T name##_reduce(const T initial, name##Iterator *iter, \
T (*reduce_fn)(const T accum, const T elem)) { \
T (*last_fn)(const T, const T) = _##name##_current_reduce_fn; \
_##name##_current_reduce_fn = reduce_fn; \
Iterator_reduce(&initial, (Iterator *)iter, \
(void (*)(void *, const void *))_##name##__reduce); \
_##name##_current_reduce_fn = last_fn; \
return initial; \
} \
DEFINE_CONTAINER_FN(name, T, name##_hash, name##_eq)
#define DECLARE_RELATIONAL_CONTAINER_FN(name, T, hash_fn, compare_fn) \
DECLARE_CONTAINER_FN(name, T, hash_fn, name##_eq) \
extern RelationalKeyInfo name##RelationalKeyInfo; \
int compare_fn(const void *a, const void *b);
#define DECLARE_RELATIONAL_CONTAINER(name, T) \
DECLARE_RELATIONAL_CONTAINER_FN(name, T, name##_hash, name##_compare)
#define DECLARE_CONTAINER_REDUCER(name, T, func_name) \
T name##_##func_name(name##Iterator *iter);
#define DECLARE_RELATIONAL_CONTAINER_BASIC(name, T) \
DECLARE_RELATIONAL_CONTAINER(name, T) \
DECLARE_CONTAINER_REDUCER(name, T, sum) \
DECLARE_CONTAINER_REDUCER(name, T, product) \
DECLARE_CONTAINER_REDUCER(name, T, min) \
DECLARE_CONTAINER_REDUCER(name, T, max)
#define DEFINE_RELATIONAL_CONTAINER_FN(name, T, hash_fn, compare_fn) \
bool name##_eq(const void *a, const void *b) { \
return name##_compare(a, b) == 0; \
} \
DEFINE_CONTAINER_FN(name, T, hash_fn, name##_eq) \
RelationalKeyInfo name##RelationalKeyInfo = {&name##KeyInfo, compare_fn};
#define DEFINE_RELATIONAL_CONTAINER(name, T, hash_expr, compare_expr) \
int name##_compare(const void *_a, const void *_b) { \
T a = *(const T *)_a; \
T b = *(const T *)_b; \
return (compare_expr); \
} \
DEFINE_CONTAINER(name, T, hash_expr, (name##_compare(_a, _b) == 0)) \
RelationalKeyInfo name##RelationalKeyInfo = {&name##KeyInfo, name##_compare};
#define DEFINE_CONTAINER_REDUCER(name, T, func_name, initial, expr) \
T _##name##__##func_name(const T a, const T b) { return (expr); } \
T name##_##func_name(name##Iterator *iter) { \
return name##_reduce((initial), iter, _##name##__##func_name); \
}
#define DEFINE_RELATIONAL_CONTAINER_BASIC(name, T) \
DEFINE_RELATIONAL_CONTAINER( \
name, T, \
(int)(unsigned int)((unsigned long long)((long double)key * \
2654435761) % \
0x100000000ul), \
a - b) \
DEFINE_CONTAINER_REDUCER(name, T, sum, 0, a + b) \
DEFINE_CONTAINER_REDUCER(name, T, product, 1, a *b) \
DEFINE_CONTAINER_REDUCER(name, T, min, (T)0x7FFFFFFFFFFFFFFF, \
a <= b ? a : b) \
DEFINE_CONTAINER_REDUCER(name, T, max, (T)0x8000000000000000, a >= b ? a : b)
DECLARE_RELATIONAL_CONTAINER(Int, int);
DECLARE_RELATIONAL_CONTAINER(Short, short);
DECLARE_RELATIONAL_CONTAINER(Long, long);
DECLARE_RELATIONAL_CONTAINER(Char, char);
DECLARE_RELATIONAL_CONTAINER(Float, float);
DECLARE_RELATIONAL_CONTAINER(Double, double);
DECLARE_RELATIONAL_CONTAINER(LongDouble, long double);
DECLARE_RELATIONAL_CONTAINER(Ptr, void *);
DECLARE_RELATIONAL_CONTAINER(UnsignedInt, unsigned int);
DECLARE_RELATIONAL_CONTAINER(UnsignedLong, unsigned long);
DECLARE_RELATIONAL_CONTAINER(UnsignedShort, unsigned short);
DECLARE_RELATIONAL_CONTAINER(UnsignedChar, unsigned char);
DECLARE_RELATIONAL_CONTAINER(CString, char *);
// Needed for case-insensitive maps.
DECLARE_RELATIONAL_CONTAINER(CStringCase, char *);
#endif // COMMON_PUBLIC_ITERATOR_H__
|
<gh_stars>1-10
package fasthttpmock
import (
"bytes"
"github.com/valyala/fasthttp"
)
func Equal(a *fasthttp.Request, b *fasthttp.Request) bool {
return bytes.Equal(a.RequestURI(), b.RequestURI()) &&
bytes.Equal(a.Header.Method(), b.Header.Method()) &&
bytes.Equal(a.Host(), b.Host()) &&
bytes.Equal(a.Body(), b.Body())
}
func Copy(from, to *fasthttp.Response) {
to.SetStatusCode(from.StatusCode())
to.SetBody(from.Body())
}
|
def is_sorted(nums):
return all(nums[i] <= nums[i + 1] for i in range(len(nums) - 1))
result = is_sorted([3, 9, 12, 5])
print(result) |
#include <iostream>
using namespace std;
int main() {
string a, b, c, d;
cin >> a >> c >> b >> d;
if (a == b && c == d) {
cout << "GOOD" << endl;
} else if (a != b && c != d) {
cout << "OTZ" << endl;
} else {
cout << "=~=" << endl;
}
} |
from django.conf.urls import url
from c3nav.mapdata.views import get_cache_package, map_history, tile
urlpatterns = [
url(r'^(?P<level>\d+)/(?P<zoom>-?\d+)/(?P<x>-?\d+)/(?P<y>-?\d+).png$', tile, name='mapdata.tile'),
url(r'^(?P<level>\d+)/(?P<zoom>-?\d+)/(?P<x>-?\d+)/(?P<y>-?\d+)/(?P<access_permissions>\d(-\d)*).png$', tile,
name='mapdata.tile'),
url(r'^history/(?P<level>\d+)/(?P<mode>base|composite)\.(?P<filetype>png|data)$', map_history,
name='mapdata.map_history'),
url(r'^cache/package\.(?P<filetype>tar|tar\.gz|tar\.xz)$', get_cache_package, name='mapdata.cache_package'),
]
|
# -*- encoding : utf-8 -*-
require 'spec_helper'
describe GistHistory do
it 'is available' do
gist_history = create(:gist_history)
expect(gist_history.user).not_to be_nil
expect(gist_history.gist).not_to be_nil
expect(gist_history.gist_files).not_to be_nil
end
it 'returns headline' do
gist_history = create(:gist_file).gist_history
expected = <<BODY
class Sample
def do_something
puts "Hello!"
BODY
expect(gist_history.headline).to eq(expected.sub(/\n$/, ""))
end
it 'returns headline for 2 lines' do
body = <<BODY
class Sample
end
BODY
gist_history = create(:gist_file, :body => body).gist_history
expect(gist_history.headline).to eq(body.sub(/\n$/, ""))
end
end
|
python setup.py test
pip install .
python -c 'import sys; sys.path.remove(""); import jgo'
which jgo
test "$(jgo org.scijava:parsington 1+3)" -eq 4
|
from .function import function
from copy import copy
def object_method_def(commands, data, node):
new_node = copy(node)
new_node.name = "o%s!%s" % (data.context_objects[-1], node.name)
return function(commands, data, new_node) |
<filename>app/services/import.js
var Q = require('q');
var models = require('../model');
var importData = function(req, res) {
try {
var result = {
'errors' : []
};
var importedCreated = 0;
var importedUpdated = 0;
var headers = req.body.headers;
var schema = models.getMetadataForClass(req.body.className).schema;
var model = models.getModelForClass(req.body.className);
if (!model) {
res.status(400).send('Invalid class for import: ' + req.body.className);
return;
}
var promises = req.body.lines.map(function(row){
var id = getId(headers, row);
if (id) {
var updateFields = buildUpdateFields(headers, row, schema);
return Q.nfcall(updateEntity, model, id, updateFields)
.then(function (entity) {
if (entity) {
importedUpdated++;
}
else {
var fields = buildUpdateFields(headers, row, schema);
//try to include id:
fields._id = id;
return Q.nfcall(createEntity, model, fields)
.then(function (entity) {
importedCreated++;
})
.fail(function (err) {
result.errors.push({
'line': row.line,
'error': err
});
});
}
})
.fail(function (err) {
result.errors.push({
'line': row.line,
'error': err
});
});
}
else {
var fields = buildUpdateFields(headers, row, schema);
return Q.nfcall(createEntity, model, fields)
.then(function (entity) {
importedCreated++;
})
.fail(function (err) {
result.errors.push({
'line': row.line,
'error': err
});
});
}
});
Q.all(promises)
.then(function(obj) {
result.importedCount = importedUpdated + importedCreated;
result.insertCount = importedCreated;
result.updatedCount = importedUpdated;
res.status(200).send(result);
})
.fail(function(err) {
console.log('import: failed ' + err);
result.importedCount = importedUpdated + importedCreated;
result.insertCount = importedCreated;
result.updatedCount = importedUpdated;
res.status(200).send(result);
});
}
catch (e) {
res.status(400).send('Invalid request. ' + e);
console.error(e);
return;
}
};
function updateEntity(model, id, updateFields, callback){
model.findByIdAndUpdate(id, updateFields, null, function(err, entity) {
callback(err, entity);
});
}
function createEntity(model, updateFields, callback){
model.create(updateFields, function(err, entity) {
callback(err, entity);
});
}
function getId(headers, row) {
for(var index in headers){
var head = headers[index];
if (head === '_id') {
var data = row.cells[index];
if (data === '' || data === null) {
return null;
}
return data;
}
}
return null;
}
function buildUpdateFields(headers, row, schema) {
var updateFields= {};
for(var index in headers){
var head = headers[index];
if (head == '_id') {
continue;
}
updateFields[head] = convertValue(row.cells[index], head, schema);
}
return updateFields;
}
function convertValue(value, key, schema) {
if (propertyIsGeopoint(key, schema)) {
var geoPoint = convertToGeoPoint(value);
return geoPoint;
}
return value;
}
function propertyIsGeopoint(key, schema) {
var hasType = false;
var hasCoordinates = false;
for (var path in schema.paths) {
if (schema.paths.hasOwnProperty(path)) {
if (path === key+'.type') {
hasType = true;
}
if (path === key+'.coordinates') {
hasCoordinates = true;
}
}
}
return hasType && hasCoordinates;
}
function convertToGeoPoint(value) {
if (!value) {
return null;
}
if (value.type && value.coordinates) {
return value;
}
if (typeof value === 'string') {
var parts = /(-?\d+\.?\d*)\s*,\s*(-?\d+\.?\d*)/.exec(value);
if (parts) {
return {
type: 'Point',
coordinates: [ Number(parts[2]), Number(parts[1]) ]
};
}
}
return null;
}
function apply(app) {
app.post('/api/import', function(req, res) {
return importData(req, res);
});
}
module.exports.apply = apply; |
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {number[]} preorder
* @return {TreeNode}
*/
var bstFromPreorder = function(preorder) {
function traverse(nums) {
if (nums.length == 0) {
return null;
}
const root = new TreeNode(nums[0]);
let i = 1;
for (; i < nums.length; i++) {
if (nums[i] < nums[0]) {
continue;
} else {
break;
}
}
root.left = traverse(nums.slice(1, i));
root.right = traverse(nums.slice(i));
return root;
}
return traverse(preorder);
};
/**
* 思路:
* 因为是先序遍历,第一个数一定是根节点,然后右边第一个比它大的数一定是右子树的根节点。
* 在此可以分成左子树和右子树,开始递归。
*
*/
|
package client
import (
"html/template"
"io"
"io/ioutil"
"net/http"
"net/url"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
"github.com/prometheus/client_golang/prometheus/promhttp"
quakenet "github.com/criticalstack/quake-kube/internal/quake/net"
)
type Config struct {
ContentServerURL string
ServerAddr string
Files http.FileSystem
}
func NewRouter(cfg *Config) (*echo.Echo, error) {
e := echo.New()
e.Use(middleware.Logger())
e.Use(middleware.Recover())
e.Use(middleware.CORSWithConfig(middleware.CORSConfig{
AllowOrigins: []string{"*"},
AllowHeaders: []string{echo.HeaderOrigin, echo.HeaderContentType, echo.HeaderAccept},
}))
f, err := cfg.Files.Open("index.html")
if err != nil {
return nil, err
}
defer f.Close()
data, err := ioutil.ReadAll(f)
if err != nil {
return nil, err
}
templates, err := template.New("index").Parse(string(data))
if err != nil {
return nil, err
}
e.Renderer = &TemplateRenderer{templates}
// default route
e.GET("/", func(c echo.Context) error {
m, err := quakenet.GetInfo(cfg.ServerAddr)
if err != nil {
return err
}
needsPass := false
if v, ok := m["g_needpass"]; ok {
if v == "1" {
needsPass = true
}
}
return c.Render(http.StatusOK, "index", map[string]interface{}{
"ServerAddr": cfg.ServerAddr,
"NeedsPass": needsPass,
})
})
e.GET("/metrics", echo.WrapHandler(promhttp.Handler()))
e.GET("/info", func(c echo.Context) error {
m, err := quakenet.GetInfo(cfg.ServerAddr)
if err != nil {
return err
}
return c.JSON(http.StatusOK, m)
})
e.GET("/status", func(c echo.Context) error {
m, err := quakenet.GetStatus(cfg.ServerAddr)
if err != nil {
return err
}
return c.JSON(http.StatusOK, m)
})
// static files
e.GET("/*", echo.WrapHandler(http.FileServer(cfg.Files)))
// Quake3 assets requests must be proxied to the content server. The host
// header is manipulated to ensure that services like CloudFlare will not
// reject requests based upon incorrect host header.
csurl, err := url.Parse(cfg.ContentServerURL)
if err != nil {
return nil, err
}
g := e.Group("/assets")
g.Use(middleware.ProxyWithConfig(middleware.ProxyConfig{
Balancer: middleware.NewRoundRobinBalancer([]*middleware.ProxyTarget{
{URL: csurl},
}),
Transport: &HostHeaderTransport{RoundTripper: http.DefaultTransport, Host: csurl.Host},
}))
return e, nil
}
type HostHeaderTransport struct {
http.RoundTripper
Host string
}
func (t *HostHeaderTransport) RoundTrip(req *http.Request) (*http.Response, error) {
req.Host = t.Host
return t.RoundTripper.RoundTrip(req)
}
type TemplateRenderer struct {
*template.Template
}
func (t *TemplateRenderer) Render(w io.Writer, name string, data interface{}, c echo.Context) error {
return t.ExecuteTemplate(w, name, data)
}
|
package com.archimatetool.model;
import org.eclipse.osgi.util.NLS;
public class Messages extends NLS {
private static final String BUNDLE_NAME = "com.archimatetool.model.messages"; //$NON-NLS-1$
public static String FolderType_0;
public static String FolderType_1;
public static String FolderType_2;
public static String FolderType_3;
public static String FolderType_4;
public static String FolderType_5;
public static String FolderType_6;
public static String FolderType_7;
public static String FolderType_8;
public static String FolderType_9;
static {
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
private Messages() {
}
}
|
#!/bin/bash
#Cube challenge上的结果
TRAINED_MODEL_PATH="./output_exp/experiment9/"
GPU_ID="0"
CUDA_VISIBLE_DEVICES="$GPU_ID" python3 hold_out.py conf_exp/experiment9.json Cube plus data/cube/plus.txt -gpu 0 --outputfolder $TRAINED_MODEL_PATH
CUDA_VISIBLE_DEVICES="$GPU_ID" python3 inference.py conf_exp/experiment9.json Cube challenge data/cube/challenge.txt $TRAINED_MODEL_PATH/Cube/plus/kmeans_finalaffine_noconf/0/model_best.pth.tar -gpu 0 --outputfolder $TRAINED_MODEL_PATH/cube_test/
|
#!/usr/bin/env bash
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# Add steps required to build artefacts for you Agones SDK. |
#!/usr/bin/env bash
docker rm signal-detection-association-control-service
cd ../
gradle docker
cd ./integration
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -eo pipefail
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
# If Cypress run – overwrite the password for admin and export env variables
if [ "$CYPRESS_CONFIG" == "true" ]; then
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
export SUPERSET_TESTENV=true
export ENABLE_REACT_CRUD_VIEWS=true
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
fi
#
# Make sure we have dev requirements installed
#
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
pip install -r "${REQUIREMENTS_LOCAL}"
else
echo "Skipping local overrides"
fi
if [[ "${1}" == "worker" ]]; then
echo "Starting Celery worker..."
celery --app=superset.tasks.celery_app:app worker -Ofair -l INFO
elif [[ "${1}" == "beat" ]]; then
echo "Starting Celery beat..."
celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule
elif [[ "${1}" == "app" ]]; then
echo "Starting web app..."
flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0
elif [[ "${1}" == "app-gunicorn" ]]; then
echo "Starting web app..."
/app/docker/docker-entrypoint.sh
fi
|
<reponame>Jenniferziyoudeyu/leetcode-java<gh_stars>0
//package q1_10;
//
//import public_class.ListNode;
//
//public class Q2AddTwoNumbers {
//
// public ListNode addTwoNumbers(ListNode l1, ListNode l2) {
// ListNode res = new ListNode(l1.val + l2.val);
// while(l1 != null && l2 != null) {
// l1 = l1.next;
// l2 = l2.next;
// }
// return res;
// }
//
//}
|
<filename>src/main/java/site/kason/tempera/source/FileTemplateSource.java
package site.kason.tempera.source;
import java.io.File;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import site.kason.tempera.engine.TemplateSource;
/**
*
* @author <NAME>
*/
public class FileTemplateSource implements TemplateSource {
File file;
String name;
private final String encoding;
private final String content;
public FileTemplateSource(String name,File file, String encoding) throws IOException {
this.file = file;
this.encoding = encoding;
this.name = name;
this.content = FileUtils.readFileToString(file, encoding);
}
@Override
public String getContent(){
return content;
}
@Override
public String getPath() {
try {
return file.getCanonicalPath();
} catch (IOException ex) {
return file.getAbsolutePath();
}
}
@Override
public String getName() {
return this.name;
}
@Override
public long lastModified() {
return this.file.lastModified();
}
}
|
#!/bin/bash -e
# Copyright 2017-2018 by SDRausty. All rights reserved. 🌎 🌍 🌏 🌐 🗺
# Hosting https://sdrausty.github.io/TermuxArch courtesy https://pages.github.com
# https://sdrausty.github.io/TermuxArch/CONTRIBUTORS Thank you for your help.
# https://sdrausty.github.io/TermuxArch/README has information about this project.
################################################################################
callsystem ()
{
mkdir -p $HOME/arch
cd $HOME/arch
detectsystem
}
copybin2path ()
{
printf " 🕚 \033[36;1m<\033[0m 🕛 "
while true; do
read -p "Copy $bin to your \$PATH? [y|n] " answer
if [[ $answer = [Yy]* ]];then
cp $HOME/arch/$bin $PREFIX/bin
printf "\n 🕦 \033[36;1m<\033[0m 🕛 Copied \033[32;1m$bin\033[0m to \033[1;34m$PREFIX/bin\033[0m.\n\n"
break
elif [[ $answer = [Nn]* ]];then
printf "\n"
break
elif [[ $answer = [Qq]* ]];then
printf "\n"
break
else
printf "\n 🕚 \033[36;1m<\033[0m 🕛 You answered \033[33;1m$answer\033[0m.\n"
printf "\n 🕚 \033[36;1m<\033[0m 🕛 Answer Yes or No (y|n).\n\n"
fi
done
}
detectsystem ()
{
spaceinfo
printdetectedsystem
if [ $(getprop ro.product.cpu.abi) = armeabi ];then
armv5l
elif [ $(getprop ro.product.cpu.abi) = armeabi-v7a ];then
detectsystem2
elif [ $(getprop ro.product.cpu.abi) = arm64-v8a ];then
aarch64
elif [ $(getprop ro.product.cpu.abi) = x86 ];then
i686
elif [ $(getprop ro.product.cpu.abi) = x86_64 ];then
x86_64
else
printmismatch
fi
}
detectsystem2 ()
{
if [[ $(getprop ro.product.device) == *_cheets ]];then
armv7lChrome
else
armv7lAndroid
fi
}
detectsystem2p ()
{
if [[ $(getprop ro.product.device) == *_cheets ]];then
printf "Chromebook. "
else
printf "$(uname -o) operating system. "
fi
}
ftchstnd ()
{
if [[ $dm = wget ]];then
wget -q -N --show-progress http://$mirror$path$file.md5
wget -q -c --show-progress http://$mirror$path$file
else
curl -q -L --fail --retry 4 -O http://$mirror$path$file.md5 -O http://$mirror$path$file
fi
}
makebin ()
{
makestartbin
printconfigq
touchupsys
}
makesetupbin ()
{
cat > root/bin/setupbin.sh <<- EOM
#!/data/data/com.termux/files/usr/bin/bash -e
unset LD_PRELOAD
exec proot --link2symlink -0 -r $HOME/arch/ -b /dev/ -b /sys/ -b /proc/ -b /storage/ -b $HOME -w $HOME /bin/env -i HOME=/root TERM="$TERM" PS1='[termux@arch \W]\$ ' LANG=$LANG PATH=/bin:/usr/bin:/sbin:/usr/sbin $HOME/arch/root/bin/finishsetup.sh
EOM
chmod 700 root/bin/setupbin.sh
}
makestartbin ()
{
cat > $bin <<- EOM
#!/data/data/com.termux/files/usr/bin/bash -e
unset LD_PRELOAD
exec proot --link2symlink -0 -r $HOME/arch/ -b /dev/ -b /sys/ -b /proc/ -b /storage/ -b $HOME -w $HOME /bin/env -i HOME=/root TERM="$TERM" PS1='[termux@arch \W]\$ ' LANG=$LANG PATH=/bin:/usr/bin:/sbin:/usr/sbin /bin/bash --login
EOM
chmod 700 $bin
}
spaceinfo ()
{
mntspace=`df /storage/emulated/0 | awk '{print $4}' | sed '2q;d'`
if [[ $mntspace = *G ]] || [[ $mntspace = *T ]];then
spaceMessage=""
else
spaceMessage="Warning! Start thinking about cleaning out some stuff. The user space on this device is just $mntspace. This is below the recommended amount of free space to install Arch Linux in Termux PRoot."
fi
}
setlocalegen()
{
if [ -e etc/locale.gen ]; then
sed -i '/\#en_US.UTF-8 UTF-8/{s/#//g;s/@/-at-/g;}' etc/locale.gen
else
cat > etc/locale.gen <<- EOM
en_US.UTF-8 UTF-8
EOM
fi
}
touchupsys ()
{
mkdir -p root/bin
addbash_profile
addbashrc
adddfa
addga
addgcl
addgcm
addgp
addgpl
addmotd
addprofile
addresolvconf
addt
addyt
addv
setlocalegen
printf "\n\033[32;1m"
while true; do
read -p "Do you want to use \`nano\` or \`vi\` to edit your Arch Linux configuration files [n|v]? " nv
if [[ $nv = [Nn]* ]];then
ed=nano
apt-get -qq install nano --yes
break
elif [[ $nv = [Vv]* ]];then
ed=vi
break
else
printf "\nYou answered \033[36;1m$nv\033[32;1m.\n"
printf "\nAnswer nano or vi (n|v). \n\n"
fi
printf "\nYou answered \033[36;1m$nv\033[32;1m.\n"
done
printf "\n"
while true; do
read -p "Would you like to run \`locale-gen\` to generate the en_US.UTF-8 locale or do you want to edit /etc/locale.gen specifying your preferred language before running \`locale-gen\`? Answer run or edit [r|e]. " ye
if [[ $ye = [Rr]* ]];then
break
elif [[ $ye = [Ee]* ]];then
$ed $HOME/arch/etc/locale.gen
break
else
printf "\nYou answered \033[36;1m$ye\033[32;1m.\n"
printf "\nAnswer run or edit (Rr|Ee). \n\n"
fi
done
$ed $HOME/arch/etc/pacman.d/mirrorlist
makefinishsetup
makesetupbin
}
|
#!/bin/bash
# This script will setup the Automated pipeline, IAM Roles, and a ServiceCatalog Portfolio using the
# reference architecture as example products. This will create resources in your currently active CLI account
# across three regions using CloudFormation StackSets. You will be responsible for any costs resulting from the usage
# of this script.
ACC=$(aws sts get-caller-identity --query 'Account' | tr -d '"')
# add child accounts as space delimited list.
# You will need to ensure StackSet IAM roles are correctly setup in each child account
childAcc=""
childAccComma=${childAcc// /,}
allACC="$ACC $childAcc"
export AWS_DEFAULT_REGION=us-east-1
allregions="us-east-1 us-east-2 us-west-1"
LinkedRole1=""
S3RootURL="https://s3.amazonaws.com/aws-service-catalog-reference-architectures"
date
echo "Using Account:$ACC Region:$AWS_DEFAULT_REGION Child Accounts:$childAcc All Regions:$allregions"
echo "Creating the StackSet IAM roles"
aws cloudformation create-stack --region $AWS_DEFAULT_REGION --stack-name IAM-StackSetAdministrator --template-url https://s3.amazonaws.com/cloudformation-stackset-sample-templates-us-east-1/AWSCloudFormationStackSetAdministrationRole.yml --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM
aws cloudformation create-stack --region $AWS_DEFAULT_REGION --stack-name IAM-StackSetExecution --parameters "[{\"ParameterKey\":\"AdministratorAccountId\",\"ParameterValue\":\"$ACC\"}]" --template-url https://s3.amazonaws.com/cloudformation-stackset-sample-templates-us-east-1/AWSCloudFormationStackSetExecutionRole.yml --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM
echo "waiting for stacks to complete..."
aws cloudformation wait stack-create-complete --stack-name IAM-StackSetAdministrator
aws cloudformation wait stack-create-complete --stack-name IAM-StackSetExecution
echo "creating the automation pipeline stack"
aws cloudformation create-stack --region $AWS_DEFAULT_REGION --stack-name SC-RA-IACPipeline --parameters "[{\"ParameterKey\":\"ChildAccountAccess\",\"ParameterValue\":\"$childAccComma\"}]" --template-url "$S3RootURL/codepipeline/sc-codepipeline-ra.json" --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM CAPABILITY_AUTO_EXPAND
echo "creating the ServiceCatalog IAM roles StackSet"
aws cloudformation create-stack-set --stack-set-name SC-IAC-automated-IAMroles --template-url "$S3RootURL/iam/sc-demosetup-iam.json" --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM CAPABILITY_AUTO_EXPAND
SSROLEOPID=$(aws cloudformation create-stack-instances --stack-set-name SC-IAC-automated-IAMroles --regions $AWS_DEFAULT_REGION --accounts $allACC --operation-preferences FailureToleranceCount=0,MaxConcurrentCount=1 | jq '.OperationId' | tr -d '"')
STATUS=""
until [ "$STATUS" = "SUCCEEDED" ]; do
STATUS=$(aws cloudformation describe-stack-set-operation --stack-set-name SC-IAC-automated-IAMroles --operation-id $SSROLEOPID | jq '.StackSetOperation.Status' | tr -d '"')
echo "waiting for IAMrole Stackset to complete. current status: $STATUS"
sleep 10
done
echo "creating the ServiceCatalog Portfolio StackSet"
aws cloudformation create-stack-set --stack-set-name SC-IAC-automated-portfolio --parameters "[{\"ParameterKey\":\"CreateEndUsers\",\"ParameterValue\":\"No\"},{\"ParameterKey\":\"LinkedRole1\",\"ParameterValue\":\"$LinkedRole1\"},{\"ParameterKey\":\"LinkedRole2\",\"ParameterValue\":\"\"},{\"ParameterKey\":\"LaunchRoleName\",\"ParameterValue\":\"SCEC2LaunchRole\"},{\"ParameterKey\":\"RepoRootURL\",\"ParameterValue\":\"$S3RootURL/\"}]" --template-url "$S3RootURL/ec2/sc-portfolio-ec2demo.json" --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM CAPABILITY_AUTO_EXPAND
aws cloudformation create-stack-instances --stack-set-name SC-IAC-automated-portfolio --regions $allregions --accounts $allACC --operation-preferences FailureToleranceCount=0,MaxConcurrentCount=3
date
echo "Complete. See CloudFormation Stacks and StackSets Console in each region for more details: $allregions" |
import * as BinaryPack from "peerjs-js-binarypack";
import { Supports } from './supports';
import { UtilSupportsObj } from '..';
const DEFAULT_CONFIG = {
iceServers: [
{ urls: "stun:stun.l.google.com:19302" },
{ urls: "turn:0.peerjs.com:3478", username: "peerjs", credential: "peerjsp" }
],
sdpSemantics: "unified-plan"
};
export const util = new class {
noop(): void { }
readonly CLOUD_HOST = "0.peerjs.com";
readonly CLOUD_PORT = 443;
// Browsers that need chunking:
readonly chunkedBrowsers = { Chrome: 1, chrome: 1 };
readonly chunkedMTU = 16300; // The original 60000 bytes setting does not work when sending data from Firefox to Chrome, which is "cut off" after 16384 bytes and delivered individually.
// Returns browser-agnostic default config
readonly defaultConfig = DEFAULT_CONFIG;
readonly browser = Supports.getBrowser();
readonly browserVersion = Supports.getVersion();
// Lists which features are supported
readonly supports = (function () {
const supported: UtilSupportsObj = {
browser: Supports.isBrowserSupported(),
webRTC: Supports.isWebRTCSupported(),
audioVideo: false,
data: false,
binaryBlob: false,
reliable: false,
};
if (!supported.webRTC) return supported;
let pc: RTCPeerConnection;
try {
pc = new RTCPeerConnection(DEFAULT_CONFIG);
supported.audioVideo = true;
let dc: RTCDataChannel;
try {
dc = pc.createDataChannel("_PEERJSTEST", { ordered: true });
supported.data = true;
supported.reliable = !!dc.ordered;
// Binary test
try {
dc.binaryType = "blob";
supported.binaryBlob = !Supports.isIOS;
} catch (e) {
}
} catch (e) {
} finally {
if (dc) {
dc.close();
}
}
} catch (e) {
} finally {
if (pc) {
pc.close();
}
}
return supported;
})();
// Ensure alphanumeric ids
validateId(id: string): boolean {
// Allow empty ids
return !id || /^[A-Za-z0-9]+(?:[. _-][A-Za-z0-9]+)*$/.test(id);
}
pack = BinaryPack.pack;
unpack = BinaryPack.unpack;
// Binary stuff
private _dataCount: number = 1;
chunk(blob: Blob): { __peerData: number, n: number, total: number, data: Blob }[] {
const chunks = [];
const size = blob.size;
const total = Math.ceil(size / util.chunkedMTU);
let index = 0;
let start = 0;
while (start < size) {
const end = Math.min(size, start + util.chunkedMTU);
const b = blob.slice(start, end);
const chunk = {
__peerData: this._dataCount,
n: index,
data: b,
total,
};
chunks.push(chunk);
start = end;
index++;
}
this._dataCount++;
return chunks;
}
blobToArrayBuffer(blob: Blob, cb: (arg: ArrayBuffer | null) => void): FileReader {
const fr = new FileReader();
fr.onload = function (evt) {
if (evt.target) {
cb(evt.target.result as ArrayBuffer);
}
};
fr.readAsArrayBuffer(blob);
return fr;
}
binaryStringToArrayBuffer(binary: string): ArrayBuffer | SharedArrayBuffer {
const byteArray = new Uint8Array(binary.length);
for (let i = 0; i < binary.length; i++) {
byteArray[i] = binary.charCodeAt(i) & 0xff;
}
return byteArray.buffer;
}
randomToken(): string {
return Math.random()
.toString(36)
.substr(2);
}
isSecure(): boolean {
return location.protocol === "https:";
}
}
|
from lib.action import St2BaseAction
from lib.formatters import format_client_list_result
__all__ = [
'St2ExecutionsListAction'
]
EXCLUDE_ATTRIBUTES = [
'trigger',
'trigger_type',
'trigger_instance',
'liveaction',
'context'
]
def format_result(result):
return format_client_list_result(result=result, exclude_attributes=EXCLUDE_ATTRIBUTES)
class St2ExecutionsListAction(St2BaseAction):
def run(self, action=None, status=None, limit=5):
kwargs = {}
kwargs['limit'] = limit
if action:
kwargs['action'] = action
if status:
kwargs['status'] = status
if kwargs:
method = self.client.liveactions.query
else:
method = self.client.liveactions.get_all
result = self._run_client_method(method=method,
method_kwargs=kwargs,
format_func=format_result)
return result
|
fn on_key_button_press(state: &mut State, key: kb::Key) {
match state.pairs.last_mut() {
Some(pair) => match pair.1 {
Some(_) if key.scan_code != 0 => state.pairs.push((Some(key.clone()), None)),
None => if pair.0.as_ref().unwrap() != &key { pair.1 = Some(key.clone()) } else {},
_ => (),
},
None if key.scan_code != 0 => state.pairs.push((Some(key.clone()), None)),
}
} |
let set1 = Set([1,2,3])
let set2 = Set([2,3,4])
let commonElements = set1.intersection(set2)
print(commonElements)
// Output: [2, 3] |
import { Link } from 'react-router-dom';
import { List, Button, Divider } from '@mui/material';
import {
AddCircle,
AccountCircle,
Assignment,
DirectionsCar,
LocationSearching,
} from '@mui/icons-material';
import Auth from '../utils/auth';
export default function SideMenu({ setOpen }) {
return (
<List component='nav' sx={{ pl: 1 }}>
<Link to={`/bash/`} onClick={() => setOpen(false)}>
<Button sx={{ my: 2 }}>
<LocationSearching sx={{ mr: 1 }} /> Browse Events
</Button>
</Link>
<Divider />
{Auth.loggedIn() && (
<>
<Link
to={`/bash/u/${Auth.getProfile().data.username}`}
onClick={() => setOpen(false)}
>
<Button sx={{ my: 2 }}>
<AccountCircle sx={{ mr: 1 }} /> View/Edit My Profile
</Button>
</Link>
<Divider />
<Link to={`/bash/create`} onClick={() => setOpen(false)}>
<Button sx={{ my: 2 }}>
<AddCircle sx={{ mr: 1 }} /> Create New Event
</Button>
</Link>
<Divider />
<Link to={`/bash/manage-events`} onClick={() => setOpen(false)}>
<Button sx={{ my: 2 }}>
<Assignment sx={{ mr: 1 }} /> Events I'm Managing
</Button>
</Link>
<Divider />
<Link to={`/bash/attending`} onClick={() => setOpen(false)}>
<Button sx={{ my: 2 }}>
<DirectionsCar sx={{ mr: 1 }} /> Events I'm Attending
</Button>
</Link>
</>
)}
</List>
);
}
|
#!/bin/bash
# Copyright (c) 2020 Sharvil Kekre skekre98
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
. scripts/custom.sh
build_and_release
|
<reponame>petroniocandido/SistemaExemplo_Comercial
package br.edu.ifnmg.SistemaComercial.LogicaAplicacao;
import br.edu.ifnmg.SistemaComercial.LogicaAplicacao.Produto;
import br.edu.ifnmg.SistemaComercial.LogicaAplicacao.Transacao;
import java.math.BigDecimal;
import javax.annotation.Generated;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.StaticMetamodel;
@Generated(value="EclipseLink-2.5.2.v20140319-rNA", date="2021-08-25T14:44:09")
@StaticMetamodel(TransacaoItem.class)
public class TransacaoItem_ {
public static volatile SingularAttribute<TransacaoItem, Transacao> transacao;
public static volatile SingularAttribute<TransacaoItem, Produto> produto;
public static volatile SingularAttribute<TransacaoItem, Integer> quantidade;
public static volatile SingularAttribute<TransacaoItem, BigDecimal> valorUnitario;
} |
<gh_stars>1-10
describe('Routes: Make Transaction', () => {
const initDetails = {
cashierId: 0,
accountNumber0: 7777777777,
accountNumber1: 7777777777,
};
before((done) => {
const models = app.get('models');
const [users, accounts] = [models.users, models.accounts];
const testClient = {
id: users.count,
firstName: 'Marcos',
lastName: 'Herrera',
email: '<EMAIL>',
password: '<PASSWORD>',
type: 'client',
isAdmin: false,
isLoggedIn: false,
};
const testCashier = {
id: users.count,
firstName: 'Mary',
lastName: 'Susaeta',
email: '<EMAIL>',
password: '<PASSWORD>',
type: 'staff',
isAdmin: false,
isLoggedIn: false,
};
const testAccount0 = {
id: accounts.count(),
accountNumber: 8963257411,
createdOn: new Date(),
owner: testClient.id,
type: 'savings',
status: 'active',
balance: 20000.00,
};
accounts.add(testAccount0);
const testAccount1 = {
id: accounts.count(),
accountNumber: 1478963255,
createdOn: new Date(),
owner: testClient.id,
type: 'savings',
status: 'dormant',
balance: 20000.00,
};
accounts.add(testAccount1);
users.add(testClient);
users.add(testCashier);
initDetails.cashierId = testCashier.id;
initDetails.accountNumber0 = testAccount0.accountNumber;
initDetails.accountNumber1 = testAccount1.accountNumber;
done();
});
describe('Post /api/v1/transactions/:accountNumber/:transactionType', () => {
describe('status 401', () => {
it('Throws an error when the account number does not exist.', (done) => {
request.post(`/api/v1/cashier/${initDetails.cashierId}/transactions/8000074110/debit`)
.send({
amount: 5000.00,
})
.expect(401)
.end((err, res) => {
const expectedResult = {
status: 401,
error: 'An account with the account number, 8000074110, does not exist.',
};
expect(res.body).to.eql(expectedResult);
done(err);
});
});
it('Throws an error when the transaction type is wrong.', (done) => {
request.post(`/api/v1/cashier/${initDetails.cashierId}/transactions/${initDetails.accountNumber0}/chicken`)
.send({
amount: 5000.00,
})
.expect(401)
.end((err, res) => {
const expectedResult = {
status: 401,
error: 'chicken is not a valid transaction type.',
};
expect(res.body).to.eql(expectedResult);
done(err);
});
});
it('Throws an error when the funds are insufficient.', (done) => {
request.post(`/api/v1/cashier/${initDetails.cashierId}/transactions/${initDetails.accountNumber0}/debit`)
.send({
amount: 50000,
})
.expect(401)
.end((err, res) => {
const expectedResult = {
status: 401,
error: 'Inadequate funds.',
};
expect(res.body).to.eql(expectedResult);
done(err);
});
});
it('Throws an error when the account is inactive.', (done) => {
request.post(`/api/v1/cashier/${initDetails.cashierId}/transactions/${initDetails.accountNumber1}/debit`)
.send({
amount: 5000,
})
.expect(401)
.end((err, res) => {
const expectedResult = {
status: 401,
error: 'This account cannot perform any transactions at the moment since it is inactive.',
};
expect(res.body).to.eql(expectedResult);
done(err);
});
});
});
describe('status 200', () => {
it('Shows the major transaction information when the transaction details are correct.', (done) => {
request.post(`/api/v1/cashier/${initDetails.cashierId}/transactions/${initDetails.accountNumber0}/debit`)
.send({
amount: 5000.00,
})
.expect(200)
.end((err, res) => {
const expectedResult = {
status: 200,
data: {
transactionId: 0,
accountNumber: initDetails.accountNumber0,
amount: 5000.00,
cashier: initDetails.cashierId,
transactionType: 'debit',
accountBalance: '15000',
},
};
expect(res.body).to.eql(expectedResult);
done(err);
});
});
});
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.