text stringlengths 1 1.05M |
|---|
<reponame>bgould/avian<filename>classpath/java/io/Writer.java
/* Copyright (c) 2008-2015, <NAME>
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice appear
in all copies.
There is NO WARRANTY for this software. See license.txt for
details. */
package java.io;
public abstract class Writer implements Closeable, Flushable, Appendable {
public void write(int c) throws IOException {
char[] buffer = new char[] { (char) c };
write(buffer);
}
public void write(char[] buffer) throws IOException {
write(buffer, 0, buffer.length);
}
public void write(String s) throws IOException {
write(s.toCharArray());
}
public void write(String s, int offset, int length) throws IOException {
char[] b = new char[length];
s.getChars(offset, offset + length, b, 0);
write(b);
}
public abstract void write(char[] buffer, int offset, int length)
throws IOException;
public Appendable append(final char c) throws IOException {
write((int)c);
return this;
}
public Appendable append(final CharSequence sequence) throws IOException {
return append(sequence, 0, sequence.length());
}
public Appendable append(CharSequence sequence, int start, int end)
throws IOException {
final int length = end - start;
if (sequence instanceof String) {
write((String)sequence, start, length);
} else {
final char[] charArray = new char[length];
for (int i = start; i < end; i++) {
charArray[i] = sequence.charAt(i);
}
write(charArray, 0, length);
}
return this;
}
public abstract void flush() throws IOException;
public abstract void close() throws IOException;
}
|
#!/bin/bash
##===----------------------------------------------------------------------===##
##
## This source file is part of the SwiftNIO open source project
##
## Copyright (c) 2017-2018 Apple Inc. and the SwiftNIO project authors
## Licensed under Apache License v2.0
##
## See LICENSE.txt for license information
## See CONTRIBUTORS.txt for the list of SwiftNIO project authors
##
## SPDX-License-Identifier: Apache-2.0
##
##===----------------------------------------------------------------------===##
mkdir -p .build # for the junit.xml file
./IntegrationTests/run-tests.sh --junit-xml .build/junit-sh-tests.xml -i
|
<filename>src/components/BackLink.tsx<gh_stars>1-10
import Link from 'next/link'
import React from 'react'
export interface BackLinkProps {
href: string
}
export const BackLink = ({ href }: BackLinkProps) => (
<Link href={href}>
<a className="govuk-back-link" data-test="back">
Späť
</a>
</Link>
)
|
import { Collection, ObjectID } from 'mongodb'
import faker from 'faker'
export const mockParticipantParams = (barbecueId: string): any => ({
barbecueId: new ObjectID(barbecueId),
participantId: new ObjectID(),
name: faker.name.findName(),
pay: faker.random.boolean(),
value: faker.random.number()
})
export const makeParticipant = async (collection: Collection, barbecueId: string): Promise<string> => {
const res = await collection.insertOne(mockParticipantParams(barbecueId))
return res.ops[0]._id
}
export const makeParticipants = async (collection: Collection, barbecueId: string): Promise<void> => {
const participants = [
{ ...mockParticipantParams(barbecueId) },
{ ...mockParticipantParams(barbecueId) },
{ ...mockParticipantParams(barbecueId) },
{ ...mockParticipantParams(barbecueId) }
]
await collection.insertMany(participants)
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.download2 = void 0;
var download2 = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M14 8h-2.5l-3.5 3.5-3.5-3.5h-2.5l-2 4v1h16v-1l-2-4zM0 14h16v1h-16v-1zM9 5v-4h-2v4h-3.5l4.5 4.5 4.5-4.5h-3.5z"
}
}]
};
exports.download2 = download2; |
<filename>jot-orientdb/src/main/java/org/mnode/jot/orientdb/command/AbstractOrientDBVertexCommand.java<gh_stars>1-10
package org.mnode.jot.orientdb.command;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.record.OVertex;
import org.mnode.jot.schema.command.NodeType;
import org.mnode.jot.schema.command.PropertyMapper;
import java.util.List;
public class AbstractOrientDBVertexCommand<T> extends AbstractOrientDBCommand {
private final NodeType nodeType;
private final PropertyMapper<T, OVertex> mapper;
public AbstractOrientDBVertexCommand(ODatabaseDocument session, NodeType nodeType, PropertyMapper<T, OVertex> mapper) {
super(session);
this.nodeType = nodeType;
this.mapper = mapper;
}
public NodeType getNodeType() {
return nodeType;
}
public PropertyMapper<T, OVertex> getMapper() {
return mapper;
}
protected OVertex createVertex() {
return getSession().newVertex(nodeType.toString());
}
protected OVertex retrieveVertex(String uid) {
List<OVertex> results = getVertices(String.format("SELECT FROM %s WHERE uid = ?", nodeType), uid);
if (!results.isEmpty()) {
return results.get(0);
}
return null;
}
protected OVertex updateVertex(OVertex vertex) {
return getSession().save(vertex);
}
protected void deleteVertex(String uid) {
getSession().delete(retrieveVertex(uid).getIdentity());
}
}
|
import React, { useEffect, useState } from 'react'
import { Link } from 'react-scroll'
import _ from 'lodash'
import Icon from '../assets/logo.svg'
import Styles from '../styles/main.module.scss'
const data = [
{
title: 'About me',
path: 'about',
},
{
title: 'Skills',
path: 'skill',
},
{
title: 'Experience',
path: 'experience',
},
]
interface Props {
handleDrawer: () => void
openDrawer: boolean
}
const ResumeLink: React.FC = props => (
<a href="resume.pdf" target="_blank" className={Styles.button}>
My Resume
</a>
)
type Direction = 'down' | 'up'
const Header: React.FC<Props> = ({ handleDrawer, openDrawer }) => {
const [offset, setOffset] = useState(0)
const [scrollingDirection, setDirection] = useState<Direction>('up')
const handleScroll = _.throttle(_ => {
const threshold = 100.0
const { pageYOffset: currentOffset } = window
const diff = currentOffset - offset
const passThreshold = diff / threshold
const isScrollDown = diff > 0 ? 'down' : 'up'
setDirection(isScrollDown)
if (passThreshold > 1 || passThreshold < -1) {
setOffset(currentOffset)
}
}, 200)
useEffect(() => {
window.addEventListener('scroll', handleScroll)
return () => window.removeEventListener('scroll', handleScroll)
})
return (
<header>
<div
className={`
${Styles.header}
${
scrollingDirection == 'down' ? Styles.headerHide : Styles.headerActive
}
`}
>
<div className={Styles.nav}>
<Link
smooth
to="about"
className={Styles.navIcon}
onClick={openDrawer ? handleDrawer : null}
duration={500}
>
<Icon />
</Link>
<div className={Styles.navGrow} />
<div className={`${Styles.navList}`}>
<ul>
{data.map((v, i) => (
<li key={i}>
<Link smooth={true} duration={500} to={v.path}>
<span className={Styles.index}>0{i}.</span>
<span className={Styles.content}>{v.title}</span>
</Link>
</li>
))}
<li>
<ResumeLink />
</li>
</ul>
</div>
<div className={`${Styles.drawer} ${openDrawer ? Styles.hide : ''}`}>
<ul>
{data.map((v, i) => (
<li key={v.title}>
<Link
onClick={handleDrawer}
smooth={true}
duration={500}
to={v.path}
>
<span className={Styles.index}>0{i}.</span>
<span className={Styles.content}>{v.title}</span>
</Link>
</li>
))}
<li>
<ResumeLink />
</li>
</ul>
</div>
<div
className={`
${Styles.navHambuger}
${openDrawer ? Styles.navHambugerActive : ''}
`}
onClick={handleDrawer}
>
<div className={Styles.navHambugerTop} />
<div className={Styles.navHambugerMiddle} />
<div className={Styles.navHambugerBottom} />
</div>
</div>
</div>
</header>
)
}
export default Header
|
def removePunctuation(inputString):
# Creating punctuation list
punctuations = '''!()-[]{};:'"\,<>./?@#$%^&*_~'''
# Checking for punctuations and removing them
outputString = ""
for char in inputString:
if char not in punctuations:
outputString = outputString + char
return outputString |
package products.pizza;
import products.Product;
import java.util.List;
public abstract class Pizza extends Product {
protected boolean vegan;
protected List<String> ingredients;
@Override
public String order() { return ("Ordered " + this.name + " pizza.");
}
}
|
<reponame>alexandremcp/Papyrus
package br.com.papyrus.model;
import java.util.List;
import javax.swing.table.AbstractTableModel;
/**
* Classe que cria e gerencia a AbstractTableModel para Autores
*
* @author <NAME>
*/
public class ModelLeitoresTableModel extends AbstractTableModel {
private List leitores;
private String[] colunas = new String[]{"CÓDIGO", "NOME", "NOME SOCIAL", "SEXO", "NASCIMENTO", "MATRICULA", "TURMA", "TURNO", "CADASTRO", "PAI", "MAE", "IDENTIDADE", "CPF", "TELEFONE", "EMAIL", "ENDERECO", "BAIRRO", "CIDADE", "ESTADO", "REFERENCIA", "OBSERVACOES"};
@Override
public String getColumnName(int i) { //getColumnName() -> Dá nome ao cabeçalho da tabela
return colunas[i];
}
public ModelLeitoresTableModel(List leitores) {
this.leitores = leitores;
}
@Override
public int getRowCount() { //Retorna a quantidade de linhas da tabela
if (leitores != null) {
return leitores.size();
}
return 0;
}
@Override
public int getColumnCount() { //Retorna a quantidade de colunas da tabela
return colunas.length;
}
@Override
public Object getValueAt(int indiceLinha, int indiceColuna) {
ModelLeitoresVO leitor = (ModelLeitoresVO) leitores.get(indiceLinha);
if (leitor != null) {
switch (indiceColuna) {
case 0:
return leitor.getId();
case 1:
return leitor.getNome();
case 2:
return leitor.getNomeSocial();
case 3:
return leitor.getSexo();
case 4:
return leitor.getNascimento();
case 5:
return leitor.getMatricula();
case 6:
return leitor.getTurma();
case 7:
return leitor.getTurno();
case 8:
return leitor.getCadastro();
case 9:
return leitor.getPai();
case 10:
return leitor.getMae();
case 11:
return leitor.getIdentidade();
case 12:
return leitor.getCPF();
case 13:
return leitor.getTelefone();
case 14:
return leitor.getEmail();
case 15:
return leitor.getEndereco();
case 16:
return leitor.getBairro();
case 17:
return leitor.getCidade();
case 18:
return leitor.getEstado();
case 19:
return leitor.getReferencia();
case 20:
return leitor.getObservacoes();
}
}
return null;
}
public ModelLeitoresVO getLeitores(int linha) {
if (linha >= 0) {
return (ModelLeitoresVO) this.leitores.get(linha);
}
return null;
}
}
|
<reponame>Vanluren/react-native-heroicons
import * as React from "react";
import Svg, { Path, SvgProps } from "react-native-svg";
interface Props extends SvgProps {
size?: number;
}
const Exclamation = ({ size = 24, ...props }: Props) => {
return (
<Svg
viewBox="0 0 20 20"
fill="currentColor"
width={size}
height={size}
{...props}
>
<Path
fillRule="evenodd"
d="M8.257 3.099c.765-1.36 2.722-1.36 3.486 0l5.58 9.92c.75 1.334-.213 2.98-1.742 2.98H4.42c-1.53 0-2.493-1.646-1.743-2.98l5.58-9.92zM11 13a1 1 0 11-2 0 1 1 0 012 0zm-1-8a1 1 0 00-1 1v3a1 1 0 002 0V6a1 1 0 00-1-1z"
clipRule="evenodd"
/>
</Svg>
);
};
export default Exclamation;
|
#!/usr/bin/env bash
# **build_docs.sh** - Build the gh-pages docs for DevStack
#
# - Install shocco if not found on PATH
# - Clone MASTER_REPO branch MASTER_BRANCH
# - Re-creates ``docs`` directory from existing repo + new generated script docs
# Usage:
## build_docs.sh [[-b branch] [-p] repo] | .
## -b branch The DevStack branch to check out (default is master; ignored if
## repo is not specified)
## -p Push the resulting docs tree to the source repo; fatal error if
## repo is not specified
## repo The DevStack repository to clone (default is DevStack github repo)
## If a repo is not supplied use the current directory
## (assumed to be a DevStack checkout) as the source.
## . Use the current repo and branch (do not use with -p to
## prevent stray files in the workspace being added tot he docs)
# Defaults
# --------
# Source repo/branch for DevStack
MASTER_REPO=${MASTER_REPO:-git://git.openstack.org/openstack-dev/devstack}
MASTER_BRANCH=${MASTER_BRANCH:-master}
# http://devstack.org is a GitHub gh-pages site in the https://github.com/cloudbuilders/devtack.git repo
GH_PAGES_REPO=git@github.com:cloudbuilders/devstack.git
# Keep track of the devstack directory
TOP_DIR=$(cd $(dirname "$0")/.. && pwd)
# Uses this shocco branch: https://github.com/dtroyer/shocco/tree/rst_support
SHOCCO=${SHOCCO:-shocco}
if ! which shocco; then
if [[ ! -x $TOP_DIR/shocco/shocco ]]; then
if [[ -z "$INSTALL_SHOCCO" ]]; then
echo "shocco not found in \$PATH, please set environment variable SHOCCO"
exit 1
fi
echo "Installing local copy of shocco"
if ! which pygmentize; then
sudo pip install Pygments
fi
if ! which rst2html.py; then
sudo pip install docutils
fi
git clone -b rst_support https://github.com/dtroyer/shocco shocco
cd shocco
./configure
make
cd ..
fi
SHOCCO=$TOP_DIR/shocco/shocco
fi
# Process command-line args
while getopts b:p c; do
case $c in
b) MASTER_BRANCH=$OPTARG
;;
p) PUSH_REPO=1
;;
esac
done
shift `expr $OPTIND - 1`
# Sanity check the args
if [[ "$1" == "." ]]; then
REPO=""
if [[ -n $PUSH_REPO ]]; then
echo "Push not allowed from an active workspace"
unset PUSH_REPO
fi
else
if [[ -z "$1" ]]; then
REPO=$MASTER_REPO
else
REPO=$1
fi
fi
# Check out a specific DevStack branch
if [[ -n $REPO ]]; then
# Make a workspace
TMP_ROOT=$(mktemp -d devstack-docs-XXXX)
echo "Building docs in $TMP_ROOT"
cd $TMP_ROOT
# Get the master branch
git clone $REPO devstack
cd devstack
git checkout $MASTER_BRANCH
fi
# Processing
# ----------
# Assumption is we are now in the DevStack repo workspace to be processed
# Pull the latest docs branch from devstack.org repo
if ! [ -d docs ]; then
git clone -b gh-pages $GH_PAGES_REPO docs
fi
# Build list of scripts to process
FILES=""
for f in $(find . -name .git -prune -o \( -type f -name \*.sh -not -path \*shocco/\* -print \)); do
echo $f
FILES+="$f "
mkdir -p docs/`dirname $f`;
$SHOCCO $f > docs/$f.html
done
for f in $(find functions lib samples -type f -name \*); do
echo $f
FILES+="$f "
mkdir -p docs/`dirname $f`;
$SHOCCO $f > docs/$f.html
done
echo "$FILES" >docs-files
# Switch to the gh_pages repo
cd docs
# Collect the new generated pages
find . -name \*.html -print0 | xargs -0 git add
# Push our changes back up to the docs branch
if ! git diff-index HEAD --quiet; then
git commit -a -m "Update script docs"
if [[ -n $PUSH ]]; then
git push
fi
fi
# Clean up or report the temp workspace
if [[ -n REPO && -n $PUSH_REPO ]]; then
rm -rf $TMP_ROOT
else
if [[ -z "$TMP_ROOT" ]]; then
TMP_ROOT="$(pwd)"
fi
echo "Built docs in $TMP_ROOT"
fi
|
<table>
<tr>
<th>Number</th>
<th>Cube</th>
</tr>
<tr>
<td>1</td>
<td>1</td>
</tr>
<tr>
<td>2</td>
<td>8</td>
</tr>
<tr>
<td>3</td>
<td>27</td>
</tr>
<tr>
<td>4</td>
<td>64</td>
</tr>
<tr>
<td>5</td>
<td>125</td>
</tr>
<tr>
<td>6</td>
<td>216</td>
</tr>
<tr>
<td>7</td>
<td>343</td>
</tr>
<tr>
<td>8</td>
<td>512</td>
</tr>
<tr>
<td>9</td>
<td>729</td>
</tr>
<tr>
<td>10</td>
<td>1000</td>
</tr>
</table> |
#!/bin/bash
set -e
# directly call the validator (to make error pasing simple)
tng-validate --project NSID1V -t
tng-validate --project NSID2V -t
tng-validate --project NSID3V -t
tng-validate --project NSID1C -t
tng-validate --project NSID2C -t
tng-validate --project NSID3C -t
tng-validate --project NSINDP1C -t
tng-validate --project NSCPE_onap -t
# package all projects
tng-pkg -p NSID1C --skip-validation
tng-pkg -p NSID1V --skip-validation
tng-pkg -p NSID1V_cirros_OSM --skip-validation
tng-pkg -p NSID1V_cirros_SONATA --skip-validation
tng-pkg -p NSID1V_cirros_SONATA_no_tags --skip-validation
tng-pkg -p NSID2C --skip-validation
tng-pkg -p NSID3C --skip-validation
tng-pkg -p NSID2V --skip-validation
tng-pkg -p NSID3V --skip-validation
tng-pkg -p NSIMPSP --skip-validation
tng-pkg -p NSIMPSP_no_tags --skip-validation
tng-pkg -p NSINDP1C --skip-validation
tng-pkg -p NSTD_VNF --skip-validation
tng-pkg -p NSTD_CNF --skip-validation
tng-pkg -p NSTD_hybrid --skip-validation
tng-pkg -p TSTGNRPRB --skip-validation
tng-pkg -p TSTIMPSP --skip-validation
tng-pkg -p NSID1V_cirros_OSM --skip-validation
tng-pkg -p TSTPING --skip-validation
tng-pkg -p TSTIMHLS --skip-validation
tng-pkg -p TSTPING_2_parallel_probes --skip-validation
tng-pkg -p NSID1V_osm_charms --skip-validation
tng-pkg -p NSSQHA --skip-validation
tng-pkg -p NSID1V_cirros_SONATA_NS_testing_tag_matches_multiple_TD_testing_tag --skip-validation
tng-pkg -p NSID1V_cirros_SONATA_TD_testing_tag_matches_multiple_NS_testing_tag_1 --skip-validation
tng-pkg -p NSID1V_cirros_SONATA_TD_testing_tag_matches_multiple_NS_testing_tag_2 --skip-validation
tng-pkg -p TSTPING_2_instances_probes --skip-validation
tng-pkg -p TSTPING_dependency_2_probes --skip-validation
tng-pkg -p TSTPING_SONATA --skip-validation
tng-pkg -p TSTPING_NS_testing_tag_matches_multiple_TD_testing_tag_1 --skip-validation
tng-pkg -p TSTPING_NS_testing_tag_matches_multiple_TD_testing_tag_2 --skip-validation
tng-pkg -p TSTPING_TD_testing_tag_matches_multiple_NS_testing_tag --skip-validation
tng-pkg -p TSTPING_testing_tag_not_match --skip-validation
tng-pkg -p TSTIMPSP_parser_multiple_cases --skip-validation
tng-pkg -p NSID1V_AND_TSTPING_cirros_SONATA --skip-validation
tng-pkg -p TSTTELNET_osm_cloud_init --skip-validation
tng-pkg -p NSID1V_ubuntu_OSM_cloud_init --skip-validation
tng-pkg -p HEADLESS_BROWSER --skip-validation
tng-pkg -p NSCPE_onap --skip-validation
tng-pkg -p NSMQTT_OSM --skip-validation
tng-pkg -p TSTINDP --skip-validation
|
function MaxSubarray(arr: number[]) {
let maxSum = -Number.POSITIVE_INFINITY;
let currentMaxSum = 0;
for (let i = 0; i < arr.length; i++) {
currentMaxSum = currentMaxSum + arr[i];
if (maxSum < currentMaxSum) {
maxSum = currentMaxSum;
}
if (currentMaxSum < 0) {
currentMaxSum = 0;
}
}
return maxSum;
} |
#!/bin/bash
##########
#The MIT License (MIT)
#
# Copyright (c) 2015 Aiden Lab
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
# MegaMap script.
#
#
# [topDir] - Should contain the results of all base experiments
#
# From the top-level directory, the following two directories are created:
#
# [topDir]/mega - Location of result of processing the mega map
#
# Juicer version 2.0
juicer_version="2.0"
# top level directory, can also be set in options
topDir=$(pwd)
# restriction enzyme, can also be set in options
site="none"
# genome ID, default to human, can also be set in options
genomeID="hg19"
# Juicer directory, contains scripts/, references/, and restriction_sites/
# can also be set in options via -D
juiceDir="/aidenlab"
# by default exclude fragment delimited maps
exclude=1
usageHelp="Usage: ${0##*/} -g genomeID [-d topDir] [-s site] [-S stage] [-D Juicer scripts directory] [-T threadsHic] [-y site_file] [-f] [-h]"
genomeHelp=" genomeID is either defined in the script, e.g. \"hg19\" or \"mm10\" or the path to the chrom.sizes file"
dirHelp=" [topDir] is the top level directory (default \"$topDir\") and must contain links to all merged files underneath it"
siteHelp=" [site] must be defined in the script, e.g. \"HindIII\" or \"MboI\" (default \"$site\"); alternatively, this can be the restriction site file"
siteFileHelp="* [restriction site file]: enter path for restriction site file (locations of\n restriction sites in genome; can be generated with the script\n misc/generate_site_positions.py)"
threadsHicHelp="* [threads for hic file creation]: number of threads when building hic file"
stageHelp="* [stage]: must be one of \"final\", \"postproc\", or \"early\".\n -Use \"final\" when the reads have been combined into merged but the\n final stats and hic files have not yet been created.\n -Use \"postproc\" when the hic files have been created and only\n postprocessing feature annotation remains to be completed.\n -Use \"early\" for an early exit, before the final creation of the stats and\n hic files"
scriptDirHelp="* [Juicer scripts directory]: set the Juicer directory,\n which should have scripts/ references/ and restriction_sites/ underneath it\n (default ${juiceDir})"
excludeHelp=" -f: include fragment-delimited maps from Hi-C mega map (will run slower)"
helpHelp=" -h: print this help and exit"
printHelpAndExit() {
echo "Juicer Version: ${juicer_version}"
echo "$usageHelp"
echo "$genomeHelp"
echo "$dirHelp"
echo "$siteHelp"
echo "$siteFileHelp"
echo "$stageHelp"
echo "$scriptDirHelp"
echo "$threadsHicHelp"
echo "$excludeHelp"
echo "$helpHelp"
exit "$1"
}
while getopts "d:g:hfs:S:D:y:T:" opt; do
case $opt in
g) genomeID=$OPTARG ;;
h) printHelpAndExit 0;;
d) topDir=$OPTARG ;;
s) site=$OPTARG ;;
f) exclude=0 ;;
y) site_file=$OPTARG ;;
S) stage=$OPTARG ;;
D) juiceDir=$OPTARG ;;
T) threadsHic=$OPTARG ;;
[?]) printHelpAndExit 1;;
esac
done
## If DNAse-type experiment, no fragment maps; or way to get around site file
if [[ "$site" == "none" ]]
then
exclude=1;
fi
if [ -z "$site_file" ]
then
site_file="${juiceDir}/restriction_sites/${genomeID}_${site}.txt"
fi
## Check that site file exists, needed for fragment number
if [[ ! -e "$site_file" ]] && [[ "$site" != "none" ]] && [[ ! "$site_file" =~ "none" ]]
then
echo "***! $site_file does not exist. It must be created before running this script."
echo "The site file is used for statistics even if fragment delimited maps are excluded"
exit 1
elif [[ "$site" != "none" ]] && [[ ! "$site_file" =~ "none" ]]
then
echo "Using $site_file as site file"
fi
resolutionsToBuildString="-r 2500000,1000000,500000,250000,100000,50000,25000,10000,5000,2000,1000,500,200,100,50,20,10"
if [ "$exclude" -eq 1 ]
then
buildFragmentMapString=""
else
buildFragmentMapString="-f $site_file"
fi
if [ -n "$stage" ]
then
case $stage in
final) final=1 ;;
early) early=1 ;;
postproc) postproc=1 ;;
*) echo "$usageHelp"
echo "$stageHelp"
exit 1
esac
fi
## Directories to be created and regex strings for listing files
megaDir=${topDir}"/mega"
outputDir=${megaDir}"/aligned"
tmpdir=${megaDir}"/HIC_tmp"
export TMPDIR=${tmpdir}
tempdirPre=${outputDir}"/HIC_tmp"
tempdirPre30=${outputDir}"/HIC30_tmp"
if [ -z "$threadsHic" ]
then
threadsHic=1
threadHicString=""
threadHic30String=""
threadNormString=""
else
threadHicString="--threads $threadsHic -i ${outputDir}/merged1_index.txt -t ${tempdirPre}"
threadHic30String="--threads $threadsHic -i ${outputDir}/merged30_index.txt -t ${tempdirPre30}"
threadNormString="--threads $threadsHic"
fi
## Check for existing merged files:
merged_count=$(find -L "${topDir}" | grep -c merged1.txt)
if [ "$merged_count" -lt "1" ]
then
echo "***! Failed to find at least one merged1 file under ${topDir}"
exit 1
fi
merged_names=$(find -L "${topDir}" | grep merged1.txt.gz | awk '{print "<(gunzip -c",$1")"}' | tr '\n' ' ')
if [ ${#merged_names} -eq 0 ]
then
merged_names=$(find -L "${topDir}" | grep merged1.txt | tr '\n' ' ')
fi
merged_names30=$(find -L "${topDir}" | grep merged30.txt.gz | awk '{print "<(gunzip -c",$1")"}' | tr '\n' ' ')
if [ ${#merged_names30} -eq 0 ]
then
merged_names30=$(find -L "${topDir}" | grep merged30.txt | tr '\n' ' ')
fi
inter_names=$(find -L "${topDir}" | grep inter.txt | tr '\n' ' ')
inter_30_names=$(find -L "${topDir}" | grep inter_30.txt | tr '\n' ' ')
inter_hist_names=$(find -L "${topDir}" | grep inter_hists.m | tr '\n' ' ')
inter_30_hist_names=$(find -L "${topDir}" | grep inter_30_hists.m | tr '\n' ' ')
## Create output directory, exit if already exists
if [[ -d "${outputDir}" ]] && [ -z $final ] && [ -z $postproc ]
then
echo "***! Move or remove directory \"${outputDir}\" before proceeding."
exit 1
else
mkdir -p "${outputDir}"
fi
## Create temporary directory
if [ ! -d "$tmpdir" ]; then
mkdir "$tmpdir"
chmod 777 "$tmpdir"
fi
## Arguments have been checked and directories created. Now begins
## the real work of the pipeline
# Not in final or postproc
if [ -z $final ] && [ -z $postproc ]
then
# Create top statistics file from all inter.txt files found under current dir
java -Xmx2g -jar "${juiceDir}"/scripts/common/merge-stats.jar "$outputDir"/inter "${inter_names}"
java -Xmx2g -jar "${juiceDir}"/scripts/common/merge-stats.jar "$outputDir"/inter_30 "${inter_30_names}"
java -Xmx2g -jar "${juiceDir}"/scripts/common/merge-stats.jar "$outputDir"/inter "${inter_hist_names}"
java -Xmx2g -jar "${juiceDir}"/scripts/common/merge-stats.jar "$outputDir"/inter_30 "${inter_30_hist_names}"
echo "(-: Finished creating top stats files."
sort --parallel=40 -T "${tmpdir}" -m -k2,2d -k6,6d "${merged_names}" > "${outputDir}"/merged1.txt
sort --parallel=40 -T "${tmpdir}" -m -k2,2d -k6,6d "${merged_names30}" > "${outputDir}"/merged30.txt
echo "(-: Finished sorting all files into a single merge."
mkdir "${tempdirPre}"
if [[ $threadsHic -gt 1 ]] && [[ ! -s "${outputDir}"/merged1_index.txt ]]
then
"${juiceDir}"/scripts/common/index_by_chr.awk "${outputDir}"/merged1.txt 500000 > "${outputDir}"/merged1_index.txt
fi
"${juiceDir}"/scripts/common/juicer_tools pre -n -s "$outputDir"/inter.txt -g "$outputDir"/inter_hists.m -q 1 "$resolutionsToBuildString" "$buildFragmentMapString" "$threadHicString" "$outputDir"/merged1.txt "$outputDir"/inter.hic "$genomeID"
"${juiceDir}"/scripts/common/juicer_tools addNorm "$threadNormString" "${outputDir}"/inter.hic
rm -Rf "${tempdirPre}"
mkdir "${tempdirPre30}"
if [[ $threadsHic -gt 1 ]] && [[ ! -s "${outputDir}"/merged30_index.txt ]]
then
"${juiceDir}"/scripts/common/index_by_chr.awk "${outputDir}"/merged30.txt 500000 > "${outputDir}"/merged30_index.txt
fi
"${juiceDir}"/scripts/common/juicer_tools pre -n -s "$outputDir"/inter_30.txt -g "$outputDir"/inter_30_hists.m -q 30 "$resolutionsToBuildString" "$buildFragmentMapString" "$threadHic30String" "$outputDir"/merged30.txt "$outputDir"/inter_30.hic "$genomeID"
"${juiceDir}"/scripts/common/juicer_tools addNorm "$threadNormString" "${outputDir}"/inter_30.hic
rm -Rf "${tempdirPre30}"
fi
if [ -z $early ]
then
# Create loop lists file for MQ > 30
"${juiceDir}"/scripts/common/juicer_hiccups.sh -j "${juiceDir}"/scripts/common/juicer_tools -i "$outputDir"/inter_30.hic -m "${juiceDir}"/references/motif -g "$genomeID"
"${juiceDir}"/scripts/common/juicer_arrowhead.sh -j "${juiceDir}"/scripts/common/juicer_tools -i "$outputDir"/inter_30.hic
fi
if [ -s "${outputDir}"/inter.hic ] && [ -s "${outputDir}"/inter_30.hic ]
then
rm -fr "${tmpdir}"
echo "(-: Successfully completed making mega map. Done. :-)"
else
echo "!*** Error: one or both hic files are empty. Check debug directory for hic logs"
fi |
<reponame>BenoitDuffez/OAT<filename>static/translate.js
/**
* Created by bicou on 02/12/2013.
*/
/**
* Called when the user clicks a string from the list
* @param name The string name
* @param lang The target language
*/
/*global $, window */
function setCurrentString(name, lang) {
window.currentStringName = name;
window.currentStringLang = lang;
$('#list_strings').find('ul li').each(function (i, e) {
$(e).removeClass('current').find('a.button').each(function (i, e) {
$(e).removeClass('active');
});
});
var current = $('li#' + window.currentStringName);
current.addClass('current');
current.find('a.button').each(function (i, e) {
$(e).addClass('active');
});
$('#main_container').animate({height: '98%'}, 150);
$('#topForm').animate({opacity: 1}, 1000);
$('#context').animate({opacity: 1}, 1000);
$.getJSON(oatPath + "/ajax.php?action=getString&name=" + name + "&lang=" + lang, null, function (data) {
$('#sourcetext').val(data.source.text);
$('#translatedtext').val(data.destination.text).focus();
});
var scr = $('#screenshots');
$.getJSON(oatPath + "/ajax.php?action=getScreenshots&name=" + name, null, function (data) {
prevCid = -1;
scr.empty();
if (data.length > 0) {
scr.append('<p>To help with the translation, here\'s the string context:</p>')
$.each(data, function (i, screen) {
if (prevCid != screen.context_id) {
if (prevCid > 0) {
scr.append('</div>');
}
scr.append('<div class="context"><h3>' + screen.context_name);
}
prevCid = screen.context_id;
scr.append('<div class="screenshot"><img class="screenshot" src="' + oatPath + '/upload/files/' + screen.name + '" /></div>');
});
scr.append('</div>');
} else {
var help = 'There is no associated context for this string. ';
help += 'If you want, you can <a href="' + oatPath + '/contexts/' + name + '">choose a context</a> for this string.';
scr.append('<p>' + help + '</p>');
}
});
}
/**
* Called when Ctrl+Enter or save string
*/
function saveString() {
var txt = $('#translatedtext').val();
$.ajax({
type: "POST",
url: oatPath + "/ajax.php?action=addString",
data: JSON.stringify({ name: window.currentStringName, lang: window.currentStringLang, text: txt }),
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (data) {
if (data.status == 'KO') {
alert("Couldn't save string: " + data.reason);
} else {
$('li#' + window.currentStringName).removeClass('unset').addClass('set').prev();
selectNextString();
}
},
failure: function (errMsg) {
alert(errMsg);
}
});
}
function selectNextString() {
var nextString = $('li#' + window.currentStringName).next().attr('id');
setCurrentString(nextString, window.currentStringLang);
}
function selectPrevString() {
var prevString = $('li#' + window.currentStringName).prev().attr('id');
setCurrentString(prevString, window.currentStringLang);
}
/**
* Handle keyboard shortcuts on the translated text box
*/
$(document).ready(function () {
$('#translatedtext').keydown(function (e) {
// Alt+Right: copy from source language
if (e.keyCode == 39 && event.altKey) {
$('#translatedtext').val($('#sourcetext').val());
}
// Alt+Up: previous string
if (e.keyCode == 38 && event.altKey) {
selectPrevString();
}
// Alt+Down: next string
if (e.keyCode == 40 && event.altKey) {
selectNextString();
}
// Ctrl+Enter: validate translation & go to next string
if ((e.keyCode == 10 || e.keyCode == 13) && event.ctrlKey) {
saveString();
e.preventDefault();
}
});
});
|
#!/bin/bash
set -e
set -o pipefail
# Note: script was adapted from Morrell Lab's sequence_handling pipeline (https://github.com/MorrellLAB/sequence_handling)
# Not using sequence_handling pipeline directly because it was developed specifically
# for MSI UMN high performance computing system (Portable Batch System)
function Usage() {
echo -e "\
\n\
This script takes in a list of SAM files and outputs BAM files aligned to reference genome.
\n\
Usage: ./sam_processing_parallel.sh [DEP_DIR] [SAM_LIST] [REFERENCE] [PROJECT] [OUT_DIR] \n\
\n\
Where: \n\
1) [DEP_DIR] is the full filepath to the directory containing dependencies (i.e. /path/to/bin)
2) [SAM_LIST] is a list of full filepaths to SAM files (output from aligning a concatenated gzipped FASTA file)
3) [REFERENCE] is the full filepath to the reference.fa file
4) [PROJECT] is the name of our project. This will get used to name summary statistic files.
5) [OUT_DIR] is the full filepath to our output directory
\n\
Example directory tree: if OUT_DIR=/path/to/file, output files will be in
OUT_DIR/SAM_Processing (/path/to/file/SAM_Processing). The SAM_Processing directory automatically
gets created within this script.
Dependencies: \n\
1) samtools
2) GNU parallel
" >&2
exit 1
}
export -f Usage
# If we have no arguments
if [[ "$#" == 0 ]]; then Usage; fi # Display the usage message and exit
# Dependencies
# 1) samtools
# 2) parallel
DEP_DIR="$1"
export PATH=${DEP_DIR}:${PATH}
# Additional user provided arguments
SAM_LIST="$2"
REFERENCE="$3"
PROJECT="$4"
OUT_DIR="$5"
function makeOutDir() {
local out_dir="$1"
# Check if out directories exists, if not make it
mkdir -p "${out_dir}"/SAM_Processing/Statistics/Raw_SAM_Stats \
"${out_dir}"/SAM_Processing/Statistics/Sorted_BAM_Stats \
"${out_dir}"/SAM_Processing/Statistics/Finished_BAM_Stats \
"${out_dir}"/SAM_Processing/Intermediates/Sorted \
"${out_dir}"/SAM_Processing/Intermediates/Fixed_Header \
"${out_dir}"/SAM_Processing/Intermediates/Raw_BAM
}
export -f makeOutDir
function samProcessing() {
local sam_file="$1"
local reference="$2"
local out_dir="$3"
local project="$4"
# Sample name, taken from full name of SAM file
sampleName=$(basename "${sam_file}" .sam)
# Remove unnecessary information from @PG line
# Could use sed's in-place option, but that fails on some systems
# This method bypasses that
sed 's/-R.*$//' "${sam_file}" > "${out_dir}"/SAM_Processing/Intermediates/Fixed_Header/"${sampleName}"_fixed_header.sam
# Generate a sorted BAM file
samtools view -bhT "${reference}" "${out_dir}"/SAM_Processing/Intermediates/Fixed_Header/"${sampleName}"_fixed_header.sam > "${out_dir}/SAM_Processing/Intermediates/Raw_BAM/${sampleName}_raw.bam"
# Create alignment statistics for the raw BAM file
samtools flagstat "${out_dir}/SAM_Processing/Intermediates/Raw_BAM/${sampleName}_raw.bam" > "${out_dir}/SAM_Processing/Statistics/Raw_SAM_Stats/${sampleName}_raw.txt"
# Sort the raw BAM file
samtools sort "${out_dir}/SAM_Processing/Intermediates/Raw_BAM/${sampleName}_raw.bam" > "${out_dir}/SAM_Processing/Intermediates/Sorted/${sampleName}_sorted.bam"
# Create alignment statistics for the sorted BAM file
#samtools stats "${out_dir}/SAM_Processing/Intermediates/Sorted/${sampleName}_sorted.bam" > "${out_dir}/SAM_Processing/Statistics/Sorted_BAM_Stats/${sampleName}_sorted.txt"
samtools flagstat "${out_dir}/SAM_Processing/Intermediates/Sorted/${sampleName}_sorted.bam" > "${out_dir}/SAM_Processing/Statistics/Sorted_BAM_Stats/${sampleName}_sorted.txt"
# Deduplicate the sorted BAM file
samtools rmdup "${out_dir}/SAM_Processing/Intermediates/Sorted/${sampleName}_sorted.bam" "${out_dir}/SAM_Processing/${sampleName}.bam"
# Create alignment statistics using SAMTools
samtools flagstat "${out_dir}/SAM_Processing/${sampleName}.bam" > "${out_dir}/SAM_Processing/Statistics/Finished_BAM_Stats/${sampleName}_finished.txt"
# Add the data from flagstat to the summary file
local num_reads=$(head -n 1 "${out_dir}/SAM_Processing/Statistics/Finished_BAM_Stats/${sampleName}_finished.txt" | cut -f 1 -d " ")
local percent_mapped=$(grep "%" "${out_dir}/SAM_Processing/Statistics/Finished_BAM_Stats/${sampleName}_finished.txt" | head -n 1 | cut -f 2 -d "(" | cut -f 1 -d " ")
local percent_paired=$(grep "%" "${out_dir}/SAM_Processing/Statistics/Finished_BAM_Stats/${sampleName}_finished.txt" | head -n 2 | tail -n 1 | cut -f 2 -d "(" | cut -f 1 -d " ")
local percent_singleton=$(grep "%" "${out_dir}/SAM_Processing/Statistics/Finished_BAM_Stats/${sampleName}_finished.txt" | tail -n 1 | cut -f 2 -d "(" | cut -f 1 -d " ")
local num_split_chr=$(tail -n 2 "${out_dir}/SAM_Processing/Statistics/Finished_BAM_Stats/${sampleName}_finished.txt" | head -n 1 | cut -f 1 -d " ")
local percent_split_chr=$(echo "${num_split_chr}/${num_reads}" | bc -l)
echo -e "${sampleName}\t${num_reads}\t${percent_mapped}\t${percent_paired}\t${percent_singleton}\t${percent_split_chr}" >> "${out_dir}/SAM_Processing/Statistics/${project}_mapping_summary_unfinished.txt"
# Create an index for our BAM file
samtools index "${out_dir}/SAM_Processing/${sampleName}.bam"
# Rename the index file
mv "${out_dir}/SAM_Processing/${sampleName}.bam.bai" "${out_dir}/SAM_Processing/${sampleName}.bai"
}
# Export the function
export -f samProcessing
# Driver function that runs program
function main() {
local sam_list="$1" # What is our list of samples?
local out_dir="$2" # Where are we storing our results?
local ref_seq="$3" # What is our reference sequence?
local project="$4" # What do we call our results?
makeOutDir "${out_dir}" # Make our outdirectories
# Create the header for the mapping stats summary file
echo -e "Sample name\tTotal reads\tPercent mapped\tPercent paired\tPercent singletons\tFraction with mate mapped to different chr" > "${out_dir}/SAM_Processing/Statistics/${project}_mapping_summary_unfinished.txt"
# Process our SAM file using SAMTools
parallel samProcessing {} "${ref_seq}" "${out_dir}" "${project}" :::: "${sam_list}"
# Sort the mapping stats summary file
echo -e "Sample name\tTotal reads\tPercent mapped\tPercent paired\tPercent singletons\tFraction with mate mapped to different chr" > "${out_dir}/SAM_Processing/Statistics/${project}_mapping_summary.txt"
tail -n +2 "${out_dir}/SAM_Processing/Statistics/${project}_mapping_summary_unfinished.txt" | sort >> "${out_dir}/SAM_Processing/Statistics/${project}_mapping_summary.txt"
rm "${out_dir}/SAM_Processing/Statistics/${project}_mapping_summary_unfinished.txt"
# Create a list of finished files
find "${out_dir}/SAM_Processing" -name "*.bam" | sort > "${out_dir}"/SAM_Processing/"${project}"_BAM_list.txt
# Remove intermediate files
rm -rf "${out_dir}/SAM_Processing/Intermediates"
}
# Export the function
export -f main
# Run the program
main "${SAM_LIST}" "${OUT_DIR}" "${REFERENCE}" "${PROJECT}"
|
package edu.uw.tacoma.piggy.model.dao;
import java.sql.Date;
import java.util.Calendar;
import java.util.List;
import junit.framework.Assert;
import junit.framework.TestCase;
import edu.uw.tacoma.piggy.model.entity.ProjectEntity;
/**
* The test case for the category DAO
* @author <NAME>
*/
public class ProjectDAOTest
extends TestCase
{
public void testProject()
{
ProjectEntity entity;
List<ProjectEntity> list = ProjectDAO.listProject();
Assert.assertEquals("The test list Project method failed ", 2, list.size());
// should set different field values
entity = new ProjectEntity();
entity.setProjectID(10);
entity.setProjectName("");
entity.setProjectAbbr("");
entity.setDescription("");
entity.setStartDate(new Date(Calendar.getInstance().getTime().getTime()));
entity.setCategoryID(0);
entity.setDateCreated(new Date(Calendar.getInstance().getTime().getTime()));
Assert.assertTrue("The test insert method failed ", ProjectDAO.insert(entity));
entity.setProjectID(10);
entity.setProjectName("");
entity.setProjectAbbr("");
entity.setDescription("");
entity.setStartDate(new Date(Calendar.getInstance().getTime().getTime()));
entity.setCategoryID(0);
entity.setDateCreated(new Date(Calendar.getInstance().getTime().getTime()));
Assert.assertTrue("The test update method failed ", ProjectDAO.update(entity));
Assert.assertTrue("The test delete method failed ", ProjectDAO.delete(entity));
}
}
|
<gh_stars>0
import pickle
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from multiprocessing import freeze_support
from data_gen_predictor import data_iterator_train, data_iterator_test
from src.embed_utils import get_predictor_model
from src.misc_utils import create_folder, save_model_to_json, load_json_model
from src.CONSTS import EMBEDDING_SIZE_PRED, BATCH_SIZE_PRED
class CustomSchedule(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, d_model=EMBEDDING_SIZE_PRED, warmup_steps=200):
super(CustomSchedule, self).__init__()
self.d_model = d_model
self.d_model = tf.cast(self.d_model, tf.float32)
self.warmup_steps = warmup_steps
def __call__(self, step):
arg1 = tf.math.rsqrt(step)
arg2 = step * (self.warmup_steps ** -1.5)
return tf.math.rsqrt(self.d_model) * tf.math.minimum(arg1, arg2)
def get_config(self):
config = {
'd_model': self.d_model,
'warmup_steps': self.warmup_steps,
}
return config
def get_optimizer():
opt_op = tf.keras.optimizers.Adam(learning_rate=CustomSchedule())
return opt_op
if __name__ == "__main__":
freeze_support()
ckpt_path = 'checkpoints/predictor/'
create_folder(ckpt_path)
callbacks = [tf.keras.callbacks.ModelCheckpoint(ckpt_path,
save_freq='epoch',
save_weights_only=True,
monitor='loss',
mode='min',
save_best_only=True)]
steps_per_epoch = pd.read_csv('predictor_data/train_data/df_train.csv').shape[0] // BATCH_SIZE_PRED
with open('predictor_data/test_data/Xy_val.pkl', 'rb') as handle:
Xy_val = pickle.load(handle)
# train
smi_inputs, y_pred = get_predictor_model()
opt_op = get_optimizer()
model = keras.Model(smi_inputs, y_pred)
model.compile(optimizer=get_optimizer(),
loss='mse')
model.summary()
model.fit(data_iterator_train(),
epochs=60,
validation_data=Xy_val,
callbacks=callbacks,
steps_per_epoch=steps_per_epoch)
res = model.evaluate(data_iterator_test('predictor_data/test_data/df_test.csv'),
return_dict=True)
model.save_weights("./predictor_weights/predictor")
create_folder("predictor_model")
save_model_to_json(model, "predictor_model/predictor_model.json")
model_new = load_json_model("predictor_model/predictor_model.json")
model_new.compile(optimizer=get_optimizer(),
loss='mse')
model_new.load_weights("./predictor_weights/predictor")
res = model_new.evaluate(data_iterator_test('predictor_data/test_data/df_test.csv'),
return_dict=True)
|
<reponame>Inego/smallrye-mutiny
package io.smallrye.mutiny.tuples;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
public class Tuple5<T1, T2, T3, T4, T5> extends Tuple4<T1, T2, T3, T4> implements Tuple {
final T5 item5;
Tuple5(T1 a, T2 b, T3 c, T4 d, T5 e) {
super(a, b, c, d);
this.item5 = e;
}
public static <T1, T2, T3, T4, T5> Tuple5<T1, T2, T3, T4, T5> of(T1 a, T2 b, T3 c, T4 d, T5 e) {
return new Tuple5<>(a, b, c, d, e);
}
public T5 getItem5() {
return item5;
}
@Override
public <T> Tuple5<T, T2, T3, T4, T5> mapItem1(Function<T1, T> mapper) {
return Tuple5.of(mapper.apply(item1), item2, item3, item4, item5);
}
@Override
public <T> Tuple5<T1, T, T3, T4, T5> mapItem2(Function<T2, T> mapper) {
return Tuple5.of(item1, mapper.apply(item2), item3, item4, item5);
}
@Override
public <T> Tuple5<T1, T2, T, T4, T5> mapItem3(Function<T3, T> mapper) {
return Tuple5.of(item1, item2, mapper.apply(item3), item4, item5);
}
@Override
public <T> Tuple5<T1, T2, T3, T, T5> mapItem4(Function<T4, T> mapper) {
return Tuple5.of(item1, item2, item3, mapper.apply(item4), item5);
}
public <T> Tuple5<T1, T2, T3, T4, T> mapItem5(Function<T5, T> mapper) {
return Tuple5.of(item1, item2, item3, item4, mapper.apply(item5));
}
@Override
public Object nth(int index) {
assertIndexInBounds(index);
if (index < size() - 1) {
return super.nth(index);
}
return item5;
}
@Override
public List<Object> asList() {
return Arrays.asList(item1, item2, item3, item4, item5);
}
@Override
public int size() {
return 5;
}
@Override
public String toString() {
return "Tuple{" +
"item1=" + item1 +
",item2=" + item2 +
",item3=" + item3 +
",item4=" + item4 +
",item5=" + item5 +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
Tuple5<?, ?, ?, ?, ?> tuple5 = (Tuple5<?, ?, ?, ?, ?>) o;
return Objects.equals(item5, tuple5.item5);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), item5);
}
}
|
#!/bin/sh
sleep 15
/home/studio/WeAreNature/FutureAction/Poll/node_modules/electron-prebuilt/dist/electron /home/studio/WeAreNature/FutureAction/Poll/
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FaceSDKForQNJ/FaceSDKForQNJ.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FaceSDKForQNJ/FaceSDKForQNJ.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>PinoEire/archi
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.example;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.FileDialog;
import com.archimatetool.editor.model.IModelExporter;
import com.archimatetool.model.FolderType;
import com.archimatetool.model.IArchimateConcept;
import com.archimatetool.model.IArchimateModel;
import com.archimatetool.model.IFolder;
/**
* Example Exporter of Archimate model
*
* @author <NAME>
*/
public class MyExporter implements IModelExporter {
String MY_EXTENSION = ".mex"; //$NON-NLS-1$
String MY_EXTENSION_WILDCARD = "*.mex"; //$NON-NLS-1$
private OutputStreamWriter writer;
public MyExporter() {
}
@Override
public void export(IArchimateModel model) throws IOException {
File file = askSaveFile();
if(file == null) {
return;
}
writer = new OutputStreamWriter(new FileOutputStream(file));
writeFolder(model.getFolder(FolderType.STRATEGY));
writeFolder(model.getFolder(FolderType.BUSINESS));
writeFolder(model.getFolder(FolderType.APPLICATION));
writeFolder(model.getFolder(FolderType.TECHNOLOGY));
writeFolder(model.getFolder(FolderType.MOTIVATION));
writeFolder(model.getFolder(FolderType.IMPLEMENTATION_MIGRATION));
writeFolder(model.getFolder(FolderType.OTHER));
writeFolder(model.getFolder(FolderType.RELATIONS));
writer.close();
}
private void writeFolder(IFolder folder) throws IOException {
List<EObject> list = new ArrayList<EObject>();
getElements(folder, list);
for(EObject eObject : list) {
if(eObject instanceof IArchimateConcept) {
IArchimateConcept concept = (IArchimateConcept)eObject;
String string = normalise(concept.eClass().getName()) +
"," + normalise(concept.getName()) //$NON-NLS-1$
+ "," + normalise(concept.getDocumentation()); //$NON-NLS-1$
writer.write(string + "\n"); //$NON-NLS-1$
}
}
}
private void getElements(IFolder folder, List<EObject> list) {
for(EObject object : folder.getElements()) {
list.add(object);
}
for(IFolder f : folder.getFolders()) {
getElements(f, list);
}
}
private String normalise(String s) {
if(s == null) {
return ""; //$NON-NLS-1$
}
s = s.replace("\r\n", " "); //$NON-NLS-1$ //$NON-NLS-2$
s = "\"" + s + "\""; //$NON-NLS-1$ //$NON-NLS-2$
return s;
}
/**
* Ask user for file name to save to
*/
private File askSaveFile() {
FileDialog dialog = new FileDialog(Display.getCurrent().getActiveShell(), SWT.SAVE);
dialog.setText(Messages.MyExporter_0);
dialog.setFilterExtensions(new String[] { MY_EXTENSION_WILDCARD, "*.*" } ); //$NON-NLS-1$
String path = dialog.open();
if(path == null) {
return null;
}
// Only Windows adds the extension by default
if(dialog.getFilterIndex() == 0 && !path.endsWith(MY_EXTENSION)) {
path += MY_EXTENSION;
}
File file = new File(path);
// Make sure the file does not already exist
if(file.exists()) {
boolean result = MessageDialog.openQuestion(Display.getCurrent().getActiveShell(),
Messages.MyExporter_0,
NLS.bind(Messages.MyExporter_1, file));
if(!result) {
return null;
}
}
return file;
}
}
|
<reponame>Sasha7b9Work/S8-53M2
///////////////////////////////////////////////////////////////////////////////
// Name: src/common/webrequest_curl.h
// Purpose: wxWebRequest implementation using libcurl
// Author: <NAME>
// Created: 2018-10-25
// Copyright: (c) 2018 wxWidgets development team
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#include "wx/webrequest.h"
#if wxUSE_WEBREQUEST && wxUSE_WEBREQUEST_CURL
#include "wx/private/webrequest_curl.h"
#ifndef WX_PRECOMP
#include "wx/log.h"
#include "wx/translation.h"
#include "wx/utils.h"
#endif
#include "wx/uri.h"
#include "wx/private/socket.h"
#include "wx/evtloop.h"
#ifdef __WINDOWS__
#include "wx/hashset.h"
#include "wx/msw/wrapwin.h"
#else
#include "wx/evtloopsrc.h"
#include "wx/evtloop.h"
#endif
// Define symbols that might be missing from older libcurl headers
#ifndef CURL_AT_LEAST_VERSION
#define CURL_VERSION_BITS(x,y,z) ((x)<<16|(y)<<8|z)
#define CURL_AT_LEAST_VERSION(x,y,z) \
(LIBCURL_VERSION_NUM >= CURL_VERSION_BITS(x, y, z))
#endif
// The new name was introduced in curl 7.21.6.
#ifndef CURLOPT_ACCEPT_ENCODING
#define CURLOPT_ACCEPT_ENCODING CURLOPT_ENCODING
#endif
//
// wxWebResponseCURL
//
static size_t wxCURLWriteData(void* buffer, size_t size, size_t nmemb, void* userdata)
{
wxCHECK_MSG( userdata, 0, "invalid curl write callback data" );
return static_cast<wxWebResponseCURL*>(userdata)->CURLOnWrite(buffer, size * nmemb);
}
static size_t wxCURLHeader(char *buffer, size_t size, size_t nitems, void *userdata)
{
wxCHECK_MSG( userdata, 0, "invalid curl header callback data" );
return static_cast<wxWebResponseCURL*>(userdata)->CURLOnHeader(buffer, size * nitems);
}
static int wxCURLXferInfo(void* clientp, curl_off_t dltotal,
curl_off_t WXUNUSED(dlnow),
curl_off_t WXUNUSED(ultotal),
curl_off_t WXUNUSED(ulnow))
{
wxCHECK_MSG( clientp, 0, "invalid curl progress callback data" );
wxWebResponseCURL* response = reinterpret_cast<wxWebResponseCURL*>(clientp);
return response->CURLOnProgress(dltotal);
}
static int wxCURLProgress(void* clientp, double dltotal, double dlnow,
double ultotal, double ulnow)
{
return wxCURLXferInfo(clientp, static_cast<curl_off_t>(dltotal),
static_cast<curl_off_t>(dlnow),
static_cast<curl_off_t>(ultotal),
static_cast<curl_off_t>(ulnow));
}
wxWebResponseCURL::wxWebResponseCURL(wxWebRequestCURL& request) :
wxWebResponseImpl(request)
{
m_knownDownloadSize = 0;
curl_easy_setopt(GetHandle(), CURLOPT_WRITEDATA, static_cast<void*>(this));
curl_easy_setopt(GetHandle(), CURLOPT_HEADERDATA, static_cast<void*>(this));
// Set the progress callback.
#if CURL_AT_LEAST_VERSION(7, 32, 0)
if ( wxWebSessionCURL::CurlRuntimeAtLeastVersion(7, 32, 0) )
{
curl_easy_setopt(GetHandle(), CURLOPT_XFERINFOFUNCTION,
wxCURLXferInfo);
curl_easy_setopt(GetHandle(), CURLOPT_XFERINFODATA,
static_cast<void*>(this));
}
else
#endif
{
curl_easy_setopt(GetHandle(), CURLOPT_PROGRESSFUNCTION,
wxCURLProgress);
curl_easy_setopt(GetHandle(), CURLOPT_PROGRESSDATA,
static_cast<void*>(this));
}
// Have curl call the progress callback.
curl_easy_setopt(GetHandle(), CURLOPT_NOPROGRESS, 0L);
Init();
}
size_t wxWebResponseCURL::CURLOnWrite(void* buffer, size_t size)
{
void* buf = GetDataBuffer(size);
memcpy(buf, buffer, size);
ReportDataReceived(size);
return size;
}
size_t wxWebResponseCURL::CURLOnHeader(const char * buffer, size_t size)
{
// HTTP headers are supposed to only contain ASCII data, so any encoding
// should work here, but use Latin-1 for compatibility with some servers
// that send it directly and to at least avoid losing data entirely when
// the current encoding is UTF-8 but the input doesn't decode correctly.
wxString hdr = wxString::From8BitData(buffer, size);
hdr.Trim();
if ( hdr.StartsWith("HTTP/") )
{
// First line of the headers contains status text after
// version and status
m_statusText = hdr.AfterFirst(' ').AfterFirst(' ');
m_headers.clear();
}
else if ( !hdr.empty() )
{
wxString hdrValue;
wxString hdrName = hdr.BeforeFirst(':', &hdrValue).Strip(wxString::trailing);
hdrName.MakeUpper();
m_headers[hdrName] = hdrValue.Strip(wxString::leading);
}
return size;
}
int wxWebResponseCURL::CURLOnProgress(curl_off_t total)
{
if ( m_knownDownloadSize != total )
{
if ( m_request.GetStorage() == wxWebRequest::Storage_Memory )
{
PreAllocBuffer(static_cast<size_t>(total));
}
m_knownDownloadSize = total;
}
return 0;
}
wxFileOffset wxWebResponseCURL::GetContentLength() const
{
#if CURL_AT_LEAST_VERSION(7, 55, 0)
curl_off_t len = 0;
curl_easy_getinfo(GetHandle(), CURLINFO_CONTENT_LENGTH_DOWNLOAD_T, &len);
return len;
#else
double len = 0;
curl_easy_getinfo(GetHandle(), CURLINFO_CONTENT_LENGTH_DOWNLOAD, &len);
return (wxFileOffset)len;
#endif
}
wxString wxWebResponseCURL::GetURL() const
{
char* urlp = NULL;
curl_easy_getinfo(GetHandle(), CURLINFO_EFFECTIVE_URL, &urlp);
// While URLs should contain ASCII characters only as per
// https://tools.ietf.org/html/rfc3986#section-2 we still want to avoid
// losing data if they somehow contain something else but are not in UTF-8
// by interpreting it as Latin-1.
return wxString::From8BitData(urlp);
}
wxString wxWebResponseCURL::GetHeader(const wxString& name) const
{
wxWebRequestHeaderMap::const_iterator it = m_headers.find(name.Upper());
if ( it != m_headers.end() )
return it->second;
else
return wxString();
}
int wxWebResponseCURL::GetStatus() const
{
long status = 0;
curl_easy_getinfo(GetHandle(), CURLINFO_RESPONSE_CODE, &status);
return status;
}
//
// wxWebRequestCURL
//
static size_t wxCURLRead(char *buffer, size_t size, size_t nitems, void *userdata)
{
wxCHECK_MSG( userdata, 0, "invalid curl read callback data" );
return static_cast<wxWebRequestCURL*>(userdata)->CURLOnRead(buffer, size * nitems);
}
wxWebRequestCURL::wxWebRequestCURL(wxWebSession & session,
wxWebSessionCURL& sessionImpl,
wxEvtHandler* handler,
const wxString & url,
int id):
wxWebRequestImpl(session, sessionImpl, handler, id),
m_sessionImpl(sessionImpl)
{
m_headerList = NULL;
m_handle = curl_easy_init();
if ( !m_handle )
{
wxStrlcpy(m_errorBuffer, "libcurl initialization failed", CURL_ERROR_SIZE);
return;
}
// Set error buffer to get more detailed CURL status
m_errorBuffer[0] = '\0';
curl_easy_setopt(m_handle, CURLOPT_ERRORBUFFER, m_errorBuffer);
// Set URL to handle: note that we must use wxURI to escape characters not
// allowed in the URLs correctly (URL API is only available in libcurl
// since the relatively recent v7.62.0, so we don't want to rely on it).
curl_easy_setopt(m_handle, CURLOPT_URL, wxURI(url).BuildURI().utf8_str().data());
// Set callback functions
curl_easy_setopt(m_handle, CURLOPT_WRITEFUNCTION, wxCURLWriteData);
curl_easy_setopt(m_handle, CURLOPT_HEADERFUNCTION, wxCURLHeader);
curl_easy_setopt(m_handle, CURLOPT_READFUNCTION, wxCURLRead);
curl_easy_setopt(m_handle, CURLOPT_READDATA, static_cast<void*>(this));
// Enable gzip, etc decompression
curl_easy_setopt(m_handle, CURLOPT_ACCEPT_ENCODING, "");
// Enable redirection handling
curl_easy_setopt(m_handle, CURLOPT_FOLLOWLOCATION, 1L);
// Limit redirect to HTTP
curl_easy_setopt(m_handle, CURLOPT_REDIR_PROTOCOLS,
CURLPROTO_HTTP | CURLPROTO_HTTPS);
// Enable all supported authentication methods
curl_easy_setopt(m_handle, CURLOPT_HTTPAUTH, CURLAUTH_ANY);
curl_easy_setopt(m_handle, CURLOPT_PROXYAUTH, CURLAUTH_ANY);
}
wxWebRequestCURL::~wxWebRequestCURL()
{
DestroyHeaderList();
m_sessionImpl.RequestHasTerminated(this);
}
void wxWebRequestCURL::Start()
{
m_response.reset(new wxWebResponseCURL(*this));
if ( m_dataSize )
{
if ( m_method.empty() || m_method.CmpNoCase("POST") == 0 )
{
curl_easy_setopt(m_handle, CURLOPT_POSTFIELDSIZE_LARGE,
static_cast<curl_off_t>(m_dataSize));
curl_easy_setopt(m_handle, CURLOPT_POST, 1L);
}
else if ( m_method.CmpNoCase("PUT") == 0 )
{
curl_easy_setopt(m_handle, CURLOPT_UPLOAD, 1L);
curl_easy_setopt(m_handle, CURLOPT_INFILESIZE_LARGE,
static_cast<curl_off_t>(m_dataSize));
}
else
{
wxFAIL_MSG(wxString::Format(
"Supplied data is ignored when using method %s", m_method
));
}
}
if ( m_method.CmpNoCase("HEAD") == 0 )
{
curl_easy_setopt(m_handle, CURLOPT_NOBODY, 1L);
}
else if ( !m_method.empty() )
{
curl_easy_setopt(m_handle, CURLOPT_CUSTOMREQUEST,
static_cast<const char*>(m_method.mb_str()));
}
for ( wxWebRequestHeaderMap::const_iterator it = m_headers.begin();
it != m_headers.end(); ++it )
{
// TODO: We need to implement RFC 2047 encoding here instead of blindly
// sending UTF-8 which is against the standard.
wxString hdrStr = wxString::Format("%s: %s", it->first, it->second);
m_headerList = curl_slist_append(m_headerList, hdrStr.utf8_str());
}
curl_easy_setopt(m_handle, CURLOPT_HTTPHEADER, m_headerList);
if ( IsPeerVerifyDisabled() )
curl_easy_setopt(m_handle, CURLOPT_SSL_VERIFYPEER, 0);
StartRequest();
}
bool wxWebRequestCURL::StartRequest()
{
m_bytesSent = 0;
if ( !m_sessionImpl.StartRequest(*this) )
{
SetState(wxWebRequest::State_Failed);
return false;
}
return true;
}
void wxWebRequestCURL::DoCancel()
{
m_sessionImpl.CancelRequest(this);
}
void wxWebRequestCURL::HandleCompletion()
{
int status = m_response ? m_response->GetStatus() : 0;
if ( status == 0 )
{
SetState(wxWebRequest::State_Failed, GetError());
}
else if ( status == 401 || status == 407 )
{
m_authChallenge.reset(new wxWebAuthChallengeCURL(
(status == 407) ? wxWebAuthChallenge::Source_Proxy : wxWebAuthChallenge::Source_Server, *this));
SetState(wxWebRequest::State_Unauthorized, m_response->GetStatusText());
}
else
{
SetFinalStateFromStatus();
}
}
wxString wxWebRequestCURL::GetError() const
{
// We don't know what encoding is used for libcurl errors, so do whatever
// is needed in order to interpret this data at least somehow.
return wxString(m_errorBuffer, wxConvWhateverWorks);
}
size_t wxWebRequestCURL::CURLOnRead(char* buffer, size_t size)
{
if ( m_dataStream )
{
m_dataStream->Read(buffer, size);
size_t readSize = m_dataStream->LastRead();
m_bytesSent += readSize;
return readSize;
}
else
return 0;
}
void wxWebRequestCURL::DestroyHeaderList()
{
if ( m_headerList )
{
curl_slist_free_all(m_headerList);
m_headerList = NULL;
}
}
wxFileOffset wxWebRequestCURL::GetBytesSent() const
{
return m_bytesSent;
}
wxFileOffset wxWebRequestCURL::GetBytesExpectedToSend() const
{
return m_dataSize;
}
//
// wxWebAuthChallengeCURL
//
wxWebAuthChallengeCURL::wxWebAuthChallengeCURL(wxWebAuthChallenge::Source source,
wxWebRequestCURL& request) :
wxWebAuthChallengeImpl(source),
m_request(request)
{
}
void wxWebAuthChallengeCURL::SetCredentials(const wxWebCredentials& cred)
{
const wxSecretString authStr =
wxString::Format
(
"%s:%s",
cred.GetUser(),
static_cast<const wxString&>(wxSecretString(cred.GetPassword()))
);
curl_easy_setopt(m_request.GetHandle(),
(GetSource() == wxWebAuthChallenge::Source_Proxy) ? CURLOPT_PROXYUSERPWD : CURLOPT_USERPWD,
authStr.utf8_str().data());
m_request.StartRequest();
}
//
// SocketPoller - a helper class for wxWebSessionCURL
//
wxDECLARE_EVENT(wxEVT_SOCKET_POLLER_RESULT, wxThreadEvent);
class SocketPollerImpl;
class SocketPoller
{
public:
enum PollAction
{
INVALID_ACTION = 0x00,
POLL_FOR_READ = 0x01,
POLL_FOR_WRITE = 0x02
};
enum Result
{
INVALID_RESULT = 0x00,
READY_FOR_READ = 0x01,
READY_FOR_WRITE = 0x02,
HAS_ERROR = 0x04
};
SocketPoller(wxEvtHandler*);
~SocketPoller();
bool StartPolling(wxSOCKET_T, int);
void StopPolling(wxSOCKET_T);
void ResumePolling(wxSOCKET_T);
private:
SocketPollerImpl* m_impl;
};
wxDEFINE_EVENT(wxEVT_SOCKET_POLLER_RESULT, wxThreadEvent);
class SocketPollerImpl
{
public:
virtual ~SocketPollerImpl(){}
virtual bool StartPolling(wxSOCKET_T, int) = 0;
virtual void StopPolling(wxSOCKET_T) = 0;
virtual void ResumePolling(wxSOCKET_T) = 0;
static SocketPollerImpl* Create(wxEvtHandler*);
};
SocketPoller::SocketPoller(wxEvtHandler* hndlr)
{
m_impl = SocketPollerImpl::Create(hndlr);
}
SocketPoller::~SocketPoller()
{
delete m_impl;
}
bool SocketPoller::StartPolling(wxSOCKET_T sock, int pollAction)
{
return m_impl->StartPolling(sock, pollAction);
}
void SocketPoller::StopPolling(wxSOCKET_T sock)
{
m_impl->StopPolling(sock);
}
void SocketPoller::ResumePolling(wxSOCKET_T sock)
{
m_impl->ResumePolling(sock);
}
#ifdef __WINDOWS__
class WinSock1SocketPoller: public SocketPollerImpl
{
public:
WinSock1SocketPoller(wxEvtHandler*);
virtual ~WinSock1SocketPoller();
virtual bool StartPolling(wxSOCKET_T, int) wxOVERRIDE;
virtual void StopPolling(wxSOCKET_T) wxOVERRIDE;
virtual void ResumePolling(wxSOCKET_T) wxOVERRIDE;
private:
static LRESULT CALLBACK MsgProc(HWND hwnd, WXUINT uMsg, WXWPARAM wParam,
WXLPARAM lParam);
static const WXUINT SOCKET_MESSAGE;
WX_DECLARE_HASH_SET(wxSOCKET_T, wxIntegerHash, wxIntegerEqual, SocketSet);
SocketSet m_polledSockets;
WXHWND m_hwnd;
};
const WXUINT WinSock1SocketPoller::SOCKET_MESSAGE = WM_USER + 1;
WinSock1SocketPoller::WinSock1SocketPoller(wxEvtHandler* hndlr)
{
// Initialize winsock in case it's not already done.
WORD wVersionRequested = MAKEWORD(1,1);
WSADATA wsaData;
WSAStartup(wVersionRequested, &wsaData);
// Create a dummy message only window.
m_hwnd = CreateWindowEx(
0, //DWORD dwExStyle,
TEXT("STATIC"), //LPCSTR lpClassName,
NULL, //LPCSTR lpWindowName,
0, //DWORD dwStyle,
0, //int X,
0, //int Y,
0, //int nWidth,
0, //int nHeight,
HWND_MESSAGE, //HWND hWndParent,
NULL, //HMENU hMenu,
NULL, //HINSTANCE hInstance,
NULL //LPVOID lpParam
);
if ( m_hwnd == NULL )
{
wxLogError("Unable to create message window for WinSock1SocketPoller");
return;
}
// Set the event handler to be the message window's user data. Also set the
// message window to use our MsgProc to process messages it receives.
SetWindowLongPtr(m_hwnd, GWLP_USERDATA, reinterpret_cast<LONG_PTR>(hndlr));
SetWindowLongPtr(m_hwnd, GWLP_WNDPROC,
reinterpret_cast<LONG_PTR>(WinSock1SocketPoller::MsgProc));
}
WinSock1SocketPoller::~WinSock1SocketPoller()
{
// Stop monitoring any leftover sockets.
for ( SocketSet::iterator it = m_polledSockets.begin() ;
it != m_polledSockets.end() ; ++it )
{
WSAAsyncSelect(*it, m_hwnd, 0, 0);
}
// Close the message window.
if ( m_hwnd )
{
CloseWindow(m_hwnd);
}
// Cleanup winsock.
WSACleanup();
}
bool WinSock1SocketPoller::StartPolling(wxSOCKET_T sock, int pollAction)
{
StopPolling(sock);
// Convert pollAction to a flag that can be used by winsock.
int winActions = 0;
if ( pollAction & SocketPoller::POLL_FOR_READ )
{
winActions |= FD_READ;
}
if ( pollAction & SocketPoller::POLL_FOR_WRITE )
{
winActions |= FD_WRITE;
}
// Have winsock send a message to our window whenever activity is
// detected on the socket.
WSAAsyncSelect(sock, m_hwnd, SOCKET_MESSAGE, winActions);
m_polledSockets.insert(sock);
return true;
}
void WinSock1SocketPoller::StopPolling(wxSOCKET_T sock)
{
SocketSet::iterator it = m_polledSockets.find(sock);
if ( it != m_polledSockets.end() )
{
// Stop sending messages when there is activity on the socket.
WSAAsyncSelect(sock, m_hwnd, 0, 0);
m_polledSockets.erase(it);
}
}
void WinSock1SocketPoller::ResumePolling(wxSOCKET_T WXUNUSED(sock))
{
}
LRESULT CALLBACK WinSock1SocketPoller::MsgProc(WXHWND hwnd, WXUINT uMsg,
WXWPARAM wParam, WXLPARAM lParam)
{
// We only handle 1 message - the message we told winsock to send when
// it notices activity on sockets we are monitoring.
if ( uMsg == SOCKET_MESSAGE )
{
// Extract the result any any errors from lParam.
int winResult = LOWORD(lParam);
int error = HIWORD(lParam);
// Convert the result/errors to a SocketPoller::Result flag.
int pollResult = 0;
if ( winResult & FD_READ )
{
pollResult |= SocketPoller::READY_FOR_READ;
}
if ( winResult & FD_WRITE )
{
pollResult |= SocketPoller::READY_FOR_WRITE;
}
if ( error != 0 )
{
pollResult |= SocketPoller::HAS_ERROR;
}
// If there is a significant result, send an event.
if ( pollResult != 0 )
{
// The event handler is stored in the window's user data and the
// socket with activity is given by wParam.
LONG_PTR userData = GetWindowLongPtr(hwnd, GWLP_USERDATA);
wxEvtHandler* hndlr = reinterpret_cast<wxEvtHandler*>(userData);
wxSOCKET_T sock = wParam;
wxThreadEvent* event =
new wxThreadEvent(wxEVT_SOCKET_POLLER_RESULT);
event->SetPayload<wxSOCKET_T>(sock);
event->SetInt(pollResult);
if ( wxThread::IsMain() )
{
hndlr->ProcessEvent(*event);
delete event;
}
else
{
wxQueueEvent(hndlr, event);
}
}
return 0;
}
else
{
return DefWindowProc(hwnd, uMsg, wParam, lParam);
}
}
SocketPollerImpl* SocketPollerImpl::Create(wxEvtHandler* hndlr)
{
return new WinSock1SocketPoller(hndlr);
}
#else
// SocketPollerSourceHandler - a source handler used by the SocketPoller class.
class SocketPollerSourceHandler: public wxEventLoopSourceHandler
{
public:
SocketPollerSourceHandler(wxSOCKET_T, wxEvtHandler*);
void OnReadWaiting() wxOVERRIDE;
void OnWriteWaiting() wxOVERRIDE;
void OnExceptionWaiting() wxOVERRIDE;
~SocketPollerSourceHandler(){}
private:
void SendEvent(int);
wxSOCKET_T m_socket;
wxEvtHandler* m_handler;
};
SocketPollerSourceHandler::SocketPollerSourceHandler(wxSOCKET_T sock,
wxEvtHandler* hndlr)
{
m_socket = sock;
m_handler = hndlr;
}
void SocketPollerSourceHandler::OnReadWaiting()
{
SendEvent(SocketPoller::READY_FOR_READ);
}
void SocketPollerSourceHandler::OnWriteWaiting()
{
SendEvent(SocketPoller::READY_FOR_WRITE);
}
void SocketPollerSourceHandler::OnExceptionWaiting()
{
SendEvent(SocketPoller::HAS_ERROR);
}
void SocketPollerSourceHandler::SendEvent(int result)
{
wxThreadEvent event(wxEVT_SOCKET_POLLER_RESULT);
event.SetPayload<wxSOCKET_T>(m_socket);
event.SetInt(result);
m_handler->ProcessEvent(event);
}
// SourceSocketPoller - a SocketPollerImpl based on event loop sources.
class SourceSocketPoller: public SocketPollerImpl
{
public:
SourceSocketPoller(wxEvtHandler*);
~SourceSocketPoller();
bool StartPolling(wxSOCKET_T, int) wxOVERRIDE;
void StopPolling(wxSOCKET_T) wxOVERRIDE;
void ResumePolling(wxSOCKET_T) wxOVERRIDE;
private:
WX_DECLARE_HASH_MAP(wxSOCKET_T, wxEventLoopSource*, wxIntegerHash,\
wxIntegerEqual, SocketDataMap);
void CleanUpSocketSource(wxEventLoopSource*);
SocketDataMap m_socketData;
wxEvtHandler* m_handler;
};
SourceSocketPoller::SourceSocketPoller(wxEvtHandler* hndlr)
{
m_handler = hndlr;
}
SourceSocketPoller::~SourceSocketPoller()
{
// Clean up any leftover socket data.
for ( SocketDataMap::iterator it = m_socketData.begin() ;
it != m_socketData.end() ; ++it )
{
CleanUpSocketSource(it->second);
}
}
static int SocketPoller2EventSource(int pollAction)
{
// Convert the SocketPoller::PollAction value to a flag that can be used
// by wxEventLoopSource.
// Always check for errors.
int eventSourceFlag = wxEVENT_SOURCE_EXCEPTION;
if ( pollAction & SocketPoller::POLL_FOR_READ )
{
eventSourceFlag |= wxEVENT_SOURCE_INPUT;
}
if ( pollAction & SocketPoller::POLL_FOR_WRITE )
{
eventSourceFlag |= wxEVENT_SOURCE_OUTPUT;
}
return eventSourceFlag;
}
bool SourceSocketPoller::StartPolling(wxSOCKET_T sock, int pollAction)
{
SocketDataMap::iterator it = m_socketData.find(sock);
wxEventLoopSourceHandler* srcHandler = NULL;
if ( it != m_socketData.end() )
{
// If this socket is already being polled, reuse the old handler. Also
// delete the old source object to stop the old polling operations.
wxEventLoopSource* oldSrc = it->second;
srcHandler = oldSrc->GetHandler();
delete oldSrc;
}
else
{
// Otherwise create a new source handler.
srcHandler =
new SocketPollerSourceHandler(sock, m_handler);
}
// Get a new source object for these polling checks.
bool socketIsPolled = true;
int eventSourceFlag = SocketPoller2EventSource(pollAction);
wxEventLoopSource* newSrc =
wxEventLoopBase::AddSourceForFD(sock, srcHandler, eventSourceFlag);
if ( newSrc == NULL )
{
// We were not able to add a source for this socket.
wxLogDebug(wxString::Format(
"Unable to create event loop source for %d",
static_cast<int>(sock)));
delete srcHandler;
socketIsPolled = false;
if ( it != m_socketData.end() )
{
m_socketData.erase(it);
}
}
else
{
m_socketData[sock] = newSrc;
}
return socketIsPolled;
}
void SourceSocketPoller::StopPolling(wxSOCKET_T sock)
{
SocketDataMap::iterator it = m_socketData.find(sock);
if ( it != m_socketData.end() )
{
CleanUpSocketSource(it->second);
m_socketData.erase(it);
}
}
void SourceSocketPoller::ResumePolling(wxSOCKET_T WXUNUSED(sock))
{
}
void SourceSocketPoller::CleanUpSocketSource(wxEventLoopSource* source)
{
wxEventLoopSourceHandler* srcHandler = source->GetHandler();
delete source;
delete srcHandler;
}
SocketPollerImpl* SocketPollerImpl::Create(wxEvtHandler* hndlr)
{
return new SourceSocketPoller(hndlr);
}
#endif
//
// wxWebSessionCURL
//
int wxWebSessionCURL::ms_activeSessions = 0;
unsigned int wxWebSessionCURL::ms_runtimeVersion = 0;
wxWebSessionCURL::wxWebSessionCURL() :
m_handle(NULL)
{
// Initialize CURL globally if no sessions are active
if ( ms_activeSessions == 0 )
{
if ( curl_global_init(CURL_GLOBAL_ALL) )
{
wxLogError(_("libcurl could not be initialized"));
}
else
{
curl_version_info_data* data = curl_version_info(CURLVERSION_NOW);
ms_runtimeVersion = data->version_num;
}
}
ms_activeSessions++;
m_socketPoller = new SocketPoller(this);
m_timeoutTimer.SetOwner(this);
Bind(wxEVT_TIMER, &wxWebSessionCURL::TimeoutNotification, this);
Bind(wxEVT_SOCKET_POLLER_RESULT,
&wxWebSessionCURL::ProcessSocketPollerResult, this);
}
wxWebSessionCURL::~wxWebSessionCURL()
{
delete m_socketPoller;
if ( m_handle )
curl_multi_cleanup(m_handle);
// Global CURL cleanup if this is the last session
--ms_activeSessions;
if ( ms_activeSessions == 0 )
curl_global_cleanup();
}
wxWebRequestImplPtr
wxWebSessionCURL::CreateRequest(wxWebSession& session,
wxEvtHandler* handler,
const wxString& url,
int id)
{
// Allocate our handle on demand.
if ( !m_handle )
{
m_handle = curl_multi_init();
if ( !m_handle )
{
wxLogDebug("curl_multi_init() failed");
return wxWebRequestImplPtr();
}
else
{
curl_multi_setopt(m_handle, CURLMOPT_SOCKETDATA, this);
curl_multi_setopt(m_handle, CURLMOPT_SOCKETFUNCTION, SocketCallback);
curl_multi_setopt(m_handle, CURLMOPT_TIMERDATA, this);
curl_multi_setopt(m_handle, CURLMOPT_TIMERFUNCTION, TimerCallback);
}
}
return wxWebRequestImplPtr(new wxWebRequestCURL(session, *this, handler, url, id));
}
bool wxWebSessionCURL::StartRequest(wxWebRequestCURL & request)
{
// Add request easy handle to multi handle
CURL* curl = request.GetHandle();
int code = curl_multi_add_handle(m_handle, curl);
if ( code == CURLM_OK )
{
request.SetState(wxWebRequest::State_Active);
m_activeTransfers[curl] = &request;
// Report a timeout to curl to initiate this transfer.
int runningHandles;
curl_multi_socket_action(m_handle, CURL_SOCKET_TIMEOUT, 0,
&runningHandles);
return true;
}
else
{
return false;
}
}
void wxWebSessionCURL::CancelRequest(wxWebRequestCURL* request)
{
// If this transfer is currently active, stop it.
CURL* curl = request->GetHandle();
StopActiveTransfer(curl);
request->SetState(wxWebRequest::State_Cancelled);
}
void wxWebSessionCURL::RequestHasTerminated(wxWebRequestCURL* request)
{
// If this transfer is currently active, stop it.
CURL* curl = request->GetHandle();
StopActiveTransfer(curl);
curl_easy_cleanup(curl);
}
wxVersionInfo wxWebSessionCURL::GetLibraryVersionInfo()
{
const curl_version_info_data* vi = curl_version_info(CURLVERSION_NOW);
wxString desc = wxString::Format("libcurl/%s", vi->version);
if (vi->ssl_version[0])
desc += " " + wxString(vi->ssl_version);
return wxVersionInfo("libcurl",
vi->version_num >> 16 & 0xff,
vi->version_num >> 8 & 0xff,
vi->version_num & 0xff,
desc);
}
bool wxWebSessionCURL::CurlRuntimeAtLeastVersion(unsigned int major,
unsigned int minor,
unsigned int patch)
{
return (ms_runtimeVersion >= CURL_VERSION_BITS(major, minor, patch));
}
// curl interacts with the wxWebSessionCURL class through 2 callback functions
// 1) TimerCallback is called whenever curl wants us to start or stop a timer.
// 2) SocketCallback is called when curl wants us to start monitoring a socket
// for activity.
//
// curl accomplishes the network transfers by calling the
// curl_multi_socket_action function to move pieces of the transfer to or from
// the system's network services. Consequently we call this function when a
// timeout occurs or when activity is detected on a socket.
int wxWebSessionCURL::TimerCallback(CURLM* WXUNUSED(multi), long timeoutms,
void *userp)
{
wxWebSessionCURL* session = reinterpret_cast<wxWebSessionCURL*>(userp);
session->ProcessTimerCallback(timeoutms);
return 0;
}
int wxWebSessionCURL::SocketCallback(CURL* curl, curl_socket_t sock, int what,
void* userp, void* WXUNUSED(sp))
{
wxWebSessionCURL* session = reinterpret_cast<wxWebSessionCURL*>(userp);
session->ProcessSocketCallback(curl, sock, what);
return CURLM_OK;
};
void wxWebSessionCURL::ProcessTimerCallback(long timeoutms)
{
// When this callback is called, curl wants us to start or stop a timer.
// If timeoutms = -1, we should stop the timer. If timeoutms > 0, we should
// start a oneshot timer and when that timer expires, we should call
// curl_multi_socket_action(m_handle, CURL_SOCKET_TIMEOUT,...
//
// In the special case that timeoutms = 0, we should signal a timeout as
// soon as possible (as above by calling curl_multi_socket_action). But
// according to the curl documentation, we can't do that from this callback
// or we might cause an infinite loop. So use CallAfter to report the
// timeout at a slightly later time.
if ( timeoutms > 0)
{
m_timeoutTimer.StartOnce(timeoutms);
}
else if ( timeoutms < 0 )
{
m_timeoutTimer.Stop();
}
else // timeoutms == 0
{
CallAfter(&wxWebSessionCURL::ProcessTimeoutNotification);
}
}
void wxWebSessionCURL::TimeoutNotification(wxTimerEvent& WXUNUSED(event))
{
ProcessTimeoutNotification();
}
void wxWebSessionCURL::ProcessTimeoutNotification()
{
int runningHandles;
curl_multi_socket_action(m_handle, CURL_SOCKET_TIMEOUT, 0, &runningHandles);
CheckForCompletedTransfers();
}
static int CurlPoll2SocketPoller(int what)
{
int pollAction = SocketPoller::INVALID_ACTION;
if ( what == CURL_POLL_IN )
{
pollAction = SocketPoller::POLL_FOR_READ ;
}
else if ( what == CURL_POLL_OUT )
{
pollAction = SocketPoller::POLL_FOR_WRITE;
}
else if ( what == CURL_POLL_INOUT )
{
pollAction =
SocketPoller::POLL_FOR_READ | SocketPoller::POLL_FOR_WRITE;
}
return pollAction;
}
void wxWebSessionCURL::ProcessSocketCallback(CURL* curl, curl_socket_t s,
int what)
{
// Have the socket poller start or stop monitoring a socket depending of
// the value of what.
switch ( what )
{
case CURL_POLL_IN:
wxFALLTHROUGH;
case CURL_POLL_OUT:
wxFALLTHROUGH;
case CURL_POLL_INOUT:
{
m_activeSockets[curl] = s;
int pollAction = CurlPoll2SocketPoller(what);
bool socketIsMonitored = m_socketPoller->StartPolling(s,
pollAction);
if ( !socketIsMonitored )
{
TransferSet::iterator it = m_activeTransfers.find(curl);
if ( it != m_activeTransfers.end() )
{
FailRequest(curl,
"wxWebSession failed to monitor a socket for this "
"transfer");
}
}
}
break;
case CURL_POLL_REMOVE:
m_socketPoller->StopPolling(s);
RemoveActiveSocket(curl);
break;
default:
wxLogDebug("Unknown socket action in ProcessSocketCallback");
break;
}
}
static int SocketPollerResult2CurlSelect(int socketEventFlag)
{
int curlSelect = 0;
if ( socketEventFlag & SocketPoller::READY_FOR_READ )
{
curlSelect |= CURL_CSELECT_IN;
}
if ( socketEventFlag & SocketPoller::READY_FOR_WRITE )
{
curlSelect |= CURL_CSELECT_OUT;
}
if ( socketEventFlag & SocketPoller::HAS_ERROR )
{
curlSelect |= CURL_CSELECT_ERR;
}
return curlSelect;
}
void wxWebSessionCURL::ProcessSocketPollerResult(wxThreadEvent& event)
{
// Convert the socket poller result to an action flag needed by curl.
// Then call curl_multi_socket_action.
curl_socket_t sock = event.GetPayload<curl_socket_t>();
int action = SocketPollerResult2CurlSelect(event.GetInt());
int runningHandles;
curl_multi_socket_action(m_handle, sock, action, &runningHandles);
CheckForCompletedTransfers();
m_socketPoller->ResumePolling(sock);
}
void wxWebSessionCURL::CheckForCompletedTransfers()
{
// Process CURL message queue
int msgQueueCount;
while ( CURLMsg* msg = curl_multi_info_read(m_handle, &msgQueueCount) )
{
if ( msg->msg == CURLMSG_DONE )
{
CURL* curl = msg->easy_handle;
TransferSet::iterator it = m_activeTransfers.find(curl);
if ( it != m_activeTransfers.end() )
{
wxWebRequestCURL* request = it->second;
curl_multi_remove_handle(m_handle, curl);
request->HandleCompletion();
m_activeTransfers.erase(it);
RemoveActiveSocket(curl);
}
}
}
}
void wxWebSessionCURL::FailRequest(CURL* curl,const wxString& msg)
{
TransferSet::iterator it = m_activeTransfers.find(curl);
if ( it != m_activeTransfers.end() )
{
wxWebRequestCURL* request = it->second;
StopActiveTransfer(curl);
request->SetState(wxWebRequest::State_Failed, msg);
}
}
void wxWebSessionCURL::StopActiveTransfer(CURL* curl)
{
TransferSet::iterator it = m_activeTransfers.find(curl);
if ( it != m_activeTransfers.end() )
{
// Record the current active socket now since it should be removed from
// the m_activeSockets map when we call curl_multi_remove_handle.
curl_socket_t activeSocket = CURL_SOCKET_BAD;
CurlSocketMap::iterator it2 = m_activeSockets.find(curl);
if ( it2 != m_activeSockets.end() )
{
activeSocket = it2->second;
}
// Remove the CURL easy handle from the CURLM multi handle.
curl_multi_remove_handle(m_handle, curl);
// If the transfer was active, close its socket.
if ( activeSocket != CURL_SOCKET_BAD )
{
wxCloseSocket(activeSocket);
}
// Clean up the maps.
RemoveActiveSocket(curl);
m_activeTransfers.erase(it);
}
}
void wxWebSessionCURL::RemoveActiveSocket(CURL* curl)
{
CurlSocketMap::iterator it = m_activeSockets.find(curl);
if ( it != m_activeSockets.end() )
{
m_activeSockets.erase(it);
}
}
#endif // wxUSE_WEBREQUEST_CURL
|
<gh_stars>0
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.spring.ge.conventions.gradle;
import java.net.InetAddress;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Function;
import com.gradle.enterprise.gradleplugin.internal.extension.BuildScanExtensionWithHiddenFeatures;
import com.gradle.scan.plugin.BuildScanDataObfuscation;
import com.gradle.scan.plugin.BuildScanExtension;
import io.spring.ge.conventions.core.ConfigurableBuildScan;
/**
* A {@link ConfigurableBuildScan} that configures a {@link BuildScanExtension} for a
* Gradle build.
*
* @author <NAME>
*/
class GradleConfigurableBuildScan implements ConfigurableBuildScan {
private final BuildScanExtension buildScan;
GradleConfigurableBuildScan(BuildScanExtension buildScan) {
this.buildScan = buildScan;
}
@Override
public void captureInputFiles(boolean capture) {
this.buildScan.setCaptureTaskInputFiles(capture);
}
@Override
public String server() {
return this.buildScan.getServer();
}
@Override
public void server(String server) {
this.buildScan.setServer(server);
}
@Override
public void uploadInBackground(boolean enabled) {
try {
this.buildScan.setUploadInBackground(enabled);
}
catch (NoSuchMethodError ex) {
// GE Plugin version < 3.3. Continue
}
}
@Override
public void obfuscation(Consumer<ObfuscationConfigurer> configurer) {
configurer.accept(new GradleObfusactionConfigurer(this.buildScan.getObfuscation()));
}
@Override
public void publishAlways() {
this.buildScan.publishAlways();
}
@Override
public void publishIfAuthenticated() {
((BuildScanExtensionWithHiddenFeatures) this.buildScan).publishIfAuthenticated();
}
@Override
public void link(String name, String url) {
this.buildScan.link(name, url);
}
@Override
public void tag(String tag) {
this.buildScan.tag(tag);
}
@Override
public void value(String name, String value) {
this.buildScan.value(name, value);
}
@Override
public void background(Consumer<ConfigurableBuildScan> buildScan) {
this.buildScan.background((extension) -> buildScan.accept(new GradleConfigurableBuildScan(extension)));
}
private static final class GradleObfusactionConfigurer implements ObfuscationConfigurer {
private final BuildScanDataObfuscation obfuscation;
private GradleObfusactionConfigurer(BuildScanDataObfuscation obfuscation) {
this.obfuscation = obfuscation;
}
@Override
public void ipAddresses(Function<? super List<InetAddress>, ? extends List<String>> obfuscator) {
this.obfuscation.ipAddresses(obfuscator);
}
}
}
|
#!/usr/bin/env bash
set -e
SCRIPT_DIR=$(dirname $0)
source ${SCRIPT_DIR}/test_functions.inc
run_tests -v7 \
2.1.0-pg13 2.1.1-pg13 2.2.0-pg13
|
public class Permutation {
public static boolean isPermutation(String s1, String s2) {
if (s1.length() != s2.length()) {
return false;
}
int[] letters = new int[128];
for (int i = 0; i < s1.length(); i++) {
char c1 = s1.charAt(i);
letters[c1]++;
char c2 = s2.charAt(i);
if (--letters[c2] < 0) {
return false;
}
}
return true;
}
public static void main(String[] args) {
String s1 = "hello";
String s2 = "loleh";
System.out.println(isPermutation(s1, s2));
}
} |
package com.itmuch.cloud;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.netflix.loadbalancer.IRule;
import com.netflix.loadbalancer.RandomRule;
@Configuration
@ExcludeFromComponentScan
public class TestConfiguration {
// @Autowired
// IClientConfig config;
@Bean
public IRule ribbonRule() {
return new RandomRule();
}
}
|
#!/bin/bash
catkin_ws_dir=/home/workspace/RoboND-HomeServiceRobot-Project/catkin_ws
# deploy turtlebot in the project environment
xterm -e roslaunch turtlebot_gazebo turtlebot_world.launch world_file:=${catkin_ws_dir}/src/World/udacity.world &
sleep 5
# run amcl to perform global localization
xterm -e roslaunch turtlebot_gazebo amcl_demo.launch map_file:=${catkin_ws_dir}/src/World/udacity.yaml &
sleep 3
# observe the navigation in Rviz
xterm -e roslaunch turtlebot_rviz_launchers view_navigation.launch &
sleep 3
# launch pick_objects node to define multiple goals
xterm -hold -e rosrun pick_objects pick_objects
|
TERMUX_PKG_HOMEPAGE=https://packages.debian.org/apt
TERMUX_PKG_DESCRIPTION="Front-end for the dpkg package manager"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_VERSION=1.4.9
TERMUX_PKG_REVISION=15
TERMUX_PKG_SRCURL=http://ftp.debian.org/debian/pool/main/a/apt/apt_${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=d4d65e7c84da86f3e6dcc933bba46a08db429c9d933b667c864f5c0e880bac0d
# apt-key requires utilities from coreutils, findutils, gpgv, grep, sed.
TERMUX_PKG_DEPENDS="coreutils, dpkg, findutils, gpgv, grep, libc++, libcurl, liblzma, sed, termux-licenses, zlib"
TERMUX_PKG_CONFLICTS="apt-transport-https"
TERMUX_PKG_REPLACES="apt-transport-https"
TERMUX_PKG_RECOMMENDS="game-repo, science-repo"
TERMUX_PKG_SUGGESTS="unstable-repo"
TERMUX_PKG_ESSENTIAL=true
TERMUX_PKG_CONFFILES="
etc/apt/sources.list
etc/apt/trusted.gpg
"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-DPERL_EXECUTABLE=$(which perl)
-DCMAKE_INSTALL_FULL_LOCALSTATEDIR=$TERMUX_PREFIX
-DCOMMON_ARCH=$TERMUX_ARCH
-DDPKG_DATADIR=$TERMUX_PREFIX/share/dpkg
-DUSE_NLS=OFF
-DWITH_DOC=OFF
"
TERMUX_PKG_RM_AFTER_INSTALL="
bin/apt-cdrom
bin/apt-extracttemplates
bin/apt-sortpkgs
etc/apt/apt.conf.d
lib/apt/apt-helper
lib/apt/methods/bzip2
lib/apt/methods/cdrom
lib/apt/methods/mirror
lib/apt/methods/rred
lib/apt/planners/
lib/apt/solvers/
lib/dpkg/
lib/libapt-inst.so
"
termux_step_pre_configure() {
# Certain packages are not safe to build on device because their
# build.sh script deletes specific files in $TERMUX_PREFIX.
if $TERMUX_ON_DEVICE_BUILD; then
termux_error_exit "Package '$TERMUX_PKG_NAME' is not safe for on-device builds."
fi
}
termux_step_post_make_install() {
printf "# The main termux repository:\ndeb https://dl.bintray.com/termux/termux-packages-24 stable main\n" > $TERMUX_PREFIX/etc/apt/sources.list
cp $TERMUX_PKG_BUILDER_DIR/trusted.gpg $TERMUX_PREFIX/etc/apt/
rm $TERMUX_PREFIX/include/apt-pkg -r
# apt-transport-tor
ln -sfr $TERMUX_PREFIX/lib/apt/methods/http $TERMUX_PREFIX/lib/apt/methods/tor
ln -sfr $TERMUX_PREFIX/lib/apt/methods/http $TERMUX_PREFIX/lib/apt/methods/tor+http
ln -sfr $TERMUX_PREFIX/lib/apt/methods/https $TERMUX_PREFIX/lib/apt/methods/tor+https
}
|
import { Component, Vue } from 'vue-property-decorator'
@Component
export default class DataMixin extends Vue {
getData(field: string, columnData: any) {
if (!columnData) {
return ''
}
const fields = field.split('.')
const res = fields.reduce((acc: any, curr: string, index) => {
return acc[curr] || (index < fields.length - 1 ? {} : null)
}, columnData)
return res ? res : '' // eslint-disable-line
}
}
|
const precedence = {
')': 0,
'+': 1,
'-': 1,
'*': 2,
'/': 2,
'%': 3,
'^': 4
}
function lex(text) {
start = 0;
current = 0;
tokens = [];
function isOperator(c) {
return '-+*/%^'.includes(c);
}
function isUnary(c) {
return '-!+'.includes(c);
}
function advance() {
return text.charAt(current++);
}
function eof() {
return current >= text.length;
}
function peekNext() {
if (current + 1 >= text.length) return null;
return text.charAt(current + 1);
}
function peek() {
if (eof()) return null;
return text.charAt(current);
}
function isDigit(c) {
return c >= '0' && c <= '9';
}
function getNum() {
while (!eof() && isDigit(peek())) {
advance();
}
if (peek() === '.' && isDigit(peekNext())) {
advance();
while (isDigit(peek())) advance();
}
tokens.push(parseFloat(text.substring(start, current)));
}
while (!eof()) {
start = current;
let c = advance();
if (isDigit(c)) {
getNum();
} else if (isOperator(c) || c === '(' || c === ')') {
tokens.push(c);
}
}
for (let i = 0; i < tokens.length - 1; i++) {
let tk = tokens[i];
if (isUnary(tk)) {
if (i == 0 || precedence[tokens[i - 1]]) {
tokens[i + 1] = evaluateUnary(tk, tokens[i + 1]);
tokens.splice(i, 1);
}
}
}
return tokens;
}
function evaluateUnary(op, operand) {
switch (op) {
case '-':
return -operand;
case '+':
return +operand;
case '!':
return !operand;
default:
return NaN;
}
}
function generateAST(tokens) {
function next() {
return tokens.shift();
}
function peek() {
return tokens[0];
}
function Tree(left, node, right) {
return new Node(left, node, right, 45, 10, 10);
}
function NUD(node) {
if (node === '(') {
const e = expr(0);
next();
return e;
}
return Tree(null, node, null);
}
function LED(left, op) {
return Tree(left, op, expr(precedence[op]));
}
function expr(rp = 0) {
let left = NUD(next());
while (precedence[peek()] > rp)
left = LED(left, next());
return left;
}
return expr();
}
function eval(node) {
if (node.left == null)
return node.value;
switch (node.value) {
case '/':
return eval(node.left) / eval(node.right);
case '*':
return eval(node.left) * eval(node.right);
case '-':
return eval(node.left) - eval(node.right);
case '+':
return eval(node.left) + eval(node.right);
case '%':
return eval(node.left) % eval(node.right);
case '^':
return Math.pow(eval(node.left), eval(node.right));
default:
return node.value;
}
}
//Node JS stuff for getting user input from the terminal
// const readline = require('readline');
// const rl = readline.createInterface({
// input: process.stdin,
// output: process.stdout
// });
// rl.question('Enter your expression : \n', (expression) => {
// let ast = generateAST(lex(expression));
// console.log(eval(ast));
// rl.close();
// })
|
import torch.nn as nn
import torch.nn.functional as F
from . import register_nas_estimator
from ..space import BaseSpace
from .base import BaseEstimator
from .one_shot import OneShotEstimator
import torch
from autogl.module.train import NodeClassificationFullTrainer, Acc
@register_nas_estimator("scratch")
class TrainEstimator(BaseEstimator):
"""
An estimator which trans from scratch
"""
def __init__(self, loss_f="nll_loss", evaluation=[Acc()]):
super().__init__(loss_f, evaluation)
self.evaluation = evaluation
self.estimator = OneShotEstimator(self.loss_f, self.evaluation)
def infer(self, model: BaseSpace, dataset, mask="train"):
# self.trainer.model=model
# self.trainer.device=model.device
boxmodel = model.wrap()
self.trainer = NodeClassificationFullTrainer(
model=boxmodel,
optimizer=torch.optim.Adam,
lr=0.005,
max_epoch=300,
early_stopping_round=30,
weight_decay=5e-4,
device="auto",
init=False,
feval=self.evaluation,
loss=self.loss_f,
lr_scheduler_type=None,
)
try:
self.trainer.train(dataset)
with torch.no_grad():
return self.estimator.infer(boxmodel.model, dataset, mask)
except RuntimeError as e:
if "cuda" in str(e) or "CUDA" in str(e):
INF = 100
fin = [-INF if eva.is_higher_better else INF for eva in self.evaluation]
return fin, 0
else:
raise e
|
class BankAccount:
account_number_counter = 1000
def __init__(self, account_holder_name):
self.account_holder_name = account_holder_name
self.account_number = BankAccount.account_number_counter + 1
BankAccount.account_number_counter += 1
self.balance = 0
def deposit(self, amount):
if amount > 0:
self.balance += amount
return True
else:
return False
def withdraw(self, amount):
if amount > 0 and amount <= self.balance:
self.balance -= amount
return True
else:
return False
def check_balance(self):
return self.balance
class Bank:
def __init__(self):
self.accounts = {}
def create_account(self, account_holder_name):
new_account = BankAccount(account_holder_name)
self.accounts[new_account.account_number] = new_account
return new_account.account_number
def get_account(self, account_number):
return self.accounts.get(account_number)
def close_account(self, account_number):
if account_number in self.accounts:
del self.accounts[account_number]
return True
else:
return False
# Sample program to demonstrate the usage of Bank and BankAccount classes
bank = Bank()
# Create new accounts
account1_number = bank.create_account("Alice")
account2_number = bank.create_account("Bob")
# Deposit and withdraw funds
bank.get_account(account1_number).deposit(1000)
bank.get_account(account2_number).deposit(500)
bank.get_account(account1_number).withdraw(200)
# Display account details and balances
print("Account 1 details:")
print("Account Holder:", bank.get_account(account1_number).account_holder_name)
print("Account Balance:", bank.get_account(account1_number).check_balance())
print("\nAccount 2 details:")
print("Account Holder:", bank.get_account(account2_number).account_holder_name)
print("Account Balance:", bank.get_account(account2_number).check_balance())
# Close an account
bank.close_account(account2_number) |
#!/bin/bash
set -ev; # stop on error
echo "Creating test files and adding paths to collection for testing form data file uploads"
pushd /home/travis/build/postmanlabs/postman-code-generators/ &>/dev/null;
echo "Sample file 1" >> test1.txt;
echo "Sample file 2" >> test2.txt;
echo "Sample file 3" >> test3.txt;
node ./npm/addPathToFormdataFile.js
popd &>/dev/null;
echo "Installing dependencies required for tests in codegens/java-okhttp"
pushd ./codegens/java-okhttp &>/dev/null;
sudo add-apt-repository ppa:openjdk-r/ppa -y
sudo rm -rf /var/lib/apt/lists/*
sudo apt-get update
sudo apt-get install -y openjdk-8-jdk
sudo wget http://central.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar
sudo wget http://central.maven.org/maven2/com/squareup/okio/okio/1.14.0/okio-1.14.0.jar
popd &>/dev/null;
echo "Installing dependencies required for tests in codegens/java-unirest"
pushd ./codegens/java-unirest &>/dev/null;
sudo wget http://central.maven.org/maven2/com/mashape/unirest/unirest-java/1.4.9/unirest-java-1.4.9.jar
sudo wget http://central.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar
sudo wget http://central.maven.org/maven2/commons-codec/commons-codec/1.9/commons-codec-1.9.jar
sudo wget http://central.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar
sudo wget http://central.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar
sudo wget http://central.maven.org/maven2/org/apache/httpcomponents/httpasyncclient/4.1.1/httpasyncclient-4.1.1.jar
sudo wget http://central.maven.org/maven2/org/apache/httpcomponents/httpcore-nio/4.4.4/httpcore-nio-4.4.4.jar
sudo wget http://central.maven.org/maven2/org/json/json/20160212/json-20160212.jar
sudo wget http://central.maven.org/maven2/org/apache/httpcomponents/httpmime/4.3.6/httpmime-4.3.6.jar
popd &>/dev/null;
echo "Installing dependencies required for tests in codegens/csharp-restsharp"
pushd ./codegens/csharp-restsharp &>/dev/null;
wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
sudo dpkg -i packages-microsoft-prod.deb
sudo apt-get install apt-transport-https
sudo apt-get update
sudo apt-get install dotnet-sdk-2.2
dotnet new console -o testProject
pushd ./testProject &>/dev/null;
dotnet add package RestSharp
popd &>/dev/null;
popd &>/dev/null;
echo "Installing dependencies required for tests in codegens/php-httprequest2"
pear install HTTP_Request2-2.3.0
echo "Installing dependencies required for tests in codegens/swift"
pushd ./codegens/swift &>/dev/null;
sudo apt-get update
sudo apt-get install clang-3.6 libicu-dev libpython2.7 -y
sudo apt-get install libcurl3 libpython2.7-dev -y
sudo wget https://swift.org/builds/swift-5.0.1-release/ubuntu1604/swift-5.0.1-RELEASE/swift-5.0.1-RELEASE-ubuntu16.04.tar.gz
sudo tar xzf swift-5.0.1-RELEASE-ubuntu16.04.tar.gz
sudo chmod 777 swift-5.0.1-RELEASE-ubuntu16.04/usr/lib/swift/CoreFoundation/module.map
popd &>/dev/null;
echo "Installing dependencies required for tests in codegens/csharp-restsharp"
sudo apt-get install -y mono-complete
echo "Installing dependencies required for tests in codegens/shell-httpie"
sudo apt-get install httpie
|
const fs = require('fs-extra');
const sharp = require('sharp');
const { contents, idiom } = require('./consts');
/**
* This function will generate an imageset for iOS
* @param {Object} options
* @param {String} options.svg - The content of the SVG that will be turned into a PNG. The SVG content at this point should have had all the token references inside of it resolved.
* @param {String} options.name - The name of the image token
* @param {String} options.iosPath - The build path for iOS. This will be defined in the configuration
*/
function generateImageset({ svg, name, iosPath }) {
const outputPath = `${iosPath}StyleDictionary.xcassets/${name}.imageset`;
fs.ensureDirSync(outputPath);
const filename = `img.png`;
const imageset = {
...contents,
images: [{
idiom,
filename
}]
}
// Here we are using the sharp library for image processing that will take
// the SVG content and render it as a PNG
// https://sharp.pixelplumbing.com/api-constructor
sharp(Buffer.from(svg, `utf-8`), { density: 300 })
.toFile(`${outputPath}/${filename}`, (err) => {
if (!err) {
console.log(`✔︎ ${outputPath}/${filename}`);
} else {
console.log(err);
}
});
// Lastly, write the Contents.json file with the updated content
fs.writeFileSync(`${outputPath}/Contents.json`, JSON.stringify(imageset, null, 2));
}
module.exports = generateImageset; |
<filename>sqlpal/src/org/sqlpal/crud/DataHandler.java
package org.sqlpal.crud;
import org.sqlpal.exception.ConnectionException;
import org.sqlpal.manager.ConfigManager;
import org.sqlpal.manager.ConnectionManager;
import org.sqlpal.util.DBUtils;
import org.sqlpal.util.EmptyUtils;
import org.sqlpal.util.StatementUtils;
import com.sun.istack.internal.NotNull;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
/**
* CRUD处理类
*/
public class DataHandler {
private Boolean isAutoCommit = null;
<T> T execute(@NotNull ExecuteCallback<T> callback) throws SQLException {
return execute(callback, null);
}
<T> T execute(@NotNull ExecuteCallback<T> callback, DataSupport model) throws SQLException {
if (!callback.onGetValues(model)) return null;
boolean isRequestConnection = false;
Connection conn = null;
PreparedStatement stmt = null;
try {
// 获取连接
conn = ConnectionManager.getConnection();
// 自动请求连接
if (conn == null) {
isRequestConnection = true;
ConnectionManager.requestConnection();
conn = ConnectionManager.getConnection();
if (conn == null) throw new ConnectionException("获取连接失败");
}
callback.onInitConnection(conn);
// 创建Statement
stmt = callback.onCreateStatement(conn, model);
if (stmt == null) return null;
callback.onAddValues(stmt);
// 执行Statement
return callback.onExecute(stmt);
} finally {
callback.onClose(conn, stmt, isRequestConnection);
}
}
void executeBatch(@NotNull final ExecuteCallback callback, @NotNull final List<? extends DataSupport> models) throws SQLException {
if (EmptyUtils.isEmpty(models)) return;
execute(new DefaultExecuteCallback<Void>() {
@Override
public void onInitConnection(Connection connection) throws SQLException {
isAutoCommit = connection.getAutoCommit();
connection.setAutoCommit(false);
}
@Override
public PreparedStatement onCreateStatement(Connection connection, DataSupport modelIgnore) throws SQLException {
PreparedStatement stmt = null;
int batchCount = 0;
final int maxBatchCount = ConfigManager.getConfig().getMaxBatchCount();
for (DataSupport model : models) {
if (!callback.onGetValues(model)) continue;
if (stmt == null) {
stmt = callback.onCreateStatement(connection, model);
if (stmt == null) return null;
}
callback.onAddValues(stmt);
stmt.addBatch();
if (++batchCount % maxBatchCount == 0) {
callback.onExecuteBatch(stmt);
}
}
return stmt;
}
@Override
public void onAddValues(PreparedStatement statement) throws SQLException {
}
@Override
public Void onExecute(PreparedStatement statement) throws SQLException {
callback.onExecuteBatch(statement);
return null;
}
@Override
public void onClose(Connection connection, Statement statement, boolean isRequestConnection) throws SQLException {
if (isAutoCommit != null && connection != null) {
connection.setAutoCommit(isAutoCommit);
}
callback.onClose(connection, statement, isRequestConnection);
}
});
}
int executeUpdate(@NotNull String[] conditions) throws SQLException {
if (EmptyUtils.isEmpty(conditions)) throw new RuntimeException("SQL语句不能为空");
PreparedStatement stmt = null;
try {
Connection conn = ConnectionManager.getConnection();
if (conn == null) {
throw new RuntimeException("请先执行Sql.begin()以获取连接");
}
stmt = conn.prepareStatement(conditions[0]);
StatementUtils utils = new StatementUtils(stmt);
if (conditions.length > 1) {
utils.addValues(conditions, 1);
}
return stmt.executeUpdate();
} finally {
DBUtils.close(stmt);
}
}
Statement executeQuery(@NotNull String[] conditions) throws SQLException {
if (EmptyUtils.isEmpty(conditions)) throw new RuntimeException("SQL语句不能为空");
Connection conn = ConnectionManager.getConnection();
if (conn == null) {
throw new RuntimeException("请先执行Sql.begin()以获取连接");
}
PreparedStatement stmt = conn.prepareStatement(conditions[0]);
StatementUtils utils = new StatementUtils(stmt);
if (conditions.length > 1) {
utils.addValues(conditions, 1);
}
stmt.executeQuery();
return stmt;
}
}
|
#!/bin/sh -eu
# The path containing the scripts.
export SCRIPTS_PATH=$(dirname $(readlink -f $0))
. $SCRIPTS_PATH/config.sh
. $SCRIPTS_PATH/functions.sh
pre_install
for file in $DOTFILES
do
install_file $file
done
# Initialize and update all submodules.
cd $BASE_PATH
git submodule update --init > /dev/null 2>&1
git submodule foreach 'git checkout master && git pull' > /dev/null 2>&1
# Download lfs tracked files
git lfs pull || true
i=0
for file in $CUSTOM_FILES
do
if [ $i -eq 0 ]; then
filename=$file
else
target_file=$file
install_custom_file $filename $target_file
fi
i=$(( ($i+1)%2 ))
done
post_install
|
class Server:
def __init__(self, hostname, name, region):
self.hostname = hostname
self.name = name
self.region = region
def get_info(self):
return f"Server {self.name} with hostname {self.hostname} is located in region {self.region}"
# Example usage
server1 = Server("host1.example.com", "Server1", "us-east-1")
print(server1.get_info()) # Output: Server Server1 with hostname host1.example.com is located in region us-east-1 |
#!/bin/bash
ko apply --selector knative.dev/crd-install=true -Rf config/core/
kubectl wait --for=condition=Established --all crd
ko apply -Rf config/core/
kubectl apply -f ./third_party/contour-latest/
#kubectl apply -f ~/direktiv/scripts/config-deployment.yaml
#kubectl patch configmap/config-network \
# --namespace knative-serving \
# --type merge \
# --patch '{"data":{"ingress.class":"contour.ingress.networking.knative.dev"}}'
|
<reponame>cforfang/Vulkan-Samples
/* Copyright (c) 2019, Arm Limited and Contributors
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 the "License";
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.khronos.vulkan_samples;
import android.support.annotation.NonNull;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.util.ArraySet;
import android.view.ViewGroup;
import android.widget.AdapterView;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
public class ViewPagerAdapter extends FragmentStatePagerAdapter {
private TabFragment currentFragment;
private List<String> categories;
private List<String> filterTags;
private HashMap<String, List<Sample>> sampleMap;
private List<Sample> viewableSamples;
private AdapterView.OnItemClickListener clickListener;
ViewPagerAdapter(FragmentManager manager, @NonNull HashMap<String,
List<Sample>> categorizedSampleMap, AdapterView.OnItemClickListener clickListener) {
super(manager);
SortedSet<String> keys = new TreeSet<>(categorizedSampleMap.keySet());
this.categories = new ArrayList<>();
this.categories.addAll(keys);
this.sampleMap = categorizedSampleMap;
this.viewableSamples = new ArrayList<>();
this.clickListener = clickListener;
applyFilter(getTags());
}
public void setDialog(FilterDialog dialog) {
dialog.setup(this, getTags());
}
/**
* @brief Modifies the filter so that the viewableSamples updates
*/
public void applyFilter(List<String> filterTags) {
viewableSamples.clear();
for(String filter : filterTags) {
for(List<Sample> sampleList : sampleMap.values()) {
for(Sample sample : sampleList) {
if(sample.getTags().contains(filter) && !viewableSamples.contains(sample)) {
viewableSamples.add(sample);
}
}
}
}
this.filterTags = filterTags;
}
/**
* @brief Returns a list of unique tags pulled from CMake used to filter the view
*/
private List<String> getTags() {
Set<String> tagSet = new ArraySet<>();
for(List<Sample> sampleList : sampleMap.values()) {
for(Sample sample : sampleList) {
tagSet.addAll(sample.getTags());
}
}
return new ArrayList<>(tagSet);
}
public List<String> getFilter() {
return filterTags;
}
@Override
public Fragment getItem(int position) {
TabFragment fragment = (TabFragment) TabFragment.getInstance(categories.get(position));
List<Sample> fragmentSamples = new ArrayList<>();
for(Sample sample : Objects.requireNonNull(sampleMap.get(categories.get(position)))) {
for(Sample viewableSample : this.viewableSamples) {
if(sample.getId().equals(viewableSample.getId())) {
fragmentSamples.add(sample);
}
}
}
fragment.prepare(fragmentSamples, clickListener);
return fragment;
}
public int getItemPosition(@NonNull Object object) {
return POSITION_NONE;
}
@Override
public void setPrimaryItem(@NonNull ViewGroup container, int position, @NonNull Object object) {
if (getCurrentFragment() != object) {
currentFragment = (TabFragment) object;
}
super.setPrimaryItem(container, position, object);
}
@Override
public int getCount() {
return categories.size();
}
@Override
public CharSequence getPageTitle(int position) {
return categories.get(position);
}
TabFragment getCurrentFragment() {
return currentFragment;
}
} |
#!/bin/bash
xset -dpms s off s noblank
matchbox-window-manager -use_titlebar no &
qjoypad "8bitdo_sf30pro" &
/usr/bin/chromium-browser --use-gl=egl --kiosk --start-fullscreen --noerrdialogs --disable-translate --no-first-run --fast --fast-start --disable-infobars --disable-extensions --disable-component-update --app=https://www.youtube.com/embed/eyU3bRy2x44?autoplay=1
|
<filename>javaSources/Miscellaneous/NeerajClass.java
public class NeerajClass {
public static void main(String[] args) {
Static myObject = new Static();
System.out.println("Let me check the value of counter => " + myObject.counter);
System.out.println("Let me check the value of counter (METHOD) => " + myObject.returnCounter());
Static myObject2 = new Static(100);
System.out.println("Let me check the value of counter => " + myObject2.counter);
System.out.println("Let me check the value of counter (METHOD) => " + myObject2.returnCounter());
// System.out.println("Let me check the value of counter => " + Static.counter1);
// System.out.println("Let me check the value of counter => " + Static.returnCounter1());
}
}class NeerajClass {
}
|
match (lhs, rhs) {
(Value::Real(lhs), Value::Real(rhs)) => {
let result = lhs * rhs;
Ok(Value::Real(result))
},
_ => Err(format!("Unable to use '*' operator for {:?} and {:?} values.", lhs, rhs)),
} |
//= require jquery
//= require ./mtimer_home
|
#!/bin/sh
BASE=fpgamanager
if [ -f /etc/default/$BASE ]; then
. /etc/default/$BASE
fi
if [ "$FPGA_INIT" = true ]; then
source /lib/firmware/fpga-default.env
#Script to loads default bitstream and dtbo on startup
echo "Loading bitstream"
fpgautil -b ${BIN} -o ${DTBO}
fi
|
#!/bin/bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
KIND_LOGGING=""
if ! [ -z "$DEBUG" ]; then
set -x
KIND_LOGGING="--verbosity=4"
kind version
kubectl version --client
helm version --client
fi
set -o errexit
set -o nounset
set -o pipefail
RED='\e[35m'
NC='\e[0m'
BGREEN='\e[32m'
K8S_VERSION=${K8S_VERSION:-v1.16.15}
KIND_CLUSTER_NAME="external-secrets-dev"
REGISTRY=external-secrets
export KUBECONFIG="$(pwd)/e2e/.kubeconfig"
kind --version || $(echo -e "${RED}Please install kind before running e2e tests${NC}";exit 1)
echo -e "${BGREEN}[dev-env] creating Kubernetes cluster with kind${NC}"
kind create cluster \
${KIND_LOGGING} \
--name ${KIND_CLUSTER_NAME} \
--config "${DIR}/kind.yaml" \
--image "kindest/node:${K8S_VERSION}"
echo -e "${BGREEN}building external-secrets images${NC}"
docker build -t external-secrets:test -f "$DIR/../Dockerfile" "$DIR/../"
docker build -t external-secrets-e2e:test -f "$DIR/Dockerfile" "$DIR/../"
kind load docker-image --name="${KIND_CLUSTER_NAME}" external-secrets-e2e:test
kind load docker-image --name="${KIND_CLUSTER_NAME}" external-secrets:test
function cleanup {
set +e
kubectl delete pod e2e 2>/dev/null
kubectl delete crd/externalsecrets.kubernetes-client.io 2>/dev/null
kubectl delete -f "${DIR}/localstack.deployment.yaml" 2>/dev/null
kind delete cluster \
${KIND_LOGGING} \
--name ${KIND_CLUSTER_NAME}
}
trap cleanup EXIT
kubectl apply -f ${DIR}/localstack.deployment.yaml
CHART_DIR="$(dirname "$DIR")/charts/kubernetes-external-secrets"
helm install e2e ${CHART_DIR} \
--set image.repository=external-secrets \
--set image.tag=test \
--set env.LOG_LEVEL=debug \
--set env.LOCALSTACK=true \
--set env.LOCALSTACK_SSM_URL=http://ssm \
--set env.LOCALSTACK_SM_URL=http://secretsmanager \
--set env.AWS_ACCESS_KEY_ID=foobar \
--set env.AWS_SECRET_ACCESS_KEY=foobar \
--set env.AWS_REGION=us-east-1 \
--set env.POLLER_INTERVAL_MILLISECONDS=1000 \
--set env.LOCALSTACK_STS_URL=http://sts
echo -e "${BGREEN}Granting permissions to external-secrets e2e service account...${NC}"
kubectl create serviceaccount external-secrets-e2e || true
kubectl create clusterrolebinding permissive-binding \
--clusterrole=cluster-admin \
--user=admin \
--user=kubelet \
--serviceaccount=default:external-secrets-e2e || true
until kubectl get secret | grep -q ^external-secrets-e2e-token; do \
echo -e "waiting for api token"; \
sleep 3; \
done
echo -e "${BGREEN}Starting external-secrets e2e tests...${NC}"
kubectl rollout status deploy/localstack
kubectl rollout status deploy/e2e-kubernetes-external-secrets
kubectl run \
--attach \
--restart=Never \
--env="LOCALSTACK=true" \
--env="LOCALSTACK_SSM_URL=http://ssm" \
--env="LOCALSTACK_SM_URL=http://secretsmanager" \
--env="AWS_ACCESS_KEY_ID=foobar" \
--env="AWS_SECRET_ACCESS_KEY=foobar" \
--env="AWS_REGION=us-east-1" \
--env="LOCALSTACK_STS_URL=http://sts" \
--overrides='{ "apiVersion": "v1", "spec":{"serviceAccountName": "external-secrets-e2e"}}' \
e2e --image=external-secrets-e2e:test
|
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.core;
import java.util.Iterator;
import org.moeaframework.core.comparator.DominanceComparator;
import org.moeaframework.core.comparator.ParetoDominanceComparator;
/**
* A population that maintains the property of pair-wise non-dominance between
* all solutions. When the {@code add} method is invoked with a new solution,
* all solutions currently in the population that are dominated by the new
* solution are removed. If the new solution is dominated by any member of the
* population, the new solution is not added.
*/
public class NondominatedPopulation extends Population {
/**
* The dominance comparator used by this non-dominated population.
*/
private final DominanceComparator comparator;
/**
* Constructs an empty non-dominated population using the Pareto dominance
* relation.
*/
public NondominatedPopulation() {
this(new ParetoDominanceComparator());
}
/**
* Constructs an empty non-dominated population using the specified
* dominance relation.
*
* @param comparator the dominance relation used by this non-dominated
* population
*/
public NondominatedPopulation(DominanceComparator comparator) {
super();
this.comparator = comparator;
}
/**
* Constructs a non-dominated population using the Pareto dominance relation
* and initialized with the specified solutions.
*
* @param iterable the solutions used to initialize this non-dominated
* population
*/
public NondominatedPopulation(Iterable<? extends Solution> iterable) {
this();
addAll(iterable);
}
/**
* Constructs a non-dominated population using the specified dominance
* comparator and initialized with the specified solutions.
*
* @param comparator the dominance relation used by this non-dominated
* population
* @param iterable the solutions used to initialize this non-dominated
* population
*/
public NondominatedPopulation(DominanceComparator comparator,
Iterable<? extends Solution> iterable) {
this(comparator);
addAll(iterable);
}
/**
* If {@code newSolution} is dominates any solution or is non-dominated with
* all solutions in this population, the dominated solutions are removed and
* {@code newSolution} is added to this population. Otherwise,
* {@code newSolution} is dominated and is not added to this population.
*/
@Override
public boolean add(Solution newSolution) {
Iterator<Solution> iterator = iterator();
while (iterator.hasNext()) {
Solution oldSolution = iterator.next();
int flag = comparator.compare(newSolution, oldSolution);
if (flag < 0) {
iterator.remove();
} else if (flag > 0) {
return false;
} else if (distance(newSolution, oldSolution) < Settings.EPS) {
return false;
}
}
return super.add(newSolution);
}
/**
* Adds the specified solution to the population, bypassing the
* non-domination check. This method should only be used when a
* non-domination check has been performed elsewhere, such as in a subclass.
* <p>
* <b>This method should only be used internally, and should never be made
* public by any subclasses.</b>
*
* @param newSolution the solution to be added
* @return true if the population was modified as a result of this operation
*/
protected boolean forceAddWithoutCheck(Solution newSolution) {
return super.add(newSolution);
}
/**
* Returns the Euclidean distance between two solutions in objective space.
*
* @param s1 the first solution
* @param s2 the second solution
* @return the distance between the two solutions in objective space
*/
protected double distance(Solution s1, Solution s2) {
double distance = 0.0;
for (int i = 0; i < s1.getNumberOfObjectives(); i++) {
distance += Math.pow(s1.getObjective(i) - s2.getObjective(i), 2.0);
}
return Math.sqrt(distance);
}
/**
* Returns the dominance comparator used by this non-dominated population.
*
* @return the dominance comparator used by this non-dominated population
*/
public DominanceComparator getComparator() {
return comparator;
}
}
|
#!/bin/sh
set -eux
# tested with GPAC version 1.1.0-DEV-rev1153-g4ad9b4f20-master
export BATCH="2021-07-29"
export GPAC="gpac"
# -threads=-1"
# -graph
export MPD=stream.mpd
export AD_CONTENT=/home/rbouqueau/works/qualcomm/CTA-Wave/Test-Content-Generation/content_files/2021-07-29/splice_ad_bbb_AD-A1_1280x720@25_5.76.mp4:noedit
export MAIN_CONTENT=/home/rbouqueau/works/qualcomm/CTA-Wave/Test-Content-Generation/content_files/2021-07-29/splice_main_croatia_A1_1280x720@25_10.mp4:noedit
export TID=$BATCH/splice/25
export AD_BASEURL=https://dash.akamaized.net/WAVE/vectors/$TID/
rm -rf $TID && \
export CMD="$GPAC \
--xps_inband=no \
-i $MAIN_CONTENT \
@ reframer:raw=av:#ClampDur=5.76:xs=0,5.76::props=#PStart=0:#m=main1,#PStart=11.52:#m=main2 \
@ enc:gfloc:c=aac:b=128k:FID=GEN1A \
@1 enc:gfloc:c=avc:b=2000k:fintra=1.920:profile=high:color_primaries=1:color_trc=1:colorspace=1:x264-params=level=42:no-open-gop=1:scenecut=0 @ bsrw:novsi:FID=GEN1V \
-i $AD_CONTENT \
@ reframer:raw=av:#ClampDur=5.76:xs=0:#PStart=5.76:#m=ad:#BUrl=$AD_BASEURL \
@ enc:gfloc:c=aac:b=128k:FID=GEN2A \
@1 enc:gfloc:c=avc:b=2000k:fintra=1.920:profile=high:color_primaries=1:color_trc=1:colorspace=1:x264-params=level=42:no-open-gop=1:scenecut=0 @ bsrw:novsi:FID=GEN2V \
-o output/$TID/$MPD:profile=live:tpl:stl:cdur=1.920:segdur=1.920:stl:cmaf=cmf2:SID=GEN1A,GEN1V,GEN2A,GEN2V --template=\$m\$_\$Type\$_\$Number\$"
echo $CMD
$CMD && code output/$TID/$MPD && ls -l output/$TID
export AD_CONTENT=/home/rbouqueau/works/qualcomm/CTA-Wave/Test-Content-Generation/content_files/2021-07-29/splice_ad_bbb_AD-A1_1280x720@30_6.4.mp4:noedit
export MAIN_CONTENT=/home/rbouqueau/works/qualcomm/CTA-Wave/Test-Content-Generation/content_files/2021-07-29/splice_main_tos_B1_1920x1080@30_10.mp4:noedit
export TID=$BATCH/splice/30
export AD_BASEURL=https://dash.akamaized.net/WAVE/vectors/$TID/
rm -rf $TID && \
export CMD="$GPAC \
--xps_inband=no \
-i $MAIN_CONTENT \
@ resample:osr=48k \
@ @1#video reframer:raw=av:#ClampDur=6.4:xs=0,6.4::props=#PStart=0:#m=main1,#PStart=12.8:#m=main2 \
@ enc:gfloc:c=aac:b=128k:FID=GEN1A \
@1 enc:gfloc:c=avc:b=2000k:fintra=32/15:profile=high:color_primaries=1:color_trc=1:colorspace=1:x264-params=level=42:no-open-gop=1:scenecut=0 @ bsrw:novsi:FID=GEN1V \
-i $AD_CONTENT \
@ reframer:raw=av:#ClampDur=6.4:xs=0:#PStart=6.4:#m=ad:#BUrl=$AD_BASEURL \
@ enc:gfloc:c=aac:b=128k:FID=GEN2A \
@1 enc:gfloc:c=avc:b=2000k:fintra=32/15:profile=high:color_primaries=1:color_trc=1:colorspace=1:x264-params=level=42:no-open-gop=1:scenecut=0 @ bsrw:novsi:FID=GEN2V \
-o output/$TID/$MPD:profile=live:tpl:stl:cdur=32/15:segdur=32/15:stl:cmaf=cmf2:SID=GEN1A,GEN1V,GEN2A,GEN2V --template=\$m\$_\$Type\$_\$Number\$"
echo $CMD
$CMD && code output/$TID/$MPD && ls -l output/$TID
|
#!/usr/bin/env bash
set -e
if [ -z "$ZOS_TARGET_DIR" ]
then
echo "\$ZOS_TARGET_DIR is not set. It needs to be set to the z/OS UNIX target directory where the z/OS native code build will be done"
exit 1
fi
if [ -z "$ZOS_USERID" ]
then
echo "\$ZOS_USERID is not set. It needs to be set to the user ID that is used for builds"
exit 1
fi
if [ -z "$ZOS_PASSWORD" ]
then
echo "\$ZOS_PASSWORD is not set. It needs to be set to the password of the '$ZOS_USERID' user ID"
exit 1
fi
echo "Preparing Zowe CLI profiles"
zowe profiles create ssh-profile $ZOWE_CLI_PROFILE_NAME --host $ZOS_HOST --port $ZOS_SSH_PORT --user $ZOS_USERID --password "$ZOS_PASSWORD" --overwrite > /dev/null
zowe profiles set-default ssh $ZOWE_CLI_PROFILE_NAME
echo "Removing $ZOS_TARGET_DIR/b$CIRCLE_BUILD_NUM"
zowe zos-uss issue ssh "rm -rf $ZOS_TARGET_DIR/b$CIRCLE_BUILD_NUM"
|
#!/bin/sh
#
# Copyright 2019 PingCAP, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# See the License for the specific language governing permissions and
# limitations under the License.
set -eux
# Check that error summary are written at the bottom of import.
run_sql 'DROP DATABASE IF EXISTS tidb_lightning_checkpoint_error_summary;'
# The easiest way to induce error is to prepopulate the target table with conflicting content.
run_sql 'CREATE DATABASE IF NOT EXISTS error_summary;'
run_sql 'DROP TABLE IF EXISTS error_summary.a;'
run_sql 'DROP TABLE IF EXISTS error_summary.c;'
run_sql 'CREATE TABLE error_summary.a (id INT NOT NULL PRIMARY KEY, k INT NOT NULL);'
run_sql 'CREATE TABLE error_summary.c (id INT NOT NULL PRIMARY KEY, k INT NOT NULL);'
export GO_FAILPOINTS="github.com/pingcap/br/pkg/lightning/restore/InitializeCheckpointExit=return(true)"
run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-error-summary.log"
run_sql 'INSERT INTO error_summary.a VALUES (2, 4), (6, 8);'
run_sql 'INSERT INTO error_summary.c VALUES (3, 9), (27, 81);'
set +e
export GO_FAILPOINTS=""
run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-error-summary.log"
ERRORCODE=$?
set -e
[ "$ERRORCODE" -ne 0 ]
# Verify that table `b` is indeed imported
run_sql 'SELECT sum(id), sum(k) FROM error_summary.b'
check_contains 'sum(id): 28'
check_contains 'sum(k): 32'
# Verify the log contains the expected messages at the last few lines
tail -20 "$TEST_DIR/lightning-error-summary.log" > "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '["tables failed to be imported"] [count=2]' "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '[-] [table=`error_summary`.`a`] [status=checksum] [error="checksum mismatched' "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '[-] [table=`error_summary`.`c`] [status=checksum] [error="checksum mismatched' "$TEST_DIR/lightning-error-summary.tail"
! grep -Fq '[-] [table=`error_summary`.`b`] [status=checksum] [error="checksum mismatched' "$TEST_DIR/lightning-error-summary.tail"
# Now check the error log when the checkpoint is not cleaned.
set +e
run_lightning --enable-checkpoint=1 --log-file "$TEST_DIR/lightning-error-summary.log"
ERRORCODE=$?
set -e
[ "$ERRORCODE" -ne 0 ]
tail -20 "$TEST_DIR/lightning-error-summary.log" > "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '["TiDB Lightning has failed last time. To prevent data loss, this run will stop now. Please resolve errors first"] [count=2]' "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '[-] [table=`error_summary`.`a`] [status=18] [failedStep=checksum] [recommendedAction="./tidb-lightning-ctl --checkpoint-error-destroy='"'"'`error_summary`.`a`'"'"' --config=..."]' "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '[-] [table=`error_summary`.`c`] [status=18] [failedStep=checksum] [recommendedAction="./tidb-lightning-ctl --checkpoint-error-destroy='"'"'`error_summary`.`c`'"'"' --config=..."]' "$TEST_DIR/lightning-error-summary.tail"
! grep -Fq '[-] [table=`error_summary`.`b`] [status=18] [failedStep=checksum]' "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '["You may also run `./tidb-lightning-ctl --checkpoint-error-destroy=all --config=...` to start from scratch"]' "$TEST_DIR/lightning-error-summary.tail"
grep -Fq '["For details of this failure, read the log file from the PREVIOUS run"]' "$TEST_DIR/lightning-error-summary.tail"
|
class Particle():
def __init__(self, mass, velocity, temperature):
self.mass = mass
self.velocity = velocity
self.temperature = temperature |
#!/bin/bash
# stop tomcat service
sudo systemctl stop tomcat.service
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "remarkably/version"
Gem::Specification.new do |s|
s.name = "Remarkably"
s.version = Remarkably::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["<NAME>"]
s.email = ["<EMAIL>"]
s.homepage = 'https://github.com/clivecrous/Remarkably'
s.summary = %q{A very tiny Markaby-like XML,HTML and CSS builder}
s.description = %q{Remarkably is a very tiny Markaby-like XML,HTML and CSS builder}
s.license = "MIT"
s.add_development_dependency "bundler", ">= 1.0.0"
s.add_development_dependency "rake", ">= 0.8.7"
s.add_development_dependency "rspec", ">= 1.3.0"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
|
<filename>Documentation/_debug_test_impl_8hpp.js<gh_stars>0
var _debug_test_impl_8hpp =
[
[ "Debug1dBFloat16Test", "_debug_test_impl_8hpp.xhtml#a7313bdcaf4c3cfbd900d583e888e10bf", null ],
[ "Debug1dFloat32Test", "_debug_test_impl_8hpp.xhtml#a02e739d71284a1bb5cf25aa4e76ea034", null ],
[ "Debug1dInt16Test", "_debug_test_impl_8hpp.xhtml#acf99889f3486cc377e44dc8c3c430bce", null ],
[ "Debug1dUint8Test", "_debug_test_impl_8hpp.xhtml#a38d2a08139107d420b511ffebeef4262", null ],
[ "Debug2dBFloat16Test", "_debug_test_impl_8hpp.xhtml#a1710c99292328a7e623f9326723bac52", null ],
[ "Debug2dFloat32Test", "_debug_test_impl_8hpp.xhtml#a340b57036ad22a70438e5e6b615eebfb", null ],
[ "Debug2dInt16Test", "_debug_test_impl_8hpp.xhtml#a70e2a4a133ff823cde320f7aac9eff79", null ],
[ "Debug2dUint8Test", "_debug_test_impl_8hpp.xhtml#ad248b29d04a555ddfa407f2121f66333", null ],
[ "Debug3dBFloat16Test", "_debug_test_impl_8hpp.xhtml#a9fc669585b91c13133f87ab8e3608c20", null ],
[ "Debug3dFloat32Test", "_debug_test_impl_8hpp.xhtml#ab27db9bc57b9d9285ef91e09a6ae1a2e", null ],
[ "Debug3dInt16Test", "_debug_test_impl_8hpp.xhtml#abdb0d918051840178c879d39e488909e", null ],
[ "Debug3dUint8Test", "_debug_test_impl_8hpp.xhtml#a353659f233efc7b8eb5a52b48c555c25", null ],
[ "Debug4dBFloat16Test", "_debug_test_impl_8hpp.xhtml#a9770886bff5c3815fc70738e3a673b08", null ],
[ "Debug4dFloat32Test", "_debug_test_impl_8hpp.xhtml#a3a17c31e0243a055d062f9ef00e00295", null ],
[ "Debug4dInt16Test", "_debug_test_impl_8hpp.xhtml#a6cce1d9e431fd143e71e8ba16267c774", null ],
[ "Debug4dUint8Test", "_debug_test_impl_8hpp.xhtml#aff1f64410b029a9b9925bb4c69219896", null ]
]; |
import { TerrainType } from '../../terrain';
import { TerrainText } from './TerrainText';
export class GameOverHeading extends TerrainText {
constructor() {
super('GAME\nOVER', TerrainType.Brick, {
lineSpacing: 6,
});
}
}
|
package com.freud.zkadmin.framework.util;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
public class DateUtil {
public static final String PATTERN_STANDARD = "yyyy-MM-dd HH:mm:ss";
public static final String PATTERN_DATE = "yyyy-MM-dd";
public static Timestamp string2Timestamp(final String strDateTime,
String pattern) {
if (strDateTime == null || strDateTime.equals("")) {
throw new java.lang.IllegalArgumentException(
"Date Time Null Illegal");
}
if (pattern == null || pattern.equals("")) {
pattern = PATTERN_STANDARD;
}
final SimpleDateFormat sdf = new SimpleDateFormat(pattern);
Date date = null;
try {
date = sdf.parse(strDateTime);
} catch (final ParseException e) {
throw new RuntimeException(e);
}
return new Timestamp(date.getTime());
}
public static boolean compareDate(final Date firstDate,
final Date secondDate) {
if (firstDate == null || secondDate == null) {
throw new java.lang.RuntimeException();
}
final String strFirstDate = date2String(firstDate, PATTERN_DATE);
final String strSecondDate = date2String(secondDate, PATTERN_DATE);
if (strFirstDate.equals(strSecondDate)) {
return true;
}
return false;
}
public static Timestamp currentTimestamp() {
return new Timestamp(new Date().getTime());
}
public static String date2String(final Date date, String pattern) {
if (date == null) {
throw new java.lang.IllegalArgumentException(
"timestamp null illegal");
}
if (pattern == null || pattern.equals("")) {
pattern = PATTERN_STANDARD;
}
final SimpleDateFormat sdf = new SimpleDateFormat(pattern);
return sdf.format(date);
}
public static Date getFirstDayOfMonth(final Calendar c) {
final int year = c.get(Calendar.YEAR);
final int month = c.get(Calendar.MONTH);
final int day = 1;
c.set(year, month, day, 0, 0, 0);
return c.getTime();
}
public static Date getLastDayOfMonth(final Calendar c) {
int year = c.get(Calendar.YEAR);
int month = c.get(Calendar.MONTH) + 1;
final int day = 1;
if (month > 11) {
month = 0;
year = year + 1;
}
c.set(year, month, day - 1, 0, 0, 0);
return c.getTime();
}
public static Date string2Date(final String strDate, String pattern) {
if (strDate == null || strDate.equals("")) {
throw new RuntimeException("str date null");
}
if (pattern == null || pattern.equals("")) {
pattern = DateUtil.PATTERN_DATE;
}
final SimpleDateFormat sdf = new SimpleDateFormat(pattern);
Date date = null;
try {
date = sdf.parse(strDate);
} catch (final ParseException e) {
throw new RuntimeException(e);
}
return date;
}
public static String timestamp2String(final Timestamp timestamp,
String pattern) {
if (timestamp == null) {
throw new java.lang.IllegalArgumentException(
"timestamp null illegal");
}
if (pattern == null || pattern.equals("")) {
pattern = PATTERN_STANDARD;
}
return new SimpleDateFormat(pattern).format(new Date(timestamp
.getTime()));
}
} |
#!/bin/bash
while true;
do
if curl -s --head --request GET www.example.com | grep "200 OK" > /dev/null;
then
echo "Website is Online"
else
echo "Website is Offline"
fi
sleep 10
done |
<reponame>ariffebriyanto/REKAVA<filename>src/store/rootReducer.js
import { combineReducers } from 'redux'
import { Task } from './ducks/task'
const rootReducer = combineReducers({
Task,
})
export default rootReducer
|
import random
import torchvision.transforms.functional as F
class RandomRotation(object):
def __init__(self, degrees, seed=1):
self.degrees = (-degrees, degrees)
random.seed(seed)
@staticmethod
def get_params(degrees):
angle = random.uniform(degrees[0], degrees[1])
return angle
def __call__(self, img):
angle = self.get_params(self.degrees)
return F.rotate(img, angle, False, False, None, None)
|
public class PasswordValidator
{
public string Password { get; set; }
public bool IsValidPassword()
{
if (Password.Length < 8)
return false;
if (!Password.Any(char.IsUpper))
return false;
if (!Password.Any(char.IsLower))
return false;
if (!Password.Any(char.IsDigit))
return false;
if (!Password.Any(c => "!@#$%^&*".Contains(c)))
return false;
return true;
}
} |
import { Component, OnInit } from '@angular/core';
import { HttpClient } from '@angular/common/http';
@Component({
selector: 'app-users',
templateUrl: './users.component.html',
styleUrls: ['./users.component.scss']
})
export class UsersComponent implements OnInit {
users: any;
constructor(private http: HttpClient) { }
ngOnInit() {
this.http.get('http://url.com/users')
.subscribe(data => {
this.users = data;
});
}
}
<!-- users.component.html -->
<div *ngFor="let user of users">
<h3>{{user.name}}</h3>
</div> |
#!/bin/bash
_HTTP_PROXY_HOST=${HTTP_PROXY_HOST}
_HTTP_PROXY_PORT=${HTTP_PROXY_PORT}
function overwrite_proxy {
sed -i "s/XX_PROXY_HOST_XX/${_HTTP_PROXY_HOST}/g" /view/view.cfg
sed -i "s/XX_PROXY_PORT_XX/${_HTTP_PROXY_PORT}/g" /view/view.cfg
}
function pre_startup_tasks {
overwrite_proxy
}
function post_startup_tasks {
:
} |
<reponame>jluisacosta/jluisacosta.github.io.old<filename>src/scripts/app.js
window.menuPinned = false;
var $window = $(window),
$cover = $('.Cover'),
$img = $cover.find('>img'),
$video = $cover.find('>video');
function loadCSS(url) {
var elem = document.createElement('link');
elem.rel = 'stylesheet';
elem.href = url;
document.head.appendChild(elem);
}
function setCover(device) {
if(device==='laptop'){
$video.addClass('responsive-video');
$video.width('100%');
$video.css('left', 0);
$img.addClass('responsive-img');
$img.width('100%');
$img.css('left', 0);
} else if(device==='tablet'){
$video.removeClass('responsive-video');
$video.width(Math.round(($video.height()*1920)/1080));
$video.css('left', -(($video.width()-$cover.width())/2));
$img.removeClass('responsive-img');
$img.width(Math.round(($img.height()*1920)/1080));
$img.css('left', -(($img.width()-$cover.width())/2));
}
}
function setPieChart(pieSide){
$('.graph-donut').easyPieChart({
easing: 'easeOutCirc',
barColor: '#ffd600',
trackColor: '#00838f',
scaleColor: '#e1e1e3',
scaleLength: 0,
lineCap: 'square',
lineWidth: 5,
size: pieSide,
animate: 2500,
onStep: function(from, to, percent) {
$(this.el).find('.percent').text(Math.round(percent));
}
});
}
function checkResponsiveDevice() {
if(window.matchMedia('(max-width: 600px)').matches){ //Mobile
setPieChart(100);
}else if(window.matchMedia('(max-width: 992px)').matches){ //Tablet
setCover('tablet');
setPieChart(130);
}else{ //Laptop
setCover('laptop');
setPieChart(180);
}
}
function jQueryMain() {
var
$menu = $('.Menu')
, $menuLogo = $menu.find('.Menu-logo')
, $upButton = $('.UpButton')
, $aboutMeSection = $('.About')
, $parallaxContainer = $('.parallax-container')
, $skillsContent = $('.Skills-content')
, $servicesCarousel = $('.carousel')
, $portfolioGrid = $('.grid')
, $modalTrigger = $('.modal-trigger');
/* MATERIALIZE AND PLUGINS INITIALISATION */
//Animations
var options=[
{ selector:'#about-me', offset:$aboutMeSection.height()/3, callback: (el) => {
$(el).addClass('fadeIn');
} },
{ selector:'#my-skills', offset:$skillsContent.height()/2, callback: (el) => {
var $charts = $('.graph-donut'),
percents = [90,85,60,80,50,80,60,70];
for (var i = 0; i<$charts.length;i++){
$($charts[i]).data('easyPieChart').update(percents[i]);
}
} },
{ selector:'#resume', offset:$('#resume').height()/2, callback: (el) => {
var $resume = $(el),
$hs = $resume.find('h2, h3');
$hs.each((i,helement) => {
$(helement).removeClass('bounceOut');
$(helement).addClass('bounceIn');
});
} },
{ selector:'.Resume-row', offset:300, callback: (el) => {
var $work = $('.Resume-work'),
$education = $('.Resume-education');
$work.removeClass('bounceOutDown');
$work.addClass('bounceInLeft');
$education.removeClass('bounceOutDown');
$education.addClass('bounceInRight');
} },
{ selector:'.Services-content', offset:$('#services').height()/2, callback: (el) => {
$(el).addClass('fadeIn');
} },
{ selector: '.Portfolio-content', offset:350, callback: (el) => {
var $el = $('.Portfolio .animated');
$el.each((i,element) => {
$(element).removeClass('bounceOutDown');
$(element).addClass('bounceInUp');
});
}},
{ selector:'.Contact-content', offset:$('#contact').height()/2, callback: (el) => {
$(el).addClass('fadeIn');
} },
];
// Menu
$menu.pushpin({ top: $menu.offset().top });
$('.scrollspy').scrollSpy({ scrollOffset: $menu.height() });
$('.button-collapse').sideNav();
// Skills
$parallaxContainer.height($skillsContent.height());
$('.parallax').parallax();
// Services
$servicesCarousel.carousel({ full_width: true });
// Portfolio
$portfolioGrid.isotope({
itemSelector: '.grid-item',
percentPosition: true,
masonry: {
columnWidth: '.grid-sizer'
}
});
$modalTrigger.leanModal();
Materialize.scrollFire(options);
/* EVENT HANDLERS */
// Menu
$window.scroll(() => {
if($window.scrollTop() >= $aboutMeSection.offset().top - $menu.height()-1) {
if(!window.menuPinned) {
$upButton.addClass('UpButton--show');
$menuLogo.addClass('Menu-logo--show');
window.menuPinned = true;
}
} else if(window.menuPinned) {
$upButton.removeClass('UpButton--show');
$menuLogo.removeClass('Menu-logo--show');
window.menuPinned = false;
}
});
// Skills
$window.resize(() => {
checkResponsiveDevice();
$parallaxContainer.height($skillsContent.height());
$menu.pushpin({ top: $menu.offset().top });
});
// Resume
$('.Resume .collapsible-header').click((event) => {
var $element = $(event.target);
$element.html(($element.html()=='add'?'remove':'add'));
});
// Services
$('.Services .collapsible-header').click((event) => {
var $element = $(event.target).parents('li');
$servicesCarousel.carousel('set', $element.data('slide'));
});
// Portfolio
$('.Portfolio-all').click(() => {
$portfolioGrid.isotope({ filter: '*' });
});
$('.Portfolio-filter').click(() => {
$portfolioGrid.isotope({ filter: '.filter' });
});
$modalTrigger.click((event) => {
var $element = $(event.target);
$('.Portfolio-modalHeader').html($element.parents('.Portfolio-thumbnailInfo').data('info'));
$('.Portfolio-modalImage').attr('src',$element.parents('.Portfolio-thumbnail').find('> img').attr('src'));
});
}
$(() => {
/* Font asynchrounous loading */
loadCSS('https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.6.3/css/font-awesome.min.css');
});
checkResponsiveDevice();
$window.load(jQueryMain);
|
#!/bin/sh
export GPU_ID=$1
echo $GPU_ID
cd ..
export DATASET_DIR="datasets/"
export CUDA_VISIBLE_DEVICES=$GPU_ID
# Activate the relevant virtual environment:
python train_continual_learning_few_shot_system.py --name_of_args_json_file experiment_config/slimagenet_variant_default_5_way_1_vgg-fine-tune-scratch_shot__True_3_3_LSLR_conditioned_0.json --gpu_to_use $GPU_ID |
<reponame>Bekhzod96/nestjs-newapp
import { IsString, IsInt, IsNumber } from 'class-validator';
export class CreateCatDto {
@IsString()
name: string;
@IsNumber()
age: number;
@IsString()
breed: string;
}
export class updateCatDto {
age?: string;
name?: string;
}
export class getCatBodyMiddleware {
name?: string;
age?: string;
auth: string;
}
|
package cyclops.stream;
import cyclops.reactive.ReactiveSeq;
public class PullReactiveSeqTest extends AbstractReactiveSeqTest {
@Override
public ReactiveSeq<Integer> of(Integer... values) {
return ReactiveSeq.of(values);
}
@Override
public ReactiveSeq<Integer> empty() {
return ReactiveSeq.empty();
}
}
|
<reponame>rbrishabh/chaos-mesh
// Copyright 2021 Chaos Mesh Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package finalizers
import (
"context"
"github.com/go-logr/logr"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/util/retry"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"github.com/chaos-mesh/chaos-mesh/api/v1alpha1"
"github.com/chaos-mesh/chaos-mesh/controllers/utils/recorder"
)
const (
// AnnotationCleanFinalizer key
AnnotationCleanFinalizer = `chaos-mesh.chaos-mesh.org/cleanFinalizer`
// AnnotationCleanFinalizerForced value
AnnotationCleanFinalizerForced = `forced`
RecordFinalizer = "chaos-mesh/records"
)
// Reconciler for common chaos
type Reconciler struct {
// Object is used to mark the target type of this Reconciler
Object v1alpha1.InnerObject
// Client is used to operate on the Kubernetes cluster
client.Client
client.Reader
Recorder recorder.ChaosRecorder
Log logr.Logger
}
// Reconcile the common chaos
func (r *Reconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {
obj := r.Object.DeepCopyObject().(v1alpha1.InnerObject)
if err := r.Client.Get(context.TODO(), req.NamespacedName, obj); err != nil {
if apierrors.IsNotFound(err) {
r.Log.Info("chaos not found")
} else {
// TODO: handle this error
r.Log.Error(err, "unable to get chaos")
}
return ctrl.Result{}, nil
}
finalizers := obj.GetObjectMeta().Finalizers
records := obj.GetStatus().Experiment.Records
shouldUpdate := false
if obj.IsDeleted() {
resumed := true
for _, record := range records {
if record.Phase != v1alpha1.NotInjected {
resumed = false
}
}
if obj.GetObjectMeta().Annotations[AnnotationCleanFinalizer] == AnnotationCleanFinalizerForced || (resumed && len(finalizers) != 0) {
r.Recorder.Event(obj, recorder.FinalizerRemoved{})
finalizers = []string{}
shouldUpdate = true
}
} else {
if !ContainsFinalizer(obj.(metav1.Object), RecordFinalizer) {
r.Recorder.Event(obj, recorder.FinalizerInited{})
shouldUpdate = true
finalizers = append(obj.GetObjectMeta().Finalizers, RecordFinalizer)
}
}
if shouldUpdate {
updateError := retry.RetryOnConflict(retry.DefaultBackoff, func() error {
obj := r.Object.DeepCopyObject().(v1alpha1.InnerObject)
if err := r.Client.Get(context.TODO(), req.NamespacedName, obj); err != nil {
r.Log.Error(err, "unable to get chaos")
return err
}
obj.GetObjectMeta().Finalizers = finalizers
return r.Client.Update(context.TODO(), obj)
})
if updateError != nil {
// TODO: handle this error
r.Log.Error(updateError, "fail to update")
r.Recorder.Event(obj, recorder.Failed{
Activity: "update finalizer",
Err: "updateError.Error()",
})
return ctrl.Result{}, nil
}
r.Recorder.Event(obj, recorder.Updated{
Field: "finalizer",
})
}
return ctrl.Result{}, nil
}
// ContainsFinalizer checks an Object that the provided finalizer is present.
func ContainsFinalizer(o metav1.Object, finalizer string) bool {
f := o.GetFinalizers()
for _, e := range f {
if e == finalizer {
return true
}
}
return false
}
|
/*
Siesta 4.2.2
Copyright(c) 2009-2016 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
Role('Ariadne.ExtJSDomQueryFinder.Role.ExtJSHelper', {
methods : {
getComponentOfDomElement : function (el) {
var doc = el.ownerDocument
var body = doc.body
var Ext = (doc.defaultView || doc.parentWindow).Ext
if (!Ext) return null
while (el && el != body) {
if (el.id && Ext.getCmp(el.id)) return Ext.getCmp(el.id)
el = el.parentElement
}
return null
},
getExtByDomElement : function (el) {
var doc = el.ownerDocument
return (doc.defaultView || doc.parentWindow).Ext
},
getExtCssClassPrefixRegexp : function (el) {
var Ext = this.getExtByDomElement(el)
if (!Ext) return null
var baseCSSPrefix = Ext.baseCSSPrefix || 'x'
return new RegExp('^' + baseCSSPrefix)
},
// Ext JS 4+: Form fields sometimes get their 'name' generated based on a parent id
// property is considered to be auto-generated if it contains an id string and id is in turn auto-generated
valueIsAutoGeneratedByComponent : function (comp, value) {
// Not relevant for Ext < 4
if (!comp.up) return false;
value = String(value)
var componentsToCheck = comp.autoGenId ? [ comp ] : []
var parentWithAutoId = comp.up('[autoGenId=true]');
if (parentWithAutoId) componentsToCheck.push(parentWithAutoId)
var childrenWithAutoId = parentWithAutoId && parentWithAutoId.query && parentWithAutoId.query('[autoGenId=true]')
if (childrenWithAutoId) componentsToCheck.push.apply(componentsToCheck, childrenWithAutoId)
return componentsToCheck.some(function (comp) {
return value.indexOf(comp.id) >= 0
})
},
componentHasAutoGeneratedId : function (component, Ext) {
var id = component.id
// Ext3 ?
if (/^ext-gen\d+|^ext-comp\d+/.test(id)) {
return true;
}
if (Ext && this.idIsGeneratedByExtDotIdCall(id, Ext)) return true
if (component.isWidget) {
return id.replace(/\d+$/, '') == component.identifiablePrefix
}
// even if `autoGenId` can be set to false, the id of the component can be formed from the id
// if its parent, like "window-1019-header_hd" (id of window header), where "window-1019" is autogenerated id
// of parent component
return component.autoGenId || this.valueIsAutoGeneratedByComponent(component, id);
},
// detect ids generated with `Ext.id()`
idIsGeneratedByExtDotIdCall : function (id, Ext, el) {
Ext = Ext || this.getExtByDomElement(el)
if (!Ext) return false
var match = /^(.+?)\d+$/.exec(id)
return Boolean(match && match[ 1 ] == Ext.idPrefix)
},
domElementHasAutoGeneratedId : function (el) {
var id = el.id
if (/^ext-/.test(id) || this.idIsGeneratedByExtDotIdCall(id, null, el)) return true
// id of node in the dom can be formed from the id of the component this node belongs to
// for example dom node `container-1019-innertCt` belonging to container-1019
// such ids are considered auto-generated and should be ignored
var comp = this.getComponentOfDomElement(el)
if (comp) {
if ((id !== comp.id && id.indexOf(comp.id) > -1) || this.componentHasAutoGeneratedId(comp)) {
return true
}
}
return false
}
}
});
|
// Copyright 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.plastic;
/**
* Defines per-instance context values for a transformed PlasticClass.
*/
public interface InstanceContext
{
/**
* Returns the type of the instance created with this context. This is most often of interest
* to implementations of {@link ComputedValue}.
*/
Class<?> getInstanceType();
/**
* Gets an instance context value which is identified by type.
*
* @throws IllegalArgumentException
* if no such value is available in the context.
*/
<T> T get(Class<T> valueType);
}
|
import Vue from "vue";
import VueRouter from "vue-router";
import Home from "../views/Home.vue";
import store from "../store/index"
import NETWORK_CONSTANTS from "./NETWORK_CONSTANTS";
import auth from "../api/auth"
const {
HOME,
WELCOME,
MY_DATA,
MY_DATA_DETAILS,
WIZARD,
SIGN_UP,
LOGIN,
VERIFY_EMAIL,
CONTACT_US,
MY_PROFILE,
MOCK_DV,
MORE_INFORMATION,
TERMS_AND_CONDITIONS,
READ_ONLY_TERMS_AND_CONDITIONS,
FORGOT_YOUR_PASSWORD,
PASSWORD_RESET_CONFIRM
} = NETWORK_CONSTANTS;
Vue.use(VueRouter);
const routes = [
{
path: HOME.PATH,
name: HOME.NAME,
component: Home
},
{
path: `${WIZARD.PATH}/`,
name: WIZARD.NAME,
// dynamic segments start with a colon
component: () => import("../views/Wizard.vue"),
meta: {
requiresAuth: true,
requiresDataset: true
}
},
{
path: MY_DATA.PATH,
name: MY_DATA.NAME,
component: () => import("../views/MyData.vue"),
meta: {
requiresAuth: true,
}
},
{
path: `${MY_DATA_DETAILS.PATH}`,
name: "MyDataDetails",
component: () => import("../views/MyDataDetails.vue"),
meta: {
requiresAuth: true,
requiresDataset: true
}
},
{
path: SIGN_UP.PATH,
name: SIGN_UP.NAME,
component: () => import("../views/SignUp.vue")
},
{
path: VERIFY_EMAIL.PATH,
name: VERIFY_EMAIL.NAME,
component: () => import("../views/VerifyEmail.vue")
},
{
path: `${SIGN_UP.PATH}/confirmation`,
name: "SignUpConfirmation",
component: () => import("../views/SignUpConfirmation.vue")
},
{
path: LOGIN.PATH,
name: LOGIN.NAME,
component: () => import("../views/LogIn.vue")
},
{
path: FORGOT_YOUR_PASSWORD.PATH,
name: FORGOT_YOUR_PASSWORD.NAME,
component: () => import("../views/ForgotYourPassword.vue")
},
{
name: PASSWORD_RESET_CONFIRM.NAME,
path: PASSWORD_RESET_CONFIRM.PATH,
component: () => import("../views/PasswordResetConfirm.vue")
},
{
path: WELCOME.PATH,
name: WELCOME.NAME,
component: () => import("../views/Welcome.vue"),
meta: {
requiresAuth: true
}
},
{
path: TERMS_AND_CONDITIONS.PATH,
name: TERMS_AND_CONDITIONS.NAME,
component: () => import("../views/TermsAndConditions.vue"),
meta: {
requiresAuth: true,
}
},
{
path: READ_ONLY_TERMS_AND_CONDITIONS.PATH,
name: READ_ONLY_TERMS_AND_CONDITIONS.NAME,
component: () => import("../views/ReadOnlyTermsAndConditions.vue")
},
{
path: CONTACT_US.PATH,
name: CONTACT_US.NAME,
component: () => import("../views/ContactUs.vue")
},
{
path: MY_PROFILE.PATH,
name: MY_PROFILE.NAME,
component: () => import("../views/MyProfile.vue"),
meta: {
requiresAuth: true
}
},
{
path: MOCK_DV.PATH,
name: MOCK_DV.NAME,
component: () => import("../views/MockDV.vue")
},
{
path: MORE_INFORMATION.PATH,
name: MORE_INFORMATION.NAME,
component: () => import("../views/MoreInformation.vue")
},
{
path: "*",
name: "NotFoundPage",
component: () => import("../views/NotFoundPage.vue")
}
];
const router = new VueRouter({
mode: "history",
// base: process.env.BASE_URL,
routes,
scrollBehavior(to, from, savedPosition) {
if (to.hash) {
return {
selector: to.hash,
behavior: 'smooth',
}
}
}
});
router.beforeEach((to, from, next) => {
// check if user is logged in, because localStorage may be stale
auth.getAccountDetails().then((response) => {
store.commit('auth/SET_USER', response.data)
if (to.name === NETWORK_CONSTANTS.LOGIN.NAME && store.state.auth.user !== null) {
next({name: NETWORK_CONSTANTS.MY_DATA.NAME})
// If user is logged in and tries to go directly to a page that requires
// Vuex state which hasn't been populated, redirect to My Data page
} else if (to.matched.some(record => record.meta.requiresDataset
&& store.state.dataset.datasetInfo == null)) {
next({name: NETWORK_CONSTANTS.MY_DATA.NAME})
} else {
// If everything is fine, go to the next page
next()
}
}).catch((data) => {
store.commit('auth/LOGOUT')
store.dispatch('dataset/clearDatasetStorage', null, {root: true})
if (to.matched.some(record => record.meta.requiresAuth) && store.state.auth.user == null) {
sessionStorage.setItem('redirectPath', to.path);
next({name: NETWORK_CONSTANTS.LOGIN.NAME})
} else {
next()
}
})
})
router.afterEach(() => {
window.scrollTo(0, 0);
});
export default router;
|
/*
* This file was generated by the CommonAPI Generators.
* Used org.genivi.commonapi.someip 3.1.12.v201801251434.
* Used org.franca.core 0.9.1.201412191134.
*
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
* If a copy of the MPL was not distributed with this file, You can obtain one at
* http://mozilla.org/MPL/2.0/.
*/
#ifndef V0_COMMONAPI_EXAMPLES_E01_HELLO_WORLD_SOMEIP_PROXY_HPP_
#define V0_COMMONAPI_EXAMPLES_E01_HELLO_WORLD_SOMEIP_PROXY_HPP_
#include <v0/commonapi/examples/E01HelloWorldProxyBase.hpp>
#include <v0/commonapi/examples/E01HelloWorldSomeIPDeployment.hpp>
#if !defined (COMMONAPI_INTERNAL_COMPILATION)
#define COMMONAPI_INTERNAL_COMPILATION
#endif
#include <CommonAPI/SomeIP/Factory.hpp>
#include <CommonAPI/SomeIP/Proxy.hpp>
#include <CommonAPI/SomeIP/Types.hpp>
#undef COMMONAPI_INTERNAL_COMPILATION
#include <string>
# if defined(_MSC_VER)
# if _MSC_VER >= 1300
/*
* Diamond inheritance is used for the CommonAPI::Proxy base class.
* The Microsoft compiler put warning (C4250) using a desired c++ feature: "Delegating to a sister class"
* A powerful technique that arises from using virtual inheritance is to delegate a method from a class in another class
* by using a common abstract base class. This is also called cross delegation.
*/
# pragma warning( disable : 4250 )
# endif
# endif
namespace v0 {
namespace commonapi {
namespace examples {
class E01HelloWorldSomeIPProxy
: virtual public E01HelloWorldProxyBase,
virtual public CommonAPI::SomeIP::Proxy {
public:
E01HelloWorldSomeIPProxy(
const CommonAPI::SomeIP::Address &_address,
const std::shared_ptr<CommonAPI::SomeIP::ProxyConnection> &_connection);
virtual ~E01HelloWorldSomeIPProxy() { }
virtual void sayHello(const std::string &_name, CommonAPI::CallStatus &_internalCallStatus, std::string &_message, const CommonAPI::CallInfo *_info);
virtual std::future<CommonAPI::CallStatus> sayHelloAsync(const std::string &_name, SayHelloAsyncCallback _callback, const CommonAPI::CallInfo *_info);
virtual void getOwnVersion(uint16_t &_major, uint16_t &_minor) const;
private:
};
} // namespace examples
} // namespace commonapi
} // namespace v0
#endif // V0_COMMONAPI_EXAMPLES_E01_Hello_World_SOMEIP_PROXY_HPP_
|
function isEvenOdd(num) {
if (num % 2 == 0)
return "even";
else
return "odd";
}
console.log(isEvenOdd(17)); |
#!/bin/bash
set -e
# This script builds a binary dpkg for Debian based distros. It does not
# currently run in CI, and is instead run manually and the resulting dpkg is
# uploaded to GitHub via the web UI.
#
# Note that this requires 'cargo deb', which can be installed with
# 'cargo install cargo-deb'.
#
# This should be run from the root of the ripgrep repo.
if ! command -V cargo-deb > /dev/null 2>&1; then
echo "cargo-deb command missing" >&2
exit 1
fi
# 'cargo deb' does not seem to provide a way to specify an asset that is
# created at build time, such as ripgrep's man page. To work around this,
# we force a debug build, copy out the man page (and shell completions)
# produced from that build, put it into a predictable location and then build
# the deb, which knows where to look.
DEPLOY_DIR=deployment/deb
mkdir -p "$DEPLOY_DIR"
cargo build
# Find and copy man page.
manpage="$(find ./target/debug -name rg.1 -print0 | xargs -0 ls -t | head -n1)"
cp "$manpage" "$DEPLOY_DIR/"
# Do the same for shell completions.
compbash="$(find ./target/debug -name rg.bash -print0 | xargs -0 ls -t | head -n1)"
cp "$compbash" "$DEPLOY_DIR/"
compfish="$(find ./target/debug -name rg.fish -print0 | xargs -0 ls -t | head -n1)"
cp "$compfish" "$DEPLOY_DIR/"
compzsh="complete/_rg"
cp "$compzsh" "$DEPLOY_DIR/"
cargo deb
|
#!/usr/bin/env bash
function usage () {
echo "Illegal number of parameters"
echo ""
echo "Usage: "
echo "-----"
echo "To start the demo: ./elk.sh start"
echo "To stop the demo: ./elk.sh stop"
echo "To destroy the demo environment: ./elk.sh destroy"
echo ""
}
# Get user option
if [ "$#" -ne 1 ]; then
usage
else
# Set project name for docker compose
project="ebi"
# Ignore case sensitivity
shopt -s nocasematch
case ${1} in
start)
docker-compose -p ${project} up -d
docker exec -it ${project}_nginx_1 bash -c "service nginx start"
docker exec -it ${project}_nginx_1 bash -c "service filebeat start"
if [ $? == 0 ]; then
echo "EBI ELK demo environment started and ready"
else
echo "There was some problmen starting EBI ELK demo environment "
fi
;;
stop)
docker-compose -p ${project} stop
if [ $? == 0 ]; then
echo "Successfully stopped"
else
echo "There was a problem stopping the environment"
fi
;;
destroy)
docker-compose -p ${project} down
;;
*)
usage
exit 1
;;
esac
fi
|
cmake_minimum_required(VERSION 3.10)
project(PhotinoProject)
set(CMAKE_CXX_STANDARD 11)
find_package(PkgConfig REQUIRED)
pkg_check_modules(GTK3 REQUIRED gtk+-3.0)
pkg_check_modules(WEBKIT2GTK REQUIRED webkit2gtk-4.0)
pkg_check_modules(LIBNOTIFY REQUIRED libnotify)
include_directories(${GTK3_INCLUDE_DIRS} ${WEBKIT2GTK_INCLUDE_DIRS} ${LIBNOTIFY_INCLUDE_DIRS})
link_directories(${GTK3_LIBRARY_DIRS} ${WEBKIT2GTK_LIBRARY_DIRS} ${LIBNOTIFY_LIBRARY_DIRS})
add_library(Photino.Native SHARED Exports.cpp Photino.Linux.cpp)
target_link_libraries(Photino.Native ${GTK3_LIBRARIES} ${WEBKIT2GTK_LIBRARIES} ${LIBNOTIFY_LIBRARIES})
add_custom_command(TARGET Photino.Native POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:Photino.Native> ../Photino.Test/bin/Debug/net5.0/
) |
var dice = 6
var dc = 3
var hunger = 3
function getRange(n) {
var arr = [];
for (var i = 0; i <= n; i++) {
arr.push(i)
}
return arr;
}
function getRoll(die, n) {
var roll = [];
for (var i=0; i<n; i++) {
roll.push(die[Math.floor(Math.random() * die.length)]);
}
return roll
}
function getTotalSuccesses(crit, normal) {
var total = crit*2 + normal;
return total
}
function rollSim() {
dice = document.getElementById('dice').value;
dc = document.getElementById('dc').value;
hunger = document.getElementById('hunger').value;
runs = 100;
wins = 0;
warr = []
crit = 0;
messy = 0;
bestial = 0;
die = [-1,0,0,0,0,1,1,1,1,2];
for (var i=0; i <= runs; i++) {
roll = getRoll(die, dice);
c = 0;
r = 0;
for (d in roll) {
if (roll[d] == 2) {
c++;
} else if (roll[d] == 1) {
r++;
}
}
if (Math.floor(c/2) != c/2) {
c--;
r++;
}
// Successes
total = getTotalSuccesses(c, r);
if (total >= dc) {
wins++;
}
// Critical wins
if (total >= dc & c > 0) {
crit++;
}
// The next two checks involve hunger dice. We assume that
// the first X dice are the hunger dice.
var hdice = roll.slice(0, hunger);
// Messy crits
if (hdice.includes(2) & c > 0 & total >= dc) {
messy++;
}
// Bestial failures
if (hdice.includes(-1) & total < dc) {
bestial++;
}
}
var table = document.getElementById("results");
var newRow = table.insertRow();
var newCell = newRow.insertCell();
newCell.innerText = wins + " / " + runs;
var newCell = newRow.insertCell();
newCell.innerText = crit + " / " + runs;
var newCell = newRow.insertCell();
newCell.innerText = messy + " / " + runs;
var newCell = newRow.insertCell();
newCell.innerText = bestial + " / " + runs;
}
function doRolls() {
// reset the table
table = document.getElementById('results');
n = table.tBodies[0].childElementCount;
for (var i=1; i<n ; i++) {
table.deleteRow(1)
}
// Do the rolls
for (var i=0; i <= 20; i++) {
rollSim();
}
}
document.addEventListener("DOMContentLoaded", function(event) {
let btn = document.getElementById('rollBtn');
btn.onclick = doRolls;
});
|
#!/bin/bash
set -e
function start_mds {
get_config
check_config
# Check to see if we are a new MDS
if [ ! -e "$MDS_KEYRING" ]; then
if [ -e "$ADMIN_KEYRING" ]; then
keyring_opt=(--name client.admin --keyring "$ADMIN_KEYRING")
elif [ -e "$MDS_BOOTSTRAP_KEYRING" ]; then
keyring_opt=(--name client.bootstrap-mds --keyring "$MDS_BOOTSTRAP_KEYRING")
else
log "ERROR- Failed to bootstrap MDS: could not find admin or bootstrap-mds keyring. You can extract it from your current monitor by running 'ceph auth get client.bootstrap-mds -o $MDS_BOOTSTRAP_KEYRING"
exit 1
fi
timeout 10 ceph "${CLI_OPTS[@]}" "${keyring_opt[@]}" health || exit 1
# Generate the MDS key
ceph "${CLI_OPTS[@]}" "${keyring_opt[@]}" auth get-or-create mds."$MDS_NAME" osd 'allow rwx' mds 'allow' mon 'allow profile mds' -o "$MDS_KEYRING"
chown --verbose ceph. "$MDS_KEYRING"
chmod 600 "$MDS_KEYRING"
fi
# NOTE (leseb): having the admin keyring is really a security issue
# If we need to bootstrap a MDS we should probably create the following on the monitors
# I understand that this handy to do this here
# but having the admin key inside every container is a concern
# Create the Ceph filesystem, if necessary
if [ "$CEPHFS_CREATE" -eq 1 ]; then
get_admin_key
check_admin_key
if [[ "$(ceph "${CLI_OPTS[@]}" fs ls | grep -c name:."${CEPHFS_NAME}",)" -eq 0 ]]; then
# Make sure the specified data pool exists
if ! ceph "${CLI_OPTS[@]}" osd pool stats "${CEPHFS_DATA_POOL}" > /dev/null 2>&1; then
ceph "${CLI_OPTS[@]}" osd pool create "${CEPHFS_DATA_POOL}" "${CEPHFS_DATA_POOL_PG}"
fi
# Make sure the specified metadata pool exists
if ! ceph "${CLI_OPTS[@]}" osd pool stats "${CEPHFS_METADATA_POOL}" > /dev/null 2>&1; then
ceph "${CLI_OPTS[@]}" osd pool create "${CEPHFS_METADATA_POOL}" "${CEPHFS_METADATA_POOL_PG}"
fi
ceph "${CLI_OPTS[@]}" fs new "${CEPHFS_NAME}" "${CEPHFS_METADATA_POOL}" "${CEPHFS_DATA_POOL}"
fi
fi
log "SUCCESS"
# NOTE: prefixing this with exec causes it to die (commit suicide)
/usr/bin/ceph-mds "${DAEMON_OPTS[@]}" -i "${MDS_NAME}"
}
|
<gh_stars>10-100
ALTER TABLE code_quality_report ADD COLUMN idx int;
|
<reponame>lucaserafini-dev/PersonalPlayroom
export * from "./cell";
export * from "./custom";
export * from "./fire";
export * from "./fur";
export * from "./gradient";
export * from "./grid";
export * from "./lava";
export * from "./mix";
export * from "./normal";
export * from "./shadowOnly";
export * from "./simple";
export * from "./sky";
export * from "./terrain";
export * from "./triPlanar";
export * from "./water";
|
import { Context, Status } from "../deps.ts";
import { Response } from "../helper/repsonse.ts";
import { validateToken } from "../security/jwt.ts";
import { parseToken } from "../helper/token.ts";
// deno-lint-ignore no-explicit-any
export const jwtMiddleware = async (context: Context, next: any) => {
const token = await parseToken(context);
if (!token || !(await validateToken(token)).isValid) {
return Response(
context,
Status.Unauthorized,
{ status: Status.Unauthorized, message: "Invalid token" },
);
} else {
await next();
return;
}
};
|
def median(nums):
nums.sort()
length = len(nums)
is_even = length % 2 == 0
if is_even:
mid = length // 2
return (nums[mid] + nums[mid-1]) / 2
else:
return nums[length//2] |
<filename>common/fitness-functions/euclidean-distance.js
//speed analysis: constant O(n)
/**
https://github.com/zeke/euclidean-distance
LICENSE: wtfpl - http://www.wtfpl.net/
*/
let distanceSquared = function (a, b) {
var sum = 0
var n
for (n = 0; n < a.length; n++) {
sum += Math.pow(a[n] - b[n], 2)
}
return sum
}
let euclideanDistance = function (a, b) {
return Math.sqrt(distanceSquared(a,b))
}
export {
distanceSquared,
euclideanDistance
} |
<gh_stars>10-100
package io.opensphere.search.googleplaces;
import io.opensphere.core.PluginLoaderData;
import io.opensphere.core.Toolbox;
import io.opensphere.core.api.adapter.PluginAdapter;
import io.opensphere.core.options.OptionsProvider;
import io.opensphere.search.SearchOptionsProvider;
/** The plugin that provides a Google Places search. */
public class GooglePlacesPlugin extends PluginAdapter
{
/** The toolbox. */
private volatile Toolbox myToolbox;
/** The class the does the actual searching. */
private volatile GooglePlacesSearch myGeoQuerySearcher;
/**
* {@inheritDoc}
*
* @see io.opensphere.core.api.adapter.PluginAdapter#initialize(io.opensphere.core.PluginLoaderData,
* io.opensphere.core.Toolbox)
*/
@Override
public void initialize(PluginLoaderData plugindata, Toolbox toolbox)
{
myToolbox = toolbox;
myGeoQuerySearcher = new GooglePlacesSearch(toolbox);
OptionsProvider optionsProvider = new GooglePlacesOptionsProvider(toolbox.getPreferencesRegistry(), myGeoQuerySearcher);
OptionsProvider parentSearchProvider = myToolbox.getUIRegistry().getOptionsRegistry()
.getRootProviderByTopic(SearchOptionsProvider.PROVIDER_NAME);
if (parentSearchProvider != null)
{
parentSearchProvider.addSubTopic(optionsProvider);
}
toolbox.getSearchRegistry().addSearchProvider(myGeoQuerySearcher);
}
/**
* {@inheritDoc}
*
* @see io.opensphere.core.api.adapter.PluginAdapter#close()
*/
@Override
public void close()
{
if (myToolbox != null && myToolbox.getSearchRegistry() != null && myGeoQuerySearcher != null)
{
myToolbox.getSearchRegistry().removeSearchProvider(myGeoQuerySearcher);
}
}
}
|
<gh_stars>1-10
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmp(AutotoolsPackage):
"""GMP is a free library for arbitrary precision arithmetic, operating
on signed integers, rational numbers, and floating-point numbers."""
homepage = "https://gmplib.org"
url = "https://ftp.gnu.org/gnu/gmp/gmp-6.1.2.tar.bz2"
version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5')
version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0', '6ef5869ae735db9995619135bd856b84')
version('5.1.3', 'a082867cbca5e898371a97bb27b31fea')
# Old version needed for a binary package in ghc-bootstrap
version('4.3.2', 'dd60683d7057917e34630b4a787932e8')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
# gmp's configure script seems to be broken; it sometimes misdetects
# shared library support. Regenerating it fixes the issue.
force_autoreconf = True
def configure_args(self):
args = ['--enable-cxx']
# This flag is necessary for the Intel build to pass `make check`
if self.spec.compiler.name == 'intel':
args.append('CXXFLAGS=-no-ftz')
return args
|
#pragma once
#ifndef INCLUDED_PACKET_READER_HPP
#define INCLUDED_PACKET_READER_HPP
#include "pa_driver/packets.hpp"
#include <functional>
#include <vector>
class packet_reader
{
public:
packet_reader(const std::function<size_t(uint8_t *, size_t)> &&read_fn, const std::function<bool(PacketID, const uint8_t *, size_t)> &&on_packet)
: m_read_fn(read_fn),
m_on_packet(on_packet)
{
}
void tick()
{
try_get_packet();
}
void wait_for_packets(size_t count)
{
while (count > 0)
{
if (try_get_packet())
{
count--;
}
}
}
private:
std::function<size_t(uint8_t *, size_t)> m_read_fn;
std::function<bool(PacketID, const uint8_t *, size_t)> m_on_packet;
std::vector<uint8_t> m_buffer;
const packet_header *m_header = nullptr;
size_t m_idx = 0;
// Returns true when complete packet has been received and callback signals
// that it was the expected one
bool try_get_packet()
{
// Read header
if (m_idx < sizeof(packet_header))
{
resize_buffer_and_update_header_pointer(sizeof(packet_header));
size_t bytes_required = sizeof(packet_header) - m_idx;
size_t bytes_received = m_read_fn(&m_buffer[m_idx], bytes_required);
m_idx += bytes_received;
if (bytes_received < bytes_required)
{
// Full header not yet obtained...
return false;
}
}
// Read remainder of packet
size_t packet_size = m_header->size();
resize_buffer_and_update_header_pointer(packet_size);
size_t bytes_remaining = packet_size - m_idx;
size_t bytes_received = m_read_fn(&m_buffer[m_idx], bytes_remaining);
m_idx += bytes_received;
if (bytes_received < bytes_remaining)
{
// Packet not yet obtained
return false;
}
// Fire callback and reset
m_idx = 0;
return m_on_packet(m_header->id, m_buffer.data(), packet_size);
}
void resize_buffer_and_update_header_pointer(size_t size)
{
if (m_buffer.size() < size)
{
m_buffer.resize(size);
// Resize may have moved memory. Need updated pointer.
m_header = reinterpret_cast<const packet_header *>(m_buffer.data());
}
}
};
#endif // INCLUDED_PACKET_READER_HPP |
// Copyright 2019 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package convert
import (
"reflect"
"strings"
"testing"
_ "github.com/golang/glog"
"github.com/golang/protobuf/proto" //nolint:staticcheck
"github.com/google/trillian/merkle/smt"
"github.com/google/trillian/storage/storagepb"
"github.com/google/trillian/storage/tree"
)
func TestUnmarshal(t *testing.T) {
type mappy map[string][]byte
for _, tc := range []struct {
sp *storagepb.SubtreeProto
wantErr string
}{
{sp: &storagepb.SubtreeProto{}, wantErr: "wrong depth"},
{sp: &storagepb.SubtreeProto{Depth: -1}, wantErr: "wrong depth"},
{sp: &storagepb.SubtreeProto{Depth: 8, Leaves: mappy{"huh?": nil}}, wantErr: "base64"},
{sp: &storagepb.SubtreeProto{Depth: 8, Leaves: mappy{"": nil}}, wantErr: "ParseSuffix: empty bytes"},
{sp: &storagepb.SubtreeProto{Depth: 12, Leaves: mappy{"DA==": nil}}, wantErr: "ParseSuffix: unexpected length"},
{sp: &storagepb.SubtreeProto{Depth: 12, Leaves: mappy{"DAAA": nil}}},
{sp: &storagepb.SubtreeProto{Depth: 13, Leaves: mappy{"DAAA": nil}}, wantErr: "wrong suffix bits"},
{sp: &storagepb.SubtreeProto{Depth: 12, Leaves: mappy{"DAAA": nil, "DAAB": nil}}, wantErr: "Prepare"},
{sp: &storagepb.SubtreeProto{Depth: 16, Leaves: mappy{"EAAA": nil, "EAAB": nil}}},
} {
t.Run("", func(t *testing.T) {
got := ""
if _, err := Unmarshal(tc.sp); err != nil {
got = err.Error()
}
if want := tc.wantErr; len(want) == 0 && len(got) != 0 {
t.Errorf("Unmarshal: %s, want no error", got)
} else if !strings.Contains(got, want) {
t.Errorf("Unmarshal: %s, want error containing %q", got, want)
}
})
}
}
func TestMarshalErrors(t *testing.T) {
for _, tc := range []struct {
tile smt.Tile
height uint
wantErr string
}{
{tile: smt.Tile{}, height: 0, wantErr: "height out of"},
{tile: smt.Tile{}, height: 256, wantErr: "height out of"},
{tile: smt.Tile{ID: tree.NewNodeID2("\xFF", 5)}, height: 8, wantErr: "root unaligned"},
{
tile: smt.Tile{
ID: tree.NewNodeID2("\xFF", 8),
Leaves: []smt.Node{{ID: tree.NewNodeID2("\xFF0000", 24)}},
},
height: 8,
wantErr: "wrong ID bits",
},
{
tile: smt.Tile{
ID: tree.NewNodeID2("\xFF", 8),
Leaves: []smt.Node{{ID: tree.NewNodeID2("\xF000", 16)}},
},
height: 8,
wantErr: "unrelated leaf ID",
},
} {
t.Run("", func(t *testing.T) {
_, err := Marshal(tc.tile, tc.height)
if err == nil {
t.Fatal("Marshal did not return error")
}
if got, want := err.Error(), tc.wantErr; !strings.Contains(got, want) {
t.Errorf("Marshal: %s, want error containing %q", got, want)
}
})
}
}
func TestMarshalUnmarshal(t *testing.T) {
nodes := []smt.Node{
{ID: tree.NewNodeID2("0", 8), Hash: []byte("a")},
{ID: tree.NewNodeID2("1", 8), Hash: []byte("b")},
}
deepNodes := []smt.Node{
{ID: tree.NewNodeID2("\x0F\x00\x00", 24), Hash: []byte("a")},
{ID: tree.NewNodeID2("\x0F\xFF\x00", 24), Hash: []byte("b")},
{ID: tree.NewNodeID2("\x0F\xFF\x01", 24), Hash: []byte("c")},
{ID: tree.NewNodeID2("\x0F\xFF\x03", 24), Hash: []byte("d")},
{ID: tree.NewNodeID2("\x0F\xFF\x09", 24), Hash: []byte("e")},
{ID: tree.NewNodeID2("\x0F\xFF\xFF", 24), Hash: []byte("f")},
}
for _, tc := range []struct {
tile smt.Tile
height uint
}{
{tile: smt.Tile{}, height: 8},
{tile: smt.Tile{Leaves: nodes[:1]}, height: 8},
{tile: smt.Tile{Leaves: nodes}, height: 8},
{tile: smt.Tile{ID: tree.NewNodeID2("\x0F", 0), Leaves: deepNodes}, height: 24},
{tile: smt.Tile{ID: tree.NewNodeID2("\x0F", 8), Leaves: deepNodes}, height: 16},
{tile: smt.Tile{ID: tree.NewNodeID2("\x0F\xFF", 16), Leaves: deepNodes[1:]}, height: 8},
} {
t.Run("", func(t *testing.T) {
sp, err := Marshal(tc.tile, tc.height)
if err != nil {
t.Fatalf("Marshal: %v", err)
}
clone := proto.Clone(sp).(*storagepb.SubtreeProto) // Break memory dependency.
tile, err := Unmarshal(clone)
if err != nil {
t.Fatalf("Unmarshal: %v", err)
}
if got, want := tile, tc.tile; !reflect.DeepEqual(got, want) {
t.Errorf("Tile mismatch: got %v, want %v", got, want)
}
})
}
}
|
<reponame>Guoxm521/weili
layui.define(["jquery", "layer", "form"], function (exports) {
//提示:模块也可以依赖其它模块,如:layui.define('layer', callback);
var $ = layui.jquery;
var layer = layui.layer;
var form = layui.form;
var obj = {
hello: function (str) {
alert("Hello " + (str || "mymod"));
},
// 分页功能
page: function (count,limit,curr,search) {
layui.use("laypage", function () {
const laypage = layui.laypage;
laypage.render({
elem: "page",
limit:limit,
count: count,
curr:curr,
jump: function (obj, first) {
if(!first){
if(search) {
console.log(1)
location.href="?page="+obj.curr+"&limit="+obj.limit+search;
}else {
console.log(2);
location.href="?page="+obj.curr+"&limit="+obj.limit;
}
}
},
});
});
},
/*
* 添加按钮事件 需要传入数据有 title/area/content/scroll
* 修改事件也可传入这个模块
* 添加和修改是同一个页面
* */
add: function (title, area, content, scroll) {
if (scroll !== true) {
scroll = 'no'
}
;
layer.open({
type: 2,
title: title,
closeBtn: 1,
shadeClose: true,
area: area,//["500px", "215px"]
offset: ["100px"],
anim: 2,
content: [content, scroll], //iframe的url,no代表不显示滚动
});
},
/*
* 删除按钮事件 根据传入的条件删除
* 传入的值有 data url
* 返回一个json对象
* */
delete: function (data, url) {
layer.confirm(
"您确定删除吗?",
{
btn: ["确定", "取消"], //按钮
},
function () {
$.ajax({
type: 'post',
url: url,
data: data,
success: function (res) {
/*
* code =1 可以删除
* code =2 该类别已经存在,无法删除
* */
if (res.code == 1) {
location.reload();
parent.layer.closeAll();
layer.msg(res.msg, {icon: 1, time: 2000});
} else if (res.code == 2) {
layer.msg(res.msg, {icon: 2, time: 1000})
}
}
})
},
function () {
parent.layer.closeAll();
}
);
},
// 修改事件按钮事件 小添加页面 运用弹窗的效果
edit: function () {
layer.open({
type: 2,
title: "类别修改",
closeBtn: 1, //不显示关闭按钮
shadeClose: true,
area: ["500px", "700px"],
offset: ["100px"],
anim: 2,
content: ["./add.html", "no"], //iframe的url,no代表不显示滚动
});
},
// 给获取内容按钮注册事件
get_content: function (url,area) {
if (!area) {
area = ["600px", "500px"];
}
;
$(".get_content").on("click", function () {
const id = $(this).attr('data-id');
$.ajax({
type: 'post',
url: url,
data: {
id: id
},
dataType: 'json',
success: function (res) {
layer.open({
type: 1,
title:res[0].sortname,
closeBtn: 1,
skin: "layui-layer-rim", //加上边框
area: area, //宽高
content:res[0].content
// "<img src='./../src/ia_200000017.jpg'>",
});
},
error: function (error) {
console.log(error);
}
});
});
},
// 发布按钮注册事件
publish: function (url) {
$("span[class^=check]").on("click", function () {
const publish = $(this).attr('value');
const id = $(this).attr('data-id');
if ($(this).attr("class") == "check_yes") {
$(this)
.removeClass("check_yes")
.addClass("check_no")
.html("<i class='layui-icon layui-icon-close'></i>否");
} else {
$(this)
.removeClass("check_no")
.addClass("check_yes")
.html("<i class='layui-icon layui-icon-ok'></i>是");
}
$.ajax({
type:'post',
url:url,
data:{
publish:publish,
id:id
},
success:function () {
location.reload();
},
error:function (error) {
console.log(error);
}
})
});
},
//获取页面URL的值
geturl: function () {
const param = {};
const query = window.location.search.substr(1);
const url = query.split('&');
for (var i = 0, l = url.length; i < l; i++) {
const a = url[i].split('=');
param[a[0]] = a[1];
}
return param;
},
/*
* 选择文件后对图片进行展示
* 传入文件的类型数组 typeArr
* 判断是否图片 isImg
* */
showimg: function (typeArr, isImg) {
if (!typeArr) {
typeArr = ['jpg', 'jpeg', 'png'];
}
;
if (isImg == null) {
isImg = true
}
$("input[type='file']").on('change', function () {
const files = $(this)[0].files;
$("#img_box").empty();
for (var i = 0; i < files.length; i++) {
const name = files[i].name;
const type = name.substr(name.lastIndexOf(".") + 1);
// 判断文件格式
if (typeArr.indexOf(type) == -1) {
layer.msg("请传入正确的文件格式");
return false;
}
;
//上传的是图片的话
if (isImg) {
const reader = new FileReader();
reader.readAsDataURL(files[i]);
reader.onload = function () {
const src = this.result;
$("#img_box").append("<img id='upload_img_show' class='upload_img_show'>")
$("#upload_img_show").attr('src', this.result);
$("#upload_img_show").removeAttr('id');
};
} else {
$('#img_box').html("<img src=''/>");
$('#img_box').children('img').attr('src', '/static/admin/images/ia.jpg')
}
}
})
},
/*
* 获取页面的选项框
* */
getoption: function (url,type) {
$.ajax({
type: 'post',
url: url,
dataType: 'json',
success: function (res) {
let str = '';
for (let i = 0; i < res.length; i++) {
str += "<option data-id=" + res[i].id + " value=" + res[i].sortname + ">" + res[i].sortname + "</option>";
}
$("#sortname").append(str);
if(type == 'add') {
form.render();
}
}
})
},
/*
* 数据添加发送ajax请求
* 处理关于我们,这种有多图片上传
* */
addfiles: function (upurl, formData, jumpurl) {
$.ajax({
type: 'post',
url: upurl,
data: formData,
dataType: 'json',
cache: false,
contentType: false,
processData: false,
success: function (res) {
layer.msg(1)
layer.msg(res.msg)
if (res.code == 1) {
location.href = jumpurl;
}
},
error: function (res) {
layer.msg(res.msg)
}
})
}
};
//输出test接口
exports("layui_defind", obj);
});
|
# set environment variables
export PK_INCREASE_NUMBER=''
export FI_USER_ID='2'
export TABS='/usr/local/bireme/tabs'
rsync -rav transfer@quartzo2:/home/intranet/bases/nmail/nmail.mst .
rsync -rav transfer@quartzo2:/home/intranet/bases/nmail/nmail.xrf .
#rsync -rav transfer@quartzo2:/home/intranet/bases/nmail/nmail.fst .
mxcp nmail create=nmail_clean clean repeat=% period=. tell=100
# normaliza campos 603 e 604 para maiusculo sem acento
mx nmail_clean "gizmo=$TABS/gansna,603,604" "proc=(if p(v603) then '<1603 0>',mpu,v603,mpl'</1603>' fi)" "proc=(if p(v604) then '<1604 0>'mpu,v604,mpl'</1604>' fi)" "proc='d603d604'" create=nmail_norm -all now
# retag 1603, 1604 --> 603, 604
echo "1603 603" > retags.tab
echo "1604 604" >> retags.tab
retag nmail_norm retags.tab
# gera tabelas auxiliares de controle a partir de CSV
mx seq=nmail_cat_list.csv create=nmail_cat_list -all now
mx nmail_cat_list fst="1 0 if p(v2) then v1/ fi" fullinv=nmail_cat_list -all now
mx seq=nmail_user_list.csv create=nmail_user_list -all now
mx nmail_user_list fst="1 0 if p(v2) then v1/ fi" fullinv=nmail_user_list -all now
# convert base para fixture json
mx nmail_norm pft=@fixture_nmail.pft gizmo=gizmo_fixture gizmo=g_countries,620 lw=8000 now > institution_iso.json
mx nmail_norm pft=@unit_seq.pft gizmo=gizmo_fixture gizmo=g_countries,620 lw=8000 now > unit_list.seq
mx seq=unit_list.seq create=unit_list -all now
mx unit_list pft=@fixture_units.pft lw=8000 now > units_iso.json
# remove initial blank lines of notes field
sed -e 's/"\\r\\n\\r\\n/"/' institution_iso.json > institution_iso_fix1.json
# convert to utf8
iconv -f iso-8859-1 -t utf-8 -c institution_iso_fix1.json > institution_utf8.json
iconv -f iso-8859-1 -t utf-8 -c units_iso.json > units_utf8.json
# fix last 2 lines of file to valid JSON
head -n -2 institution_utf8.json > institution.json
echo -e "\n}\n]" >> institution.json
head -n -2 units_utf8.json > units.json
echo -e "\n}\n]" >> units.json
# validate json file
jsonlint -v institution.json
jsonlint -v units.json
#rm *_iso.json
rm *_clean.*
|
'use strict';
const {getMD5String, newHTMLData, buildHTMLDataStream} = require('./utils');
const adarender = require('../../proto/adarender_pb');
/**
* HTMLStream class
*/
class HTMLStream {
/**
* constructor
*/
constructor() {
this.reset();
}
/**
* reset
*/
reset() {
this.buf = new Uint8Array();
this.htmlobj = undefined;
this.err = '';
}
/**
* onData
* @param {object} htmlstream - HTMLStream
* @return {string} err - err
*/
onData(htmlstream) {
try {
const err = htmlstream.getError();
if (err) {
this.err = err;
return 'client sent a error';
}
if (htmlstream.hasHtmldata()) {
this.htmlobj = htmlstream.getHtmldata();
return '';
}
const curlen = htmlstream.getCurlength();
const curstart = htmlstream.getCurstart();
if (curlen <= 0) {
return 'invalid curlength ' + curlen;
}
if (curstart < 0) {
return 'invalid curstart ' + curstart;
}
if (this.buf.length != curstart) {
return (
'invalid buf.length or curstart ' + this.buf.length + ' ' + curstart
);
}
const curbuf = htmlstream.getData_asU8();
if (!curbuf) {
return 'invalid data';
}
if (curbuf.length != curlen) {
return (
'invalid curlength or data.length ' + curlen + ' ' + curbuf.length
);
}
const hashdata = htmlstream.getHashdata();
const md5str = getMD5String(curbuf);
if (md5str != hashdata) {
return 'invalid md5str or hashdata ' + md5str + ' ' + hashdata;
}
this.buf.set(curbuf, curstart);
const totalhashdata = htmlstream.getTotalhashdata();
const totallength = htmlstream.getTotallength();
if (totallength < this.buf.length) {
return (
'invalid buf.length or totallength ' +
this.buf.length +
' ' +
totallength
);
}
if (totallength == this.buf.length) {
if (!totalhashdata) {
return 'no totalhashdata';
}
const totalmd5str = getMD5String(this.buf);
if (totalmd5str != totalhashdata) {
return (
'invalid totalmd5str or totalhashdata ' +
totalmd5str +
' ' +
totalhashdata
);
}
this.htmlobj = adarender.HTMLData.deserializeBinary(this.buf);
} else if (totalhashdata) {
return 'invalid totalhashdata';
}
} catch (err) {
return 'htmlstream.onData error ' + err;
}
return '';
}
/**
* onEnd
* @return {string} err - err
*/
onEnd() {
if (this.err || this.htmlobj) {
return '';
}
return 'non error or htmlobj';
}
/**
* sendErr
* @param {object} call - call
* @param {string} errstr - error string
*/
async sendErr(call, errstr) {
try {
const htmlstream = new adarender.HTMLStream();
htmlstream.setError(errstr);
await call.write(htmlstream);
call.end();
} catch (err) {
console.log('HTMLStream.sendErr ' + err);
}
}
/**
* sendHTMLData
* @param {object} call - call
* @param {object} obj - {strData}
*/
async sendHTMLData(call, obj) {
try {
const htmldata = newHTMLData(obj);
await buildHTMLDataStream(htmldata, async (htmstream) => {
await call.write(htmstream);
});
call.end();
} catch (err) {
console.log('HTMLStream.sendHTMLData ' + err);
}
}
}
exports.HTMLStream = HTMLStream;
|
func binary(n int) {
if n == 0 {
return
}
binary(n / 2)
fmt.Printf("%d", n % 2)
}
func main() {
x := 15
binary(x)
// 1111
} |
package malte0811.controlengineering.network.keypunch;
import malte0811.controlengineering.blockentity.tape.KeypunchState;
import net.minecraft.network.FriendlyByteBuf;
public class FullSync extends KeypunchSubPacket {
private final int numAvailable;
private final byte[] typed;
public FullSync(int numAvailable, byte[] typed) {
this.numAvailable = numAvailable;
this.typed = typed;
}
public FullSync(FriendlyByteBuf buffer) {
this(buffer.readVarInt(), buffer.readByteArray());
}
@Override
protected void write(FriendlyByteBuf out) {
out.writeVarInt(numAvailable);
out.writeByteArray(typed);
}
@Override
public boolean process(KeypunchState state) {
state.setAvailable(numAvailable);
state.getData().clear();
state.getData().addElements(0, typed);
return true;
}
@Override
public boolean allowSendingToServer() {
return false;
}
}
|
import React from "react";
import styles from "./InputNumber.module.scss";
interface InputNumberProps {
value?: string | number | undefined;
autoFocus?: boolean;
onChange?: (event: React.ChangeEvent<HTMLInputElement>) => void;
disabled?: boolean;
}
export const InputNumber = (props: InputNumberProps): JSX.Element => {
return (
<input
className={styles.InputNumber}
type="text"
value={props.value}
onChange={props.onChange}
disabled={props.disabled}
autoFocus={props.autoFocus}
/>
);
};
|
var fn = function(){
console.log('test' + test);
};
fn(undefined); |
#!/bin/sh
# Build Docker image
./build-docker.sh
# Run it
docker rm --force ngunits-001
docker run --name ngunits-001 -d -p 9081:9081 -p 9082:9082 -p 9083:9083 -p 9084:9084 -p 9085:9085 -p 3000:80 ngunits
exit_status=$?
if [ ! $exit_status -eq 0 ]; then
echo "Failed to run the docker container"
exit $exit_status
fi
# Wait for docker image and all APIs inside are up and running
sleep 2s
echo "Waiting for APIs to be started..."
#
# wait while APIs start
curl_command="curl --write-out %{http_code} --silent --output /dev/null localhost:9083/sciper-api/hc"
response=`$curl_command`
counter=0
while [ "$response" != "200" ]
do
counter=$((counter+1))
if [[ "$counter" -gt 60 ]]; then
echo "API check timeout"
exit 1
fi
sleep 2s
response=`$curl_command`
done
echo "sciper-api is started!"
curl_command="curl --write-out %{http_code} --silent --output /dev/null localhost:9082/cadi-api/hc"
response=`$curl_command`
counter=0
while [ "$response" != "200" ]
do
counter=$((counter+1))
if [[ "$counter" -gt 60 ]]; then
echo "API check timeout"
exit 1
fi
sleep 2s
response=`$curl_command`
done
echo "cadi-api is started!"
curl_command="curl --write-out %{http_code} --silent --output /dev/null localhost:9084/archibus-api/hc"
response=`$curl_command`
counter=0
while [ "$response" != "200" ]
do
counter=$((counter+1))
if [[ "$counter" -gt 60 ]]; then
echo "API check timeout"
exit 1
fi
sleep 2s
response=`$curl_command`
done
echo "archibus-api is started!"
curl_command="curl --write-out %{http_code} --silent --output /dev/null localhost:9081/units-api/hc"
response=`$curl_command`
counter=0
while [ "$response" != "200" ]
do
counter=$((counter+1))
if [[ "$counter" -gt 60 ]]; then
echo "API check timeout"
exit 1
fi
sleep 2s
response=`$curl_command`
done
echo "units-api is started!"
curl_command="curl --write-out %{http_code} --silent --output /dev/null localhost:9085/userinfo"
response=`$curl_command`
counter=0
while [ "$response" != "200" ]
do
counter=$((counter+1))
if [[ "$counter" -gt 60 ]]; then
echo "API check timeout"
exit 1
fi
sleep 2s
response=`$curl_command`
done
echo "tequlia mock is started!"
echo "All APIs are started!"
# Wake up APIs...
curl http://localhost:9081/units-api/v1/units/13030
curl http://localhost:9082/cadi-api/v1/countries?query=CH
curl http://localhost:9083/sciper-api/v1/people?query=delo
curl http://localhost:9084/archibus-api/v1/rooms?query=INN
# Run webdriver/selenium
npm run webdriver:update
exit_status=$?
if [ ! $exit_status -eq 0 ]; then
echo "Failed to update web driver"
exit $exit_status
fi
npm run webdriver:start &
sleep 10s
# Start E2E tests
echo "Starting E2E tests..."
npm run e2e:docker
exit_status=$?
if [ ! $exit_status -eq 0 ]; then
echo "E2E tests have failed"
fi
# Shutdown docker image
docker rm --force ngunits-001
exit $exit_status
|
def longest_increasing_subsequence(arr):
# Create a list to store the length of the longest increasing subsequence ending at each index
lis = [1] * len(arr)
# Iterate through the array
for i in range(1, len(arr)):
for j in range(0, i):
# If the current number is greater than the number at the previous index and
# adding 1 to the longest increasing subsequence at the previous index
# gives a better result, update lis[i]
if arr[i] > arr[j] and lis[j] + 1 > lis[i]:
lis[i] = lis[j] + 1
# Return the length of the longest increasing subsequence
return max(lis)
arr = [1, 6, 2, 8, 3, 4]
result = longest_increasing_subsequence(arr)
print(result) |
<reponame>mufeili/GNNLens2
import { connect } from 'react-redux';
import { Dispatch } from 'redux';
import { StoreState } from '../types';
import GraphViewSettingsModal from '../components/DataRuns/GraphView/GraphViewSettingsModal';
import { changeGraphViewSettingsModal_visible,changeGraphViewState } from '../actions';
const mapStateToProps = (state: StoreState) => ({
GraphViewSettingsModal_visible: state.GraphViewSettingsModal_visible,
GraphViewState: state.GraphViewState
})
const mapDispatchToProps = (dispatch: Dispatch) => ({
changeGraphViewSettingsModal_visible: (visible:boolean) => dispatch(changeGraphViewSettingsModal_visible(visible)),
changeGraphViewState: (state_dict:any) => dispatch(changeGraphViewState(state_dict))
})
export default connect(mapStateToProps, mapDispatchToProps)(GraphViewSettingsModal);
|
package com.zhcs.controller;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.zhcs.context.PlatformContext;
import com.zhcs.entity.PartyEntity;
import com.zhcs.service.PartyService;
import com.zhcs.utils.BeanUtil;
import com.zhcs.utils.PageUtils;
import com.zhcs.utils.R;
import com.zhcs.utils.StringUtil;
//*****************************************************************************
/**
* <p>Title:PartyController</p>
* <p>Description: 甲方</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司 </p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
@Controller
@RequestMapping("party")
public class PartyController extends AbstractController {
@Autowired
private PartyService partyService;
@RequestMapping("/party.html")
public String list(){
return "party/party.html";
}
/**
* 列表
*/
@ResponseBody
@RequestMapping("/list")
@RequiresPermissions("party:list")
public R list(Integer page, Integer limit){
Map<String, Object> map = new HashMap<String, Object>();
map.put("offset", (page - 1) * limit);
map.put("limit", limit);
//查询列表数据
List<PartyEntity> partyList = partyService.queryList(map);
int total = partyService.queryTotal(map);
PageUtils pageUtil = new PageUtils(partyList, total, limit, page);
return R.ok().put("page", pageUtil);
}
/**
* 获取树
*/
@ResponseBody
@RequestMapping("/getPartyTree")
@RequiresPermissions("party:getPartyTreet")
public R getUserTree(){
//查询列表数据
List<Map<String, Object>> userList = partyService.getPartyTree();
return R.ok().put("userList", userList);
}
/**
* 信息
*/
@ResponseBody
@RequestMapping("/info/{id}")
@RequiresPermissions("party:info")
public R info(@PathVariable("id") Long id){
PartyEntity party = partyService.queryObject(id);
return R.ok().put("party", party);
}
/**
* 保存
*/
@ResponseBody
@RequestMapping("/save")
@RequiresPermissions("party:save")
public R save(@RequestBody PartyEntity party){
BeanUtil.fillCCUUD(party, getUserId(), getUserId());
partyService.save(party);
return R.ok();
}
/**
* 修改
*/
@ResponseBody
@RequestMapping("/update")
@RequiresPermissions("party:update")
public R update(@RequestBody PartyEntity party){
BeanUtil.fillCCUUD(party, getUserId());
partyService.update(party);
return R.ok();
}
/**
* 删除
*/
@ResponseBody
@RequestMapping("/delete/{id}")
@RequiresPermissions("party:delete")
public R delete(@PathVariable("id") Long id){
if(PlatformContext.getGoalbalContext("adminId", String.class).equals(StringUtil.valueOf(getUserId()))){
partyService.delete(id);
} else {
PartyEntity party = new PartyEntity();
party.setId(id);
party.setStatus("0");
BeanUtil.fillCCUUD(party, getUserId());
partyService.update(party);
}
return R.ok();
}
}
|
import React from 'react';
import { shallowWithTheme, mountWithTheme } from '../util-test';
import Textarea from './Textarea';
describe('<Textarea />', () => {
let wrapper, classes;
beforeAll(() => {
wrapper = shallowWithTheme(
<Textarea
className="test-class"
value=""
onChange={() => {}}
label="label"
id="test"
/>
);
classes = wrapper.prop('classes');
});
test('rendering correctly', () => {
expect(wrapper.find('textarea')).toBeTruthy();
});
test('rendering with correct classes', () => {
expect(wrapper.dive().hasClass(classes.root)).toBe(true);
expect(wrapper.dive().hasClass('test-class')).toBe(true);
expect(
wrapper
.dive()
.childAt(0)
.hasClass(classes.textarea)
).toBe(true);
expect(
wrapper
.dive()
.childAt(1)
.hasClass(classes.label)
).toBe(true);
});
test('prop: fullWidth', () => {
wrapper.setProps({ fullWidth: true });
expect(wrapper.dive().hasClass(classes.fullWidth)).toBe(true);
});
test('prop: disabled', () => {
const disabledWrapper = shallowWithTheme(
<Textarea
value=""
onChange={() => {}}
label="label"
id="test"
disabled
/>
);
expect(
disabledWrapper
.dive()
.find('textarea')
.prop('disabled')
).toBe(true);
});
test('spreading custom props', () => {
wrapper.setProps({ customProp: 'test' });
expect(wrapper.prop('customProp')).toBe('test');
});
test('appling custom classes to textarea & label', () => {
wrapper.setProps({
customClasses: {
textarea: 'test-textarea-class',
label: 'test-label-class',
},
});
expect(wrapper.dive().find('textarea').hasClass('test-textarea-class')).toBe(true);
expect(wrapper.dive().find('label').hasClass('test-label-class')).toBe(true);
});
describe('event behavior', () => {
let wrapper, classes;
const mockOnChange = jest.fn();
beforeAll(() => {
wrapper = mountWithTheme(
<Textarea
value=""
onChange={mockOnChange}
label="label"
id="test"
/>
);
classes = wrapper.prop('classes');
});
test('handling value change', () => {
wrapper.find('textarea').simulate('change');
expect(mockOnChange).toBeCalled();
});
test('handling focus & blur', () => {
wrapper.find('textarea').simulate('focus');
expect(wrapper.childAt(0).hasClass(classes.textareaColored)).toBe(true);
expect(wrapper.find('textarea').hasClass(classes.textareaColored)).toBe(true);
expect(wrapper.find('label').hasClass(classes.labelFocus)).toBe(true);
wrapper.find('textarea').simulate('blur');
expect(wrapper.childAt(0).hasClass(classes.textareaColored)).toBe(false);
expect(wrapper.find('textarea').hasClass(classes.textareaColored)).toBe(false);
expect(wrapper.find('label').hasClass(classes.labelFocus)).toBe(false);
});
});
}); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.