text stringlengths 1 1.05M |
|---|
#!/bin/bash
set -euo pipefail
IMAGE="php:7.3-apache"
ID=$(docker run \
--rm -d \
-v $(pwd)/"${GOSS_EXE}":/bin/goss \
-v $(pwd):/app \
-v $(pwd)/httpd:/var/www/html \
"${IMAGE}")
function clean {
printf "\n"
echo "Stop container..."
docker stop "${ID}"
}
trap "clean ${ID}" EXIT
sleep 1 # Wait for httpd
docker exec "${ID}" /bin/sh -c 'goss -g /app/goss_fail.yaml validate' |
object ReplaceSpaces {
def replaceSpaces(str: String): String = {
str.replaceAll("\\s", "-")
}
def main(args: Array[String]): Unit = {
val str = "Hello World"
println(replaceSpaces(str))
}
} |
#!/bin/bash
./redis-cli -h $(docker-machine ip redis-cluster-1) -p 6379 config set appendonly "yes"
./redis-cli -h $(docker-machine ip redis-cluster-2) -p 6379 config set appendonly "yes"
./redis-cli -h $(docker-machine ip redis-cluster-3) -p 6379 config set appendonly "yes"
./redis-cli -h $(docker-machine ip redis-cluster-4) -p 6379 config set appendonly "no"
./redis-cli -h $(docker-machine ip redis-cluster-5) -p 6379 config set appendonly "no"
./redis-cli -h $(docker-machine ip redis-cluster-6) -p 6379 config set appendonly "no"
./redis-cli -h $(docker-machine ip redis-cluster-1) -p 6379 config set save ""
./redis-cli -h $(docker-machine ip redis-cluster-2) -p 6379 config set save ""
./redis-cli -h $(docker-machine ip redis-cluster-3) -p 6379 config set save ""
./redis-cli -h $(docker-machine ip redis-cluster-4) -p 6379 config set save ""
./redis-cli -h $(docker-machine ip redis-cluster-5) -p 6379 config set save ""
./redis-cli -h $(docker-machine ip redis-cluster-6) -p 6379 config set save "" |
#!/bin/bash
TARGET="$(echo "$LANG" | grep -o ^..)"
SEARCH="$(wofi -d -L 1 | sed 's/ /+/g')"
SUGGESTION=$(curl "https://libretranslate.com/translate" -H "Content-Type: application/json" -d "{\"q\": \"${SEARCH}\", \"source\": \"en\", \"target\": \"${TARGET}\"}" | sed 's/^{"translatedText":"//g' | rev | sed 's/^}"//g'| rev | sed 's/+/ /g')
wl-copy "$SUGGESTION"
exit
|
#!/usr/bin/env bash
export PYTHONPATH="../":"${PYTHONPATH}"
export BS=32
export GAS=1
python finetune.py \
--learning_rate=3e-5 \
--fp16 \
--gpus 1 \
--do_train \
--do_predict \
--val_check_interval 0.25 \
--n_val 500 \
--num_train_epochs 2 \
--freeze_encoder --freeze_embeds --data_dir $CNN_DIR \
--max_target_length 142 --val_max_target_length=142 \
--train_batch_size=$BS --eval_batch_size=$BS --gradient_accumulation_steps=$GAS \
--model_name_or_path sshleifer/student_cnn_12_6 \
--tokenizer_name facebook/bart-large \
--warmup_steps 500 \
--output_dir distilbart-cnn-12-6 \
"$@"
|
import sqlite3
conn = sqlite3.connect('orders.db')
c = conn.cursor()
# create the table to store orders
c.execute("CREATE TABLE Orders (order_id integer PRIMARY KEY, customer_name text NOT NULL, order_date text NOT NULL, order_items text NOT NULL)")
# commit changes
conn.commit()
# close connection
conn.close()
# write query to retrieve orders
def get_customer_orders(customer_name):
conn = sqlite3.connect('orders.db')
c = conn.cursor()
c.execute("SELECT * FROM Orders WHERE customer_name=?", (customer_name))
orders = c.fetchall()
conn.close()
return orders |
<gh_stars>1-10
import {getDcDenom, IbcDenom} from "./denom.helper";
describe('getDcDenom', () => {
describe('getDcDenom', () => {
it('getDcDenom Test', async () => {
const msg = {
msg:{
'packet':{
"source_port" : "transfer",
"source_channel" : "channel-10",
"destination_port" : "transfer",
"destination_channel" : "channel-36",
"data" : {
"denom" : "transfer/channel-9/transfer/channel-54/uiris",
"amount" : 2,
"sender" : "<KEY>",
"receiver" : "<KEY>"
}
}}
}
const data = await getDcDenom(msg)
console.log(data, '--result--')
});
it('IbcDenom Test', async () => {
const data = await IbcDenom("transfer/channel-54","uiris")
console.log(data, '--result--')
const data1 = await IbcDenom("","uiris")
console.log(data1, '--result--')
});
});
}) |
#!/usr/bin/env bash
# SPDX-License-Identifier: BSD-3-Clause
set -eufx
echo -n "abcde12345abcde12345" > testdata
# generate private key as PEM
openssl genpkey -provider tpm2 -algorithm EC -pkeyopt group:P-256 -out testkey.priv
# read PEM and export public key as PEM
openssl pkey -provider tpm2 -provider base -in testkey.priv -pubout -out testkey.pub
# check various digests
for HASH in sha1 sha256 sha384 sha512; do
# skip unsupported algorithms
tpm2_getcap algorithms | grep $HASH || continue
# sign using ECDSA and a defined hash
openssl pkeyutl -provider tpm2 -provider base -sign -inkey testkey.priv -rawin -in testdata \
-digest $HASH -out testdata.sig
# verify the signature
openssl pkeyutl -verify -pubin -inkey testkey.pub -rawin -in testdata \
-digest $HASH -sigfile testdata.sig
done
rm testdata testdata.sig testkey.priv testkey.pub
|
#!/bin/bash
python3 -m venv venv
source venv/bin/activate
export FLASK_APP=app.py
export FLASK_ENV=development
python3 -m flask run |
# Function to compute the factorial of a positive integer n
def factorial(n):
# base case
if n == 0:
return 1
# recursive case
else:
return n * factorial(n-1)
if __name__ == '__main__':
num = 6
print(factorial(num)) |
<filename>users/admin.py
from django.contrib import admin
from .models import Users
from django.contrib.auth.admin import UserAdmin
from department.models import *
# Register your models here.
class UsersAdmin(UserAdmin):
model = Users
fieldsets = UserAdmin.fieldsets + (
('Additional Info', {
'fields': (
('is_student',),
('is_teacher',),
('is_admin')
)
}
),
)
admin.site.register(Users, UsersAdmin)
|
def sort_numbers(nums):
for i in range(len(nums)):
for j in range(i+1, len(nums)):
if nums[i] > nums[j]:
temp = nums[j]
nums[j] = nums[i]
nums[i] = temp
return nums
print(sort_numbers([5, 7, 1, 6, 10, 4, 2])) |
<reponame>lahosken/pants
// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.pantsbuild.testproject.workdirs.onedir;
import org.junit.Test;
import java.io.File;
import static org.junit.Assert.assertTrue;
/**
* Ensure cwd works correctly.
*
* This test depends on the contents of org/pantsbuild/testproject/workdirs/twodir.
* */
public class WorkdirTest {
@Test
public void testPlaceholderExists() {
assertTrue("Could not find placeholder.txt, working directory must be wrong!",
new File("placeholder.txt").exists());
}
}
|
<gh_stars>0
package com.spmovy;
import org.junit.Test;
import org.mockito.Mockito;
import javax.servlet.http.HttpServletResponse;
import static org.junit.Assert.*;
public class UtilsTest extends Mockito {
@Test
public void getDatabaseUtils() throws Exception {
HttpServletResponse response = mock(HttpServletResponse.class);
assertNotNull(Utils.getDatabaseUtils(response));
}
} |
class Action:
INSERT = "INSERT"
UPSERT = "UPSERT"
class NA:
pass
class RootItem:
def __init__(self, action):
self.action = action
class DataSubItem:
def __init__(self, root, saved):
self.root = root
self.saved = saved
def inserting(data: DataSubItem) -> bool:
return (
data.root.action == Action.INSERT or
(data.root.action == Action.UPSERT and data.saved is NA)
) |
<gh_stars>100-1000
// https://www.codechef.com/OCT17/problems/CHEFGP/
#include <iostream>
using namespace std;
void f() {
string s;
int x, y;
cin >> s >> x >> y;
int a = 0;
int b = 0;
for (int j = 0; j < s.size(); j++) {
if (s[j] == 'a') a++;
else b++;
}
int ca = (a + x - 1) / x;
int cb = (b + y - 1) / y;
if (ca == cb) {
while (a >= x && b >= y) {
for (int k = 0; k < x; k++) cout << 'a';
a -= x;
for (int k = 0; k < y; k++) cout << 'b';
b -= y;
}
for (int k = 0; k < a; k++) cout << 'a';
for (int k = 0; k < b; k++) cout << 'b';
cout << endl;
return;
}
if (ca > cb && b >= ca) {
int kb = b / ca;
int mb = b % ca;
while (a >= x) {
for (int k = 0; k < x; k++) cout << 'a';
a -= x;
int l = kb + (mb ? 1 : 0);
for (int k = 0; k < l; k++) cout << 'b';
b -= l;
if (mb) mb--;
}
for (int k = 0; k < a; k++) cout << 'a';
for (int k = 0; k < b; k++) cout << 'b';
cout << endl;
return;
}
if (ca > cb) {
while (b > 0) {
for (int k = 0; k < x; k++) cout << 'a';
a -= x;
cout << 'b';
b--;
}
while (a > 0) {
int l = min(a, x);
for (int k = 0; k < l; k++) cout << 'a';
a -= l;
if (a) cout << '*';
}
cout << endl;
return;
}
if (ca < cb && a >= cb) {
int ka = a / cb;
int ma = a % cb;
while (b >= y) {
for (int k = 0; k < y; k++) cout << 'b';
b -= y;
int l = ka + (ma ? 1 : 0);
for (int k = 0; k < l; k++) cout << 'a';
a -= l;
if (ma) ma--;
}
for (int k = 0; k < b; k++) cout << 'b';
for (int k = 0; k < a; k++) cout << 'a';
cout << endl;
return;
}
while (a > 0) {
for (int k = 0; k < y; k++) cout << 'b';
b -= y;
cout << 'a';
a--;
}
while (b > 0) {
int l = min(b, y);
for (int k = 0; k < l; k++) cout << 'b';
b -= l;
if (b) cout << '*';
}
cout << endl;
return;
}
int main() {
int t;
cin >> t;
for (int i = 0; i < t; i++) {
f();
}
}
|
<reponame>theLambda/DBH-project1
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from cement.utils.misc import minimal_logger
from botocore.exceptions import EndpointConnectionError
from ebcli.lib import aws
from ebcli.core import io
from ebcli.objects.exceptions import ServiceError
LOG = minimal_logger(__name__)
SUPPORTED_REGIONS = [
"ap-east-1", # Asia Pacific (Hong Kong)
"ap-northeast-1", # Asia Pacific (Tokyo)
"ap-northeast-2", # Asia Pacific (Seoul)
"ap-south-1", # Asia Pacific (Mumbai)
"ap-southeast-1", # Asia Pacific (Singapore)
"ap-southeast-2", # Asia Pacific (Sydney)
"ca-central-1", # Canada (Central)
"cn-north-1", # China (Beijing)
"cn-northwest-1" # China (Ningxia)
"eu-central-1", # Europe (Frankfurt)
"eu-north-1", # Europe (Stockholm)
"eu-west-1", # Europe (Ireland)
"eu-west-2", # Europe (London)
"eu-west-3", # Europe (Paris)
"me-south-1", # Middle East (Bahrain)
"sa-east-1", # South America (Sao Paulo)
"us-east-1", # US East (N. Virginia)
"us-east-2", # US East (Ohio)
"us-gov-east-1", # AWS GovCloud (US-East)
"us-gov-west-1", # AWS GovCloud (US-West)
"us-west-1", # US West (N. California)
"us-west-2", # US West (Oregon)
]
UNSUPPORTED_REGIONS = [
"af-south-1", # Africa (Cape Town)
"ap-northeast-3", # Asia Pacific (Osaka-Local)
"eu-south-1", # Europe (Milan)
]
def _make_api_call(operation_name, **operation_options):
try:
result = aws.make_api_call('codecommit', operation_name, **operation_options)
except ServiceError as ex:
if ex.code == 'AccessDeniedException':
io.echo(
"EB CLI does not have the right permissions to access CodeCommit."
" List of IAM policies needed by EB CLI, please configure and try again.\n"
" codecommit:CreateRepository\n"
" codecommit:CreateBranch\n"
" codecommit:GetRepository\n"
" codecommit:ListRepositories\n"
" codecommit:ListBranches\n"
"To learn more, see Docs: "
"http://docs.aws.amazon.com/codecommit/latest/userguide/access-permissions.html"
)
raise ex
return result
def create_repository(repo_name, repo_description=None):
params = dict(repositoryName=repo_name)
if repo_description is not None:
params['repositoryDescription'] = repo_description
result = _make_api_call('create_repository', **params)
return result
def create_branch(repo_name, branch_name, commit_id):
params = dict(repositoryName=repo_name, branchName=branch_name, commitId=commit_id)
_make_api_call('create_branch', **params)
def get_repository(repo_name):
params = dict(repositoryName=repo_name)
result = _make_api_call('get_repository', **params)
return result
def get_branch(repo_name, branch_name):
params = dict(repositoryName=repo_name, branchName=branch_name)
result = _make_api_call('get_branch', **params)
return result
def list_repositories(next_token=None, sort_by='lastModifiedDate', order='descending'):
params = dict()
if next_token is not None:
params['nextToken'] = next_token
if sort_by is not None:
params['sortBy'] = sort_by
if order is not None:
params['order'] = order
result = _make_api_call('list_repositories', **params)
return result
def list_branches(repo_name, next_token=None):
params = dict(repositoryName=repo_name)
if next_token is not None:
params['nextToken'] = next_token
result = _make_api_call('list_branches', **params)
return result
def region_supported():
region = aws.get_region_name()
if region is None or region in UNSUPPORTED_REGIONS:
return False
if region in SUPPORTED_REGIONS:
return True
# If region support is unknown attempt to make a request and check for
# connection error. If there is a connection error it is most likely that
# the region is not supported. This is a fall back for regions that have
# not been added to our region support lists.
try:
list_repositories()
except EndpointConnectionError as e:
LOG.debug(
'Could not connect to CodeCommit in region {}: {}'.format(
region, e))
return False
except Exception as e:
LOG.debug('Request failed while checking region support: {}'.format(e))
return True
|
<filename>src/visualizers/open-data-table-enum-visualizer.js
/*
@license
Copyright (c) 2020 <NAME>. All rights reserved.
*/
import { html, css, LitElement } from 'lit-element';
import { OpenDataTableVisualizerController } from './open-data-table-visualizer-controller.js';
export class OpenDataTableEnumVisualizer extends OpenDataTableVisualizerController(LitElement) {
static get styles() {
return [css`
:host {
display: block;
overflow: hidden;
}
:host([hidden]) {
display: none !important;
}
:host([disabled]) {
pointer-events: none;
}
.container {
display: flex;
justify-content: flex-start;
height: 100%;
text-overflow: ellipsis;
white-space: nowrap;
}
.label {
display: flex;
align-items: center;
justify-content: flex-start;
width: 100%;
padding: 0 4px;
text-align: left;
box-sizing: border-box;
}
.container[align="center"], .label[align="center"] {
justify-content: center;
}
.container[align="right"], .label[align="right"] {
justify-content: flex-end;
}
`];
}
static get properties() {
return {
displayValue: {
type: String
}
}
}
constructor() {
super();
this.displayValue = '';
}
updated(changedProperties) {
super.updated(changedProperties);
changedProperties.forEach((oldValue, propName) => {
if ((propName === 'value') || (propName === 'params')) {
if ((this.value !== undefined) && (this.value !== null) && (this.params) && (this.params.lookups)) {
const v = Number(this.value);
if ((v >= 0) && (v < this.params.lookups.length)) {
this.displayValue = this.params.lookups[v];
}
}
}
});
}
render() {
const valueAlign = this.getParamValue('valueAlign', 'left');
const style = this.getStyles(this.value);
return html`
<div class="container" align="${valueAlign}">
<div class="label" align="${valueAlign}" style="${style}">${this.displayValue}</div>
</div>
`;
}
}
window.customElements.define('open-data-table-enum-visualizer', OpenDataTableEnumVisualizer);
|
#!/bin/sh
# jenkins build helper script for osmo-bts-lc15
# shellcheck source=contrib/jenkins_common.sh
. $(dirname "$0")/jenkins_common.sh
osmo-build-dep.sh libosmocore "" --disable-doxygen
export PKG_CONFIG_PATH="$inst/lib/pkgconfig:$PKG_CONFIG_PATH"
export LD_LIBRARY_PATH="$inst/lib"
osmo-build-dep.sh libosmo-abis
cd "$deps"
osmo-layer1-headers.sh lc15 "$FIRMWARE_VERSION"
configure_flags="--enable-sanitize --with-litecell15=$deps/layer1-headers/inc/ --enable-litecell15"
build_bts "osmo-bts-lc15" "$configure_flags"
osmo-clean-workspace.sh
|
#!/bin/bash
if ! netlify --version; then
echo "You must install the netlify cli to test our docs build"
echo "Try running:"
echo "brew install npm"
echo "npm install netlify-cli -g"
fi
here="$(dirname "${0}")"
cd "${here}/.."
netlify dev
|
#/bin/bash
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
BASEDIR=$(dirname $0)
TEMPLATES_FOLDER=${BASEDIR}/templates/models
function usage(){
echo "usage: ./gen_model_from_template.sh TEMPLATE_NAME"
echo "Input template not found in ${TEMPLATES_FOLDER}"
echo "Options are:"
for FILENAME in $(ls ${TEMPLATES_FOLDER});
do
MODEL_NAME=$(echo "$FILENAME" | cut -f 1 -d '.')
echo " - ${MODEL_NAME}"
done
}
TEMPLATE_FILE=$1
if [ ! -f ${TEMPLATES_FOLDER}/${TEMPLATE_FILE}.sdf.jinja ]; then
usage
else
MODEL_NAME=$(echo "$TEMPLATE_FILE" | cut -f 1 -d '.')
echo "Processing model template=${TEMPLATES_FOLDER}/${TEMPLATE_FILE}.sdf.jinja"
echo "Model name=${MODEL_NAME}"
if [[ -w ${HOME} ]]; then
MODELS_FOLDER=${HOME}/.pcg/models
else
MODELS_FOLDER=/tmp/.pcg/models
fi
if [ ! -d ${MODELS_FOLDER}/${MODEL_NAME} ]; then
mkdir -p ${MODELS_FOLDER}/${MODEL_NAME}
fi
pcg-process-jinja-template \
-i '$(PCG)/model.config.jinja' \
-o ${MODELS_FOLDER}/${MODEL_NAME}/model.config \
-p model_name=${MODEL_NAME} \
-p version=1.0 \
-p sdf_version=1.6 \
-p author_name=$(whoami) \
-p author_email=$(whoami)@email.com \
-p sdf_filename=model.sdf
pcg-process-jinja-template \
-i ${TEMPLATES_FOLDER}/${TEMPLATE_FILE}.sdf.jinja \
-o ${MODELS_FOLDER}/${MODEL_NAME}/model.sdf
echo "${MODEL_NAME} generated in ${MODELS_FOLDER}/${MODEL_NAME}"
fi
|
<filename>src/guards/base.guard.ts
import {
ExecutionContext,
mixin
} from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { AuthGuard, IAuthGuard, Type } from '@nestjs/passport';
import { RolesEnum } from 'enums/roles.enum';
import { Request } from 'express';
import memoize from 'lodash.memoize';
import { Model } from 'mongoose';
import { IpAddress, IpAddressDocument } from 'schemas/ip-address.schema';
import { UserDocument } from 'schemas/user.schema';
import { normalizeIpAddress } from 'utils/normalize-ip';
function createBaseGuard(role: RolesEnum) {
class MixinBaseGuard extends AuthGuard('jwt') {
constructor(@InjectModel(IpAddress.name) private readonly ipAddressModel: Model<IpAddressDocument>) {
super()
}
async canActivate(context: ExecutionContext): Promise<boolean> {
await super.canActivate(context);
const request = context.switchToHttp().getRequest<Request>();
const user = request.user as UserDocument;
return user.roles.includes(role) && this.verifyIpIsWhitelisted(request, user)
}
async verifyIpIsWhitelisted(request: Request, user: UserDocument): Promise<boolean> {
const ipAddresses = await this.ipAddressModel.find({ user: user._id });
const isWhiteListed = !!ipAddresses.find(item => item.address === normalizeIpAddress(request));
const isNotSystemAdmin = !user.roles.includes(RolesEnum.SYSTEM_ADMIN);
return isNotSystemAdmin ? isWhiteListed : true;
}
}
return mixin(MixinBaseGuard);
}
export const BaseGuard: (
role: RolesEnum,
) => Type<IAuthGuard> = memoize(createBaseGuard);
|
<reponame>joojis/cron-jobs
const { SDK } = require("codechain-sdk");
const assert = require("assert");
async function main() {
const toAddress = process.argv[2];
console.log(toAddress);
try {
SDK.Core.classes.PlatformAddress.fromString(toAddress);
} catch (err) {
console.error(`Invalid to address "${toAddress}"`);
throw err;
}
let sdk = new SDK({
server: "http://localhost:8080"
});
const networkId = await sdk.rpc.chain.getNetworkId();
console.log(`Network Id ${networkId}`);
sdk = new SDK({
server: "http://localhost:8080",
networkId
});
const accounts = await sdk.rpc.account.getList();
console.log("Accounts: %j", accounts);
assert.strictEqual(accounts.length, 1);
const balance = await sdk.rpc.chain.getBalance(accounts[0]);
console.log(`${accounts[0]}: ${balance}`);
if (balance.lt(1000)) {
return;
}
const payTransaction = sdk.core.createPayTransaction({
recipient: toAddress,
quantity: balance.minus(100)
});
const sendResult = await sdk.rpc.account.sendTransaction({
tx: payTransaction,
account: accounts[0],
fee: 100
});
console.log(`Sent ${JSON.stringify(sendResult)}`);
for (let i = 0; i < 100; i++) {
const contains = await sdk.rpc.chain.containsTransaction(sendResult.hash);
if (contains) {
console.log(`Success ${sendResult.hash}`);
return;
}
await new Promise(resolve => {
setTimeout(() => {
resolve();
}, 1000);
});
}
console.error(`Transaction is not mined ${sendResult.hash}`);
}
main().catch(console.error);
|
#!/bin/bash
#
# Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests the examples provided in Bazel
#
# Load the test setup defined in the parent directory
CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${CURRENT_DIR}/../integration_test_setup.sh" \
|| { echo "integration_test_setup.sh not found!" >&2; exit 1; }
JAVA_TOOLCHAIN="$1"; shift
add_to_bazelrc "build --java_toolchain=${JAVA_TOOLCHAIN}"
JAVA_TOOLS_ZIP="$1"; shift
if [[ "${JAVA_TOOLS_ZIP}" != "released" ]]; then
JAVA_TOOLS_ZIP_FILE_URL="file://$(rlocation io_bazel/$JAVA_TOOLS_ZIP)"
echo "JAVA_TOOLS_ZIP_FILE_URL=$JAVA_TOOLS_ZIP_FILE_URL"
fi
JAVA_TOOLS_ZIP_FILE_URL=${JAVA_TOOLS_ZIP_FILE_URL:-}
if [[ $# -gt 0 ]]; then
JAVABASE_VALUE="$1"; shift
add_to_bazelrc "build --javabase=${JAVABASE_VALUE}"
fi
function set_up() {
cat >>WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
# java_tools versions only used to test Bazel with various JDK toolchains.
EOF
if [[ ! -z "${JAVA_TOOLS_ZIP_FILE_URL}" ]]; then
cat >>WORKSPACE <<EOF
http_archive(
name = "local_java_tools",
urls = ["${JAVA_TOOLS_ZIP_FILE_URL}"]
)
EOF
fi
cat >>WORKSPACE <<EOF
http_archive(
name = "remote_java_tools_javac9_test_linux",
urls = [
"https://mirror.bazel.build/bazel_java_tools/releases/javac9/v1.0/java_tools_javac9_linux-v1.0.zip",
],
)
http_archive(
name = "remote_java_tools_javac9_test_windows",
urls = [
"https://mirror.bazel.build/bazel_java_tools/releases/javac9/v1.0/java_tools_javac9_windows-v1.0.zip",
],
)
http_archive(
name = "remote_java_tools_javac9_test_darwin",
urls = [
"https://mirror.bazel.build/bazel_java_tools/releases/javac9/v1.0/java_tools_javac9_darwin-v1.0.zip",
],
)
http_archive(
name = "openjdk9_linux_archive",
build_file_content = "java_runtime(name = 'runtime', srcs = glob(['**']), visibility = ['//visibility:public'])",
sha256 = "45f2dfbee93b91b1468cf81d843fc6d9a47fef1f831c0b7ceff4f1eb6e6851c8",
strip_prefix = "zulu9.0.7.1-jdk9.0.7-linux_x64",
urls = [
"https://mirror.bazel.build/openjdk/azul-zulu-9.0.7.1-jdk9.0.7/zulu9.0.7.1-jdk9.0.7-linux_x64.tar.gz",
],
)
http_archive(
name = "openjdk10_linux_archive",
build_file_content = "java_runtime(name = 'runtime', srcs = glob(['**']), visibility = ['//visibility:public'])",
sha256 = "b3c2d762091a615b0c1424ebbd05d75cc114da3bf4f25a0dec5c51ea7e84146f",
strip_prefix = "zulu10.2+3-jdk10.0.1-linux_x64",
urls = [
"https://mirror.bazel.build/openjdk/azul-zulu10.2+3-jdk10.0.1/zulu10.2+3-jdk10.0.1-linux_x64.tar.gz",
],
)
EOF
}
function write_hello_library_files() {
mkdir -p java/main
cat >java/main/BUILD <<EOF
java_binary(name = 'main',
deps = ['//java/hello_library'],
srcs = ['Main.java'],
main_class = 'main.Main')
EOF
cat >java/main/Main.java <<EOF
package main;
import hello_library.HelloLibrary;
public class Main {
public static void main(String[] args) {
HelloLibrary.funcHelloLibrary();
System.out.println("Hello, World!");
}
}
EOF
mkdir -p java/hello_library
cat >java/hello_library/BUILD <<EOF
package(default_visibility=['//visibility:public'])
java_library(name = 'hello_library',
srcs = ['HelloLibrary.java']);
EOF
cat >java/hello_library/HelloLibrary.java <<EOF
package hello_library;
public class HelloLibrary {
public static void funcHelloLibrary() {
System.out.print("Hello, Library!;");
}
}
EOF
}
function write_files_for_java_provider_in_attr() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,Main}.java,java_custom_library.bzl}
rule_type="$1" # java_library / java_import
attribute_name="$2" # exports / runtime_deps
srcs_attribute_row="srcs = ['A.java']"
if [ "$rule_type" = "java_import" ]; then
srcs_attribute_row="jars = []"
fi
cat > java/com/google/sandwich/BUILD <<EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
EOF
if [ "$attribute_name" = "runtime_deps" ]; then
cat >> java/com/google/sandwich/BUILD <<EOF
main_class = "com.google.sandwich.Main",
runtime_deps = [":top"]
)
EOF
else
cat >> java/com/google/sandwich/BUILD <<EOF
srcs = ["Main.java"],
deps = [":top"]
)
EOF
fi
echo "$rule_type(" >> java/com/google/sandwich/BUILD
cat >> java/com/google/sandwich/BUILD <<EOF
name = "top",
EOF
echo " $srcs_attribute_row," >> java/com/google/sandwich/BUILD
echo " $attribute_name = [':middle']" >> java/com/google/sandwich/BUILD
cat >> java/com/google/sandwich/BUILD <<EOF
)
java_custom_library(
name = "middle",
EOF
if [ "$attribute_name" = "runtime_deps" ]; then
cat >> java/com/google/sandwich/BUILD <<EOF
srcs = ["B.java", "Main.java"],
)
EOF
else
cat >> java/com/google/sandwich/BUILD <<EOF
srcs = ["B.java"],
)
EOF
fi
cat > java/com/google/sandwich/B.java <<EOF
package com.google.sandwich;
class B {
public void printB() {
System.out.println("Message from B");
}
}
EOF
if [ "$rule_type" = "java_library" ]; then
cat > java/com/google/sandwich/A.java <<EOF
package com.google.sandwich;
class A {
public void printA() {
System.out.println("Message from A");
}
}
EOF
fi
cat > java/com/google/sandwich/Main.java <<EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
EOF
if [[ "$rule_type" = "java_library" && "$attribute_name" = "exports" ]]; then
cat >> java/com/google/sandwich/Main.java <<EOF
A myObjectA = new A();
myObjectA.printA();
EOF
fi
cat >> java/com/google/sandwich/Main.java <<EOF
B myObjectB = new B();
myObjectB.printB();
}
}
EOF
}
function write_java_custom_rule() {
cat > java/com/google/sandwich/java_custom_library.bzl << EOF
def _impl(ctx):
deps = [dep[java_common.provider] for dep in ctx.attr.deps]
exports = [export[java_common.provider] for export in ctx.attr.exports]
output_jar = ctx.actions.declare_file("lib" + ctx.label.name + ".jar")
compilation_provider = java_common.compile(
ctx,
source_files = ctx.files.srcs,
output = output_jar,
deps = deps,
exports = exports,
resources = ctx.files.resources,
strict_deps = "ERROR",
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo],
host_javabase = ctx.attr._host_javabase[java_common.JavaRuntimeInfo],
)
return struct(
files = depset([output_jar]),
providers = [compilation_provider]
)
java_custom_library = rule(
implementation = _impl,
attrs = {
"srcs": attr.label_list(allow_files=True),
"deps": attr.label_list(),
"exports": attr.label_list(),
"resources": attr.label_list(allow_files=True),
"_java_toolchain": attr.label(default = Label("${JAVA_TOOLCHAIN}")),
"_host_javabase": attr.label(default = Label("@bazel_tools//tools/jdk:current_host_java_runtime"))
},
fragments = ["java"]
)
EOF
}
function test_build_hello_world() {
write_hello_library_files
bazel build //java/main:main &> $TEST_log || fail "build failed"
}
function test_worker_strategy_is_default() {
write_hello_library_files
bazel build //java/main:main \
--incompatible_list_based_execution_strategy_selection &> $TEST_log || fail "build failed"
# By default, Java rules use worker strategy
expect_log " processes: .*worker"
}
function test_strategy_overrides_worker_default() {
write_hello_library_files
bazel build //java/main:main \
--incompatible_list_based_execution_strategy_selection \
--spawn_strategy=local &> $TEST_log || fail "build failed"
# Java rules defaulting to worker do not override the strategy specified on
# the cli
expect_not_log " processes: .*worker"
}
function test_strategy_picks_first_preferred_worker() {
write_hello_library_files
bazel build //java/main:main \
--incompatible_list_based_execution_strategy_selection \
--spawn_strategy=worker,local &> $TEST_log || fail "build failed"
expect_log " processes: .*worker"
}
function test_strategy_picks_first_preferred_local() {
write_hello_library_files
bazel build //java/main:main \
--incompatible_list_based_execution_strategy_selection \
--spawn_strategy=local,worker &> $TEST_log || fail "build failed"
expect_not_log " processes: .*worker"
expect_log " processes: .*local"
}
# This test builds a simple java deploy jar using remote singlejar and ijar
# targets which compile them from source.
function test_build_hello_world_with_remote_embedded_tool_targets() {
write_hello_library_files
bazel build //java/main:main_deploy.jar --define EXECUTOR=remote \
&> $TEST_log || fail "build failed"
}
# This test verifies that jars named by deploy_env are excluded from the final
# deploy jar.
function test_build_with_deploy_env() {
write_hello_library_files
# Overwrite java/main to add deploy_env customizations and remove the
# compile-time hello_library dependency.
cat >java/main/BUILD <<EOF
java_binary(name = 'env', runtime_deps = ['//java/hello_library'])
java_binary(name = 'main',
runtime_deps = ['//java/hello_library'],
srcs = ['Main.java'],
main_class = 'main.Main',
deploy_env = ['env'])
EOF
cat >java/main/Main.java <<EOF
package main;
public class Main {
public static void main(String[] args) {
System.out.println("Hello, World!");
}
}
EOF
bazel build //java/main:main_deploy.jar &> $TEST_log || fail "build failed"
zipinfo -1 ${PRODUCT_NAME}-bin/java/main/main_deploy.jar &> $TEST_log \
|| fail "Failed to zipinfo ${PRODUCT_NAME}-bin/java/main/main_deploy.jar"
expect_not_log "hello_library/HelloLibrary.class"
}
function test_build_with_sourcepath() {
mkdir -p g
cat >g/A.java <<'EOF'
package g;
public class A {
public A() {
new B();
}
}
EOF
cat >g/B.java <<'EOF'
package g;
public class B {
public B() {
}
}
EOF
cat >g/BUILD <<'EOF'
genrule(
name = "stub",
srcs = ["B.java"],
outs = ["B.jar"],
cmd = "zip $@ $(SRCS)",
)
java_library(
name = "test",
srcs = ["A.java"],
javacopts = ["-sourcepath $(GENDIR)/$(location :stub)", "-implicit:none"],
deps = [":stub"]
)
EOF
bazel build //g:test >$TEST_log || fail "Failed to build //g:test"
}
function test_java_common_compile_sourcepath() {
# TODO(bazel-team): Enable this for Java 7 when VanillaJavaBuilder supports --sourcepath.
JAVA_VERSION="1.$(bazel query --output=build '@bazel_tools//tools/jdk:remote_toolchain' | grep source_version | cut -d '"' -f 2)"
if [ "${JAVA_VERSION}" = "1.7" ]; then
return 0
fi
mkdir -p g
cat >g/A.java <<'EOF'
package g;
public class A {
public A() {
new B();
}
}
EOF
cat >g/B.java <<'EOF'
package g;
public class B {
public B() {
}
}
EOF
cat >g/BUILD <<'EOF'
load(':java_custom_library.bzl', 'java_custom_library')
genrule(
name = "stub",
srcs = ["B.java"],
outs = ["B.jar"],
cmd = "zip $@ $(SRCS)",
)
java_custom_library(
name = "test",
srcs = ["A.java"],
sourcepath = [":stub"]
)
EOF
cat >g/java_custom_library.bzl << EOF
def _impl(ctx):
output_jar = ctx.actions.declare_file("lib" + ctx.label.name + ".jar")
compilation_provider = java_common.compile(
ctx,
source_files = ctx.files.srcs,
output = output_jar,
deps = [],
sourcepath = ctx.files.sourcepath,
strict_deps = "ERROR",
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo],
host_javabase = ctx.attr._host_javabase[java_common.JavaRuntimeInfo],
)
return struct(
files = depset([output_jar]),
providers = [compilation_provider]
)
java_custom_library = rule(
implementation = _impl,
attrs = {
"srcs": attr.label_list(allow_files=True),
"sourcepath": attr.label_list(),
"_java_toolchain": attr.label(default = Label("${JAVA_TOOLCHAIN}")),
"_host_javabase": attr.label(default = Label("@bazel_tools//tools/jdk:current_host_java_runtime"))
},
fragments = ["java"]
)
EOF
bazel build //g:test &> $TEST_log || fail "Failed to build //g:test"
zipinfo -1 bazel-bin/g/libtest.jar >> $TEST_log || fail "Failed to zipinfo -1 bazel-bin/g/libtest.jar"
expect_log "g/A.class"
expect_not_log "g/B.class"
}
function test_java_common_compile_sourcepath_with_implicit_class() {
# TODO(bazel-team): Enable this for Java 7 when VanillaJavaBuilder supports --sourcepath.
JAVA_VERSION="1.$(bazel query --output=build '@bazel_tools//tools/jdk:remote_toolchain' | grep source_version | cut -d '"' -f 2)"
if [ "${JAVA_VERSION}" = "1.7" ]; then
return 0
fi
mkdir -p g
cat >g/A.java <<'EOF'
package g;
public class A {
public A() {
new B();
}
}
EOF
cat >g/B.java <<'EOF'
package g;
public class B {
public B() {
}
}
EOF
cat >g/BUILD <<'EOF'
load(':java_custom_library.bzl', 'java_custom_library')
genrule(
name = "stub",
srcs = ["B.java"],
outs = ["B.jar"],
cmd = "zip $@ $(SRCS)",
)
java_custom_library(
name = "test",
srcs = ["A.java"],
sourcepath = [":stub"]
)
EOF
cat >g/java_custom_library.bzl << EOF
def _impl(ctx):
output_jar = ctx.actions.declare_file("lib" + ctx.label.name + ".jar")
compilation_provider = java_common.compile(
ctx,
source_files = ctx.files.srcs,
output = output_jar,
javac_opts = ["-implicit:class"],
deps = [],
sourcepath = ctx.files.sourcepath,
strict_deps = "ERROR",
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo],
host_javabase = ctx.attr._host_javabase[java_common.JavaRuntimeInfo],
)
return struct(
files = depset([output_jar]),
providers = [compilation_provider]
)
java_custom_library = rule(
implementation = _impl,
attrs = {
"srcs": attr.label_list(allow_files=True),
"sourcepath": attr.label_list(),
"_java_toolchain": attr.label(default = Label("${JAVA_TOOLCHAIN}")),
"_host_javabase": attr.label(default = Label("@bazel_tools//tools/jdk:current_host_java_runtime"))
},
fragments = ["java"]
)
EOF
bazel build //g:test &> $TEST_log || fail "Failed to build //g:test"
zipinfo -1 bazel-bin/g/libtest.jar >> $TEST_log || fail "Failed to zipinfo -1 bazel-bin/g/libtest.jar"
expect_log "g/A.class"
expect_log "g/B.class"
}
# Runfiles is disabled by default on Windows, but we can test it on Unix by
# adding flag --enable_runfiles=0
function test_build_and_run_hello_world_without_runfiles() {
write_hello_library_files
bazel run --enable_runfiles=0 //java/main:main &> $TEST_log || fail "build failed"
expect_log "Hello, Library!;Hello, World!"
}
function test_errorprone_error_fails_build_by_default() {
JAVA_VERSION="1.$(bazel query --output=build '@bazel_tools//tools/jdk:remote_toolchain' | grep source_version | cut -d '"' -f 2)"
if [ "${JAVA_VERSION}" = "1.7" ]; then
return 0
fi
write_hello_library_files
# Trigger an error-prone error by comparing two arrays via #equals().
cat >java/hello_library/HelloLibrary.java <<EOF
package hello_library;
public class HelloLibrary {
public static boolean funcHelloLibrary() {
int[] arr1 = {1, 2, 3};
int[] arr2 = {1, 2, 3};
return arr1.equals(arr2);
}
}
EOF
bazel build //java/main:main &> $TEST_log && fail "build should have failed" || true
expect_log "error: \[ArrayEquals\] Reference equality used to compare arrays"
}
function test_extrachecks_off_disables_errorprone() {
JAVA_VERSION="1.$(bazel query --output=build '@bazel_tools//tools/jdk:remote_toolchain' | grep source_version | cut -d '"' -f 2)"
if [ "${JAVA_VERSION}" = "1.7" ]; then
return 0
fi
write_hello_library_files
# Trigger an error-prone error by comparing two arrays via #equals().
cat >java/hello_library/HelloLibrary.java <<EOF
package hello_library;
public class HelloLibrary {
public static boolean funcHelloLibrary() {
int[] arr1 = {1, 2, 3};
int[] arr2 = {1, 2, 3};
return arr1.equals(arr2);
}
}
EOF
# Disable error-prone for this target, though.
cat >java/hello_library/BUILD <<EOF
package(default_visibility=['//visibility:public'])
java_library(name = 'hello_library',
srcs = ['HelloLibrary.java'],
javacopts = ['-XepDisableAllChecks'],);
EOF
bazel build //java/main:main &> $TEST_log || fail "build failed"
expect_not_log "error: \[ArrayEquals\] Reference equality used to compare arrays"
}
function test_java_test_main_class() {
setup_javatest_support
mkdir -p java/testrunners || fail "mkdir failed"
cat > java/testrunners/TestRunner.java <<EOF
package testrunners;
import com.google.testing.junit.runner.BazelTestRunner;
public class TestRunner {
public static void main(String[] argv) {
System.out.println("Custom test runner was run");
BazelTestRunner.main(argv);
}
}
EOF
cat > java/testrunners/Tests.java <<EOF
package testrunners;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.Test;
@RunWith(JUnit4.class)
public class Tests {
@Test
public void testTest() {
System.out.println("testTest was run");
}
}
EOF
cat > java/testrunners/BUILD <<EOF
java_library(name = "test_runner",
srcs = ['TestRunner.java'],
deps = ['@bazel_tools//tools/jdk:TestRunner'],
)
java_test(name = "Tests",
srcs = ['Tests.java'],
deps = ['//third_party:junit4'],
main_class = "testrunners.TestRunner",
runtime_deps = [':test_runner']
)
EOF
bazel test --test_output=streamed //java/testrunners:Tests &> "$TEST_log"
expect_log "Custom test runner was run"
expect_log "testTest was run"
}
function test_basic_java_sandwich() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,C,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
srcs = ["Main.java"],
deps = [":top"]
)
java_library(
name = "top",
srcs = ["A.java"],
deps = [":middle"]
)
java_custom_library(
name = "middle",
srcs = ["B.java"],
deps = [":bottom"]
)
java_library(
name = "bottom",
srcs = ["C.java"]
)
EOF
cat > java/com/google/sandwich/C.java << EOF
package com.google.sandwich;
class C {
public void printC() {
System.out.println("Message from C");
}
}
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
C myObject;
public void printB() {
System.out.println("Message from B");
myObject = new C();
myObject.printC();
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
B myObject;
public void printA() {
System.out.println("Message from A");
myObject = new B();
myObject.printB();
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
A myObject = new A();
myObject.printA();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main > $TEST_log || fail "Java sandwich build failed"
expect_log "Message from A"
expect_log "Message from B"
expect_log "Message from C"
}
function test_java_library_exports_java_sandwich() {
write_files_for_java_provider_in_attr "java_library" "exports"
write_java_custom_rule
bazel run java/com/google/sandwich:Main > $TEST_log || fail "Java sandwich build failed"
expect_log "Message from A"
expect_log "Message from B"
}
function test_java_library_runtime_deps_java_sandwich() {
write_files_for_java_provider_in_attr "java_library" "runtime_deps"
write_java_custom_rule
bazel run java/com/google/sandwich:Main > $TEST_log || fail "Java sandwich build failed"
expect_log "Message from B"
}
function test_java_import_exports_java_sandwich() {
write_files_for_java_provider_in_attr "java_import" "exports"
write_java_custom_rule
bazel run java/com/google/sandwich:Main > $TEST_log || fail "Java sandwich build failed"
expect_log "Message from B"
}
function test_java_import_runtime_deps_java_sandwich() {
write_files_for_java_provider_in_attr "java_import" "runtime_deps"
write_java_custom_rule
bazel run java/com/google/sandwich:Main > $TEST_log || fail "Java sandwich build failed"
expect_log "Message from B"
}
function test_java_binary_deps_java_sandwich() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
srcs = ["Main.java"],
deps = [":custom"]
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
deps = [":bottom"]
)
java_library(
name = "bottom",
srcs = ["B.java"]
)
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public void print() {
System.out.println("Message from B");
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
B myObject;
public void print() {
System.out.println("Message from A");
myObject = new B();
myObject.print();
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
A myObject = new A();
myObject.print();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main > "$TEST_log" || fail "Java sandwich build failed"
expect_log "Message from A"
expect_log "Message from B"
}
function test_java_binary_runtime_deps_java_sandwich() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
main_class = "com.google.sandwich.Main",
runtime_deps = [":custom"]
)
java_custom_library(
name = "custom",
srcs = ["Main.java"],
deps = [":bottom"]
)
java_library(
name = "bottom",
srcs = ["A.java"]
)
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
public void print() {
System.out.println("Message from A");
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
System.out.println("Message from Main");
A myObject = new A();
myObject.print();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main > "$TEST_log" || fail "Java sandwich build failed"
expect_log "Message from Main"
expect_log "Message from A"
}
function test_java_test_java_sandwich() {
setup_javatest_support
mkdir -p java/com/google/sandwich
touch BUILD java/com/google/sandwich/{{A,B,MainTest}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_test(
name = "MainTest",
size = "small",
srcs = ["MainTest.java"],
deps = [
":custom",
"//third_party:junit4",
],
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
deps = [":bottom"]
)
java_library(
name = "bottom",
srcs = ["B.java"]
)
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
B myObj = new B();
public boolean returnsTrue() {
System.out.println("Message from A");
return myObj.returnsTrue();
}
}
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public boolean returnsTrue() {
System.out.println("Message from B");
return true;
}
}
EOF
cat > java/com/google/sandwich/MainTest.java << EOF
package com.google.sandwich;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class MainTest {
@Test
public void testReturnsTrue() {
A myObj = new A();
assertTrue(myObj.returnsTrue());
System.out.println("Test message");
}
}
EOF
write_java_custom_rule
bazel test java/com/google/sandwich:MainTest --test_output=streamed > "$TEST_log" || fail "Java sandwich for java_test failed"
expect_log "Message from A"
expect_log "Message from B"
expect_log "Test message"
}
function test_explicit_java_test_deps_flag() {
setup_javatest_support
mkdir -p java/testrunners || fail "mkdir failed"
cat > java/testrunners/Tests.java <<EOF
package testrunners;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.Test;
@RunWith(JUnit4.class)
public class Tests {
@Test
public void testTest() {
System.out.println("testTest was run");
}
}
EOF
# With explicit_java_test_deps, we fail without explicitly specifying the JUnit deps.
cat > java/testrunners/BUILD <<EOF
java_test(name = "Tests",
srcs = ['Tests.java'],
)
EOF
bazel test --test_output=streamed --explicit_java_test_deps //java/testrunners:Tests \
&> "$TEST_log" && fail "Expected Failure" || true
expect_log "cannot find symbol"
# We start passing again with explicit_java_test_deps once we explicitly specify the deps.
cat > java/testrunners/BUILD <<EOF
java_test(name = "Tests",
srcs = ['Tests.java'],
deps = ['//third_party:junit4'],
)
EOF
bazel test --test_output=streamed --explicit_java_test_deps //java/testrunners:Tests \
&> "$TEST_log" || fail "Expected success"
expect_log "testTest was run"
}
function test_java_sandwich_resources_file() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,A.java,java_custom_library.bzl,my_precious_resource.txt}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_custom_library(
name = "custom",
srcs = ["A.java"],
resources = ["my_precious_resource.txt"]
)
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A { }
EOF
write_java_custom_rule
bazel build java/com/google/sandwich:custom > "$TEST_log" || fail "Java sandwich build failed"
unzip -l bazel-bin/java/com/google/sandwich/libcustom.jar > "$TEST_log"
expect_log "my_precious_resource.txt"
}
function test_java_sandwich_resources_filegroup() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,A.java,java_custom_library.bzl,my_precious_resource.txt,my_other_precious_resource.txt}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
filegroup(
name = "resources_group",
srcs = ["my_precious_resource.txt", "my_other_precious_resource.txt"]
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
resources = [":resources_group"]
)
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A { }
EOF
write_java_custom_rule
bazel build java/com/google/sandwich:custom > "$TEST_log" || fail "Java sandwich build failed"
unzip -l bazel-bin/java/com/google/sandwich/libcustom.jar > "$TEST_log"
expect_log "my_precious_resource.txt"
expect_log "my_other_precious_resource.txt"
}
function test_basic_java_sandwich_with_exports() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,C,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
srcs = ["Main.java"],
deps = [":custom"]
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
exports = [":lib-b"]
)
java_custom_library(
name = "lib-b",
srcs = ["B.java"],
exports = [":lib-c"]
)
java_custom_library(
name = "lib-c",
srcs = ["C.java"],
)
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public static void print() {
System.out.println("Message from B");
}
}
EOF
cat > java/com/google/sandwich/C.java << EOF
package com.google.sandwich;
class C {
public static void print() {
System.out.println("Message from C");
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
public static void print() {
System.out.println("Message from A");
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
A.print();
B.print();
C.print();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main > "$TEST_log" || fail "Java sandwich build failed"
expect_log "Message from A"
expect_log "Message from B"
expect_log "Message from C"
}
function test_basic_java_sandwich_with_exports_and_java_library() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,C,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
srcs = ["Main.java"],
deps = [":custom"]
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
exports = [":lib-b"]
)
java_library(
name = "lib-b",
srcs = ["B.java"],
exports = [":lib-c"]
)
java_library(
name = "lib-c",
srcs = ["C.java"],
)
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public static void print() {
System.out.println("Message from B");
}
}
EOF
cat > java/com/google/sandwich/C.java << EOF
package com.google.sandwich;
class C {
public static void print() {
System.out.println("Message from C");
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
public static void print() {
System.out.println("Message from A");
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
A.print();
B.print();
C.print();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main > "$TEST_log" || fail "Java sandwich build failed"
expect_log "Message from A"
expect_log "Message from B"
expect_log "Message from C"
}
function test_java_sandwich_default_strict_deps() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,A.java,java_custom_library.bzl}
write_java_custom_rule
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_custom_library(
name = "custom",
srcs = ["A.java"]
)
EOF
sed -i -- 's/ERROR/DEFAULT/g' 'java/com/google/sandwich/java_custom_library.bzl'
bazel build java/com/google/sandwich:custom > $TEST_log || fail "Java sandwich build failed"
sed -i -- 's/DEFAULT/WARN/g' 'java/com/google/sandwich/java_custom_library.bzl'
bazel build java/com/google/sandwich:custom > $TEST_log || fail "Java sandwich build failed"
}
function test_basic_java_sandwich_with_transitive_deps_and_java_library_should_fail() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,C,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
srcs = ["Main.java"],
deps = [":custom"]
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
deps = [":lib-b"]
)
java_library(
name = "lib-b",
srcs = ["B.java"],
deps = [":lib-c"]
)
java_library(
name = "lib-c",
srcs = ["C.java"],
)
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public static void print() {
System.out.println("Message from B");
}
}
EOF
cat > java/com/google/sandwich/C.java << EOF
package com.google.sandwich;
class C {
public static void print() {
System.out.println("Message from C");
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
public static void print() {
System.out.println("Message from A");
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
A.print();
B.print();
C.print();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main &> "$TEST_log" && fail "Java sandwich build shold have failed" || true
expect_log "Using type com.google.sandwich.B from an indirect dependency"
expect_log "Using type com.google.sandwich.C from an indirect dependency"
}
function test_basic_java_sandwich_with_deps_should_fail() {
mkdir -p java/com/google/sandwich
touch java/com/google/sandwich/{BUILD,{A,B,C,Main}.java,java_custom_library.bzl}
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_binary(
name = "Main",
srcs = ["Main.java"],
deps = [":custom"]
)
java_custom_library(
name = "custom",
srcs = ["A.java"],
deps = [":lib-b"]
)
java_custom_library(
name = "lib-b",
srcs = ["B.java"],
deps = [":lib-c"]
)
java_custom_library(
name = "lib-c",
srcs = ["C.java"],
)
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public static void print() {
System.out.println("Message from B");
}
}
EOF
cat > java/com/google/sandwich/C.java << EOF
package com.google.sandwich;
class C {
public static void print() {
System.out.println("Message from C");
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
public static void print() {
System.out.println("Message from A");
}
}
EOF
cat > java/com/google/sandwich/Main.java << EOF
package com.google.sandwich;
class Main {
public static void main(String[] args) {
A.print();
B.print();
C.print();
}
}
EOF
write_java_custom_rule
bazel run java/com/google/sandwich:Main &> "$TEST_log" && fail "Java sandwich build shold have failed" || true
expect_log "Using type com.google.sandwich.B from an indirect dependency"
expect_log "Using type com.google.sandwich.C from an indirect dependency"
}
function test_java_merge_outputs() {
mkdir -p java/com/google/sandwich
cat > java/com/google/sandwich/BUILD << EOF
load(':java_custom_library.bzl', 'java_custom_library')
java_custom_library(
name = "custom",
srcs = ["A.java"],
jar = "libb.jar"
)
java_library(
name = "b",
srcs = ["B.java"]
)
EOF
cat > java/com/google/sandwich/B.java << EOF
package com.google.sandwich;
class B {
public void printB() {
System.out.println("Message from B");
}
}
EOF
cat > java/com/google/sandwich/A.java << EOF
package com.google.sandwich;
class A {
public void printA() {
System.out.println("Message from A");
}
}
EOF
cat > java/com/google/sandwich/java_custom_library.bzl << EOF
def _impl(ctx):
compiled_jar = ctx.actions.declare_file("lib" + ctx.label.name + ".jar")
imported_jar = ctx.files.jar[0];
compilation_provider = java_common.compile(
ctx,
source_files = ctx.files.srcs,
output = compiled_jar,
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo],
host_javabase = ctx.attr._host_javabase[java_common.JavaRuntimeInfo],
)
imported_provider = JavaInfo(output_jar = imported_jar, use_ijar=False);
final_provider = java_common.merge([compilation_provider, imported_provider])
print(final_provider.outputs.jars[0].class_jar)
print(final_provider.outputs.jars[1].class_jar)
return struct(
files = depset([compiled_jar, imported_jar]),
providers = [final_provider]
)
java_custom_library = rule(
implementation = _impl,
attrs = {
"srcs": attr.label_list(allow_files=True),
"jar": attr.label(allow_files=True),
"_java_toolchain": attr.label(default = Label("${JAVA_TOOLCHAIN}")),
"_host_javabase": attr.label(default = Label("@bazel_tools//tools/jdk:current_host_java_runtime"))
},
fragments = ["java"]
)
EOF
bazel build java/com/google/sandwich:custom &> "$TEST_log" || fail "Java sandwich build failed"
expect_log "<generated file java/com/google/sandwich/libcustom.jar>"
expect_log "<generated file java/com/google/sandwich/libb.jar>"
}
function test_java_common_create_provider_with_ijar() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,A.java,my_rule.bzl}
cat > java/com/google/foo/A.java << EOF
package com.google.foo;
class A {}
EOF
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
java_library(name = "a", srcs = ["A.java"])
my_rule(name = "banana", compile_time_jars = ["liba.jar"])
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
def _impl(ctx):
provider = java_common.create_provider(
ctx.actions,
compile_time_jars = ctx.files.compile_time_jars,
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo]
)
print(provider.compile_jars)
print(provider.full_compile_jars)
return DefaultInfo(files = provider.compile_jars)
my_rule = rule(
implementation = _impl,
attrs = {
"compile_time_jars": attr.label_list(allow_files=True),
"_java_toolchain": attr.label(default = Label("${JAVA_TOOLCHAIN}")),
}
)
EOF
bazel build java/com/google/foo:banana >& "$TEST_log" || fail "Unexpected fail"
expect_log "liba-ijar.jar"
unzip -l bazel-bin/java/com/google/foo/liba-ijar.jar >> "$TEST_log"
expect_log "00:00 com/google/foo/A.class"
}
function test_java_common_create_provider_without_ijar() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,A.java,my_rule.bzl}
cat > java/com/google/foo/A.java << EOF
package com.google.foo;
class A {}
EOF
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
java_library(name = "a", srcs = ["A.java"])
my_rule(name = "banana", compile_time_jars = ["liba.jar"])
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
def _impl(ctx):
provider = java_common.create_provider(
use_ijar = False,
compile_time_jars = ctx.files.compile_time_jars,
)
print(provider.compile_jars)
return DefaultInfo(files = provider.compile_jars)
my_rule = rule(
implementation = _impl,
attrs = {
"compile_time_jars": attr.label_list(allow_files=True),
}
)
EOF
bazel build java/com/google/foo:banana >& "$TEST_log" || fail "Unexpected failure"
expect_log "liba.jar"
}
function test_java_common_create_provider_with_ijar_unset_actions() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,A.java,my_rule.bzl}
cat > java/com/google/foo/A.java << EOF
package com.google.foo;
class A {}
EOF
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
java_library(name = "a", srcs = ["A.java"])
my_rule(name = "banana", compile_time_jars = ["liba.jar"])
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
def _impl(ctx):
provider = java_common.create_provider(
compile_time_jars = ctx.files.compile_time_jars,
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo]
)
return DefaultInfo(files = provider.compile_jars)
my_rule = rule(
implementation = _impl,
attrs = {
"compile_time_jars": attr.label_list(allow_files=True),
"_java_toolchain": attr.label(default = Label("@bazel_tools//tools/jdk:remote_toolchain")),
}
)
EOF
bazel build java/com/google/foo:banana >& "$TEST_log" && fail "Unexpected success"
expect_log "The value of use_ijar is True. Make sure the ctx.actions argument is valid."
}
function test_java_info_constructor_with_ijar_unset_actions() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,my_rule.bzl}
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
my_rule(
name = 'my_skylark_rule',
output_jar = 'my_skylark_rule_lib.jar',
source_jars = ['my_skylark_rule_src.jar']
)
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
result = provider()
def _impl(ctx):
javaInfo = JavaInfo(
output_jar = ctx.file.output_jar,
source_jars = ctx.files.source_jars,
use_ijar = True,
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo]
)
return [result(property = javaInfo)]
my_rule = rule(
implementation = _impl,
attrs = {
'output_jar' : attr.label(allow_single_file=True),
'source_jars' : attr.label_list(allow_files=['.jar']),
"_java_toolchain": attr.label(default = Label("@bazel_tools//tools/jdk:remote_toolchain"))
}
)
EOF
bazel build java/com/google/foo:my_skylark_rule >& "$TEST_log" && fail "Unexpected success"
expect_log "The value of use_ijar is True. Make sure the ctx.actions argument is valid."
}
function test_java_common_create_provider_with_ijar_unset_java_toolchain() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,A.java,my_rule.bzl}
cat > java/com/google/foo/A.java << EOF
package com.google.foo;
class A {}
EOF
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
java_library(name = "a", srcs = ["A.java"])
my_rule(name = "banana", compile_time_jars = ["liba.jar"])
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
def _impl(ctx):
provider = java_common.create_provider(
ctx.actions,
compile_time_jars = ctx.files.compile_time_jars,
)
return DefaultInfo(files = provider.compile_jars)
my_rule = rule(
implementation = _impl,
attrs = {
"compile_time_jars": attr.label_list(allow_files=True),
"_java_toolchain": attr.label(default = Label("@bazel_tools//tools/jdk:remote_toolchain")),
}
)
EOF
bazel build java/com/google/foo:banana >& "$TEST_log" && fail "Unexpected success"
expect_log "The value of use_ijar is True. Make sure the java_toolchain argument is valid."
}
function test_java_info_constructor_with_ijar_unset_java_toolchain() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,my_rule.bzl}
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
my_rule(
name = 'my_skylark_rule',
output_jar = 'my_skylark_rule_lib.jar',
source_jars = ['my_skylark_rule_src.jar']
)
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
result = provider()
def _impl(ctx):
javaInfo = JavaInfo(
output_jar = ctx.file.output_jar,
source_jars = ctx.files.source_jars,
use_ijar = True,
actions = ctx.actions
)
return [result(property = javaInfo)]
my_rule = rule(
implementation = _impl,
attrs = {
'output_jar' : attr.label(allow_single_file=True),
'source_jars' : attr.label_list(allow_files=['.jar'])
}
)
EOF
bazel build java/com/google/foo:my_skylark_rule >& "$TEST_log" && fail "Unexpected success"
expect_log "The value of use_ijar is True. Make sure the java_toolchain argument is valid."
}
function write_java_timeout_test() {
setup_javatest_support
mkdir -p javatests/com/google/timeout
touch javatests/com/google/timeout/{BUILD,TimeoutTests.java}
cat > javatests/com/google/timeout/TimeoutTests.java << EOF
package com.google.timeout;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.Test;
@RunWith(JUnit4.class)
public class TimeoutTests {
@Test
public void testPasses() throws InterruptedException { }
@Test
public void testTimesOut() throws InterruptedException {
// sleep more than 1 min
Thread.sleep(Long.MAX_VALUE);
}
}
EOF
cat > javatests/com/google/timeout/BUILD <<EOF
java_test(
name = "TimeoutTests",
srcs = ["TimeoutTests.java"],
deps = ['//third_party:junit4'],
timeout = "short", # 1 min
)
EOF
}
# Test is flaky: b/123476045, https://github.com/bazelbuild/bazel/issues/7288
function DISABLED_test_java_test_timeout() {
write_java_timeout_test
bazel test javatests/com/google/timeout:TimeoutTests --test_timeout=5 \
--noexperimental_split_xml_generation >& "$TEST_log" \
&& fail "Unexpected success"
xml_log=bazel-testlogs/javatests/com/google/timeout/TimeoutTests/test.xml
[[ -s $xml_log ]] || fail "$xml_log was not present after test"
cat "$xml_log" > "$TEST_log"
expect_log "failures='2'"
expect_log "<failure message='Test cancelled' type='java.lang.Exception'>java.lang.Exception: Test cancelled"
expect_log "<failure message='Test interrupted' type='java.lang.Exception'>java.lang.Exception: Test interrupted"
}
function test_java_test_timeout_split_xml() {
write_java_timeout_test
bazel test javatests/com/google/timeout:TimeoutTests --test_timeout=5 \
--experimental_split_xml_generation >& "$TEST_log" \
&& fail "Unexpected success"
xml_log=bazel-testlogs/javatests/com/google/timeout/TimeoutTests/test.xml
[[ -s $xml_log ]] || fail "$xml_log was not present after test"
cat "$xml_log" > "$TEST_log"
expect_log "failures='2'"
expect_log "<failure message='Test cancelled' type='java.lang.Exception'>java.lang.Exception: Test cancelled"
expect_log "<failure message='Test interrupted' type='java.lang.Exception'>java.lang.Exception: Test interrupted"
}
function test_wrapper_resolves_runfiles_to_subsuming_tree() {
setup_clean_workspace
set_up
mkdir -p java/com/google/runfiles/
cat <<'EOF' > java/com/google/runfiles/EchoRunfiles.java
package com.google.runfiles;
public class EchoRunfiles {
public static void main(String[] argv) {
System.out.println(System.getenv("JAVA_RUNFILES"));
}
}
EOF
cat <<'EOF' > java/com/google/runfiles/BUILD
java_binary(
name = 'EchoRunfiles',
srcs = ['EchoRunfiles.java'],
visibility = ['//visibility:public'],
)
EOF
# The workspace name is initialized in testenv.sh; use that var rather than
# hardcoding it here. The extra sed pass is so we can selectively expand
# that one var while keeping the rest of the heredoc literal.
cat | sed "s/{{WORKSPACE_NAME}}/$WORKSPACE_NAME/" > check_runfiles.sh << 'EOF'
#!/bin/sh -eu
unset JAVA_RUNFILES # Force the wrapper script to recompute it.
subrunfiles=`$TEST_SRCDIR/{{WORKSPACE_NAME}}/java/com/google/runfiles/EchoRunfiles`
if [ $subrunfiles != $TEST_SRCDIR ]; then
echo $subrunfiles
echo "DOES NOT MATCH"
echo $TEST_SRCDIR
exit 1
fi
EOF
chmod u+x check_runfiles.sh
cat <<'EOF' > BUILD
sh_test(
name = 'check_runfiles',
srcs = ['check_runfiles.sh'],
data = ['//java/com/google/runfiles:EchoRunfiles'],
)
EOF
# Create a runfiles tree for EchoRunfiles.
bazel build //java/com/google/runfiles:EchoRunfiles
# We're testing a formerly non-hermetic interaction, so disable the sandbox.
bazel test --spawn_strategy=standalone --test_output=errors :check_runfiles
}
function test_java_info_constructor_e2e() {
mkdir -p java/com/google/foo
touch java/com/google/foo/{BUILD,my_rule.bzl}
cat > java/com/google/foo/BUILD << EOF
load(":my_rule.bzl", "my_rule")
my_rule(
name = 'my_skylark_rule',
output_jar = 'my_skylark_rule_lib.jar',
source_jars = ['my_skylark_rule_src.jar'],
)
EOF
cat > java/com/google/foo/my_rule.bzl << EOF
result = provider()
def _impl(ctx):
compile_jar = java_common.run_ijar(
ctx.actions,
jar = ctx.file.output_jar,
target_label = ctx.label,
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo],
)
source_jar = java_common.pack_sources(
ctx.actions,
output_jar = ctx.file.output_jar,
source_jars = ctx.files.source_jars,
java_toolchain = ctx.attr._java_toolchain[java_common.JavaToolchainInfo],
host_javabase = ctx.attr._host_javabase[java_common.JavaRuntimeInfo],
)
javaInfo = JavaInfo(
output_jar = ctx.file.output_jar,
compile_jar = compile_jar,
source_jar = source_jar,
)
return [result(property = javaInfo)]
my_rule = rule(
implementation = _impl,
attrs = {
'output_jar' : attr.label(allow_single_file=True),
'source_jars' : attr.label_list(allow_files=['.jar']),
"_java_toolchain": attr.label(default = Label("@bazel_tools//tools/jdk:remote_toolchain")),
"_host_javabase": attr.label(default = Label("@bazel_tools//tools/jdk:current_host_java_runtime"))
}
)
EOF
bazel build java/com/google/foo:my_skylark_rule >& "$TEST_log" || fail "Expected success"
}
run_suite "Java integration tests"
|
<gh_stars>1-10
import os
from datetime import datetime, timedelta
from airflow import DAG
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow_utils import (
DATA_IMAGE,
clone_and_setup_extraction_cmd,
gitlab_defaults,
slack_failed_task,
)
from kube_secrets import (
GCP_SERVICE_CREDS,
SNOWFLAKE_ACCOUNT,
SNOWFLAKE_LOAD_DATABASE,
SNOWFLAKE_LOAD_PASSWORD,
SNOWFLAKE_LOAD_ROLE,
SNOWFLAKE_LOAD_USER,
SNOWFLAKE_LOAD_WAREHOUSE,
)
from kubernetes_helpers import get_affinity, get_toleration
env = os.environ.copy()
pod_env_vars = {"CI_PROJECT_DIR": "/analytics"}
default_args = {
"catchup": True,
"depends_on_past": False,
"on_failure_callback": slack_failed_task,
"owner": "airflow",
"retries": 1,
"retry_delay": timedelta(minutes=1),
"sla": timedelta(hours=12),
"sla_miss_callback": slack_failed_task,
"start_date": datetime(2019, 1, 1),
"dagrun_timeout": timedelta(hours=6),
}
dag = DAG("prometheus_extract", default_args=default_args, schedule_interval="@hourly")
# don't add a newline at the end of this because it gets added to in the K8sPodOperator arguments
prometheus_extract_command = (
f"{clone_and_setup_extraction_cmd} && python prometheus/src/execute.py"
)
prometheus_operator = KubernetesPodOperator(
**gitlab_defaults,
image=DATA_IMAGE,
task_id="prometheus-extract",
name="prometheus-extract",
secrets=[
SNOWFLAKE_ACCOUNT,
SNOWFLAKE_LOAD_DATABASE,
SNOWFLAKE_LOAD_ROLE,
SNOWFLAKE_LOAD_USER,
SNOWFLAKE_LOAD_WAREHOUSE,
SNOWFLAKE_LOAD_PASSWORD,
],
env_vars=pod_env_vars,
affinity=get_affinity(False),
tolerations=get_toleration(False),
arguments=[
prometheus_extract_command
+ " {{ ts }} {{ next_execution_date.isoformat() }} $(gcloud auth print-identity-token)"
],
dag=dag,
)
|
import cv2
class VideoProcessor:
def __init__(self, video):
self.video = video
def is_opened(self):
"""
Check if the video is ready.
"""
if self.video:
return self.video.isOpened()
def get_frame(self, gray=False):
"""
Return the current frame from the video.
"""
if self.video:
code, frame = self.video.read()
if not code:
raise ValueError("Impossible to retrieve current frame.")
if gray:
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
return frame |
<reponame>timherrm/geizhalscrawler<gh_stars>0
import unittest
from random import uniform
from time import sleep
from tests.exception_decorator import except_httperror
from geizhalscrawler import geizhals
class TestStringMethods(unittest.TestCase):
@except_httperror
def test_URL_AT(self):
id = geizhals._url2id("https://geizhals.at/bose-quietcomfort-35-ii-schwarz-a1696985.html")
# avoid banning from website
sleep(uniform(1, 10))
self.assertEqual(id, "1696985")
@except_httperror
def test_URL_EU(self):
id = geizhals._url2id("https://geizhals.eu/bose-quietcomfort-35-ii-schwarz-a1696985.html")
# avoid banning from website
sleep(uniform(1, 10))
self.assertEqual(id, "1696985")
@except_httperror
def test_URL_DE(self):
id = geizhals._url2id("https://geizhals.de/bose-quietcomfort-35-ii-schwarz-a1696985.html")
# avoid banning from website
sleep(uniform(1, 10))
self.assertEqual(id, "1696985")
@except_httperror
def test_URL_UK(self):
id = geizhals._url2id("https://skinflint.co.uk/bose-quietcomfort-35-ii-black-a1696985.html")
# avoid banning from website
sleep(uniform(1, 10))
self.assertEqual(id, "1696985")
@except_httperror
def test_URL_PL(self):
id = geizhals._url2id("https://cenowarka.pl/bose-quietcomfort-35-ii-czarny-a1696985.html")
# avoid banning from website
sleep(uniform(1, 10))
self.assertEqual(id, "1696985")
if __name__ == '__main__':
unittest.main()
|
public class IssueContainsKeywordsPredicate {
private List<String> keywords;
public IssueContainsKeywordsPredicate(List<String> keywords) {
this.keywords = keywords;
}
public boolean test(Issue issue) {
for (String keyword : keywords) {
if (!issue.getDescription().contains(keyword)) {
return false;
}
}
return true;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IssueContainsKeywordsPredicate that = (IssueContainsKeywordsPredicate) o;
return Objects.equals(keywords, that.keywords);
}
@Override
public int hashCode() {
return Objects.hash(keywords);
}
} |
import { Entity, PrimaryGeneratedColumn, Column, OneToMany } from 'typeorm';
import { PhotoEntity } from './photo.entity';
@Entity()
export class AlbumEntity {
@PrimaryGeneratedColumn()
id: number;
@Column()
name: string;
@OneToMany(() => PhotoEntity, (photo) => photo.album)
photos: PhotoEntity[];
addPhoto(photo: PhotoEntity): void {
if (!this.photos) {
this.photos = [];
}
this.photos.push(photo);
photo.album = this;
}
getPhotos(): PhotoEntity[] {
return this.photos;
}
} |
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.actions;
import ideal.library.elements.*;
import ideal.library.reflections.*;
import javax.annotation.Nullable;
import ideal.runtime.elements.*;
import ideal.runtime.logs.*;
import ideal.runtime.reflections.*;
import ideal.development.elements.*;
import ideal.development.notifications.*;
import ideal.development.types.*;
import ideal.development.values.*;
public class is_action extends base_action {
public final action expression;
public final type the_type;
public final boolean negated;
public is_action(action expression, type the_type, boolean negated, position source) {
super(source);
this.expression = expression;
this.the_type = the_type;
this.negated = negated;
}
@Override
public abstract_value result() {
return common_library.get_instance().immutable_boolean_type();
}
@Override
public entity_wrapper execute(execution_context the_context) {
entity_wrapper expression_result = expression.execute(the_context);
assert expression_result instanceof value_wrapper;
boolean result = action_utilities.is_of(expression_result, the_type);
if (negated) {
result = !result;
}
return common_library.get_instance().to_boolean_value(result);
}
@Override
public string to_string() {
return new base_string(expression + (negated ? " is_not " : " is ") + the_type);
}
}
|
#coding:utf-8
import os
import sys
import time
import pipes
import subprocess
import threading
import pexpect
import signal
class ReturnContainer():
def __init__(self):
self.value = None
class Command(object):
def __init__(self, cmd, fail_cmd, post_cmd=None, post_delay=0):
self.cmd = cmd
self.fail_cmd = fail_cmd
self.post_cmd = post_cmd
self.post_delay = post_delay
self.proc = None
def run(self, timeout=None, retcode=0):
print "Testing '{0}'...".format(" ".join(pipes.quote(s) for s in self.cmd)),
sys.stdout.flush()
err = None
pipe_kwargs = {"stdout": subprocess.PIPE, "stderr": subprocess.PIPE, "stdin": subprocess.PIPE}
def target():
self.proc = subprocess.Popen(self.cmd, **pipe_kwargs)
self.stdout, self.stderr = self.proc.communicate()
thread = threading.Thread(target=target)
thread.daemon = True
thread.start()
if self.post_cmd is not None:
time.sleep(self.post_delay)
subprocess.check_call(self.post_cmd, **pipe_kwargs)
thread.join(timeout - self.post_delay if timeout is not None else timeout)
# Checks
if thread.is_alive():
subprocess.check_call(self.fail_cmd, **pipe_kwargs)
err = Exception("Test failed with timeout!")
elif self.proc.returncode != retcode:
err = Exception("Test failed with unexpected returncode (expected {0}, got {1})".format(retcode, self.proc.returncode))
if err is not None:
print "FAIL"
print "--- STDOUT ---"
print getattr(self, "stdout", "no stdout")
print "--- STDERR ---"
print getattr(self, "stderr", "no stderr")
print "--- ... ---"
raise err
else:
print "OK"
def attach_and_type_exit_0(name):
print "Attaching to {0} to exit 0".format(name)
p = pexpect.spawn("docker attach {0}".format(name))
p.sendline('')
p.sendline('exit 0')
p.close()
def attach_and_issue_ctrl_c(name):
print "Attaching to {0} to CTRL+C".format(name)
p = pexpect.spawn("docker attach {0}".format(name))
p.expect_exact('#')
p.sendintr()
p.close()
def test_tty_handling(img, name, base_cmd, fail_cmd, container_command, exit_function, expect_exit_code):
print "Testing TTY handling (using container command '{0}' and exit function '{1}')".format(container_command, exit_function.__name__)
rc = ReturnContainer()
shell_ready_event = threading.Event()
def spawn():
cmd = base_cmd + ["--tty", "--interactive", img, "/tini/dist/tini"]
if os.environ.get("MINIMAL") is None:
cmd.append("--")
cmd.append(container_command)
p = pexpect.spawn(" ".join(cmd))
p.expect_exact("#")
shell_ready_event.set()
rc.value = p.wait()
thread = threading.Thread(target=spawn)
thread.daemon = True
thread.start()
if not shell_ready_event.wait(2):
raise Exception("Timeout waiting for shell to spawn")
exit_function(name)
thread.join(timeout=2)
if thread.is_alive():
subprocess.check_call(fail_cmd)
raise Exception("Timeout waiting for container to exit!")
if rc.value != expect_exit_code:
raise Exception("Return code is: {0} (expected {1})".format(rc.value, expect_exit_code))
def main():
img = sys.argv[1]
name = "{0}-test".format(img)
args_disabled = os.environ.get("MINIMAL")
root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
base_cmd = [
"docker",
"run",
"--rm",
"--volume={0}:/tini".format(root),
"--name={0}".format(name),
]
fail_cmd = ["docker", "kill", "-s", "KILL", name]
# Funtional tests
for entrypoint in ["/tini/dist/tini", "/tini/dist/tini-static"]:
functional_base_cmd = base_cmd + [
"--entrypoint={0}".format(entrypoint),
"-e", "TINI_VERBOSITY=3",
img,
]
# Reaping test
Command(functional_base_cmd + ["/tini/test/reaping/stage_1.py"], fail_cmd).run(timeout=10)
# Signals test
for sig, retcode in [("TERM", 143), ("USR1", 138), ("USR2", 140)]:
Command(
functional_base_cmd + ["/tini/test/signals/test.py"],
fail_cmd,
["docker", "kill", "-s", sig, name],
2
).run(timeout=10, retcode=retcode)
# Exit code test
Command(functional_base_cmd + ["-z"], fail_cmd).run(retcode=127 if args_disabled else 1)
Command(functional_base_cmd + ["-h"], fail_cmd).run(retcode=127 if args_disabled else 0)
Command(functional_base_cmd + ["zzzz"], fail_cmd).run(retcode=127)
Command(functional_base_cmd + ["-w"], fail_cmd).run(retcode=127 if args_disabled else 0)
# Valgrind test (we only run this on the dynamic version, because otherwise Valgrind may bring up plenty of errors that are
# actually from libc)
Command(base_cmd + [img, "valgrind", "--leak-check=full", "--error-exitcode=1", "/tini/dist/tini", "ls"], fail_cmd).run()
# Test tty handling
test_tty_handling(img, name, base_cmd, fail_cmd, "dash", attach_and_type_exit_0, 0)
test_tty_handling(img, name, base_cmd, fail_cmd, "dash -c 'while true; do echo \#; sleep 0.1; done'", attach_and_issue_ctrl_c, 128 + signal.SIGINT)
# Installation tests (sh -c is used for globbing and &&)
for image, pkg_manager, extension in [
["ubuntu:precise", "dpkg", "deb"],
["ubuntu:trusty", "dpkg", "deb"],
["centos:6", "rpm", "rpm"],
["centos:7", "rpm", "rpm"],
]:
Command(base_cmd + [image, "sh", "-c", "{0} -i /tini/dist/*.{1} && /usr/bin/tini true".format(pkg_manager, extension)], fail_cmd).run()
if __name__ == "__main__":
main()
|
<reponame>AllenElguira16/repairservicelocator<filename>Assets/js/Components/MyShop/Content.tsx
import * as React from "react";
import DeleteConfirmation from "./DeleteConfirmation";
class Content extends React.Component<any, any>{
state: any = {
activeId: null
}
fillForm(id: any){
this.props.fillForm(id);
}
setActiveId(shopId: any){
// this.setState({
// activeId: shopId
// })
this.props.setActiveId(shopId);
}
render(){
return(
<>
<tbody>
{this.props.shops.map((shop: any, i: any) =>
<tr key={i}>
<th>{shop.Name}</th>
<th>{shop.StreetNumber} {shop.Barangay}</th>
<th>{shop.ContactNumber}</th>
<th>{parseInt(shop.Status, 10) === 0 ? 'Pending' : 'Good'}</th>
<th className="d-flex">
<a
href="#"
data-toggle="modal"
data-target="#myshopModal"
className="btn btn-primary"
onClick={this.fillForm.bind(this, shop.Id)}
>Edit</a>
<a
href="#"
data-toggle="modal"
data-target="#DeleteConfirmation"
className="btn btn-danger"
onClick={this.setActiveId.bind(this, shop.Id)}
>Delete</a>
</th>
</tr>
)}
</tbody>
</>
);
}
}
export default Content; |
SELECT *
FROM Employees
ORDER BY Salary DESC
LIMIT 0.1 * (SELECT COUNT(EmployeeID)
FROM Employees); |
<reponame>youaxa/ara-poc-open
package com.decathlon.ara.repository.custom.impl;
import com.decathlon.ara.domain.Functionality;
import com.decathlon.ara.domain.QFunctionality;
import com.decathlon.ara.domain.enumeration.FunctionalityType;
import com.decathlon.ara.repository.custom.FunctionalityRepositoryCustom;
import com.querydsl.jpa.impl.JPAQueryFactory;
import java.util.Map;
import java.util.stream.Collectors;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class FunctionalityRepositoryImpl implements FunctionalityRepositoryCustom {
@NonNull
private final JPAQueryFactory jpaQueryFactory;
@Override
public Map<Long, Long> getFunctionalityTeamIds(long projectId) {
return jpaQueryFactory.select(QFunctionality.functionality.id, QFunctionality.functionality.teamId)
.distinct()
.from(QFunctionality.functionality)
.where(QFunctionality.functionality.type.eq(FunctionalityType.FUNCTIONALITY))
.where(QFunctionality.functionality.projectId.eq(Long.valueOf(projectId)))
.fetch()
.stream()
.collect(Collectors.toMap(
tuple -> tuple.get(QFunctionality.functionality.id),
tuple -> tuple.get(QFunctionality.functionality.teamId)));
}
@Override
public boolean existsByProjectIdAndCountryCode(long projectId, String countryCode) {
final String separator = Functionality.COUNTRY_CODES_SEPARATOR;
return jpaQueryFactory.select(QFunctionality.functionality.id)
.from(QFunctionality.functionality)
.where(QFunctionality.functionality.projectId.eq(Long.valueOf(projectId)))
.where(QFunctionality.functionality.countryCodes.prepend(separator).concat(separator)
.like("%" + separator + countryCode + separator + "%"))
.fetchFirst() != null;
}
}
|
<gh_stars>1-10
package org.firstinspires.ftc.teamcode.teleop;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.teamcode.botfunctionality.RecorderBot;
@TeleOp (name="Record Wobble Deliveries", group="Recording")
public class RecordWobbleDeliverOp extends RecorderBot {
boolean isRecording = false;
boolean recordingA,recordingB,recordingC = false;
@Override
public void runOpMode() throws InterruptedException {
initRecorderBot();
waitForStart();
while(opModeIsActive()){
executeControllerDriveLogic();
if(gamepad1.a && debounced && !(recordingB || recordingC)){
isRecording = !isRecording;
recordingA = isRecording;
if(isRecording) {initMap(); RobotLog.vv("RECORDING A START","\n\nBEGINS NOW:\n\n");}
else saveFile();
debounce.run();
}
if(gamepad1.b && debounced && !(recordingA || recordingC)){
isRecording = !isRecording;
recordingB = isRecording;
if(isRecording) {initMap(); RobotLog.vv("RECORDING B START","\n\nBEGINS NOW:\n\n");}
else saveFile();
debounce.run();
}
if(gamepad1.x && debounced && !(recordingA || recordingB)){
isRecording = !isRecording;
recordingC = isRecording;
if(isRecording) {initMap(); RobotLog.vv("RECORDING C START","\n\nBEGINS NOW:\n\n");}
else saveFile();
debounce.run();
}
telemetry.addData("instructions","Press a to start/stop recording A, press b to start stop recording B, press x to start/stop recording C");
telemetry.addData("ABC", "CURRENTLY RECORDING A: %s, B: %s, C: %s".format(String.valueOf(recordingA),String.valueOf(recordingB),String.valueOf(recordingC)));
telemetry.update();
if(isRecording) executeRecorderLogic();
}
}
}
|
<reponame>JLLeitschuh/Symfony-2-Eclipse-Plugin
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.ui.editor.hyperlink;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.Region;
import org.eclipse.php.internal.ui.editor.hyperlink.PHPHyperlinkDetector;
/**
*
* Abstract Baseclass for string hyperlink detectors.
*
*
* @author <NAME> <<EMAIL>>
*
*/
@SuppressWarnings("restriction")
public abstract class StringHyperlinkDetector extends PHPHyperlinkDetector {
public static IRegion findWord(IDocument document, int offset) {
int start = -2;
int end = -1;
try {
int pos = offset;
char c;
char separator = '?';
int length = document.getLength();
// search backwards until a string delimiter
// to find the start position
while (pos >= 0) {
c = document.getChar(pos);
if (c == '\'' || c == '"' || c == ' ') {
separator = c;
break;
}
--pos;
}
start = pos;
pos++;
// search forward until a string delimiter
// to find the end position
while (pos < length) {
c = document.getChar(pos);
if (c == separator) {
end = pos;
break;
}
++pos;
}
if (separator != '?' && start >= 0 && end != 0) {
start++;
int rlength = end - start;
return new Region(start, rlength );
}
} catch (BadLocationException x) {
}
return null;
}
}
|
<reponame>Mihailus2000/lab-01-parser
#include <gtest/gtest.h>
#include "Json.hpp"
TEST(JsonObject, ExampleTest){
std::string json = R"({
"lastname" : "Ivanov",
"firstname" : "Ivan",
"age" : 25,
"islegal" : false,
"marks" : [
4,5,5,5,2,3
],
"address" : {
"city" : "Moscow",
"street" : "Vozdvijenka"
}
} )";
Json object = Json::parse(json);
EXPECT_EQ(std::any_cast<std::string>(object["lastname"]), "Ivanov");
EXPECT_EQ(std::any_cast<bool>(object["islegal"]), false);
EXPECT_EQ(std::any_cast<int>(object["age"]), 25);
auto marks = std::any_cast<Json>(object["marks"]);
EXPECT_EQ(std::any_cast<int>(marks[0]), 4);
EXPECT_EQ(std::any_cast<int>(marks[1]), 5);
auto address = std::any_cast<Json>(object["address"]);
EXPECT_EQ(std::any_cast<std::string>(address["city"]), "Moscow");
EXPECT_EQ(std::any_cast<std::string>(address["street"]), "Vozdvijenka");
}
TEST(JsonObject, EmptyObject)
{
Json json{"{}"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
}
TEST(JsonObject, EmptyObjectWithSpaces)
{
Json json{"{ \n \t \t \n }"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
}
TEST(JsonObject, SimpleObject)
{
Json json{R"({ "key" : "value" })"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
EXPECT_EQ(std::any_cast<std::string>(json["key"]), "value");
}
TEST(JsonObject, NestedObject)
{
Json json{R"({ "key" : { "number": 9.5 } })"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
Json nested = std::any_cast<Json >(json["key"]);
EXPECT_EQ(nested.is_object(), true);
EXPECT_EQ(nested.is_array(), false);
EXPECT_EQ(std::any_cast<float>(nested["number"]), 9.5);
}
TEST(JsonObject, NestedWithArray)
{
Json json{R"({ "key" : [1,2,3] })"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
Json nested = std::any_cast<Json>(json["key"]);
EXPECT_EQ(nested.is_object(), false);
EXPECT_EQ(nested.is_array(), true);
EXPECT_EQ(std::any_cast<int>(nested[0]), 1);
}
TEST(JsonObject, NestedWithArrayWithValues)
{
Json json{R"({ "key" : [1,2,3], "key2": true })"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
Json nested = std::any_cast<Json >(json["key"]);
EXPECT_EQ(nested.is_object(), false);
EXPECT_EQ(nested.is_array(), true);
EXPECT_EQ(std::any_cast<int>(nested[0]), 1);
EXPECT_EQ(std::any_cast<int>(nested[1]), 2);
EXPECT_EQ(std::any_cast<int>(nested[2]), 3);
EXPECT_EQ(std::any_cast<bool>(json["key2"]), true);
}
TEST(JsonObject, WrongKeywordStart)
{
EXPECT_THROW(
Json{R"({ "some_key": "some_value", "new_key": undefined })"},
json_exception
);
}
TEST(JsonObject, WrongOnjectEnd){
EXPECT_THROW(
Json{R"({ "key": { })"},
json_exception
);
}
TEST(JsonObject, WrongObject){
EXPECT_THROW(
Json{R"({ "some_key": "some_value", "value again" })"},
json_exception
);
}
TEST(JsonObject, WrongKeywordEnd) {
EXPECT_THROW(
Json{R"({ "some_key": "some_value", "new_key": fallen })"},
json_exception
);
}
//TEST(JsonObject, CharAfterEndException){
// EXPECT_THROW(
// Json{R"({},)"},
// json_exception
// );
//}
TEST(JsonObject, ExpectedKeyException){
EXPECT_THROW(
Json{R"({ 123: 123 })"},
json_exception
);
}
//TEST(JsonObject, ExpectedSomething){
// EXPECT_THROW(
//Json{R"({ "123": })"},
// json_exception
// );
//}
TEST(JsonObject, KeyRepeat){
EXPECT_THROW(
Json{R"({ "123":1, "123": 2 })"},
json_exception
);
}
TEST(JsonObject, BackslashInKey){
Json json{R"({ "1\"2" : true })"};
EXPECT_EQ(json.is_object(), true);
EXPECT_EQ(json.is_array(), false);
EXPECT_EQ(std::any_cast<bool>(json["1\"2"]), true);
}
TEST(JsonObject, BackslashInKeyBad){
EXPECT_THROW(
Json{R"({ "\1"":1 })"},
json_exception
);
} |
import re
def validate_email(emails):
valid_emails = []
for email in emails:
if re.match(r'^[a-zA-Z0-9]+@[a-zA-Z0-9-]+\.[a-zA-Z]{2,}$', email):
valid_emails.append(email)
return valid_emails
# Test the function with the given example
emails = ["john.doe@example.com", "jane.doe@company.org", "invalid.email@", "12345@example.net", "user@domain.co.uk"]
valid_email_list = validate_email(emails)
print(valid_email_list) |
<filename>16 manipulateBits (go)/main.go
package main
import (
"github.com/dborzov/bitmanipulation"
"fmt"
)
func main() {
i := bitmanipulation.BitInt(44)
fmt.Printf("Behold 4: %s \n", i.String())
} |
import tensorflow as tf
# define features
age = tf.feature_column.numeric_column("age")
location = tf.feature_column.categorical_column_with_vocabulary_list(
"location", ["usa", "uk", "india", "australia"]
)
gender = tf.feature_column.categorical_column_with_vocabulary_list(
"gender", ["male", "female"]
)
# define input layer
feature_columns = [age, location, gender]
input_layer = tf.feature_column.input_layer(feature_columns=feature_columns)
# define model
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(units=128, activation="relu", input_shape=[3]))
model.add(tf.keras.layers.Dense(units=128, activation="relu"))
model.add(tf.keras.layers.Dense(units=1))
# compile model
model.compile(optimizer="adam", loss="mean_squared_error")
# train model
model.fit(x=input_layer, y=price, epochs=100) |
<reponame>IT2901-Tiles/Tiles
import {configure, shallow} from "enzyme"
import Adapter from '@wojtekmaj/enzyme-adapter-react-17';
import MainPage from '../Pages/MainPage';
import {cleanup} from '@testing-library/react';
import renderer from 'react-test-renderer';
import {BrowserRouter as Router} from "react-router-dom";
afterEach(() => {
cleanup();
});
configure({
adapter: new Adapter()
});
describe('Main page', () => {
const wrapper = shallow(<MainPage/>)
it('renders correctly', () => {
const tree = renderer.create(<Router><MainPage/></Router>).toJSON();
expect(tree).toMatchSnapshot();
});
it('renders a LinkButton', () => {
expect(wrapper.find('LinkButton')).toHaveLength(1);
});
it('renders a p element', () => {
const paragraph = wrapper.find('p');
expect(paragraph.text()).toEqual('Welcome to the Tiles Internet of Things Invention! Pick one card from three different categories and create some smart creations.Click on the button below to start!');
expect(paragraph).toHaveLength(1);
});
it('renders two img elements', () => {
expect(wrapper.find('img')).toHaveLength(2);
});
it('renders a CharNormalMainPage', () => {
expect(wrapper.find('CharNormalMainPage')).toHaveLength(1);
});
}); |
def count_words_length(string):
count = 0
words = string.split(" ")
for word in words:
if len(word) == 5:
count += 1
return count
string = "The quick brown fox jumped over the lazy dog."
print(count_words_length(string)) |
#!/bin/bash
##############################################################################
# (c) OPNFV, Yin Kanglin and others.
# 14_ykl@tongji.edu.cn
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
# create flavor
# parameter: $1-name $2-id $3-ram $4-disk $5-vcpus
set -e
source /root/openrc
openstack flavor create $1 --id $2 --ram $3 --disk $4 --vcpus $5
|
<filename>a001/os_walk.py<gh_stars>0
import os
cur_dp = os.getcwd()
print (cur_dp)
# sampleディレクトリの全階層にあるサブディレクトリとファイルの名前をすべて取得する
for curDir, dirs, files in os.walk('./sample'):
print('---')
print(curDir)
print(dirs)
print(files)
|
module Slackware::Gui
DOBBAGE_VERSION = "1.5"
DOBBAGE_URL = "https://github.com/vbatts/dobbage"
DOBBAGE_AUTHOR = "<NAME>, <EMAIL>"
end
|
package types
import (
"fmt"
"github.com/lterrac/system-autoscaler/pkg/apis/systemautoscaler/v1beta1"
)
// NodeScales is used to group podscales by node.
type NodeScales struct {
Node string
PodScales []*v1beta1.PodScale
}
func (n *NodeScales) Contains(name, namespace string) bool {
for _, podscale := range n.PodScales {
if podscale.Spec.Namespace == namespace &&
podscale.Spec.Pod == name {
return true
}
}
return false
}
func (n *NodeScales) Remove(name, namespace string) (*v1beta1.PodScale, error) {
for i, podscale := range n.PodScales {
if podscale.Spec.Namespace == namespace &&
podscale.Spec.Pod == name {
n.PodScales = append(n.PodScales[:i], n.PodScales[i+1:]...)
return podscale, nil
}
}
return nil, fmt.Errorf("error: missing %#v-%#v in node %#v", namespace, name, n.Node)
}
|
#!/bin/bash
FN="MAQCsubsetILM_1.32.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/experiment/src/contrib/MAQCsubsetILM_1.32.0.tar.gz"
"https://bioarchive.galaxyproject.org/MAQCsubsetILM_1.32.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-maqcsubsetilm/bioconductor-maqcsubsetilm_1.32.0_src_all.tar.gz"
)
MD5="b96fad384d7d77cdb262d96da40a62b5"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
// Fill out your copyright notice in the Description page of Project Settings.
#include "EliasTest.h"
IMPLEMENT_PRIMARY_GAME_MODULE( FDefaultGameModuleImpl, EliasTest, "EliasTest" );
|
<filename>dist/stratification/index.d.ts
/**
* Created by sam on 26.12.2016.
*/
export * from './IStratification';
export * from './StratificationGroup';
export * from './StratificationCategoricalVector';
export * from './loader';
export * from './Stratification';
export * from './vector/ANameVector';
export * from './vector/StratificationVector';
export * from './vector/VectorNameVector';
|
var path = require("path");
var webpack = require("webpack");
module.exports = {
// Root folder of source code
context: path.join(__dirname, "src"),
// Entry point(s)
entry: {
// HTML
html: "./index.html",
// JS
javascript: ["babel-polyfill", "./index.js"]
},
// Output
output: {
// Filename
filename: "bundle.js"
},
// Module
module: {
// Loaders
loaders: [
{
// Target .js and .jsx files
test: /.jsx?$/,
// Use the "babel-loader" library
loader: "babel-loader",
// Exclude the npm libraries
exclude: /node_modules/,
query: {
// Compile Order:
// 1 - "babel-preset-react"
// 2 - "babel-preset-es2015"
presets: ["es2015", "react"]
}
},
{
// Target .htm and .html files
test: /.html?$/,
// Use the "file-loader" to copy the file to the output folder
loader: "file?name=[name].[ext]"
}
]
}
} |
#!/usr/bin/env bash
sudo chown -R vagrant /home/vagrant
sudo chgrp -R vagrant /home/vagrant
# Setup a swap partition
sudo fallocate -l 8G /swapfile
sudo dd if=/dev/zero of=/swapfile bs=1M count=8192
sudo chmod 600 /swapfile
sudo mkswap /swapfile
sudo swapon /swapfile
#
# Update & install dependencies
#
sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get -y -o DPkg::options::="--force-confdef" -o DPkg::options::="--force-confold" upgrade
sudo apt-get install -y zip unzip curl bzip2 python-dev build-essential git libssl1.0.0 libssl-dev \
software-properties-common debconf-utils apt-transport-https
#
# Uncomment below to install Oracle Java8 (No longer available from ppa)
#
# sudo add-apt-repository -y ppa:webupd8team/java
# sudo apt-get update
# echo "oracle-java8-installer shared/accepted-oracle-license-v1-1 select true" | sudo debconf-set-selections
# sudo apt-get install -y oracle-java8-installer oracle-java8-set-default
# cd /var/lib/dpkg/info
# sudo sed -i 's|JAVA_VERSION=8u151|JAVA_VERSION=8u162|' oracle-java8-installer.*
# sudo sed -i 's|PARTNER_URL=http://download.oracle.com/otn-pub/java/jdk/8u151-b12/e758a0de34e24606bca991d704f6dcbf/|PARTNER_URL=http://download.oracle.com/otn-pub/java/jdk/8u162-b12/0da788060d494f5095bf8624735fa2f1/|' oracle-java8-installer.*
# sudo sed -i 's|SHA256SUM_TGZ="c78200ce409367b296ec39be4427f020e2c585470c4eed01021feada576f027f"|SHA256SUM_TGZ="68ec82d47fd9c2b8eb84225b6db398a72008285fafc98631b1ff8d2229680257"|' oracle-java8-installer.*
# sudo sed -i 's|J_DIR=jdk1.8.0_151|J_DIR=jdk1.8.0_162|' oracle-java8-installer.*
# echo "oracle-java8-installer shared/accepted-oracle-license-v1-1 select true" | sudo debconf-set-selections
# sudo apt-get install -y oracle-java8-installer oracle-java8-set-default
sudo add-apt-repository ppa:openjdk-r/ppa
sudo apt-get update
sudo apt-get install -y openjdk-8-jdk
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64" | sudo tee -a /home/vagrant/.bash_profile
#
# Install Miniconda
#
curl -Lko /tmp/Miniconda3-latest-Linux-x86_64.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
chmod +x /tmp/Miniconda3-latest-Linux-x86_64.sh
/tmp/Miniconda3-latest-Linux-x86_64.sh -b -p /home/vagrant/anaconda
export PATH=/home/vagrant/anaconda/bin:$PATH
echo 'export PATH=/home/vagrant/anaconda/bin:$PATH' | sudo tee -a /home/vagrant/.bash_profile
sudo chown -R vagrant /home/vagrant/anaconda
sudo chgrp -R vagrant /home/vagrant/anaconda
#
# Install Clone repo, install Python dependencies
#
cd /home/vagrant
git clone https://github.com/rjurney/Agile_Data_Code_2
cd /home/vagrant/Agile_Data_Code_2
export PROJECT_HOME=/home/vagrant/Agile_Data_Code_2
echo "export PROJECT_HOME=/home/vagrant/Agile_Data_Code_2" | sudo tee -a /home/vagrant/.bash_profile
conda install -y python=3.6.8
conda install -y iso8601 numpy scipy scikit-learn matplotlib ipython jupyter
pip install bs4 Flask beautifulsoup4 frozendict geopy kafka-python py4j pymongo pyelasticsearch requests selenium tabulate tldextract wikipedia findspark imongo-kernel
sudo chown -R vagrant /home/vagrant/Agile_Data_Code_2
sudo chgrp -R vagrant /home/vagrant/Agile_Data_Code_2
cd /home/vagrant
# Install commons-httpclient
curl -Lko /home/vagrant/Agile_Data_Code_2/lib/commons-httpclient-3.1.jar http://central.maven.org/maven2/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar
#
# Install Hadoop
#
curl -Lko /tmp/hadoop-3.0.1.tar.gz https://archive.apache.org/dist/hadoop/common/hadoop-3.0.1/hadoop-3.0.1.tar.gz
mkdir -p /home/vagrant/hadoop
cd /home/vagrant/
tar -xvf /tmp/hadoop-3.0.1.tar.gz -C hadoop --strip-components=1
echo "" >> /home/vagrant/.bash_profile
export HADOOP_HOME=/home/vagrant/hadoop
echo 'export HADOOP_HOME=/home/vagrant/hadoop' | sudo tee -a /home/vagrant/.bash_profile
export PATH=$PATH:$HADOOP_HOME/bin
echo 'export PATH=$PATH:$HADOOP_HOME/bin' | sudo tee -a /home/vagrant/.bash_profile
export HADOOP_CLASSPATH=$(hadoop classpath)
echo 'export HADOOP_CLASSPATH=$(hadoop classpath)' | sudo tee -a /home/vagrant/.bash_profile
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
echo 'export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop' | sudo tee -a /home/vagrant/.bash_profile
# Give to vagrant
echo "Giving hadoop to user vagrant ..." | tee -a $LOG_FILE
sudo chown -R vagrant /home/vagrant/hadoop
sudo chgrp -R vagrant /home/vagrant/hadoop
#
# Install Spark
#
echo "" | tee -a $LOG_FILE
echo "Downloading and installing Spark 2.2.1 ..." | tee -a $LOG_FILE
curl -Lko /tmp/spark-2.2.1-bin-without-hadoop.tgz https://archive.apache.org/dist/spark/spark-2.2.1/spark-2.2.1-bin-hadoop2.7.tgz
mkdir -p /home/vagrant/spark
cd /home/vagrant
tar -xvf /tmp/spark-2.2.1-bin-without-hadoop.tgz -C spark --strip-components=1
echo "" >> /home/vagrant/.bash_profile
echo "# Spark environment setup" | sudo tee -a /home/vagrant/.bash_profile
export SPARK_HOME=/home/vagrant/spark
echo 'export SPARK_HOME=/home/vagrant/spark' | sudo tee -a /home/vagrant/.bash_profile
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop/
echo 'export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop/' | sudo tee -a /home/vagrant/.bash_profile
export SPARK_DIST_CLASSPATH=`$HADOOP_HOME/bin/hadoop classpath`
echo 'export SPARK_DIST_CLASSPATH=`$HADOOP_HOME/bin/hadoop classpath`' | sudo tee -a /home/vagrant/.bash_profile
export PATH=$PATH:$SPARK_HOME/bin
echo 'export PATH=$PATH:$SPARK_HOME/bin' | sudo tee -a /home/vagrant/.bash_profile
# Have to set spark.io.compression.codec in Spark local mode
cp /home/vagrant/spark/conf/spark-defaults.conf.template /home/vagrant/spark/conf/spark-defaults.conf
echo 'spark.io.compression.codec org.apache.spark.io.SnappyCompressionCodec' | sudo tee -a /home/vagrant/spark/conf/spark-defaults.conf
# Give Spark 8GB of RAM, use Python3
echo "spark.driver.memory 8g" | sudo tee -a $SPARK_HOME/conf/spark-defaults.conf
echo "spark.executor.cores 2" | sudo tee -a $SPARK_HOME/conf/spark-defaults.conf
echo "PYSPARK_PYTHON=python3" | sudo tee -a $SPARK_HOME/conf/spark-env.sh
echo "PYSPARK_DRIVER_PYTHON=python3" | sudo tee -a $SPARK_HOME/conf/spark-env.sh
# Setup log4j config to reduce logging output
cp $SPARK_HOME/conf/log4j.properties.template $SPARK_HOME/conf/log4j.properties
sed -i 's/INFO/ERROR/g' $SPARK_HOME/conf/log4j.properties
# Give to vagrant
sudo chown -R vagrant /home/vagrant/spark
sudo chgrp -R vagrant /home/vagrant/spark
#
# Install MongoDB and dependencies
#
sudo apt-get install -y mongodb
sudo mkdir -p /data/db
sudo chown -R mongodb /data/db
sudo chgrp -R mongodb /data/db
# run MongoDB as daemon
sudo systemctl start mongodb
# Get the MongoDB Java Driver
echo "curl -sLko /home/vagrant/Agile_Data_Code_2/lib/mongo-java-driver-3.6.1.jar https://oss.sonatype.org/content/repositories/releases/org/mongodb/mongo-java-driver/3.6.1/mongo-java-driver-3.6.1.jar"
curl -sLko /home/vagrant/Agile_Data_Code_2/lib/mongo-java-driver-3.6.1.jar https://oss.sonatype.org/content/repositories/releases/org/mongodb/mongo-java-driver/3.6.1/mongo-java-driver-3.6.1.jar
# Install the mongo-hadoop project in the mongo-hadoop directory in the root of our project.
curl -Lko /tmp/mongo-hadoop-r2.0.2.tar.gz https://github.com/mongodb/mongo-hadoop/archive/r2.0.2.tar.gz
mkdir /home/vagrant/mongo-hadoop
cd /home/vagrant
tar -xvzf /tmp/mongo-hadoop-r2.0.2.tar.gz -C mongo-hadoop --strip-components=1
rm -rf /tmp/mongo-hadoop-r2.0.2.tar.gz
# Now build the mongo-hadoop-spark jars
cd /home/vagrant/mongo-hadoop
./gradlew jar
cp /home/vagrant/mongo-hadoop/spark/build/libs/mongo-hadoop-spark-*.jar /home/vagrant/Agile_Data_Code_2/lib/
cp /home/vagrant/mongo-hadoop/build/libs/mongo-hadoop-*.jar /home/vagrant/Agile_Data_Code_2/lib/
cd /home/vagrant
# Now build the pymongo_spark package
cd /home/vagrant/mongo-hadoop/spark/src/main/python
python setup.py install
cp /home/vagrant/mongo-hadoop/spark/src/main/python/pymongo_spark.py /home/vagrant/Agile_Data_Code_2/lib/
export PYTHONPATH=$PYTHONPATH:$PROJECT_HOME/lib
echo "" | sudo tee -a /home/vagrant/.bash_profile
echo 'export PYTHONPATH=$PYTHONPATH:$PROJECT_HOME/lib' | sudo tee -a /home/vagrant/.bash_profile
cd /home/vagrant
rm -rf /home/vagrant/mongo-hadoop
#
# Install ElasticSearch in the elasticsearch directory in the root of our project, and the Elasticsearch for Hadoop package
#
echo "curl -sLko /tmp/elasticsearch-5.6.0.tar.gz https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.6.0.tar.gz"
curl -sLko /tmp/elasticsearch-5.6.0.tar.gz https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.6.0.tar.gz
mkdir /home/vagrant/elasticsearch
cd /home/vagrant
tar -xvzf /tmp/elasticsearch-5.6.0.tar.gz -C elasticsearch --strip-components=1
sudo chown -R vagrant /home/vagrant/elasticsearch
sudo chgrp -R vagrant /home/vagrant/elasticsearch
sudo mkdir -p /home/vagrant/elasticsearch/logs
sudo chown -R vagrant /home/vagrant/elasticsearch/logs
sudo chgrp -R vagrant /home/vagrant/elasticsearch/logs
# Run elasticsearch
sudo -u vagrant /home/vagrant/elasticsearch/bin/elasticsearch -d # re-run if you shutdown your computer
# Run a query to test - it will error but should return json
echo "Testing Elasticsearch with a query ..." | tee -a $LOG_FILE
curl 'localhost:9200/agile_data_science/on_time_performance/_search?q=Origin:ATL&pretty'
# Install Elasticsearch for Hadoop
echo "curl -sLko /tmp/elasticsearch-hadoop-6.1.2.zip http://download.elastic.co/hadoop/elasticsearch-hadoop-6.1.2.zip"
curl -sLko /tmp/elasticsearch-hadoop-6.1.2.zip http://download.elastic.co/hadoop/elasticsearch-hadoop-6.1.2.zip
unzip /tmp/elasticsearch-hadoop-6.1.2.zip
mv /home/vagrant/elasticsearch-hadoop-6.1.2 /home/vagrant/elasticsearch-hadoop
cp /home/vagrant/elasticsearch-hadoop/dist/elasticsearch-hadoop-6.1.2.jar /home/vagrant/Agile_Data_Code_2/lib/
cp /home/vagrant/elasticsearch-hadoop/dist/elasticsearch-spark-20_2.11-6.1.2.jar /home/vagrant/Agile_Data_Code_2/lib/
echo "spark.speculation false" | sudo tee -a /home/vagrant/spark/conf/spark-defaults.conf
rm -f /tmp/elasticsearch-hadoop-6.1.2.zip
rm -rf /home/vagrant/elasticsearch-hadoop/conf/spark-defaults.conf
#
# Spark jar setup
#
# Install and add snappy-java and lzo-java to our classpath below via spark.jars
echo "" | tee -a $LOG_FILE
echo "Installing snappy-java and lzo-java and adding them to our classpath ..." | tee -a $LOG_FILE
cd /home/vagrant/Agile_Data_Code_2
curl -sLko lib/snappy-java-1.1.7.1.jar http://central.maven.org/maven2/org/xerial/snappy/snappy-java/1.1.7.1/snappy-java-1.1.7.1.jar
curl -sLko lib/lzo-hadoop-1.0.5.jar http://central.maven.org/maven2/org/anarres/lzo/lzo-hadoop/1.0.5/lzo-hadoop-1.0.5.jar
cd /home/vagrant
# Set the spark.jars path
echo "spark.jars /home/vagrant/Agile_Data_Code_2/lib/mongo-hadoop-spark-2.0.2.jar,/home/vagrant/Agile_Data_Code_2/lib/mongo-java-driver-3.6.1.jar,/home/vagrant/Agile_Data_Code_2/lib/mongo-hadoop-2.0.2.jar,/home/vagrant/Agile_Data_Code_2/lib/elasticsearch-spark-20_2.11-6.1.2.jar,/home/vagrant/Agile_Data_Code_2/lib/snappy-java-1.1.7.1.jar,/home/vagrant/Agile_Data_Code_2/lib/lzo-hadoop-1.0.5.jar,/home/vagrant/Agile_Data_Code_2/lib/commons-httpclient-3.1.jar" | sudo tee -a /home/vagrant/spark/conf/spark-defaults.conf
#
# Kafka install and setup
#
echo "" | tee -a $LOG_FILE
echo "" | tee -a $LOG_FILE
echo "Downloading and installing Kafka version 2.1.1 for Scala 2.11 ..." | tee -a $LOG_FILE
curl -Lko /tmp/kafka_2.11-2.1.1.tgz https://www-us.apache.org/dist/kafka/2.1.1/kafka_2.11-2.1.1.tgz
mkdir -p /home/vagrant/kafka
cd /home/vagrant/
tar -xvzf /tmp/kafka_2.11-2.1.1.tgz -C kafka --strip-components=1 && rm -f /tmp/kafka_2.11-2.1.1.tgz
# Set the log dir to kafka/logs
sed -i '/log.dirs=\/tmp\/kafka-logs/c\log.dirs=logs' /home/vagrant/kafka/config/server.properties
# Give to vagrant
echo "Giving Kafka to user vagrant ..." | tee -a $LOG_FILE
sudo chown -R vagrant /home/vagrant/kafka
sudo chgrp -R vagrant /home/vagrant/kafka
# Set the log dir to kafka/logs
echo "Configuring logging for kafka to go into kafka/logs directory ..." | tee -a $LOG_FILE
sed -i '/log.dirs=\/tmp\/kafka-logs/c\log.dirs=logs' /home/vagrant/kafka/config/server.properties
# Run zookeeper (which kafka depends on), then Kafka
echo "Running Zookeeper as a daemon ..." | tee -a $LOG_FILE
sudo -H -u vagrant /home/vagrant/kafka/bin/zookeeper-server-start.sh -daemon /home/vagrant/kafka/config/zookeeper.properties
echo "Running Kafka Server as a daemon ..." | tee -a $LOG_FILE
sudo -H -u vagrant /home/vagrant/kafka/bin/kafka-server-start.sh -daemon /home/vagrant/kafka/config/server.properties
#
# Install and setup Airflow
#
echo "export SLUGIFY_USES_TEXT_UNIDECODE=yes"
export SLUGIFY_USES_TEXT_UNIDECODE=yes
pip install apache-airflow[hive]
mkdir /home/vagrant/airflow
mkdir /home/vagrant/airflow/dags
mkdir /home/vagrant/airflow/logs
mkdir /home/vagrant/airflow/plugins
sudo chown -R vagrant /home/vagrant/airflow
sudo chgrp -R vagrant /home/vagrant/airflow
airflow initdb
airflow webserver -D &
airflow scheduler -D &
# Install Apache Zeppelin
echo "curl -sLko /tmp/zeppelin-0.7.3-bin-all.tgz https://archive.apache.org/dist/zeppelin/zeppelin-0.7.3/zeppelin-0.7.3-bin-all.tgz"
curl -sLko /tmp/zeppelin-0.7.3-bin-all.tgz https://archive.apache.org/dist/zeppelin/zeppelin-0.7.3/zeppelin-0.7.3-bin-all.tgz
mkdir zeppelin
tar -xvzf /tmp/zeppelin-0.7.3-bin-all.tgz -C zeppelin --strip-components=1
# Configure Zeppelin
cp zeppelin/conf/zeppelin-env.sh.template zeppelin/conf/zeppelin-env.sh
echo "export SPARK_HOME=$PROJECT_HOME/spark" >> zeppelin/conf/zeppelin-env.sh
echo "export SPARK_MASTER=local" >> zeppelin/conf/zeppelin-env.sh
echo "export SPARK_CLASSPATH=" >> zeppelin/conf/zeppelin-env.sh
# Jupyter server setup
jupyter notebook --generate-config
mkdir /root/.jupyter/
cp /home/vagrant/Agile_Data_Code_2/jupyter_notebook_config.py /root/.jupyter/
mkdir /root/certs
sudo openssl req -x509 -nodes -days 365 -newkey rsa:1024 -subj "/C=US" -keyout /root/certs/mycert.pem -out /root/certs/mycert.pem
cd /home/vagrant/Agile_Data_Code_2
jupyter notebook --ip=0.0.0.0 --NotebookApp.token= --allow-root --no-browser &
cd
# =======
sudo chown -R vagrant /home/vagrant/airflow
sudo chgrp -R vagrant /home/vagrant/airflow
echo "sudo chown -R vagrant /home/vagrant/airflow" | sudo tee -a /home/vagrant/.bash_profile
echo "sudo chgrp -R vagrant /home/vagrant/airflow" | sudo tee -a /home/vagrant/.bash_profile
# Install Ant to build Cassandra
sudo apt-get install -y ant
# Install Cassandra - must build from source as the latest 3.11.1 build is broken...
git clone https://github.com/apache/cassandra
cd cassandra
git checkout cassandra-3.11
ant
bin/cassandra
export PATH=$PATH:/home/vagrant/cassandra/bin
echo 'export PATH=$PATH:/home/vagrant/cassandra/bin' | sudo tee -a /home/vagrant/.bash_profile
cd ..
# Install and setup JanusGraph
cd /home/vagrant
curl -Lko /tmp/janusgraph-0.2.0-hadoop2.zip \
https://github.com/JanusGraph/janusgraph/releases/download/v0.2.0/janusgraph-0.2.0-hadoop2.zip
unzip -d . /tmp/janusgraph-0.2.0-hadoop2.zip
mv janusgraph-0.2.0-hadoop2 janusgraph
rm /tmp/janusgraph-0.2.0-hadoop2.zip
# Download data
cd /home/vagrant/Agile_Data_Code_2
./download.sh
# Install phantomjs
/home/vagrant/Agile_Data_Code_2/install/phantomjs.sh
# make sure we own /home/vagrant/.bash_profile after all the 'sudo tee'
sudo chgrp vagrant /home/vagrant/.bash_profile
sudo chown vagrant /home/vagrant/.bash_profile
#
# Cleanup
#
sudo apt-get clean
sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
echo "DONE!"
|
'use strict';
/* global IACHandler */
(function(exports) {
/**
* DialerComms allows the user to stop the ringtone from playing
* by interacting with the hardware. When the user presses the sleep
* or volumedown button, the ringtone will stop playing.
* @class DialerComms
* @requires IACHandler
*/
function DialerComms() {
window.addEventListener('sleep', this.stopRingtone);
window.addEventListener('volumedown', this.stopRingtone);
}
DialerComms.prototype = {
/**
* Notifies the dialer app that the system requests the ringtone
* stop playing. Posts a stop_ringtone message to the dialercomms port.
* @memberof DialerComms.prototype
* @param {DOMEvent} evt The event.
*/
stopRingtone: function() {
var port = IACHandler.getPort('dialercomms');
if (!port) {
return;
}
port.postMessage('stop_ringtone');
}
};
exports.DialerComms = DialerComms;
})(window);
|
#!/usr/bin/env bash
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
if [[ "$OSTYPE" == "darwin"* ]]; then
realpath() { [[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"; }
VSCODE_PATH=$(dirname $(dirname $(dirname $(dirname $(dirname $(realpath "$0"))))))
else
VSCODE_PATH=$(dirname $(dirname $(dirname $(dirname $(dirname $(readlink -f $0))))))
fi
PROD_NAME="Code Server - Dev"
VERSION=""
COMMIT=""
EXEC_NAME="$(basename "$(test -L "$0" && readlink "$0" || echo "$0")")"
CLI_SCRIPT="$VSCODE_PATH/out/server-cli.js"
node "$CLI_SCRIPT" "$PROD_NAME" "$VERSION" "$COMMIT" "$EXEC_NAME" "$@"
|
__version__ = '0.1'
__author__ = 'Tester'
#home page FB
Email_Phone_Field_To_Login = '//input[@id =\'email\']'
Password_Field_To_Login = '//input[@id =\'pass\']'
Login_Button = '//input[@value =\'Log In\']'
Forgot_account = '//a[contains(text(), \'Forgot account?\')]'
English_Link = '//a[@title=\'English (US)\']'
First_Name_Field = '//input[@name =\'firstname\']'
Last_Name_Field = '//input[@name =\'lastname\']'
Email_Field = '//input[@name =\'reg_email__\']'
Re_Enter_Email_Field = '//input[@name =\'reg_email_confirmation__\']'
New_Password = '//input[@name =\'reg_passwd__\']'
Select_Month = '//select[@name=\'birthday_month\']'
Select_Day = '//select[@name=\'birthday_day\']'
Select_Year = '//select[@name=\'birthday_year\']'
Q_About_BD_Info = '//a[@id =\'birthday-help\']'
Female = '//input[@value=\'1\']'
Male = '//input[@value=\'2\']'
Sign_Up_Button = '//button[@name =\'websubmit\']'
Incorrect_Email_Error = '//div[@id = \'reg_error_inner\']'
#Forgot pass FB
Email_Or_Phone_To_Recall_Pass = '//input[@id =\'identify_email\']'
Search_Button = '//input[@name =\'did_submit\']'
#Fight booking page
Flight_Xpath = '//a[contains(text(), \'Flights\')]'
Oneway_Xpath = '//input[@value=\'oneway\']'
Passenger_X = '//select[@name=\'passCount\']'
From_X = '//select[@name=\'fromPort\']'
To_X = '//select[@name=\'toPort\']'
#flight authorization page
Class_X = '//input[@type=\'radio\'][@value=\'Business\']'
Continue_X = '//input[@name=\'findFlights\']'
Continue2_X = '//input[@name=\'reserveFlights\']'
Submit2_X = '//input[@name=\'buyFlights\']'
#Flight confirmation page
Confirmation_X = '//*[contains(text(),\'itinerary\')]'
BookedNo_X = '//*[contains(text(),\'# 2017\')]'
|
import {Response, NextFunction, Request} from "express";
import {v4 as uuid} from "uuid";
import logger from "../shared/Logger";
const requestMiddleware = async (req: Request, res: Response, next: NextFunction) => {
req.requestId = uuid();
logger.info("Request Received - " + req.requestId);
logger.info(req.method + " " + req.originalUrl);
res.setHeader("X-Request-Id", req.requestId);
next();
};
export default requestMiddleware;
|
import numpy as np
matrix = np.array([[1,2,3], [4,5,6], [7,8,9]])
# Calulate the sum of the diagonals
sum_diag = np.trace(matrix)
# Print the output
print(sum_diag) |
<gh_stars>0
if RUBY_PLATFORM =~ /64/
puts "You have a 64-bit Architecture ruby"
if RUBY_PLATFORM =~ /mswin/ || RUBY_PLATFORM =~ /mingw/
puts "With Windows"
lib, path = 'stbDLL_x64.dll', "#{__dir__}/../dlls"
elsif RUBY_PLATFORM =~ /linux/ || RUBY_PLATFORM =~ /cygwin/
puts "With Linux"
lib, path = 'libstb_x64.so', "#{__dir__}/../dlls"
elsif RUBY_PLATFORM =~ /darwin/
puts "With macOS"
else
puts "I have no idea what os are you using, so it's possible that stbimage wont't work"
end
elsif RUBY_PLATFORM =~ /arm/
puts "You have a arm architecture"
lib, path = 'libstb_arm.so', "#{__dir__}/../dlls"
elsif RUBY_PLATFORM =~ /java/
puts "You have jruby!"
else
puts "You have a 32-bit Architecture ruby"
if RUBY_PLATFORM =~ /mswin/ || RUBY_PLATFORM =~ /mingw/
puts "With Windows"
lib, path = 'stbDLL_x32.dll', "#{__dir__}/../dlls"
elsif RUBY_PLATFORM =~ /linux/ || RUBY_PLATFORM =~ /cygwin/
puts "With Linux"
lib, path = 'libstb_x32.so', "#{__dir__}/../dlls"
elsif RUBY_PLATFORM =~ /darwin/
puts "With macOS"
else
puts "I have no idea what os are you using, so it's possible that stbimage wont't work"
end
end
# if RUBY_PLATFORM == "x64-mswin64_140" || RUBY_PLATFORM == "x64-mingw32"
# lib, path = 'stbDLL_x64.dll', "#{__dir__}/../dlls"
# elsif RUBY_PLATFORM == "x86-mingw32"
# lib, path = 'stbDLL_x86.dll', "#{__dir__}/../dlls"
# elsif RUBY_PLATFORM =~ /x86_linux/
# lib, path = 'libstd_x86.so', "#{__dir__}/../dlls"
# else
|
#!/bin/bash
height=28
width=28
if [ `ls test-images/*/*.png 2> /dev/null | wc -l ` -gt 0 ]; then
for file in test-images/*/*.png; do
convert "$file" -resize "${width}x${height}"\! "${file%.*}.jpg"
file "$file" #uncomment for testing
rm "$file"
done
fi
if [ `ls training-images/*/*.png 2> /dev/null | wc -l ` -gt 0 ]; then
for file in training-images/*/*.png; do
convert "$file" -resize "${width}x${height}"\! "${file%.*}.jpg"
file "$file" #uncomment for testing
rm "$file"
done
fi
|
import numpy as np
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense, Activation
# Create dataframe from data
df = pd.DataFrame({
'Temperature': [36.6, 36.6, 37.2, 36.6],
'Time': ['6am', '6am', '6am', '6pm'],
'Location': ['Paris', 'Paris', 'London', 'Paris'],
'Activity': ['sleeping', 'sleeping', 'working', 'working']
})
# Set feature vectors and labels
X = df.drop('Temperature', axis=1)
y = df.loc[:, 'Temperature']
# Define model
model = Sequential([
Dense(4, input_shape = (3,), activation = 'relu'),
Dense(8, activation = 'relu'),
Dense(1)
])
# Compile and fit neural network
model.compile(optimizer='adam', loss='mse')
model.fit(X, y, epochs=50) |
#!/usr/bin/env bash
gobuild (){
package=$1
if [[ -z "$package" ]]; then
echo "usage: $0 <package-name>"
exit 1
fi
package_name=$package
platforms=("linux/amd64" "windows/amd64") # "linux/arm64")
for platform in "${platforms[@]}"
do
CGO_ENABLED=1
CC=gcc
platform_split=(${platform//\// })
GOOS=${platform_split[0]}
GOARCH=${platform_split[1]}
folder=bin/$GOOS'-'$GOARCH
mkdir -p $folder
output_name=$(echo ${package_name} | awk -F'/' '{print $3}')
if [ $GOARCH = "arm64" ]; then # TODO not work yet
ext_flag='CC=aarch64-linux-gnu-gcc CXX=aarch64-linux-gnu-g++'
elif [ $GOOS = "windows" ]; then
ext_flag='CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ CGO_LDFLAGS=-static'
output_name+='.exe'
else
ext_flag='CC=gcc'
fi
env GOOS=$GOOS GOARCH=$GOARCH CGO_ENABLED=1 $ext_flag go build -o $folder/$output_name $package
if [ $? -ne 0 ]; then
echo 'An error has occurred! Aborting the script execution...'
exit 1
fi
done
}
for i in `find cmd -name "*go"`; do
gobuild ./$i
done |
def update_op_field(data):
paragraphs = data['data'][0]['paragraphs']
subquestions = [q['question'] for q in paragraphs[0]['qas'] if q['level'] == 'subquestion']
last_subquestion = subquestions[-1]
paragraphs[0]['op'] = last_subquestion.replace(' ', '_').upper()
return data |
<gh_stars>0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from scipy import misc
import sys
import os
import argparse
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import numpy as np
#import facenet
import detect_face
import random
from time import sleep
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
import face_image
from skimage import transform as trans
import cv2
import imageio
def to_rgb(img):
w, h = img.shape
ret = np.empty((w, h, 3), dtype=np.uint8)
ret[:, :, 0] = ret[:, :, 1] = ret[:, :, 2] = img
return ret
def IOU(Reframe,GTframe):
x1 = Reframe[0];
y1 = Reframe[1];
width1 = Reframe[2]-Reframe[0];
height1 = Reframe[3]-Reframe[1];
x2 = GTframe[0]
y2 = GTframe[1]
width2 = GTframe[2]-GTframe[0]
height2 = GTframe[3]-GTframe[1]
endx = max(x1+width1,x2+width2)
startx = min(x1,x2)
width = width1+width2-(endx-startx)
endy = max(y1+height1,y2+height2)
starty = min(y1,y2)
height = height1+height2-(endy-starty)
if width <=0 or height <= 0:
ratio = 0
else:
Area = width*height
Area1 = width1*height1
Area2 = width2*height2
ratio = Area*1./(Area1+Area2-Area)
return ratio
def main(args):
output_dir = os.path.expanduser(args.output_dir)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Store some git revision info in a text file in the log directory
src_path,_ = os.path.split(os.path.realpath(__file__))
#facenet.store_revision_info(src_path, output_dir, ' '.join(sys.argv))
dataset = face_image.get_dataset(args.name, args.input_dir)
print('dataset size', args.name, len(dataset))
print('Creating networks and loading parameters')
with tf.Graph().as_default():
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=args.gpu_memory_fraction)
#sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False))
sess = tf.Session()
with sess.as_default():
pnet, rnet, onet = detect_face.create_mtcnn(sess, None)
minsize = 100 # minimum size of face
threshold = [ 0.6, 0.7, 0.7 ] # three steps's threshold
factor = 0.709 # scale factor
#image_size = [112,96]
image_size = [112,112]
src = np.array([
[30.2946, 51.6963],
[65.5318, 51.5014],
[48.0252, 71.7366],
[33.5493, 92.3655],
[62.7299, 92.2041] ], dtype=np.float32 )
if image_size[1]==112:
src[:,0] += 8.0
# Add a random key to the filename to allow alignment using multiple processes
#random_key = np.random.randint(0, high=99999)
#bounding_boxes_filename = os.path.join(output_dir, 'bounding_boxes_%05d.txt' % random_key)
#output_filename = os.path.join(output_dir, 'faceinsight_align_%s.lst' % args.name)
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
output_filename = os.path.join(args.output_dir, 'lst')
with open(output_filename, "w") as text_file:
nrof_images_total = 0
nrof = np.zeros( (5,), dtype=np.int32)
for fimage in dataset:
if nrof_images_total%100==0:
print("Processing %d, (%s)" % (nrof_images_total, nrof))
nrof_images_total += 1
#if nrof_images_total<950000:
# continue
image_path = fimage.image_path
if not os.path.exists(image_path):
print('image not found (%s)'%image_path)
continue
filename = os.path.splitext(os.path.split(image_path)[1])[0]
#print(image_path)
try:
img = imageio.imread(image_path)
except (IOError, ValueError, IndexError) as e:
errorMessage = '{}: {}'.format(image_path, e)
print(errorMessage)
else:
if img.ndim<2:
print('Unable to align "%s", img dim error' % image_path)
#text_file.write('%s\n' % (output_filename))
continue
if img.ndim == 2:
img = to_rgb(img)
img = img[:,:,0:3]
_paths = fimage.image_path.split('/')
a,b,c = _paths[-3], _paths[-2], _paths[-1]
target_dir = os.path.join(args.output_dir, a, b)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
target_file = os.path.join(target_dir, c)
warped = None
if fimage.landmark is not None:
dst = fimage.landmark.astype(np.float32)
tform = trans.SimilarityTransform()
tform.estimate(dst, src[0:3,:]*1.5+image_size[0]*0.25)
M = tform.params[0:2,:]
warped0 = cv2.warpAffine(img,M,(image_size[1]*2,image_size[0]*2), borderValue = 0.0)
_minsize = image_size[0]
bounding_boxes, points = detect_face.detect_face(warped0, _minsize, pnet, rnet, onet, threshold, factor)
if bounding_boxes.shape[0]>0:
bindex = 0
det = bounding_boxes[bindex,0:4]
#points need to be transpose, points = points.reshape( (5,2) ).transpose()
dst = points[:, bindex].reshape( (2,5) ).T
tform = trans.SimilarityTransform()
tform.estimate(dst, src)
M = tform.params[0:2,:]
warped = cv2.warpAffine(warped0,M,(image_size[1],image_size[0]), borderValue = 0.0)
nrof[0]+=1
#assert fimage.bbox is not None
if warped is None and fimage.bbox is not None:
_minsize = img.shape[0]//4
bounding_boxes, points = detect_face.detect_face(img, _minsize, pnet, rnet, onet, threshold, factor)
if bounding_boxes.shape[0]>0:
det = bounding_boxes[:,0:4]
bindex = -1
index2 = [0.0, 0]
for i in xrange(det.shape[0]):
_det = det[i]
iou = IOU(fimage.bbox, _det)
if iou>index2[0]:
index2[0] = iou
index2[1] = i
if index2[0]>0.3:
bindex = index2[1]
if bindex>=0:
dst = points[:, bindex].reshape( (2,5) ).T
tform = trans.SimilarityTransform()
tform.estimate(dst, src)
M = tform.params[0:2,:]
warped = cv2.warpAffine(img,M,(image_size[1],image_size[0]), borderValue = 0.0)
nrof[1]+=1
#print('1',target_file,index2[0])
if warped is None and fimage.bbox is not None:
bb = fimage.bbox
#croped = img[bb[1]:bb[3],bb[0]:bb[2],:]
bounding_boxes, points = detect_face.detect_face_force(img, bb, pnet, rnet, onet)
assert bounding_boxes.shape[0]==1
_box = bounding_boxes[0]
if _box[4]>=0.3:
dst = points[:, 0].reshape( (2,5) ).T
tform = trans.SimilarityTransform()
tform.estimate(dst, src)
M = tform.params[0:2,:]
warped = cv2.warpAffine(img,M,(image_size[1],image_size[0]), borderValue = 0.0)
nrof[2]+=1
#print('2',target_file)
if warped is None:
roi = np.zeros( (4,), dtype=np.int32)
roi[0] = int(img.shape[1]*0.06)
roi[1] = int(img.shape[0]*0.06)
roi[2] = img.shape[1]-roi[0]
roi[3] = img.shape[0]-roi[1]
if fimage.bbox is not None:
bb = fimage.bbox
h = bb[3]-bb[1]
w = bb[2]-bb[0]
x = bb[0]
y = bb[1]
#roi = np.copy(bb)
_w = int( (float(h)/image_size[0])*image_size[1] )
x += (w-_w)//2
#x = min( max(0,x), img.shape[1] )
x = max(0,x)
xw = x+_w
xw = min(xw, img.shape[1])
roi = np.array( (x, y, xw, y+h), dtype=np.int32)
nrof[3]+=1
else:
nrof[4]+=1
#print('3',bb,roi,img.shape)
#print('3',target_file)
warped = img[roi[1]:roi[3],roi[0]:roi[2],:]
#print(warped.shape)
warped = cv2.resize(warped, (image_size[1], image_size[0]))
bgr = warped[...,::-1]
cv2.imwrite(target_file, bgr)
oline = '%d\t%s\t%d\n' % (1,target_file, int(fimage.classname))
text_file.write(oline)
def parse_arguments(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--input-dir', type=str, help='Directory with unaligned images.')
parser.add_argument('--name', type=str, help='dataset name, can be facescrub, megaface, webface, celeb.')
parser.add_argument('--output-dir', type=str, help='Directory with aligned face thumbnails.')
#parser.add_argument('--image_size', type=str, help='Image size (height, width) in pixels.', default='112,112')
#parser.add_argument('--margin', type=int,
# help='Margin for the crop around the bounding box (height, width) in pixels.', default=44)
return parser.parse_args(argv)
if __name__ == '__main__':
main(parse_arguments(sys.argv[1:]))
|
<filename>collect_app/src/main/java/org/odk/collect/android/widgets/BarcodeWidget.java
/*
* Copyright (C) 2009 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.odk.collect.android.widgets;
import static org.odk.collect.android.utilities.Appearances.FRONT;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.util.TypedValue;
import android.view.View;
import com.google.zxing.integration.android.IntentIntegrator;
import org.javarosa.core.model.data.IAnswerData;
import org.javarosa.core.model.data.StringData;
import org.javarosa.form.api.FormEntryPrompt;
import org.odk.collect.android.R;
import org.odk.collect.android.activities.ScannerWithFlashlightActivity;
import org.odk.collect.android.databinding.BarcodeWidgetAnswerBinding;
import org.odk.collect.android.formentry.questions.QuestionDetails;
import org.odk.collect.android.listeners.PermissionListener;
import org.odk.collect.android.utilities.Appearances;
import org.odk.collect.android.utilities.CameraUtils;
import org.odk.collect.androidshared.ui.ToastUtils;
import org.odk.collect.android.widgets.interfaces.WidgetDataReceiver;
import org.odk.collect.android.widgets.utilities.WaitingForDataRegistry;
/**
* Widget that allows user to scan barcodes and add them to the form.
*/
@SuppressLint("ViewConstructor")
public class BarcodeWidget extends QuestionWidget implements WidgetDataReceiver {
BarcodeWidgetAnswerBinding binding;
private final WaitingForDataRegistry waitingForDataRegistry;
private final CameraUtils cameraUtils;
public BarcodeWidget(Context context, QuestionDetails questionDetails, WaitingForDataRegistry waitingForDataRegistry,
CameraUtils cameraUtils) {
super(context, questionDetails);
this.waitingForDataRegistry = waitingForDataRegistry;
this.cameraUtils = cameraUtils;
}
@Override
protected View onCreateAnswerView(Context context, FormEntryPrompt prompt, int answerFontSize) {
binding = BarcodeWidgetAnswerBinding.inflate(((Activity) context).getLayoutInflater());
if (prompt.isReadOnly()) {
binding.barcodeButton.setVisibility(GONE);
} else {
binding.barcodeButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, answerFontSize);
binding.barcodeButton.setOnClickListener(v -> onButtonClick());
}
binding.barcodeAnswerText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, answerFontSize);
String answer = prompt.getAnswerText();
if (answer != null && !answer.isEmpty()) {
binding.barcodeButton.setText(getContext().getString(R.string.replace_barcode));
binding.barcodeAnswerText.setText(answer);
}
return binding.getRoot();
}
@Override
public void clearAnswer() {
binding.barcodeAnswerText.setText(null);
binding.barcodeButton.setText(getContext().getString(R.string.get_barcode));
widgetValueChanged();
}
@Override
public IAnswerData getAnswer() {
String answer = binding.barcodeAnswerText.getText().toString();
return answer.isEmpty() ? null : new StringData(answer);
}
@Override
public void setData(Object answer) {
String response = (String) answer;
binding.barcodeAnswerText.setText(stripInvalidCharacters(response));
binding.barcodeButton.setText(getContext().getString(R.string.replace_barcode));
widgetValueChanged();
}
// Remove control characters, invisible characters and unused code points.
private String stripInvalidCharacters(String data) {
return data == null ? null : data.replaceAll("\\p{C}", "");
}
@Override
public void setOnLongClickListener(OnLongClickListener l) {
binding.barcodeAnswerText.setOnLongClickListener(l);
binding.barcodeButton.setOnLongClickListener(l);
}
@Override
public void cancelLongPress() {
super.cancelLongPress();
binding.barcodeButton.cancelLongPress();
binding.barcodeAnswerText.cancelLongPress();
}
private void onButtonClick() {
getPermissionsProvider().requestCameraPermission((Activity) getContext(), new PermissionListener() {
@Override
public void granted() {
waitingForDataRegistry.waitForData(getFormEntryPrompt().getIndex());
IntentIntegrator intent = new IntentIntegrator((Activity) getContext())
.setCaptureActivity(ScannerWithFlashlightActivity.class);
setCameraIdIfNeeded(getFormEntryPrompt(), intent);
intent.initiateScan();
}
@Override
public void denied() {
}
});
}
private void setCameraIdIfNeeded(FormEntryPrompt prompt, IntentIntegrator intent) {
if (Appearances.isFrontCameraAppearance(prompt)) {
if (cameraUtils.isFrontCameraAvailable()) {
intent.addExtra(FRONT, true);
} else {
ToastUtils.showLongToast(getContext(), R.string.error_front_camera_unavailable);
}
}
}
}
|
package com.reiser.daily.day04;
import com.reiser.homework.linkedlist.ListNode;
/**
* @author: reiserx
* Date:2020/9/11
* Des:
*/
public class MergeTwoLists {
public static void main(String[] args) {
MergeTwoLists solution = new MergeTwoLists();
}
public ListNode mergeTwoLists(ListNode l1, ListNode l2) {
ListNode newNode = new ListNode(-1);
ListNode preNode = newNode;
while (l1 != null && l2 != null) {
if (l1.val <= l2.val) {
preNode.next = l1;
l1 = l1.next;
} else {
preNode.next = l2;
l2 = l2.next;
}
preNode = preNode.next;
}
preNode.next = l1 == null ? l2 : l1;
return newNode.next;
}
}
|
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package analyser.gui;
import gui.SelectableFile;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import analyser.Analyser;
public class Settings {
protected static final File SETTINGS = new File(Analyser.PROP_NAME);
protected static Properties m_settings;
public static final String DEFAULT_PATH = "paths.default";
public static final String EJAVA_PATH = "paths.ejava";
public static final String ADB_PATH = "paths.adb";
public static final String MAINFRAME_X = "mainframe.x";
public static final String MAINFRAME_Y = "mainframe.y";
public static final String MAINFRAME_W = "mainframe.width";
public static final String MAINFRAME_H = "mainframe.height";
public static final String MAINFRAME_TEXTWINDOW_W = "mainframe.textwindow.width";
public static final String MAINFRAME_TEXTWINDOW_H = "mainframe.textwindow.height";
public static final String MAINFRAME_GRAPHWINDOW_W = "mainframe.graphwindow.width";
public static final String MAINFRAME_GRAPHWINDOW_H = "mainframe.graphwindow.height";
public static final String MAINFRAME_TREES_DIV = "mainframe.trees.div";
public static final String INJECTIONS_USER_DEFINED = "injections.user";
public static final String CLASSPATH = "cp";
public static final String MIDLETS = "midlets";
public static final String ANDROID_SDK = "androidsdk";
public static final String ANDROID_SDK_SELECTED = "use_androidsdk";
public static final String CONFIRMED_BREAKING = "confirmed_breaking";
public static final String MAINFRAME_CONTENT_DIV = "mainframe.content.div";
protected Settings() {
}
public static void setSettings(Properties p) {
m_settings = p;
}
public static void setClasspath(Object[] files)
{
Settings.setClasspath(makeCpPropertyString(files));
}
public static void setMidletsPath(Object[] files)
{
Settings.setMidletsPath(makeCpPropertyString(files));
}
public static void setUseAndroidSDK(boolean b) {
m_settings.setProperty(ANDROID_SDK_SELECTED, b ? "true" : "false");
}
public static boolean getUseAndroidSDK() {
if (m_settings.getProperty(ANDROID_SDK_SELECTED) == null) {
return false;
}
return m_settings.getProperty(ANDROID_SDK_SELECTED).equals("true");
}
public static void setAndroidSDK(String string) {
m_settings.setProperty(ANDROID_SDK, string);
}
public static String getAndroidSDK() {
return m_settings.getProperty(ANDROID_SDK);
}
public static String getDefaultPath() {
String f = m_settings.getProperty(DEFAULT_PATH);
if (f == null) {
f = System.getProperty("user.home");
}
return f;
}
public static void setDefaultPath(String string) {
m_settings.setProperty(DEFAULT_PATH, string);
}
public static String getEjavaPath() {
String f = m_settings.getProperty(EJAVA_PATH);
if (f == null) {
f = "C:\\SonyEricsson\\JavaME_SDK_CLDC\\OnDeviceDebug\\bin\\ejava.exe";
}
return f;
}
public static String getAdbPath() {
String f = m_settings.getProperty(ADB_PATH);
if (f == null) {
f = "C:\\android-sdk-windows\\platform-tools\\adb.exe";
}
return f;
}
public static void setAdbPath(String string) {
m_settings.setProperty(ADB_PATH, string);
}
public static void setEjavaPath(String string) {
m_settings.setProperty(EJAVA_PATH, string);
}
private static final String FSELECT = "[*]";
private static final String FUNSELECT = "[ ]";
public static String getClasspath() {
return removeUnselectedFromClasspath(m_settings.getProperty(CLASSPATH));
}
public static void setClasspath(String cp) {
m_settings.setProperty(CLASSPATH, cp);
}
public static File[] getSelectableClasspath() {
return getSelectableFiles(Settings.breakString(m_settings.getProperty(CLASSPATH), ";"));
}
public static String getMidletsPath() {
return removeUnselectedFromClasspath(m_settings.getProperty(MIDLETS));
}
public static File[] getSelectableMidletsPath() {
return getSelectableFiles(Settings.breakString(m_settings.getProperty(MIDLETS), ";"));
}
public static void setMidletsPath(String midletsPath) {
m_settings.setProperty(MIDLETS, midletsPath);
}
public static String removeUnselectedFromClasspath(String paths) {
String[] pathArr = breakString(paths, ";");
StringBuffer selPaths = new StringBuffer();
for (int i = 0; i < pathArr.length; i++) {
String path = pathArr[i];
if (path.startsWith(FSELECT)) {
selPaths.append(path.substring(FSELECT.length()));
selPaths.append(';');
} else if (path.startsWith(FUNSELECT)) {
} else {
selPaths.append(path);
selPaths.append(';');
}
}
return selPaths.toString();
}
public static int getMainFrameX() {
return getPropertyInt(MAINFRAME_X);
}
public static int getMainFrameY() {
return getPropertyInt(MAINFRAME_Y);
}
public static int getMainFrameWidth() {
return getPropertyInt(MAINFRAME_W);
}
public static int getMainFrameHeight() {
return getPropertyInt(MAINFRAME_H);
}
public static int getMainFrameTreesDiv() {
return getPropertyInt(MAINFRAME_TREES_DIV);
}
public static int getMainFrameContentDiv() {
return getPropertyInt(MAINFRAME_CONTENT_DIV);
}
public static boolean getConfirmedBreaking() {
return getPropertyBoolean(CONFIRMED_BREAKING);
}
public static void setConfirmedBreaking(boolean confirmed) {
m_settings.setProperty(CONFIRMED_BREAKING, Boolean.toString(confirmed));
}
public static void setMainFrameX(int i) {
m_settings.setProperty(MAINFRAME_X, Integer.toString(i));
}
public static void setMainFrameY(int i) {
m_settings.setProperty(MAINFRAME_Y, Integer.toString(i));
}
public static void setMainFrameWidth(int i) {
m_settings.setProperty(MAINFRAME_W, Integer.toString(i));
}
public static void setMainFrameHeight(int i) {
m_settings.setProperty(MAINFRAME_H, Integer.toString(i));
}
public static void setMainFrameTreesDiv(int i) {
m_settings.setProperty(MAINFRAME_TREES_DIV, Integer.toString(i));
}
public static void setMainFrameContentDiv(int i) {
m_settings.setProperty(MAINFRAME_CONTENT_DIV, Integer.toString(i));
}
public static int getTextWindowWidth() {
return getPropertyInt(MAINFRAME_TEXTWINDOW_W);
}
public static void setTextWindowWidth(int w) {
m_settings.setProperty(MAINFRAME_TEXTWINDOW_W, Integer.toString(w));
}
public static int getTextWindowHeight() {
return getPropertyInt(MAINFRAME_TEXTWINDOW_H);
}
public static void setTextWindowHeight(int h) {
m_settings.setProperty(MAINFRAME_TEXTWINDOW_H, Integer.toString(h));
}
public static int getGraphWindowWidth() {
return getPropertyInt(MAINFRAME_GRAPHWINDOW_W);
}
public static void setGraphWindowWidth(int w) {
m_settings.setProperty(MAINFRAME_GRAPHWINDOW_W, Integer.toString(w));
}
public static int getGraphWindowHeight() {
return getPropertyInt(MAINFRAME_GRAPHWINDOW_H);
}
public static void setGraphWindowHeight(int h) {
m_settings.setProperty(MAINFRAME_GRAPHWINDOW_H, Integer.toString(h));
}
public static String getVersion() {
return "5.2";
}
public static String getApplicationName() {
return "ApkAnalyser";
}
public static void addUserDefinedInjection(String inj) {
String s = m_settings.getProperty(INJECTIONS_USER_DEFINED);
if (s != null && s.trim().length() > 0) {
s += "," + inj;
} else {
s = inj;
}
m_settings.setProperty(INJECTIONS_USER_DEFINED, s);
}
public static String[] getUserDefinedInjections() {
return breakString(m_settings.getProperty(INJECTIONS_USER_DEFINED), ",");
}
// HELPERS
private static File[] getSelectableFiles(String[] paths)
{
File[] res = new File[paths.length];
for (int i = 0; i < res.length; i++)
{
if (paths[i].startsWith(Settings.FSELECT)) {
SelectableFile sf = new SelectableFile(paths[i].substring(Settings.FSELECT.length()));
res[i] = sf;
sf.setSelected(true);
} else if (paths[i].startsWith(Settings.FUNSELECT)) {
SelectableFile sf = new SelectableFile(paths[i].substring(Settings.FUNSELECT.length()));
res[i] = sf;
sf.setSelected(false);
} else {
SelectableFile sf = new SelectableFile(paths[i]);
res[i] = sf;
sf.setSelected(true);
}
}
return res;
}
private static String makeCpPropertyString(Object[] files) {
StringBuffer s = new StringBuffer();
for (int i = 0; i < files.length; i++)
{
if (files[i] instanceof SelectableFile) {
if (((SelectableFile) files[i]).isSelected()) {
s.append(Settings.FSELECT + ((File) files[i]).getAbsolutePath());
} else {
s.append(Settings.FUNSELECT + ((File) files[i]).getAbsolutePath());
}
} else {
s.append(Settings.FSELECT + ((File) files[i]).getAbsolutePath());
}
if (i < files.length - 1) {
s.append(';');
}
}
return s.toString();
}
public static String getProperty(String key) {
return m_settings.getProperty(key);
}
public static int getPropertyInt(String key) {
String iStr = m_settings.getProperty(key);
int i = 0;
try {
i = Integer.parseInt(iStr);
} catch (Exception e) {
}
return i;
}
public static long getPropertyLong(String key) {
String lStr = m_settings.getProperty(key);
long l = 0;
try {
l = Long.parseLong(lStr);
} catch (Exception e) {
}
return l;
}
public static double getPropertyDouble(String key) {
String dStr = m_settings.getProperty(key);
double d = 0;
try {
d = Double.parseDouble(dStr);
} catch (Exception e) {
}
return d;
}
public static boolean getPropertyBoolean(String key) {
String bStr = m_settings.getProperty(key);
boolean b = false;
try {
b = Boolean.parseBoolean(bStr);
} catch (Exception e) {
}
return b;
}
public static String[] getPropertyKeys(String keyPrefix) {
ArrayList<String> res = new ArrayList<String>();
Enumeration<Object> e = m_settings.keys();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
if (key.startsWith(keyPrefix)) {
res.add(key);
}
}
return res.toArray(new String[res.size()]);
}
public static String[] getPropertyKeysUnique(String keyPrefix) {
ArrayList<String> res = new ArrayList<String>();
Enumeration<Object> e = m_settings.keys();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
if (key.startsWith(keyPrefix) && key.length() > keyPrefix.length()) {
String postKey = key.substring(keyPrefix.length() + 1); // +1 = '.'
String uKey;
int pIndex = postKey.indexOf('.');
if (pIndex < 0) {
uKey = keyPrefix + "." + postKey;
} else {
uKey = keyPrefix + "." + postKey.substring(0, pIndex);
}
if (!res.contains(uKey)) {
res.add(uKey);
}
}
}
return res.toArray(new String[res.size()]);
}
public static Properties getProperties(String keyPrefix) {
String[] keys = getPropertyKeys(keyPrefix);
Properties props = new Properties();
int l = keyPrefix.length();
for (int i = 0; i < keys.length; i++) {
props.setProperty(keys[i].substring(l + 1), m_settings.getProperty(keys[i]));
}
return props;
}
public static void setProperties(Properties props, String keyPrefix) {
Enumeration<Object> e = props.keys();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
m_settings.setProperty(keyPrefix + "." + key, m_settings.getProperty(key));
}
}
public static void load() throws IOException {
if (!SETTINGS.exists()) {
setDefault();
} else {
FileInputStream fis = null;
try {
fis = new FileInputStream(SETTINGS);
m_settings = new Properties();
m_settings.load(fis);
} finally {
if (fis != null) {
fis.close();
}
}
}
}
public static void store() throws IOException {
if (!SETTINGS.exists()) {
if (SETTINGS.getParentFile() != null) {
SETTINGS.getParentFile().mkdirs();
}
SETTINGS.createNewFile();
}
FileOutputStream fos = null;
try {
fos = new FileOutputStream(SETTINGS);
m_settings.store(fos, getApplicationName() + " " + getVersion());
} finally {
if (fos != null) {
fos.close();
}
}
}
protected static void setDefault() {
m_settings = new Properties();
setDefaultPath(System.getProperty("user.home"));
}
public static Properties getSettings() {
return m_settings;
}
public static String[] breakString(String s, String separator) {
if (s == null) {
return new String[0];
} else {
StringTokenizer st = new StringTokenizer(s, separator);
List<String> res = new ArrayList<String>();
while (st.hasMoreTokens()) {
res.add(st.nextToken());
}
return res.toArray(new String[res.size()]);
}
}
}
|
public interface ITagRepository
{
IEnumerable<Tag> GetAllTags();
Tag GetTagById(int id);
void CreateTag(Tag tag);
void UpdateTag(Tag tag);
void DeleteTag(int id);
}
public class TagsController : ControllerBase
{
private readonly ILogger<TagsController> _logger;
private readonly ITagRepository _repository;
public TagsController(ILogger<TagsController> logger, ITagRepository repository)
{
_logger = logger;
_repository = repository;
}
[HttpGet]
public IActionResult GetAllTags()
{
var tags = _repository.GetAllTags();
return Ok(tags);
}
[HttpGet("{id}")]
public IActionResult GetTagById(int id)
{
var tag = _repository.GetTagById(id);
if (tag == null)
{
return NotFound();
}
return Ok(tag);
}
[HttpPost]
public IActionResult CreateTag([FromBody] Tag tag)
{
_repository.CreateTag(tag);
return CreatedAtAction(nameof(GetTagById), new { id = tag.Id }, tag);
}
[HttpPut("{id}")]
public IActionResult UpdateTag(int id, [FromBody] Tag tag)
{
if (id != tag.Id)
{
return BadRequest();
}
_repository.UpdateTag(tag);
return NoContent();
}
[HttpDelete("{id}")]
public IActionResult DeleteTag(int id)
{
_repository.DeleteTag(id);
return NoContent();
}
} |
#!/bin/bash
# This script can be used to offload a job to the Condor batch
# system. It assumes that all nodes that may recieve a job share the
# relevant part of the file system with the node where Mosek Server
# runs. This means that:
# - The absolute path of the working directory and problem file must be
# the same on the recieving Condir node and the sending Condor node.
# - The script directory (containing this file, the script being run,
# extra modules etc.) must be the same on the recieving node.
# Sending SIGTERM to this process should cause it to propagate to the
# child process (condor_run), which should propagate it to the process
# that runs the actual job (remote or local). This should happen
# automatically (I hope).
echo $BASHPID > "$1/PID"
condor_run "$(dirname $0)/solve.py" "$1" "$2" "-noPID"
rm -rf $BASHPID > "$1/PID"
|
!/bin/bash
# make sure you are in path "ytk-learn"
# cd ../../..
sh demo/multiclass_linear/local_optimizer.sh |
func requestHistory(beforeTimestamp: Int64,
completion: @escaping ([MessageImpl], Bool) -> ()) {
webimActions.requestHistory(beforeMessageTimestamp: beforeTimestamp) { [weak self] data in
guard let self = self, let data = data,
let json = try? JSONSerialization.jsonObject(with: data, options: []),
let historyBeforeResponseDictionary = json as? [String: Any] else {
completion([MessageImpl](), false)
return
}
let historyBeforeResponse = HistoryBeforeResponse(jsonDictionary: historyBeforeResponseDictionary)
let messageImplList = historyBeforeResponse.messages.map { MessageImpl(from: $0) }
completion(messageImplList, true)
}
} |
public class MutualDependency {
// Violation: BadModel and BadView are mutually dependent
private static class BadModel {
private int i;
private BadView view;
public int getI() {
return i;
}
public void setI(int i) {
this.i = i;
if(view != null) view.modelChanged();
}
public void setView(BadView view) {
this.view = view;
}
}
private static class BadView {
private BadModel model;
public BadView(BadModel model) {
this.model = model;
}
public void modelChanged() {
System.out.println("Model Changed: " + model.getI());
}
}
} |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-EasyCheckoutTests/TAPageControl.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-EasyCheckoutTests/TAPageControl.framework"
fi
|
package org.firstinspires.ftc.teamcode.subsystem.drive.drivecontroller.PID.consts;
public class GyroPIDConstants implements PIDConstants {
private double KP = 0.35;
private double KI = 0.05;
private double KD = 0.05;
private double KF = 0.1;
private double TOLERANCE = 5; //Degrees
private double SETTLING_TIME = 1; //Seconds
private double TARGET_MIN_RANGE = -360;
private double TARGET_MAX_RANGE = 360;
private double MIN_OUTPUT = -1.0;
private double MAX_OUTPUT = 1.0;
private double TARGET;
private boolean INVERTED, ABSOLUTE_SETPOINT, NO_OSCILLATION = false;
@Override
public double getKP() {
return KP;
}
public GyroPIDConstants setKP(double KP) {
this.KP = KP;
return this;
}
@Override
public double getKD() {
return KD;
}
public GyroPIDConstants setKD(double KD) {
this.KD = KD;
return this;
}
@Override
public double getKI() {
return KI;
}
public GyroPIDConstants setKI(double KI) {
this.KI = KI;
return this;
}
@Override
public double getKF() {
return KF;
}
public GyroPIDConstants setKF(double KF) {
this.KF = KF;
return this;
}
@Override
public double getTOLERANCE() {
return TOLERANCE;
}
public GyroPIDConstants setTOLERANCE(double TOLERANCE) {
this.TOLERANCE = TOLERANCE;
return this;
}
@Override
public double getSETTLING_TIME() {
return SETTLING_TIME;
}
public GyroPIDConstants setSETTLING_TIME(double SETTLING_TIME) {
this.SETTLING_TIME = SETTLING_TIME;
return this;
}
@Override
public double getMIN_OUTPUT() {
return MIN_OUTPUT;
}
public GyroPIDConstants setMIN_OUTPUT(double MIN_OUTPUT) {
this.MIN_OUTPUT = MIN_OUTPUT;
return this;
}
@Override
public double getMAX_OUTPUT() {
return MAX_OUTPUT;
}
public GyroPIDConstants setMAX_OUTPUT(double MAX_OUTPUT) {
this.MAX_OUTPUT = MAX_OUTPUT;
return this;
}
public double getTARGET_MIN_RANGE() {
return TARGET_MIN_RANGE;
}
public GyroPIDConstants setTARGET_MIN_RANGE(double TARGET_MIN_RANGE) {
this.TARGET_MIN_RANGE = TARGET_MIN_RANGE;
return this;
}
public double getTARGET_MAX_RANGE() {
return TARGET_MAX_RANGE;
}
public GyroPIDConstants setTARGET_MAX_RANGE(double TARGET_MAX_RANGE) {
this.TARGET_MAX_RANGE = TARGET_MAX_RANGE;
return this;
}
@Override
public double getTARGET() {
return TARGET;
}
public GyroPIDConstants setTARGET(double TARGET) {
this.TARGET = TARGET;
return this;
}
@Override
public boolean isINVERTED() {
return INVERTED;
}
public GyroPIDConstants setINVERTED(boolean INVERTED) {
this.INVERTED = INVERTED;
return this;
}
@Override
public boolean isABSOLUTE_SETPOINT() {
return ABSOLUTE_SETPOINT;
}
public GyroPIDConstants setABSOLUTE_SETPOINT(boolean ABSOLUTE_SETPOINT) {
this.ABSOLUTE_SETPOINT = ABSOLUTE_SETPOINT;
return this;
}
@Override
public boolean isNO_OSCILLATION() {
return NO_OSCILLATION;
}
public GyroPIDConstants setNO_OSCILLATION(boolean NO_OSCILLATION) {
this.NO_OSCILLATION = NO_OSCILLATION;
return this;
}
}
|
<reponame>ch1huizong/learning
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""
"""
#end_pymotw_header
import os
from urllib import pathname2url, url2pathname
print '== Default =='
path = '/a/b/c'
print 'Original:', path
print 'URL :', pathname2url(path)
print 'Path :', url2pathname('/d/e/f')
print
from nturl2path import pathname2url, url2pathname
print '== Windows, without drive letter =='
path = r'\a\b\c'
print 'Original:', path
print 'URL :', pathname2url(path)
print 'Path :', url2pathname('/d/e/f')
print
print '== Windows, with drive letter =='
path = r'C:\a\b\c'
print 'Original:', path
print 'URL :', pathname2url(path)
print 'Path :', url2pathname('/d/e/f')
|
import os
import numpy as np
def read_single_field_binary(filename, grid_coordinates):
# Simulated function to read binary data from a file
# Replace this with actual implementation for reading binary data
# For the purpose of this problem, a placeholder return is provided
data = np.random.rand(10, 10, 10) # Placeholder random data
xp = np.linspace(0, 1, 10) # Placeholder x-coordinates
yp = np.linspace(0, 1, 10) # Placeholder y-coordinates
zp = np.linspace(0, 1, 10) # Placeholder z-coordinates
xu = np.linspace(0, 1, 11) # Placeholder x-velocity grid coordinates
yv = np.linspace(0, 1, 11) # Placeholder y-velocity grid coordinates
zw = np.linspace(0, 1, 11) # Placeholder z-velocity grid coordinates
return data, xp, yp, zp, xu, yv, zw
def test_ldc():
# Test function to validate the correctness of read_single_field_binary()
data_ref = np.random.rand(10, 10) # Placeholder reference dataset
if "data_y" in os.getcwd():
data, xp, yp, zp, xu, yv, zw = read_single_field_binary("vex_fld_0001500.bin", np.array([1, 1, 1]))
islice = int(np.size(data[0, 0, :]) / 2)
np.testing.assert_allclose(data[islice, 0, :], data_ref[:, 1], rtol=1e-7, atol=0)
if "data_z" in os.getcwd():
data, xp, yp, zp, xu, yv, zw = read_single_field_binary("vex_fld_0001500.bin", np.array([1, 1, 1]))
islice = int(np.size(data[0, :, 0]) / 2)
np.testing.assert_allclose(data[islice, :, 0], data_ref[:, 1], rtol=1e-7, atol=0)
if __name__ == "__main__":
test_ldc() |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
// THIS IS A GENERATED FILE. DO NOT MODIFY MANUALLY. @see scripts/compile-icons.js
import * as React from 'react';
interface SVGRProps {
title?: string;
titleId?: string;
}
const EuiIconEditorStrike = ({
title,
titleId,
...props
}: React.SVGProps<SVGSVGElement> & SVGRProps) => (
<svg
xmlns="http://www.w3.org/2000/svg"
width={16}
height={16}
viewBox="0 0 16 16"
aria-labelledby={titleId}
{...props}
>
{title ? <title id={titleId}>{title}</title> : null}
<path d="M10.023 10h1.274c.006.08.01.164.01.25a2.557 2.557 0 01-.883 1.949c-.284.25-.627.446-1.03.588A4.087 4.087 0 018.028 13a4.616 4.616 0 01-3.382-1.426c-.193-.259-.193-.5 0-.724.193-.223.438-.266.735-.13.343.363.748.655 1.213.876.466.22.949.33 1.449.33.637 0 1.132-.144 1.485-.433.353-.29.53-.67.53-1.14a1.72 1.72 0 00-.034-.353zM5.586 7a2.49 2.49 0 01-.294-.507 2.316 2.316 0 01-.177-.934c0-.363.076-.701.228-1.015.152-.314.363-.586.633-.816.27-.23.588-.41.955-.537A3.683 3.683 0 018.145 3c.578 0 1.112.11 1.603.33.49.221.907.508 1.25.861.16.282.16.512 0 .692-.16.18-.38.214-.662.102a3.438 3.438 0 00-.978-.669 2.914 2.914 0 00-1.213-.242c-.54 0-.973.125-1.302.375-.328.25-.492.595-.492 1.036 0 .236.046.434.14.596.092.162.217.304.374.426.157.123.329.23.515.324.119.06.24.116.362.169H5.586zM2.5 8h11a.5.5 0 110 1h-11a.5.5 0 010-1z" />
</svg>
);
export const icon = EuiIconEditorStrike;
|
<gh_stars>0
package com.breakersoft.plow.test.thrift.dao;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.UUID;
import javax.annotation.Resource;
import org.junit.Test;
import org.springframework.test.annotation.Rollback;
import com.breakersoft.plow.ExitStatus;
import com.breakersoft.plow.Signal;
import com.breakersoft.plow.Task;
import com.breakersoft.plow.dispatcher.dao.DispatchDao;
import com.breakersoft.plow.dispatcher.dao.DispatchTaskDao;
import com.breakersoft.plow.dispatcher.dao.ProcDao;
import com.breakersoft.plow.dispatcher.domain.DispatchJob;
import com.breakersoft.plow.dispatcher.domain.DispatchNode;
import com.breakersoft.plow.dispatcher.domain.DispatchProc;
import com.breakersoft.plow.dispatcher.domain.DispatchTask;
import com.breakersoft.plow.event.JobLaunchEvent;
import com.breakersoft.plow.service.JobService;
import com.breakersoft.plow.test.AbstractTest;
import com.breakersoft.plow.thrift.JobSpecT;
import com.breakersoft.plow.thrift.TaskFilterT;
import com.breakersoft.plow.thrift.TaskState;
import com.breakersoft.plow.thrift.TaskStatsT;
import com.breakersoft.plow.thrift.TaskT;
import com.breakersoft.plow.thrift.dao.ThriftTaskDao;
public class ThriftTaskDaoTests extends AbstractTest {
@Resource
JobService jobService;
@Resource
ThriftTaskDao thriftTaskDao;
@Resource
DispatchTaskDao dispatchTaskDao;
@Resource
DispatchDao dispatchDao;
@Resource
ProcDao procDao;
@Test
public void testGetTask() {
JobSpecT spec = getTestJobSpec();
jobService.launch(spec);
@SuppressWarnings("deprecation")
UUID id = simpleJdbcTemplate.queryForObject(
"SELECT pk_task FROM task LIMIT 1", UUID.class);
TaskT task = thriftTaskDao.getTask(id);
assertEquals(id, UUID.fromString(task.id));
}
@Test
public void testGetTasks() {
JobSpecT spec = getTestJobSpec();
JobLaunchEvent event = jobService.launch(spec);
TaskFilterT filter = new TaskFilterT();
filter.jobId = event.getJob().getJobId().toString();
List<TaskT> task = thriftTaskDao.getTasks(filter);
assertTrue(task.size() > 0);
}
@Test
public void testGetTasksByJob() {
JobSpecT spec = getTestJobSpec();
JobLaunchEvent event = jobService.launch(spec);
DispatchNode node = dispatchDao.getDispatchNode(
nodeService.createNode(getTestNodePing()).getName());
DispatchJob job = new DispatchJob(event.getJob());
List<DispatchTask> tasks = dispatchTaskDao.getDispatchableTasks(job, node);
assertTrue(dispatchTaskDao.reserve(tasks.get(0)));
DispatchProc proc = procDao.create(node, tasks.get(0));
dispatchTaskDao.start(tasks.get(0), proc);
TaskFilterT filter = new TaskFilterT();
filter.jobId = event.getJob().getJobId().toString();
List<TaskT> task = thriftTaskDao.getTasks(filter);
assertTrue(task.size() > 0);
}
@Test
public void testGetRunningTasks() {
JobSpecT spec = getTestJobSpec();
JobLaunchEvent event = jobService.launch(spec);
DispatchNode node = dispatchDao.getDispatchNode(
nodeService.createNode(getTestNodePing()).getName());
DispatchJob job = new DispatchJob(event.getJob());
List<DispatchTask> tasks = dispatchTaskDao.getDispatchableTasks(job, node);
assertTrue(dispatchTaskDao.reserve(tasks.get(0)));
DispatchProc proc = procDao.create(node, tasks.get(0));
dispatchTaskDao.start(tasks.get(0), proc);
TaskFilterT filter = new TaskFilterT();
filter.jobId = event.getJob().getJobId().toString();
filter.addToStates(TaskState.RUNNING);
List<TaskT> task = thriftTaskDao.getTasks(filter);
assertTrue(task.size() > 0);
dispatchTaskDao.stop(tasks.get(0), TaskState.DEAD, 1, 1);
task = thriftTaskDao.getTasks(filter);
assertTrue(task.size() == 0);
}
@Test
public void testGetTaskStats() throws InterruptedException {
JobSpecT spec = getTestJobSpec();
JobLaunchEvent event = jobService.launch(spec);
DispatchNode node = dispatchDao.getDispatchNode(
nodeService.createNode(getTestNodePing()).getName());
DispatchJob job = new DispatchJob(event.getJob());
List<DispatchTask> tasks = dispatchTaskDao.getDispatchableTasks(job, node);
Task t = tasks.get(0);
assertTrue(dispatchTaskDao.reserve(tasks.get(0)));
DispatchProc proc = procDao.create(node, tasks.get(0));
dispatchTaskDao.start(tasks.get(0), proc);
List<TaskStatsT> stats = thriftTaskDao.getTaskStats(tasks.get(0).getTaskId());
assertEquals(1, stats.size());
Thread.sleep(1000);
dispatchTaskDao.stop(t, TaskState.SUCCEEDED, ExitStatus.SUCCESS, Signal.NORMAL);
stats = thriftTaskDao.getTaskStats(tasks.get(0).getTaskId());
assertEquals(1, stats.size());
}
@Test
public void testUpdatedTasks() throws InterruptedException {
JobSpecT spec = getTestJobSpec();
JobLaunchEvent event = jobService.launch(spec);
@SuppressWarnings("deprecation")
UUID id = simpleJdbcTemplate.queryForObject(
"SELECT pk_task FROM task LIMIT 1", UUID.class);
Task t = jobService.getTask(id);
jobService.setTaskState(t, TaskState.WAITING, TaskState.EATEN);
TaskFilterT filter = new TaskFilterT();
filter.jobId = event.getJob().getJobId().toString();
filter.lastUpdateTime = System.currentTimeMillis() - 1000;
List<TaskT> tasks = thriftTaskDao.getTasks(filter);
assertEquals(1, tasks.size());
filter.lastUpdateTime = System.currentTimeMillis();
tasks = thriftTaskDao.getTasks(filter);
assertEquals(0, tasks.size());
}
@Test
public void getLogPath() {
JobSpecT spec = getTestJobSpec();
jobService.launch(spec);
@SuppressWarnings("deprecation")
UUID id = simpleJdbcTemplate.queryForObject(
"SELECT pk_task FROM task ORDER BY int_task_order DESC LIMIT 1", UUID.class);
String logPath = "/tmp/plow/unittests/test/0010-test_ls.-1.log";
String result = thriftTaskDao.getLogPath(id);
assertEquals(logPath, result);
}
}
|
<filename>Sample app/Reduxion-iOS sample app/Reduxion_iOS.h
//
// Reduxion_iOS.h
// Reduxion-iOS
//
// Created by <NAME> on 8/25/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for Reduxion_iOS.
FOUNDATION_EXPORT double Reduxion_iOSVersionNumber;
//! Project version string for Reduxion_iOS.
FOUNDATION_EXPORT const unsigned char Reduxion_iOSVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <Reduxion_iOS/PublicHeader.h>
|
<reponame>syrflover/iterator-helper<gh_stars>1-10
import { assertEquals } from 'https://deno.land/std/testing/asserts.ts';
import { iterator } from '../mod.ts';
Deno.test(`flatMap() [\`it's Sunny in\`, '', 'California'] split(' ')`, async () => {
const a = iterator([`it's Sunny in`, '', 'California']);
const actual: string[] = [];
const expected = [`it's`, 'Sunny', 'in', '', 'California'];
for await (const _ of a.flatMap((e) => e.split(' '))) {
actual.push(_);
}
assertEquals(actual, expected);
});
Deno.test('flatMap() [[1,2,3],[4,5,6]] * 2', async () => {
const a = iterator([
[1, 2, 3],
[4, 5, 6],
]);
const actual: (number | number[])[] = [];
const expected = [2, 4, 6, 8, 10, 12];
for await (const _ of a.flatMap((e) => e.map((ee) => ee * 2))) {
actual.push(_);
}
assertEquals(actual, expected);
});
|
from pytest import fixture
from selenium.webdriver.common.keys import Keys
from mysign_app.models import Company, User
from mysign_app.tests.frontend.helpers import authenticate_selenium
@fixture(autouse=True)
def user_setup(selenium, live_server):
Company.objects.create(name="Mindhash", email="<EMAIL>")
Company.objects.create(name="Test", email="<EMAIL>")
User.objects.create(first_name="John", last_name="Doe", email="<EMAIL>", is_admin=False)
User.objects.create(first_name="Jan", last_name="Janssen", email="<EMAIL>", is_admin=True)
authenticate_selenium(selenium, live_server, is_admin=True, first_name="admin")
selenium.maximize_window()
selenium.get(live_server.url + "/admin/users/")
def test_card_selected(selenium):
card_1 = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")[0]
card_2 = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")[1]
card_1_parent = card_1.find_element_by_xpath('..')
card_2_parent = card_2.find_element_by_xpath('..')
assert "selected" not in card_1_parent.get_attribute("class")
assert "selected" not in card_2_parent.get_attribute("class")
card_1.click()
assert "selected" in card_1_parent.get_attribute("class")
assert "selected" not in card_2_parent.get_attribute("class")
card_2.click()
assert "selected" not in card_1_parent.get_attribute("class")
assert "selected" in card_2_parent.get_attribute("class")
def test_card_form_data(selenium):
# checks if form data is empty when no card is selected
# first name
assert "" == selenium.find_element_by_id('id_first_name').get_attribute('value')
# last name
assert "" == selenium.find_element_by_id('id_last_name').get_attribute('value')
# email
assert "" == selenium.find_element_by_id('id_email').get_attribute('value')
# company
assert "" == selenium.find_element_by_id('id_company').get_attribute('value')
# checks if form data is filled correctly
assert len(selenium.find_elements_by_xpath("//td[@class='name sorting_1']")) == 3
selenium.find_element_by_xpath("//td[@class='name sorting_1' and text()='<NAME>']").click()
assert 'John' == selenium.find_element_by_id('id_first_name').get_attribute('value')
assert 'Doe' == selenium.find_element_by_id('id_last_name').get_attribute('value')
assert '<EMAIL>' == selenium.find_element_by_id('id_email').get_attribute('value')
assert '' == selenium.find_element_by_id('id_company').get_attribute('value')
assert not selenium.find_element_by_id('id_is_admin').is_selected()
selenium.find_element_by_xpath("//td[@class='name sorting_1' and text()='<NAME>']").click()
assert 'Jan' == selenium.find_element_by_id('id_first_name').get_attribute('value')
assert 'Janssen' == selenium.find_element_by_id('id_last_name').get_attribute('value')
assert '<EMAIL>' == selenium.find_element_by_id('id_email').get_attribute('value')
assert '' == selenium.find_element_by_id('id_company').get_attribute('value')
assert selenium.find_element_by_id('id_is_admin').is_selected()
# checks if form data is empty when card is deselected
selenium.find_element_by_xpath("//td[@class='name sorting_1' and text()='<NAME>']").click()
# first name
assert "" == selenium.find_element_by_id('id_first_name').get_attribute('value')
# last name
assert "" == selenium.find_element_by_id('id_last_name').get_attribute('value')
# email
assert "" == selenium.find_element_by_id('id_email').get_attribute('value')
# company
assert "" == selenium.find_element_by_id('id_company').get_attribute('value')
def test_disabled_if_none_selected(selenium):
assert not selenium.find_element_by_id('id_company').is_enabled()
assert not selenium.find_element_by_id('id_first_name').is_enabled()
assert not selenium.find_element_by_id('id_last_name').is_enabled()
assert not selenium.find_element_by_id('id_is_admin').is_enabled()
assert not selenium.find_element_by_id('submitButton').is_enabled()
selenium.find_element_by_xpath("//td[@class='name sorting_1' and text()='<NAME>']").click()
assert selenium.find_element_by_id('id_company').is_enabled()
assert selenium.find_element_by_id('id_first_name').is_enabled()
assert selenium.find_element_by_id('id_last_name').is_enabled()
assert selenium.find_element_by_id('id_is_admin').is_enabled()
assert selenium.find_element_by_id('submitButton').is_enabled()
def test_save_button(selenium):
card = selenium.find_element_by_xpath("//td[@class='name sorting_1' and text()='<NAME>']")
card_parent = card.find_element_by_xpath('..')
# check if card has no company data
card.click()
assert '' == card_parent.find_element_by_xpath("//td[@class='company.name']").text
# select a new company
assert not selenium.find_element_by_id('collapseDiv').is_displayed()
selenium.find_element_by_xpath("// select[ @ id = 'id_company'] / option[text() = 'Mindhash']").click()
selenium.find_element_by_id('id_first_name').clear()
selenium.find_element_by_id('id_first_name').send_keys("Las")
selenium.find_element_by_id('id_last_name').clear()
selenium.find_element_by_id('id_last_name').send_keys("Ligt")
selenium.find_element_by_id('id_email').clear()
selenium.find_element_by_id('id_email').send_keys("<EMAIL>")
# check if save reminder shows up
assert selenium.find_element_by_id('collapseDiv').is_displayed()
# save
selenium.find_element_by_id('submitButton').click()
# reload cards
card = selenium.find_element_by_xpath("//td[@class='name active sorting_1' and text()='<NAME>']")
card_parent = card.find_element_by_xpath('..')
card.click()
# check if card now does have company data
assert 'Mindhash' == card_parent.find_element_by_xpath("//td[@class='company.name active']").text
assert '<NAME>' == card.text
assert '<EMAIL>' == selenium.find_element_by_id('id_email').get_attribute('value')
def test_invalid_save(selenium):
card = selenium.find_element_by_xpath("//td[@class='name sorting_1' and text()='<NAME>']")
card.click()
# Make form invalid (by setting admin and company)
assert selenium.find_element_by_id('id_is_admin').is_selected()
selenium.find_element_by_xpath("// select[ @ id = 'id_company'] / option[text() = 'Mindhash']").click()
selenium.find_element_by_id('submitButton').click()
# Check that there is a error in the form
error_card = selenium.find_element_by_xpath("//div[@class='alert alert-block alert-danger']")
assert error_card
assert error_card.text == 'Company and is_admin cannot set both'
def test_remove_button(selenium):
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
# check that the total amount of cards is 2
assert len(cards) == 3
# select the card and press delete
cards[0].click()
selenium.find_element_by_id("deleteButton").click()
# check chrome popup, click "Cancel"
selenium.switch_to.alert.dismiss()
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
assert len(cards) == 3
selenium.find_element_by_id("deleteButton").click()
# check chrome popup, click Accept"
selenium.switch_to.alert.accept()
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
assert len(cards) == 2
def test_search(selenium):
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
assert len(cards) == 3
search = selenium.find_element_by_xpath("//input[@class='form-control w-100']")
search.send_keys("John")
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
assert len(cards) == 1
search.clear()
search.send_keys("Jan")
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
assert len(cards) == 1
search.clear()
search.send_keys("MySign")
cards = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
assert len(cards) == 0
search.clear()
search.send_keys(Keys.ENTER)
# link company
card = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")[1]
card.click()
selenium.find_element_by_xpath("// select[ @ id = 'id_company'] / option[text() = 'Mindhash']").click()
if selenium.find_element_by_id('id_is_admin').is_selected():
selenium.find_element_by_id('id_is_admin').click()
selenium.find_element_by_id('submitButton').click()
# search for company
search = selenium.find_element_by_xpath("//input[@class='form-control w-100']")
search.send_keys("M<PASSWORD>")
cards_not_active = selenium.find_elements_by_xpath("//td[@class='name sorting_1']")
cards_active = selenium.find_elements_by_xpath("//td[@class='name active sorting_1']")
assert len(cards_not_active) + len(cards_active) == 1
|
//go:build go1.7
// +build go1.7
package ini
import (
"reflect"
"testing"
)
func TestSkipper(t *testing.T) {
idTok, _, _ := newLitToken([]rune("id"))
nlTok := newToken(TokenNL, []rune("\n"), NoneType)
cases := []struct {
name string
Fn func(s *skipper)
param Token
expected bool
expectedShouldSkip bool
expectedPrevTok Token
}{
{
name: "empty case",
Fn: func(s *skipper) {
},
param: emptyToken,
expectedPrevTok: emptyToken,
},
{
name: "skip case",
Fn: func(s *skipper) {
s.Skip()
},
param: idTok,
expectedShouldSkip: true,
expected: true,
expectedPrevTok: emptyToken,
},
{
name: "continue case",
Fn: func(s *skipper) {
s.Continue()
},
param: emptyToken,
expectedPrevTok: emptyToken,
},
{
name: "skip then continue case",
Fn: func(s *skipper) {
s.Skip()
s.Continue()
},
param: emptyToken,
expectedPrevTok: emptyToken,
},
{
name: "do not skip case",
Fn: func(s *skipper) {
s.Skip()
s.prevTok = nlTok
},
param: idTok,
expectedShouldSkip: true,
expectedPrevTok: nlTok,
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
s := newSkipper()
c.Fn(&s)
if e, a := c.expectedShouldSkip, s.shouldSkip; e != a {
t.Errorf("%s: expected %t, but received %t", c.name, e, a)
}
if e, a := c.expectedPrevTok, s.prevTok; !reflect.DeepEqual(e, a) {
t.Errorf("%s: expected %v, but received %v", c.name, e, a)
}
if e, a := c.expected, s.ShouldSkip(c.param); e != a {
t.Errorf("%s: expected %t, but received %t", c.name, e, a)
}
})
}
}
|
<gh_stars>1-10
""":mod:`crawler.serializers` --- Serializer for crawler data
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import datetime
def payload_serializer(*, type: str, id: int = None, link: str, count: int,
title: str) -> dict:
utc_now = datetime.datetime.now(tz=datetime.timezone.utc)
dic = {
'type': type,
'link': link,
'count': count,
'title': title,
'date': utc_now.isoformat()
}
dic['id'] = id
return dic
|
<filename>src/add-ons/kernel/bus_managers/virtio/VirtioQueue.cpp
/*
* Copyright 2013, 2018, <NAME>, <EMAIL>.
* Distributed under the terms of the MIT License.
*/
#include "VirtioPrivate.h"
static inline uint32
round_to_pagesize(uint32 size)
{
return (size + B_PAGE_SIZE - 1) & ~(B_PAGE_SIZE - 1);
}
area_id
alloc_mem(void **virt, phys_addr_t *phy, size_t size, uint32 protection,
const char *name)
{
physical_entry pe;
void * virtadr;
area_id areaid;
status_t rv;
TRACE("allocating %ld bytes for %s\n", size, name);
size = round_to_pagesize(size);
areaid = create_area(name, &virtadr, B_ANY_KERNEL_ADDRESS, size,
B_CONTIGUOUS, protection);
if (areaid < B_OK) {
ERROR("couldn't allocate area %s\n", name);
return B_ERROR;
}
rv = get_memory_map(virtadr, size, &pe, 1);
if (rv < B_OK) {
delete_area(areaid);
ERROR("couldn't get mapping for %s\n", name);
return B_ERROR;
}
if (virt)
*virt = virtadr;
if (phy)
*phy = pe.address;
TRACE("area = %" B_PRId32 ", size = %ld, virt = %p, phy = %#" B_PRIxPHYSADDR "\n",
areaid, size, virtadr, pe.address);
return areaid;
}
class TransferDescriptor {
public:
TransferDescriptor(VirtioQueue* queue,
uint16 indirectMaxSize);
~TransferDescriptor();
status_t InitCheck() { return fStatus; }
uint16 Size() { return fDescriptorCount; }
void SetTo(uint16 size, void *cookie);
void* Cookie() { return fCookie; }
void Unset();
struct vring_desc* Indirect() { return fIndirect; }
phys_addr_t PhysAddr() { return fPhysAddr; }
private:
status_t fStatus;
VirtioQueue* fQueue;
void* fCookie;
struct vring_desc* fIndirect;
size_t fAreaSize;
area_id fArea;
phys_addr_t fPhysAddr;
uint16 fDescriptorCount;
};
TransferDescriptor::TransferDescriptor(VirtioQueue* queue, uint16 indirectMaxSize)
: fQueue(queue),
fCookie(NULL),
fIndirect(NULL),
fAreaSize(0),
fArea(-1),
fPhysAddr(0),
fDescriptorCount(0)
{
fStatus = B_OK;
struct vring_desc* virtAddr;
phys_addr_t physAddr;
if (indirectMaxSize > 0) {
fAreaSize = indirectMaxSize * sizeof(struct vring_desc);
fArea = alloc_mem((void **)&virtAddr, &physAddr, fAreaSize, 0,
"virtqueue");
if (fArea < B_OK) {
fStatus = fArea;
return;
}
memset(virtAddr, 0, fAreaSize);
fIndirect = virtAddr;
fPhysAddr = physAddr;
for (uint16 i = 0; i < indirectMaxSize - 1; i++)
fIndirect[i].next = i + 1;
fIndirect[indirectMaxSize - 1].next = UINT16_MAX;
}
}
TransferDescriptor::~TransferDescriptor()
{
if (fArea > B_OK)
delete_area(fArea);
}
void
TransferDescriptor::SetTo(uint16 size, void *cookie)
{
fCookie = cookie;
fDescriptorCount = size;
}
void
TransferDescriptor::Unset()
{
fCookie = NULL;
fDescriptorCount = 0;
}
// #pragma mark -
VirtioQueue::VirtioQueue(VirtioDevice* device, uint16 queueNumber,
uint16 ringSize)
:
fDevice(device),
fQueueNumber(queueNumber),
fRingSize(ringSize),
fRingFree(ringSize),
fRingHeadIndex(0),
fRingUsedIndex(0),
fStatus(B_OK),
fIndirectMaxSize(0),
fCallback(NULL),
fCookie(NULL)
{
fDescriptors = new(std::nothrow) TransferDescriptor*[fRingSize];
if (fDescriptors == NULL) {
fStatus = B_NO_MEMORY;
return;
}
uint8* virtAddr;
phys_addr_t physAddr;
fAreaSize = vring_size(fRingSize, device->Alignment());
fArea = alloc_mem((void **)&virtAddr, &physAddr, fAreaSize, 0,
"virtqueue");
if (fArea < B_OK) {
fStatus = fArea;
return;
}
memset(virtAddr, 0, fAreaSize);
vring_init(&fRing, fRingSize, virtAddr, device->Alignment());
for (uint16 i = 0; i < fRingSize - 1; i++)
fRing.desc[i].next = i + 1;
fRing.desc[fRingSize - 1].next = UINT16_MAX;
if ((fDevice->Features() & VIRTIO_FEATURE_RING_INDIRECT_DESC) != 0)
fIndirectMaxSize = 128;
for (uint16 i = 0; i < fRingSize; i++) {
fDescriptors[i] = new TransferDescriptor(this, fIndirectMaxSize);
if (fDescriptors[i] == NULL || fDescriptors[i]->InitCheck() != B_OK) {
fStatus = B_NO_MEMORY;
return;
}
}
DisableInterrupt();
device->SetupQueue(fQueueNumber, physAddr);
}
VirtioQueue::~VirtioQueue()
{
delete_area(fArea);
for (uint16 i = 0; i < fRingSize; i++) {
delete fDescriptors[i];
}
delete[] fDescriptors;
}
status_t
VirtioQueue::SetupInterrupt(virtio_callback_func handler, void *cookie)
{
fCallback = handler;
fCookie = cookie;
return B_OK;
}
void
VirtioQueue::DisableInterrupt()
{
if ((fDevice->Features() & VIRTIO_FEATURE_RING_EVENT_IDX) == 0)
fRing.avail->flags |= VRING_AVAIL_F_NO_INTERRUPT;
}
void
VirtioQueue::EnableInterrupt()
{
if ((fDevice->Features() & VIRTIO_FEATURE_RING_EVENT_IDX) == 0)
fRing.avail->flags &= ~VRING_AVAIL_F_NO_INTERRUPT;
}
void
VirtioQueue::NotifyHost()
{
fDevice->NotifyQueue(fQueueNumber);
}
status_t
VirtioQueue::Interrupt()
{
CALLED();
DisableInterrupt();
if (fCallback != NULL)
fCallback(Device()->DriverCookie(), fCookie);
EnableInterrupt();
return B_OK;
}
void*
VirtioQueue::Dequeue(uint16 *_size)
{
TRACE("Dequeue() fRingUsedIndex: %u\n", fRingUsedIndex);
if (fRingUsedIndex == fRing.used->idx)
return NULL;
uint16 usedIndex = fRingUsedIndex++ & (fRingSize - 1);
TRACE("Dequeue() usedIndex: %u\n", usedIndex);
struct vring_used_elem *element = &fRing.used->ring[usedIndex];
uint16 descriptorIndex = element->id;
// uint32 length = element->len;
void* cookie = fDescriptors[descriptorIndex]->Cookie();
uint16 size = fDescriptors[descriptorIndex]->Size();
if (_size != NULL)
*_size = size;
if (size == 0)
panic("VirtioQueue::Dequeue() size is zero\n");
fDescriptors[descriptorIndex]->Unset();
fRingFree += size;
size--;
uint16 index = descriptorIndex;
if ((fRing.desc[index].flags & VRING_DESC_F_INDIRECT) == 0) {
while ((fRing.desc[index].flags & VRING_DESC_F_NEXT) != 0) {
index = fRing.desc[index].next;
size--;
}
}
if (size > 0)
panic("VirtioQueue::Dequeue() descriptors left %d\n", size);
fRing.desc[index].next = fRingHeadIndex;
fRingHeadIndex = descriptorIndex;
TRACE("Dequeue() fRingHeadIndex: %u\n", fRingHeadIndex);
return cookie;
}
status_t
VirtioQueue::QueueRequest(const physical_entry* vector, size_t readVectorCount,
size_t writtenVectorCount, void *cookie)
{
CALLED();
size_t count = readVectorCount + writtenVectorCount;
if (count < 1)
return B_BAD_VALUE;
if ((fDevice->Features() & VIRTIO_FEATURE_RING_INDIRECT_DESC) != 0) {
return QueueRequestIndirect(vector, readVectorCount,
writtenVectorCount, cookie);
}
if (count > fRingFree)
return B_BUSY;
uint16 insertIndex = fRingHeadIndex;
fDescriptors[insertIndex]->SetTo(count, cookie);
// enqueue
uint16 index = QueueVector(insertIndex, fRing.desc, vector,
readVectorCount, writtenVectorCount);
fRingHeadIndex = index;
fRingFree -= count;
UpdateAvailable(insertIndex);
NotifyHost();
return B_OK;
}
status_t
VirtioQueue::QueueRequestIndirect(const physical_entry* vector,
size_t readVectorCount, size_t writtenVectorCount,
void *cookie)
{
CALLED();
size_t count = readVectorCount + writtenVectorCount;
if (count > fRingFree || count > fIndirectMaxSize)
return B_BUSY;
uint16 insertIndex = fRingHeadIndex;
fDescriptors[insertIndex]->SetTo(1, cookie);
// enqueue
uint16 index = QueueVector(0, fDescriptors[insertIndex]->Indirect(),
vector, readVectorCount, writtenVectorCount);
fRing.desc[insertIndex].addr = fDescriptors[insertIndex]->PhysAddr();
fRing.desc[insertIndex].len = index * sizeof(struct vring_desc);
fRing.desc[insertIndex].flags = VRING_DESC_F_INDIRECT;
fRingHeadIndex = fRing.desc[insertIndex].next;
fRingFree--;
UpdateAvailable(insertIndex);
NotifyHost();
return B_OK;
}
void
VirtioQueue::UpdateAvailable(uint16 index)
{
CALLED();
uint16 available = fRing.avail->idx & (fRingSize - 1);
fRing.avail->ring[available] = index;
fRing.avail->idx++;
}
uint16
VirtioQueue::QueueVector(uint16 insertIndex, struct vring_desc *desc,
const physical_entry* vector, size_t readVectorCount,
size_t writtenVectorCount)
{
CALLED();
uint16 index = insertIndex;
size_t total = readVectorCount + writtenVectorCount;
for (size_t i = 0; i < total; i++, index = desc[index].next) {
desc[index].addr = vector[i].address;
desc[index].len = vector[i].size;
desc[index].flags = 0;
if (i < total - 1)
desc[index].flags |= VRING_DESC_F_NEXT;
if (i >= readVectorCount)
desc[index].flags |= VRING_DESC_F_WRITE;
}
return index;
}
|
const { adapt } = require('../adapters/express-router-adapter')
const CalculateRouterComposer = require('../composers/calculate-call-router-composer')
module.exports = router => {
router.post('/calculate-call', adapt(CalculateRouterComposer.compose()))
}
|
/*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//{{NO_DEPENDENCIES}}
// Microsoft Visual C++ generated include file.
// Used by SeismicSimulation.rc
//
#define IDC_MYICON 2
#define IDD_SEISMICSIMULATION_DIALOG 102
#define IDS_APP_TITLE 103
#define IDD_ABOUTBOX 103
#define IDM_ABOUT 104
#define IDM_EXIT 105
#define IDI_SEISMICSIMULATION 107
#define IDI_SMALL 108
#define IDC_SEISMICSIMULATION 109
#define IDR_MAINFRAME 128
#define ID_FILE_PARALLEL 32771
#define ID_FILE_SERIAL 32772
#define IDM_PARALLEL 32773
#define ID_FILE_ENABLEGUI 32774
#define ID_FILE_DISABLEGUI 32775
#define IDC_STATIC -1
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NO_MFC 1
#define _APS_NEXT_RESOURCE_VALUE 129
#define _APS_NEXT_COMMAND_VALUE 32782
#define _APS_NEXT_CONTROL_VALUE 1000
#define _APS_NEXT_SYMED_VALUE 110
#endif
#endif
|
<reponame>lerages/anarchy-source
package org.rs2server.rs2.model.container;
import org.rs2server.rs2.model.container.Equipment.EquipmentType;
import org.rs2server.rs2.model.player.Player;
public class Dueling {
public int duelStatus = 0;
public int duelSpaceReq;
/**
* The is where we keep all our rule constants.
*/
public static final int NO_FORFEIT = 0;
public static final int NO_MOVEMENT = 1;
public static final int NO_RANGE = 2;
public static final int NO_MELEE = 3;
public static final int NO_MAGIC = 4;
public static final int NO_DRINKS = 5;
public final static int NO_FOOD = 6;
public final static int NO_PRAYER = 7;
public static final int OBSTACLES = 8;
public static final int FUN_WEAPONS = 9;
public static final int NO_SPECIAL_ATTACKS = 10;
public static final int NO_HATS = 11;
public static final int NO_CAPES = 12;
public static final int NO_AMULETS = 13;
public static final int NO_SWORDS = 14;
public static final int NO_BODIES = 15;
public static final int NO_SHIELDS = 16;
public static final int NO_LEGS = 17;
public static final int NO_GLOVES = 18;
public static final int NO_BOOTS = 19;
public static final int NO_RINGS = 20;
public static final int NO_ARROWS = 21;
public int totalDuelConfigs = 0;
/**
* Defines if a rule is set to on or off.
*/
public boolean[] rules = new boolean[22];
public final int[] DURING_THE_DUEL_CHILD_IDS = {40, 41, 42, 43, 45, 46, 47, 48, 50, 51, 52};
public final int[] BEFORE_THE_DUEL_STARTS_CHILD_IDS = {49, 34, 35, 37, 38};
public final String[] RULES = {"You cannot forfeit the duel.", "You cannot move.", "You cannot use ranged attacks.",
"You cannot use melee attacks.", "You cannot use magic attacks.", "You cannot use drinks.", "You cannot use food.",
"You cannot use prayer.", "There will be obstacles in the arena.", "You cannot use drinks.",
"You cannot use special attacks."};
public final String[] BEFORE_THE_DUEL_STARTS = {"Some user items will be taken off.", "Boosted stats will be restored.",
"Existing prayers will be stopped.", "", ""};
public final int[] DUELING_CONFIG_IDS = { 1, 2, 16, 32, 64, 128,
256, 512, 1024, 4096, 8192, 16384, 32768, 65536, 131072, 262144,
524288, 2097152, 8388608, 16777216, 67108864, 134217728, 268435456 };
public final int[] RULE_IDS = { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5,
6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21 };
public final int[] DUEL_SLOT_IDS = { Equipment.SLOT_HELM,
Equipment.SLOT_CAPE, Equipment.SLOT_AMULET, Equipment.SLOT_WEAPON,
Equipment.SLOT_CHEST, Equipment.SLOT_SHIELD,
Equipment.SLOT_BOTTOMS, Equipment.SLOT_GLOVES,
Equipment.SLOT_BOOTS, Equipment.SLOT_RING, Equipment.SLOT_ARROWS,
};
public final int[] DUELING_BUTTON_IDS = { 124, 76, 132, 80, 126,
71, 127, 72, 125, 73, 129, 77, 130, 78, 131, 79, 154, 81,
156, 75, 159, 74, 113, 114, 115, 117, 118, 119, 120, 123, 122,
121, 116 };
private Player player;
private Player opponent;
private boolean died;
public Dueling(Player player, Player opponent) {
this.setPlayer(player);
this.setOpponent(opponent);
this.died = false;
}
public void setDuelStatus(int status) {
this.duelStatus = status;
}
public int getDuelStatus() {
return this.duelStatus;
}
public Player getOpponent() {
return opponent;
}
public void setOpponent(Player opponent) {
this.opponent = opponent;
}
public Player getPlayer() {
return player;
}
public void setPlayer(Player player) {
this.player = player;
}
/**
* Checks if we can wear a specific item during this duel.
* @param player The player attempting to wear an item.
* @param type The equipment type.
* @return <code>true</code> if, <code>false</code> if not.
*/
public boolean canUseItem(Player player, EquipmentType type) {
/*
* We loop through all equipment rules.
*/
for(int rule = 11; rule < player.getDueling().rules.length; rule++) {
/*
* Make sure the rule applies.
*/
if(player.getDueling().rules[rule]) {
/*
* If that is so, we get the equipment slot.
*/
int slot = player.getDueling().DUEL_SLOT_IDS[rule - 11];
/*
* Check if the item we're about to wear is heading for this slot..
*/
if(slot == type.getSlot()) {
/*
* If so, we get the EquipmentType description..
*/
String desc = type.getDescription().toLowerCase();
/*
* Modify it slightly..
*/
if(!desc.endsWith("s")) {
desc += "s";
}
/*
* Notify the player, and return false.
*/
player.getActionSender().sendMessage("Wearing " + desc + " has been disabled during this duel.");
return false;
}
}
}
return true;
}
/**
* Defines if a specific rule is toggled.
* @param rule The rule toggled.
* @return <code>true</code> if, <code>false</code> if not.
*/
public boolean isRuleToggled(int rule) {
return rules[rule];
}
public boolean isDead() {
return died;
}
public void setDied(boolean died) {
this.died = died;
}
}
|
<reponame>tsmvision/spring-security-examples
package com.example.corespringsecurity.repository;
import com.example.corespringsecurity.domain.entity.AccessIp;
import org.springframework.data.jpa.repository.JpaRepository;
public interface AccessIpRepository extends JpaRepository<AccessIp, Long> {
}
|
#!/bin/bash
# Script to deploy VPC resources for an IBM Cloud solution tutorial
#
# (C) 2019 IBM
#
# Written by Henrik Loeser, hloeser@de.ibm.com
# usage: $0 region ssh-key-name prefix-string [ naming-prefix [ resource-output-file [ user-data-file [ image-name ] ] ] ]
# usage: $0 us-south-1 pfq testx default resources.sh cloud-config.yaml ibm-centos-7-6-minimal-amd64-2
# Exit on errors
set -e
set -o pipefail
# include common functions
. $(dirname "$0")/../scripts/common.sh
if [ -z "$2" ]; then
echo "usage: [REUSE_VPC=vpcname] $0 region ssh-key-name prefix-string [ naming-prefix [ resource-output-file [ user-data-file [ image-name ]]]]"
exit
fi
export zone=$1
export KEYNAME=$2
if [ -z "$3" ]; then
export prefix=""
else
export prefix=$3
fi
if [ -z "$4" ]; then
export resourceGroup=$(currentResourceGroup)
else
export resourceGroup=$4
fi
if [ ! -z "$5" ]; then
resource_file=$5
fi
if [ ! -z "$6" ]; then
user_data_frontend=$6
else
user_data_frontend='#!/bin/bash
apt-get update
apt-get install -y nginx
echo "I am the frontend server" > /var/www/html/index.html
service nginx start
'
fi
if [ ! -z "$7" ]; then
user_data_backend=$6
else
user_data_backend='#!/bin/bash
apt-get update
apt-get install -y nginx
echo "I am the backend server" > /var/www/html/index.html
service nginx start
'
fi
if [ -z "$8" ]; then
image=$(ubuntu1804)
else
image=$8
fi
export basename="vpc-pubpriv"
export ImageId=$(ibmcloud is images --json | jq -r '.[] | select (.name=="'$image'") | .id')
export SSHKey=$(SSHKeynames2UUIDs $KEYNAME)
export BASENAME="${prefix}${basename}"
# check if to reuse existing VPC
if [ -z "$REUSE_VPC" ]; then
echo "Creating VPC"
VPC_OUT=$(ibmcloud is vpc-create ${BASENAME} --resource-group-name ${resourceGroup} --json)
if [ $? -ne 0 ]; then
echo "Error while creating VPC:"
echo "========================="
echo "$VPC_OUT"
exit
fi
VPCID=$(echo "$VPC_OUT" | jq -r '.id')
vpcResourceAvailable vpcs $BASENAME
VPCNAME=$BASENAME
else
echo "Reusing VPC $REUSE_VPC"
VPC_OUT=$(ibmcloud is vpcs --json)
VPCID=$(echo "${VPC_OUT}" | jq -r '.[] | select (.name=="'${REUSE_VPC}'") | .id')
echo "$VPCID"
VPCNAME=$REUSE_VPC
fi
# Create a bastion
#
# set up few variables
BASTION_SSHKEY=$SSHKey
BASTION_IMAGE=$ImageId
BASTION_ZONE=$zone
# include file to create the bastion resources
. $(dirname "$0")/../scripts/bastion-create.sh
# Create Public Gateways if not available
vpcCreatePublicGateways $VPCNAME
# Identify the right public gateway to allow software installation on backend VSI
PUBGWID=$( vpcPublicGatewayIDbyZone $VPCNAME $zone )
echo "PUBGWID: ${PUBGWID}"
if ! SUB_BACK=$(ibmcloud is subnet-create ${BASENAME}-backend-subnet $VPCID $zone --ipv4-address-count 256 --public-gateway-id $PUBGWID --json)
then
code=$?
echo ">>> ibmcloud is subnet-create ${BASENAME}-backend-subnet $VPCID $zone --ipv4-address-count 256 --public-gateway-id $PUBGWID --json"
echo "${SUB_BACK}"
exit $code
fi
SUB_BACK_ID=$(echo "$SUB_BACK" | jq -r '.id')
if ! SUB_FRONT=$(ibmcloud is subnet-create ${BASENAME}-frontend-subnet $VPCID $zone --ipv4-address-count 256 --json)
then
code=$?
echo ">>> ibmcloud is subnet-create ${BASENAME}-frontend-subnet $VPCID $zone --ipv4-address-count 256 --json"
echo "${SUB_FRONT}"
exit $code
fi
SUB_FRONT_ID=$(echo "$SUB_FRONT" | jq -r '.id')
vpcResourceAvailable subnets ${BASENAME}-backend-subnet
vpcResourceAvailable subnets ${BASENAME}-frontend-subnet
if ! SGBACK_JSON=$(ibmcloud is security-group-create ${BASENAME}-backend-sg $VPCID --json)
then
code=$?
echo ">>> ibmcloud is security-group-create ${BASENAME}-backend-sg $VPCID --json"
echo "${SGBACK_JSON}"
exit $code
fi
SGBACK=$(echo "${SGBACK_JSON}" | jq -r '.id')
if ! SGFRONT_JSON=$(ibmcloud is security-group-create ${BASENAME}-frontend-sg $VPCID --json)
then
code=$?
echo ">>> ibmcloud is security-group-create ${BASENAME}-frontend-sg $VPCID --json"
echo "${SGFRONT_JSON}"
exit $code
fi
SGFRONT=$(echo "${SGFRONT_JSON}" | jq -r '.id')
# Example has the frontend responding to port 80 from anywhere. The backend responds from 80 but only from the frontend
#ibmcloud is security-group-rule-add GROUP_ID DIRECTION PROTOCOL
echo "Creating rules"
echo "backend"
ibmcloud is security-group-rule-add $SGBACK inbound tcp --remote $SGFRONT --port-min 80 --port-max 80 > /dev/null
echo "frontend"
# inbound
ibmcloud is security-group-rule-add $SGFRONT inbound tcp --remote "0.0.0.0/0" --port-min 80 --port-max 80 > /dev/null
# outbound
ibmcloud is security-group-rule-add $SGFRONT outbound tcp --remote $SGBACK --port-min 80 --port-max 80 > /dev/null
# Frontend and backend server
echo "Creating VSIs"
instance_create="ibmcloud is instance-create ${BASENAME}-backend-vsi $VPCID $zone $(instance_profile) $SUB_BACK_ID --image-id $ImageId --key-ids $SSHKey --security-group-ids $SGBACK,$SGMAINT --json"
if ! BACK_VSI=$($instance_create --user-data "$user_data_backend")
then
code=$?
echo ">>> $instance_create --user-data $user_data_backend"
echo "${BACK_VSI}"
exit $code
fi
instance_create="ibmcloud is instance-create ${BASENAME}-frontend-vsi $VPCID $zone $(instance_profile) $SUB_FRONT_ID --image-id $ImageId --key-ids $SSHKey --security-group-ids $SGFRONT,$SGMAINT --json"
if ! FRONT_VSI=$($instance_create --user-data "$user_data_frontend")
then
code=$?
echo ">>> $instance_create --user-data $user_data_frontend"
echo "${FRONT_VSI}"
exit $code
fi
vpcResourceRunning instances ${BASENAME}-frontend-vsi
vpcResourceRunning instances ${BASENAME}-bastion-vsi
# network interface is not initially returned
instanceId=$(echo "$BACK_VSI" | jq -r '.id')
BACK_VSI=$(ibmcloud is instance $instanceId --json)
instanceId=$(echo "$FRONT_VSI" | jq -r '.id')
FRONT_VSI=$(ibmcloud is instance $instanceId --json)
export FRONT_VSI_NIC_ID=$(echo "$FRONT_VSI" | jq -r '.primary_network_interface.id')
export FRONT_NIC_IP=$(echo "$FRONT_VSI" | jq -r '.primary_network_interface.primary_ipv4_address')
export BACK_VSI_NIC_ID=$(echo "$BACK_VSI" | jq -r '.primary_network_interface.id')
export BACK_NIC_IP=$(echo "$BACK_VSI" | jq -r '.primary_network_interface.primary_ipv4_address')
# Floating IP for frontend
if ! FRONT_IP_JSON=$(ibmcloud is floating-ip-reserve ${BASENAME}-frontend-ip --nic-id $FRONT_VSI_NIC_ID --json)
then
code=$?
echo ">>> ibmcloud is floating-ip-reserve ${BASENAME}-frontend-ip --nic-id $FRONT_VSI_NIC_ID --json"
echo "${FRONT_IP_JSON}"
exit $code
fi
FRONT_IP_ADDRESS=$(echo "${FRONT_IP_JSON}" | jq -r '.address')
vpcResourceAvailable floating-ips ${BASENAME}-frontend-ip
echo "Your frontend IP address: $FRONT_IP_ADDRESS"
echo "Your bastion IP address: $BASTION_IP_ADDRESS"
echo "Your frontend internal IP address: $FRONT_NIC_IP"
echo "Your backend internal IP address: $BACK_NIC_IP"
echo ""
echo "It may take few minutes for the new routing to become active."
echo "To connect to the frontend: ssh -J root@$BASTION_IP_ADDRESS root@$FRONT_NIC_IP"
echo "To connect to the backend: ssh -J root@$BASTION_IP_ADDRESS root@$BACK_NIC_IP"
echo ""
echo "Install software: ssh -J root@$BASTION_IP_ADDRESS root@$BACK_NIC_IP 'bash -s' < install-software.sh"
echo ""
echo "Turn maintenance off and on by removing the security group from the fronend or backend subnet, frontend example:"
echo $(dirname "$0")/vpc-maintenance.sh frontend off $prefix $REUSE_VPC
echo $(dirname "$0")/vpc-maintenance.sh frontend on $prefix $REUSE_VPC
if [ ! -z "$resource_file" ]; then
cat > $resource_file <<EOF
FRONT_IP_ADDRESS=$FRONT_IP_ADDRESS
BASTION_IP_ADDRESS=$BASTION_IP_ADDRESS
FRONT_NIC_IP=$FRONT_NIC_IP
BACK_NIC_IP=$BACK_NIC_IP
FRONT_VSI_NIC_ID=$FRONT_VSI_NIC_ID
BACK_VSI_NIC_ID=$BACK_VSI_NIC_ID
EOF
fi
# ssh -J root@$BASTION_IP_ADDRESS root@$BACK_NIC_I 'bash -s' < install-software.sh
|
#!/usr/bin/env bash
sudo apt update && sudo apt install curl gnupg2 lsb-release -y
sudo curl -sSL https://raw.githubusercontent.com/ros/rosdistro/master/ros.key -o /usr/share/keyrings/ros-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/ros-archive-keyring.gpg] http://packages.ros.org/ros2/ubuntu $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/ros2.list > /dev/null
sudo apt update
sudo apt install ros-foxy-desktop -y
# Setup
# printf "bass source /opt/ros/foxy/setup.bash\n\n" >> ~/.config/fish/config.fish
|
import { Injectable } from '@angular/core';
import {environment} from '../../environments/environment';
import {LoginModel} from '../models/loginModel';
import {HttpClient} from '@angular/common/http';
import {TokenModel} from '../models/tokenModel';
import {Observable} from 'rxjs';
import {SingleResponseModel} from '../models/singleResponseModel';
import {RegisterModel} from '../models/registerModel';
import {LocalstorageService} from './localstorage.service';
@Injectable({
providedIn: 'root'
})
export class AuthService {
apiUrl: string = environment.apiUrl + '/auth';
constructor(private httpClient: HttpClient ,private storageService:LocalstorageService) { }
login(loginModel:LoginModel):Observable<SingleResponseModel<TokenModel>>{
return this.httpClient.post<SingleResponseModel<TokenModel>>(this.apiUrl + '/login',loginModel);
}
register(registerModel:RegisterModel):Observable<SingleResponseModel<TokenModel>>{
return this.httpClient.post<SingleResponseModel<TokenModel>>(this.apiUrl + '/register',registerModel);
}
isAuthenticated():boolean{
return this.storageService.isAuthenticated();
}
}
|
import type { quat2 } from 'gl-matrix'
class DualQuatUtil{
/** Used to get data from a flat buffer of matrices */
static fromBuf( out: quat2, ary : Array<number> | Float32Array, idx: number ) : quat2 {
out[ 0 ] = ary[ idx ];
out[ 1 ] = ary[ idx + 1 ];
out[ 2 ] = ary[ idx + 2 ];
out[ 3 ] = ary[ idx + 3 ];
out[ 4 ] = ary[ idx + 4 ];
out[ 5 ] = ary[ idx + 5 ];
out[ 6 ] = ary[ idx + 6 ];
out[ 7 ] = ary[ idx + 7 ];
return out;
}
/** Put data into a flat buffer of matrices */
static toBuf( m: quat2, ary : Array<number> | Float32Array, idx: number ) : DualQuatUtil {
ary[ idx ] = m[ 0 ];
ary[ idx + 1 ] = m[ 1 ];
ary[ idx + 2 ] = m[ 2 ];
ary[ idx + 3 ] = m[ 3 ];
ary[ idx + 4 ] = m[ 4 ];
ary[ idx + 5 ] = m[ 5 ];
ary[ idx + 6 ] = m[ 6 ];
ary[ idx + 7 ] = m[ 7 ];
return this;
}
}
export default DualQuatUtil; |
#!/bin/bash
set -e
set -x
# On osx we need to bring our own Python.
# See: https://github.com/travis-ci/travis-ci/issues/2312
if [[ "$(Agent.OS)" == "Darwin" ]]; then
# We use the official python.org installers to make sure our wheels are
# going to be as widely compatible as possible
PYTHON_PKG_36="https://www.python.org/ftp/python/3.6.6/python-3.6.6-macosx10.9.pkg"
PYTHON_PKG_37="https://www.python.org/ftp/python/3.7.0/python-3.7.0-macosx10.9.pkg"
GET_PIP="https://bootstrap.pypa.io/get-pip.py"
# update brew
brew update || brew update
# Update openssl if necessary
brew outdated openssl || brew upgrade openssl
# Install packages needed to build lib-secp256k1
for pkg in automake libtool pkg-config libffi; do
brew list $pkg > /dev/null || brew install $pkg
brew outdated --quiet $pkg || brew upgrade $pkg
done
mkdir -p ~/.cache/python-dl
builtin pushd ~/.cache/python-dl
ls -l
py_pkg=PYTHON_PKG_${TRAVIS_PYTHON_VERSION//./}
py_pkg=${!py_pkg}
installer_pkg=$(basename ${py_pkg})
# The package might have been cached from a previous run
if [[ ! -f ${installer_pkg} ]]; then
curl -LO ${py_pkg}
fi
sudo installer -pkg ${installer_pkg} -target /
builtin popd
case "${ESV_PYTHON_VERSION}" in
3.6|3.7)
python=/Library/Frameworks/Python.framework/Versions/${TRAVIS_PYTHON_VERSION}/bin/python3
virtualenv=venv
;;
esac
# https://bugs.python.org/issue28150
if [[ "${NEED_SSL_FIX}" == "true" ]]; then
"/Applications/Python ${TRAVIS_PYTHON_VERSION}/Install Certificates.command"
fi
mkdir ~/virtualenv
${python} -m ${virtualenv} ~/virtualenv/python${TRAVIS_PYTHON_VERSION}
source ~/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/activate
fi
set +x +e
|
export { LayoutService } from './layout.service';
export { AnalyticsService } from './analytics.service';
export { PlayerService } from './player.service';
export { StateService } from './state.service';
export { SeoService } from './seo.service';
|
def unique_names(names):
unique_list = []
for i in names:
if i not in unique_list:
unique_list.append(i)
return unique_list |
<reponame>Commutyble/thingmagic-client<filename>c/src/api/tmr_utils.h
#ifndef _TMR_UTILS_H
#define _TMR_UTILS_H
/**
* @file tmr_utils.h
* @brief Mercury API - generic utilities
* @author <NAME>
* @date 12/1/2009
*/
/*
* Copyright (c) 2009 ThingMagic, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifdef WINCE
#include <stdint_win32.h>
#else
#include <stdint.h>
#endif
#include <stddef.h>
extern uint16_t totalMsgIdx;
#ifdef __cplusplus
extern "C" {
#endif
/* Macros for working with values embedded in uint8_t arrays (msg) */
/* Absolute-value get */
#define GETU8AT(msg, i) ( \
((msg)[(i)]) )
#define GETU16AT(msg, i) ( \
((uint16_t)((msg)[(i) ]) << 8) | \
((msg)[(i)+1] << 0) )
#define GETS16AT(msg, i) ( \
((int16_t)((msg)[(i) ]) << 8) | \
((int16_t)((msg)[(i)+1]) << 0) )
#define GETU24AT(msg, i) ( \
((uint32_t)((msg)[(i) ]) << 16) | \
((uint32_t)((msg)[(i)+1]) << 8) | \
((msg)[(i)+2] << 0) )
#define GETU32AT(msg, i) ( \
((uint32_t)((msg)[(i) ]) << 24) | \
((uint32_t)((msg)[(i)+1]) << 16) | \
((uint32_t)((msg)[(i)+2]) << 8) | \
((msg)[(i)+3] << 0) )
#define GETU40AT(msg, i) ( \
((uint64_t)((msg)[(i) ]) << 32) | \
((uint64_t)((msg)[(i)+1]) << 24) | \
((uint64_t)((msg)[(i)+2]) << 16) | \
((uint64_t)((msg)[(i)+3]) << 8) | \
((msg)[(i)+4] << 0) )
/* Get and update index to next position */
#define GETU8(msg, i) ((msg)[(i)++])
#define GETU16(msg, i) (i+=2, GETU16AT((msg), i-2))
#define GETU24(msg, i) (i+=3, GETU24AT((msg), i-3))
#define GETU32(msg, i) (i+=4, GETU32AT((msg), i-4))
/* Set and update index to next position */
#define SETU8(msg, i, u8val) do { \
totalMsgIdx += 1; \
(msg)[(i)++] = (u8val) & 0xff; \
} while (0)
#define SETU16(msg, i, u16val) do { \
uint16_t _tmp = (u16val); \
totalMsgIdx += 2; \
(msg)[(i)++] =(uint8_t) (_tmp >> 8) & 0xff; \
(msg)[(i)++] =(uint8_t)(_tmp >> 0) & 0xff; \
} while (0)
#define SETS16(msg, i, s16val) do { \
int16_t _tmp = (s16val); \
totalMsgIdx += 2; \
(msg)[(i)++] =(int8_t) (_tmp >> 8) & 0xff; \
(msg)[(i)++] =(int8_t)(_tmp >> 0) & 0xff; \
} while (0)
#define SETU24(msg, i, u32val) do { \
uint32_t _tmp = (u32val); \
totalMsgIdx += 3; \
(msg)[(i)++] = (uint8_t)(_tmp >> 16) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 8) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 0) & 0xff; \
} while (0)
#define SETU32(msg, i, u32val) do { \
uint32_t _tmp = (u32val); \
totalMsgIdx += 4; \
(msg)[(i)++] = (uint8_t)(_tmp >> 24) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 16) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 8) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 0) & 0xff; \
} while (0)
#define SETU40(msg, i, u64val) do { \
uint64_t _tmp = (u64val); \
totalMsgIdx += 5; \
(msg)[(i)++] = (uint8_t)(_tmp >> 32) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 24) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 16) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 8) & 0xff; \
(msg)[(i)++] = (uint8_t)(_tmp >> 0) & 0xff; \
} while (0)
#define SETS32(msg, i, s32val) do { \
int32_t _tmp = (s32val); \
totalMsgIdx += 4; \
(msg)[(i)++] = (int8_t)(_tmp >> 24) & 0xff; \
(msg)[(i)++] = (int8_t)(_tmp >> 16) & 0xff; \
(msg)[(i)++] = (int8_t)(_tmp >> 8) & 0xff; \
(msg)[(i)++] = (int8_t)(_tmp >> 0) & 0xff; \
} while (0)
/* Append a value to our list structures, which have both
* a allocated-space value (max) and a length-of-underlying-list
* value (len). Len can exceed max, which indicates to the caller
* that there was not enough space in the passed-in list structure
* to store the entire list.
*/
#define LISTAPPEND(l, value) do { \
(l)->len++; \
if ((l)->len <= (l)->max) \
(l)->list[(l)->len - 1] = (value); \
} while (0)
/* Macros for working with large bitmasks made up of arrays of uint32_t */
#define BITGET(array, number) (((array)[(number)/32] >> ((number)&31)) & 1)
#define BITSET(array, number) ((array)[(number)/32] |= ((uint32_t)1 << ((number)&31)))
#define BITCLR(array, number) ((array)[(number)/32] &= ~((uint32_t)1 << ((number)&31)))
#define numberof(x) (sizeof((x))/sizeof((x)[0]))
#ifndef TMR_USE_HOST_C_LIBRARY
void *tm_memcpy(void *dest, const void *src, size_t n);
char *tm_strcpy(char *dest, const char *src);
char *tm_strchr(const char *s, int c);
#undef memcpy
#undef strcpy
#undef strchr
#define memcpy tm_memcpy
#define strcpy tm_strcpy
#define strchr tm_strchr
#endif
int tm_strcasecmp(const char *s1, const char *s2);
#define strcasecmp tm_strcasecmp
void tm_gettime_consistent(uint32_t *high, uint32_t *low);
uint32_t tm_time_subtract(uint32_t end, uint32_t start);
int tm_u8s_per_bits(int bitCount);
void TMR_stringCopy(TMR_String *dest, const char *src, int len);
uint64_t TMR_makeBitMask(int offset, int length);
uint32_t TMR_byteArrayToInt(uint8_t data[], int offset);
uint16_t TMR_byteArrayToShort(uint8_t data[], int offset);
uint64_t TMR_byteArrayToLong(uint8_t data[], int offset);
void TMR_bytesToWords(uint16_t count, const uint8_t data[], uint16_t data16[]);
void TMR_wordsToBytes(uint16_t count, const uint16_t data[], uint8_t buf[]);
#ifdef __cplusplus
}
#endif
#endif /* _TMR_UTILS_H */
|
package org.rs2server.rs2.content;
import org.rs2server.rs2.model.Animation;
import org.rs2server.rs2.model.GameObject;
import org.rs2server.rs2.model.GroundItem;
import org.rs2server.rs2.model.Item;
import org.rs2server.rs2.model.World;
import org.rs2server.rs2.model.player.Player;
import org.rs2server.rs2.tickable.Tickable;
import java.util.HashMap;
import java.util.Map;
public class MageArenaGodPrayer {
static Map<Integer, Integer> contains = new HashMap<>();
public static boolean godPrayer(Player player, GameObject obj) {
if (!contains.containsKey(obj.getId())) {
return false;
}
GroundItem groundItem = new GroundItem(player.getName(), false, new Item(contains.get(obj.getId())), player.getLocation());
System.out.println(contains.get(obj.getId()));
if (containsGodCape(player) || player.hasAttribute("droppedGodCape")) {
player.getActionSender().sendMessage("You already own a God Cape.");
return false;
}
player.setAttribute("busy", true);
player.playAnimation(Animation.create(645));
World.getWorld().submit(new Tickable(4) {
@Override
public void execute() {
this.stop();
player.removeAttribute("busy");
World.getWorld().createGroundItem(new GroundItem(player.getName(), false, new Item(contains.get(obj.getId())), player.getLocation()), player);
}
});
return true;
}
public static boolean containsGodCape(Player player) {
boolean sara = player.getBank().contains(2412) || player.getInventory().contains(2412) || player.getEquipment().contains(2412);
boolean guthix = player.getBank().contains(2413) || player.getInventory().contains(2413) || player.getEquipment().contains(2413);
boolean zammy = player.getBank().contains(2414) || player.getInventory().contains(2414) || player.getEquipment().contains(2414);
return sara || guthix || zammy;
}
static {
contains.put(2873, 2412);
contains.put(2874, 2414);
contains.put(2875, 2413);
}
}
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/icomoon/newspaper.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.newspaper = void 0;
var newspaper = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M14 4v-2h-14v11c0 0.552 0.448 1 1 1h13.5c0.828 0 1.5-0.672 1.5-1.5v-8.5h-2zM13 13h-12v-10h12v10zM2 5h10v1h-10zM8 7h4v1h-4zM8 9h4v1h-4zM8 11h3v1h-3zM2 7h5v5h-5z"
}
}]
};
exports.newspaper = newspaper; |
// Test1SampleQueue.cpp : This file contains the 'main' function. Program execution begins and ends there.
//
#include <string>
#include <iostream>
#include "Queue.h"
#include "Customer.h"
using namespace std;
void registerCustomer(Queue& serviceQueue, int& queueNumber)
{
//to be implemented
}
void nextCustomer(Queue& serviceQueue)
{
//to be implemented
}
void displayCount(Queue& serviceQueue)
{
//to be implemented
}
int main()
{
Queue serviceQueue;
int queueNumber = 0;
registerCustomer(serviceQueue, queueNumber);
nextCustomer(serviceQueue);
displayCount(serviceQueue);
}
|
<gh_stars>1-10
'use strict';
module.exports = {
$schema: 'https://json.schemastore.org/eslintrc',
rules: {
'no-underscore-dangle': 0,
},
extends: ['@strapi-community', 'prettier'],
};
|
<gh_stars>1-10
import { Helper } from "./helper";
export class Firewall {
helper: any;
constructor(config) {
this.helper = new Helper(config);
}
async list(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall';
return await this.helper.httpGet(url, data);
}
async listRules(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/rules';
return await this.helper.httpGet(url, data);
}
async createRule(node, qemu, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/rules';
return await this.helper.httpPost(url, data);
}
async getRule(node, qemu, pos) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/rules/' + pos;
return await this.helper.httpGet(url, data);
}
async updateRule(node, qemu, pos, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/rules/' + pos;
return await this.helper.httpPut(url, data);
}
async deleteRule(node, qemu, pos) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/rules/' + pos;
return await this.helper.httpDel(url, data);
}
async listAlias(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/aliases';
return await this.helper.httpGet(url, data);
}
async createAlias(node, qemu, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/aliases';
return await this.helper.httpPost(url, data);
}
async getAlias(node, qemu, name) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/aliases/' + name;
return await this.helper.httpGet(url, data);
}
async updateAlias(node, qemu, name, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/aliases/' + name;
return await this.helper.httpPut(url, data);
}
async deleteAlias(node, qemu, name) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/aliases/' + name;
return await this.helper.httpDel(url, data);
}
async listIpset(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset';
return await this.helper.httpGet(url, data);
}
async createIpset(node, qemu, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset';
return await this.helper.httpPost(url, data);
}
async getIpsetContent(node, qemu, name) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset/' + name;
return await this.helper.httpGet(url, data);
}
async addIpToIpset(node, qemu, name, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset/' + name;
return await this.helper.httpPost(url, data);
}
async deleteIpset(node, qemu, name) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset/' + name;
return await this.helper.httpDel(url, data);
}
async getIpfromIpset(node, qemu, name, cidr) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset/' + name + '/' + cidr;
return await this.helper.httpGet(url, data);
}
async updateIpfromIpset(node, qemu, name, cidr, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset/' + name + '/' + cidr;
return await this.helper.httpPut(url, data);
}
async deleteIpfromIpset(node, qemu, name, cidr) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/ipset/' + name + '/' + cidr;
return await this.helper.httpDel(url, data);
}
async getOptions(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/options';
return await this.helper.httpGet(url, data);
}
async setOptions(node, qemu, data) {
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/options';
return await this.helper.httpPut(url, data);
}
async getLog(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/log';
return await this.helper.httpGet(url, data);
}
async getRefs(node, qemu) {
const data = {};
const url = '/nodes/' + node + '/qemu/' + qemu + '/firewall/refs';
return await this.helper.httpGet(url, data);
}
}
|
# Solution
# The solution involves creating a new class called AlphaCoefficient that inherits from the FourierCoefficient class.
from ._coefficient import FourierCoefficient
class AlphaCoefficient(FourierCoefficient):
pass |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.