text
stringlengths 1
1.05M
|
|---|
# Entrypoint when running inside docker only!
cd /app
yarn
yarn bootstrap
yarn build
cd demo
yarn start
|
#!/usr/bin/env bash
echo "System information:"
cat /etc/issue
echo "cuda_information:"
cat /usr/local/cuda/version.txt
cat /usr/local/cuda/include/cudnn.h | grep CUDNN_MAJOR -A 2
|
import {
Controller,
HttpCode,
Post,
UseGuards,
Body,
Put,
Param,
ParseIntPipe,
UnauthorizedException,
Delete,
Get,
HttpStatus,
Query,
} from '@nestjs/common';
import { EventService } from './event.service';
import { AuthGuard } from '@nestjs/passport';
import { Usr } from '../user/user.decorator';
import { User } from '../user/user.entity';
import {
CreateEventRequest,
CreateEventResponse,
UserType,
UpdateEventRequest,
GetEventResponse,
GetCityLeaderboardResponse,
GetEventsResponse,
} from '../contract';
import { ApiUseTags, ApiBearerAuth } from '@nestjs/swagger';
@ApiUseTags('event')
@Controller('event')
export class EventController {
constructor(private readonly eventService: EventService) {}
@Get('geo/:lat/:long')
@HttpCode(HttpStatus.OK)
async getClosestEvents(
@Param('lat', ParseIntPipe) lat: number,
@Param('long', ParseIntPipe) long: number,
): Promise<GetEventsResponse> {
return await this.eventService.getClosestNEvents(lat, long, 5);
}
@ApiBearerAuth()
@Post()
@HttpCode(HttpStatus.CREATED)
@UseGuards(AuthGuard())
async createEvent(
@Body() createRequest: CreateEventRequest,
@Usr() user: User,
): Promise<CreateEventResponse> {
return new CreateEventResponse(
await this.eventService.createEvent(
createRequest,
user.id,
user.type === UserType.Organizator,
),
);
}
@ApiBearerAuth()
@Post(':id/enroll')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async enrollToEvent(
@Param('id', ParseIntPipe) id: number,
@Usr() user: User,
): Promise<void> {
await this.eventService.enrollToEvent(id, user.id);
}
@ApiBearerAuth()
@Post(':id/approve/:userId')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async approveEventParticipation(
@Param('id', ParseIntPipe) id: number,
@Param('userId', ParseIntPipe) userId: number,
@Usr() user: User,
): Promise<void> {
await this.eventService.approveEventParticipation(id, user.id, userId);
}
@ApiBearerAuth()
@Put(':id')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async updateEvent(
@Param('id', ParseIntPipe) id: number,
@Body() updateRequest: UpdateEventRequest,
@Usr() user: User,
): Promise<void> {
if (id !== updateRequest.event.id) {
throw new UnauthorizedException();
}
await this.eventService.updateEvent(updateRequest, user.id);
}
@ApiBearerAuth()
@Delete(':id')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async cancelEvent(
@Param('id', ParseIntPipe) id: number,
@Usr() user: User,
): Promise<void> {
await this.eventService.cancelEvent(id, user.id);
}
@Get()
@HttpCode(HttpStatus.OK)
async getEventsHomePage(
@Query('limit') limit: string,
@Query('page') page: string,
): Promise<GetEventsResponse> {
limit = limit || '10';
page = page || '0';
return await this.eventService.paginate({
limit: parseInt(limit, 10),
page: parseInt(page, 10),
});
}
@Get(':id')
@HttpCode(HttpStatus.OK)
async getEventWithParticipants(
@Param('id', ParseIntPipe) id: number,
): Promise<GetEventResponse> {
return await this.eventService.getEventWithNParticipantsAndCreator(id, 3);
}
@Get('leaderboard/city')
@HttpCode(HttpStatus.OK)
async getCitywiseLeaderBoard(): Promise<GetCityLeaderboardResponse> {
return await this.eventService.getTopNCities(20);
}
}
|
require "features_helper"
feature "mina transactions", :vcr do
let(:chain) { create :mina_chain }
before do
visit mina_chain_transactions_path(chain)
end
it "displays transaction search" do
expect(page).to have_text "Transaction Search"
expect(page).to have_text "Filter Transactions"
expect(page).to have_text "Search Results"
expect(page).to have_link "Summary", href: mina_chain_path(chain)
within ".transactions-search-form" do
expect(page).to have_text "Account"
expect(page).to have_text "Payment"
expect(page).to have_text "Fee Transfer"
expect(page).to have_text "Start Date"
expect(page).to have_text "Tx Memo"
end
within ".transactions-search-results" do
expect(page.all("tbody > tr").size).to eq 25
end
end
context "search" do
before do
fill_in "Tx Memo", with: "E4YgzwkKzKGy6M6Ro79DAPfCAtnehgfEQ4dwz6XPQTRq5wenxdPk3"
click_on "Search"
end
it "displays search results" do
within ".transactions-search-results" do
expect(page.all("tbody > tr").size).to eq 2
expect(page).to have_text "Payment"
expect(page).to have_text "E4YgzwkKzKGy6M6Ro79DAPfCAtnehgfEQ4dwz6XPQTRq5wenxdPk3"
expect(page).to have_link "details"
end
end
it "allows to reset the filter" do
expect { click_on "Reset Filters" }.
to change { page.find("#search_memo").value }.
from("E4YgzwkKzKGy6M6Ro79DAPfCAtnehgfEQ4dwz6XPQTRq5wenxdPk3").to("")
end
it "redirects to the transaction page" do
within ".transactions-search-results" do
within page.find("tbody > tr:first-child") do
click_on "details"
expect(page.current_path).to match "/mina/chains/#{chain.slug}/transactions/CkpZYwZYxj8uwsEmT4MBxrHuuAtPfwV31k9bj1P12wgyRFBaoWhuC"
end
end
end
end
context "empty search" do
before do
fill_in "Tx Memo", with: "something that does not exist"
click_on "Search"
end
it "displays empty search results" do
within ".transactions-search-results" do
expect(page).to have_text "No transactions found"
end
end
end
context "bad search" do
before do
visit mina_chain_transactions_path(chain, start_time: "invalid")
end
it "displays an error" do
expect(page).to have_text "Search request failed: start time is invalid"
end
end
end
|
// This file is part of SWGANH which is released under the MIT license.
// See file LICENSE or go to http://swganh.com/LICENSE
#pragma once
#include "swganh_core/object/tangible/tangible.h"
namespace swganh {
namespace object {
class WeaponFactory;
class WeaponMessageBuilder;
enum WeaponType {
MELEE = 0,
RANGED
};
enum WeaponGroup {
UNARMED = 1,
ONEHANDED,
TWOHANDED,
POLEARM,
RIFLE,
PISTOL,
CARBINE,
FLAME_THROWER,
HEAVY_ACID_BEAM,
HEAVY_ACID_RIFLE,
HEAVY_PARTICLE_BEAM,
HEAVY_ROCKET_LAUNCHER,
THROWN_WEAPON,
SABER
};
class Weapon : public swganh::object::Tangible
{
public:
typedef WeaponFactory FactoryType;
typedef WeaponMessageBuilder MessageBuilderType;
Weapon();
WeaponType GetWeaponType() { return weapon_type_; }
// WEAO
virtual uint32_t GetType() const;
const static uint32_t type = 0x6AAC0CFE;
private:
WeaponType weapon_type_;
};
}} // namespace swganh::object
|
use std::fmt;
// Define the InstanceMessage enum to represent different types of messages
enum InstanceMessage {
StartInstance,
StopInstance,
GetStatus,
}
// Implement the InstanceActor struct
pub struct InstanceActor {
pub instance: Option<(Filesystem<Minfs>, FvmInstance)>,
}
impl InstanceActor {
// Implement the new constructor method for InstanceActor
pub fn new() -> Self {
InstanceActor { instance: None }
}
// Implement the receive_message method to handle different types of messages
pub fn receive_message(&mut self, message: InstanceMessage) -> Result<String, String> {
match message {
InstanceMessage::StartInstance => {
if self.instance.is_none() {
// Create a new instance and store it in the instance field
self.instance = Some((Filesystem::new(), FvmInstance::new()));
Ok("Instance started".to_string())
} else {
Err("Instance already exists".to_string())
}
}
InstanceMessage::StopInstance => {
if let Some(_) = self.instance {
// Stop and remove the instance from the instance field
self.instance = None;
Ok("Instance stopped".to_string())
} else {
Err("No instance to stop".to_string())
}
}
InstanceMessage::GetStatus => {
// Return the status of the instance
match &self.instance {
Some(_) => Ok("Instance running".to_string()),
None => Ok("Instance not found".to_string()),
}
}
}
}
}
|
<gh_stars>0
import React from 'react'
import classNames from 'classnames'
import { createFromIconfontCN } from '@ant-design/icons'
import styles from './index.module.less'
// IconType继承React.HTMLAttributes的属性,然后IconType,就拥有了其可被外界访问的属性
export interface IconType extends React.HTMLAttributes<any> {
// type 必有属性,如果使用的时候没有静态检查是,会提示错误,类型不匹配,使用ts的好处,静态类型检查非常nice
// 报错如下:TS2741: Property 'type' is missing in type '{}' but required in type 'IconType'. index.tsx(7, 3): 'type' is declared here.
type: string;
// 图标尺寸,默认 normal
size?: 'small' | 'normal' | 'large' | null; // 可选属性,size后面加上?
// 是否禁用
disabled?: boolean;
}
// createFromIconfontCN 返回一个组件
const FontIcon = createFromIconfontCN({
// 请给新图标一个合适的驼峰命名,并保证单词正确//at.alicdn.com/t/font_3168130_s635q11ab28.js
scriptUrl: '//at.alicdn.com/t/font_3168130_5pj66yoarc8.js'
})
const Icon: React.FC<IconType> = ({
className,
size = 'normal',
disabled,
...restProps
}) => {
// 我们使用classNames 这个插件动态渲染icon的状态,size,disabled等等
return (
<FontIcon
className={classNames(
{
[styles.large]: size === 'large',
[styles.normal]: size === 'normal',
[styles.small]: size === 'small',
[styles.disabled]: disabled
},
className
)}
{...restProps}
/>
)
}
// 思考题:这个地方需要用,react.memo吗?
export default React.memo(Icon)
|
'use strict';
var _24 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M18.25 25H9V7h8.5a5.25 5.25 0 0 1 4 8.65A5.25 5.25 0 0 1 18.25 25zM12 22h6.23a2.25 2.25 0 1 0 0-4.5H12zm0-7.5h5.5a2.25 2.25 0 1 0 0-4.5H12z',
},
},
],
name: 'text--bold',
size: 24,
};
module.exports = _24;
|
#!/usr/bin/env bash
set -x
# shellcheck disable=SC1091
source lib/logging.sh
# shellcheck disable=SC1091
source lib/common.sh
# Kill and remove the running ironic containers
for name in ipa-downloader ironic ironic-inspector dnsmasq httpd mariadb vbmc sushy-tools httpd-infra; do
sudo "${CONTAINER_RUNTIME}" ps | grep -w "$name$" && sudo "${CONTAINER_RUNTIME}" kill $name
sudo "${CONTAINER_RUNTIME}" ps --all | grep -w "$name$" && sudo "${CONTAINER_RUNTIME}" rm $name -f
done
# Remove existing pod
if [[ "${CONTAINER_RUNTIME}" == "podman" ]]; then
if sudo "${CONTAINER_RUNTIME}" pod exists ironic-pod ; then
sudo "${CONTAINER_RUNTIME}" pod rm ironic-pod -f
fi
if sudo "${CONTAINER_RUNTIME}" pod exists infra-pod ; then
sudo "${CONTAINER_RUNTIME}" pod rm infra-pod -f
fi
fi
# Kill the locally running operators
if [ "${BMO_RUN_LOCAL}" = true ]; then
kill "$(pgrep "run-bmo-loop.sh")" 2> /dev/null || true
kill "$(pgrep "operator-sdk")" 2> /dev/null || true
fi
if [ "${CAPBM_RUN_LOCAL}" = true ]; then
CAPBM_PARENT_PID="$(pgrep -f "go run ./cmd/manager/main.go")"
if [[ "${CAPBM_PARENT_PID}" != "" ]]; then
CAPBM_GO_PID="$(pgrep -P "${CAPBM_PARENT_PID}" )"
kill "${CAPBM_GO_PID}" 2> /dev/null || true
fi
fi
ANSIBLE_FORCE_COLOR=true ansible-playbook \
-e "working_dir=$WORKING_DIR" \
-e "num_nodes=$NUM_NODES" \
-e "extradisks=$VM_EXTRADISKS" \
-e "virthost=$HOSTNAME" \
-e "manage_baremetal=$MANAGE_BR_BRIDGE" \
-i vm-setup/inventory.ini \
-b -vvv vm-setup/teardown-playbook.yml
# There was a bug in this file, it may need to be recreated.
if [[ $OS == "centos" || $OS == "rhel" ]]; then
sudo rm -rf /etc/NetworkManager/conf.d/dnsmasq.conf
if [ "$MANAGE_PRO_BRIDGE" == "y" ]; then
sudo ifdown provisioning || true
sudo rm -f /etc/sysconfig/network-scripts/ifcfg-provisioning || true
fi
# Leaving this around causes issues when the host is rebooted
if [ "$MANAGE_BR_BRIDGE" == "y" ]; then
sudo ifdown baremetal || true
sudo rm -f /etc/sysconfig/network-scripts/ifcfg-baremetal || true
fi
fi
|
#!/bin/sh
# set default values
USE_ARCH=${1:-x86_64}
USE_SECURITY=${2:--}
USE_SHA1=${3:-master}
# # x86_64 or arm64
[ "$USE_ARCH" = "arm64" ] && USE_ARM64="-arm64"
[ "$USE_SECURITY" = "-security-" ] && SECURITY_SERVICE_NEEDED="true"
TAF_COMMON_IMAGE=nexus3.edgexfoundry.org:10003/edgex-taf-common${USE_ARM64}:latest
COMPOSE_IMAGE=nexus3.edgexfoundry.org:10003/edgex-devops/edgex-compose${USE_ARM64}:latest
# Pull edgex images
sh get-compose-file-perfermance.sh ${USE_ARCH} ${USE_SECURITY} ${USE_SHA1}
# Pull images
docker run --rm -v ${WORK_DIR}:${WORK_DIR}:rw,z -w ${WORK_DIR} -v /var/run/docker.sock:/var/run/docker.sock \
--env WORK_DIR=${WORK_DIR} --security-opt label:disable \
${COMPOSE_IMAGE} -f "${WORK_DIR}/TAF/utils/scripts/docker/docker-compose.yml" pull
sleep 5
# Run scripts to collect performance metrics and generate reports
docker run --rm --network host --privileged -v ${WORK_DIR}:${WORK_DIR}:z -w ${WORK_DIR} -e ARCH=${USE_ARCH} \
-v /var/run/docker.sock:/var/run/docker.sock -e SECURITY_SERVICE_NEEDED=${SECURITY_SERVICE_NEEDED} \
-e COMPOSE_IMAGE=${COMPOSE_IMAGE} ${TAF_COMMON_IMAGE} \
--exclude Skipped -u performanceTest/performance-metrics-collection --profile performance-metrics
|
const Event = require('../../base/Event');
module.exports = class extends Event {
async run(guild) {
if(!guild) return;
await this.client.guildsData.findOneAndDelete({ id: guild.id });
this.client.webhook.guild({
color: this.client.colors.error,
title: 'Guild Left',
description: `Name: ${guild.name}\nGuild ID: ${guild.id}\nMember Count: ${guild.memberCount}\nTotal Guilds: ${this.client.guilds.cache.size}`
});
}
};
|
<reponame>desbo/http4s
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package headers
import org.scalacheck.Prop._
import org.http4s.syntax.all._
import org.http4s.laws.discipline.ArbitraryInstances._
class AcceptCharsetSuite extends MHeaderLaws {
checkAll("Accept-Charset", headerLaws(`Accept-Charset`))
test("AcceptCharset is satisfied by a charset if the q value is > 0") {
forAll { (h: `Accept-Charset`, cs: Charset) =>
h.qValue(cs) > QValue.Zero ==> h.satisfiedBy(cs)
}
}
test("AcceptCharset is not satisfied by a charset if the q value is 0") {
forAll { (h: `Accept-Charset`, cs: Charset) =>
!(h.map(_.withQValue(QValue.Zero)).satisfiedBy(cs))
}
}
test("AcceptCharset matches atom before splatted") {
val acceptCharset =
`Accept-Charset`(CharsetRange.*, CharsetRange.Atom(Charset.`UTF-8`, qValue"0.5"))
assertEquals(acceptCharset.qValue(Charset.`UTF-8`), qValue"0.5")
}
test("AcceptCharset matches splatted if atom not present") {
val acceptCharset =
`Accept-Charset`(CharsetRange.*, CharsetRange.Atom(Charset.`ISO-8859-1`, qValue"0.5"))
assertEquals(acceptCharset.qValue(Charset.`UTF-8`), QValue.One)
}
test("AcceptCharset rejects charset matching atom with q=0") {
val acceptCharset =
`Accept-Charset`(CharsetRange.*, CharsetRange.Atom(Charset.`UTF-8`, QValue.Zero))
assertEquals(acceptCharset.qValue(Charset.`UTF-8`), QValue.Zero)
}
test("AcceptCharset rejects charset matching splat with q=0") {
val acceptCharset = `Accept-Charset`(
CharsetRange.*.withQValue(QValue.Zero),
CharsetRange.Atom(Charset.`ISO-8859-1`, qValue"0.5"))
assertEquals(acceptCharset.qValue(Charset.`UTF-8`), QValue.Zero)
}
test("AcceptCharset rejects unmatched charset") {
val acceptCharset = `Accept-Charset`(CharsetRange.Atom(Charset.`ISO-8859-1`, qValue"0.5"))
assertEquals(acceptCharset.qValue(Charset.`UTF-8`), QValue.Zero)
}
}
|
#include "status.h"
void fprint_dir_status(FILE *file, struct client_status *s) {
for(int i = 0; i < s->current_dir->file_count; i ++) {
fprintf(file, "[%d]%30.30s\n", i + 1, s->current_dir->files[i]);
}
}
char *sprint_dir_status(struct client_status *s) {
size_t file_str_len = 0;
char *file_str;
char *str = malloc(1);
for(int i = 0; i < s->current_dir->file_count; i ++) {
/* get size of resulting string */
file_str_len = buffer_size("[%d]%30.30s\n", i + 1, s->current_dir->files[i]);
file_str = malloc(file_str_len);
snprintf(file_str, file_str_len, "[%d]%30.30s\n", i + 1, s->current_dir->files[i]);
str = realloc(str, strlen(str) + MAX_BUFFER);
str = strncat(str, file_str, strlen(str) + file_str_len);
free(file_str);
}
return str;
}
struct directory *get_dir_contents(const char *dir) {
struct dirent *dir_contents;
struct directory *current_dir = malloc(sizeof(struct directory));
current_dir->files = malloc(sizeof(struct file_status*));
if(current_dir == NULL) {
return NULL;
}
DIR *fd = opendir(dir);
for(int i = 0; (dir_contents = readdir(fd)) != NULL;){
if(dir_contents->d_type == DT_REG) {
current_dir->files = realloc(current_dir->files, sizeof(char*) * (i + 1));
current_dir->files[i] = dir_contents->d_name;
current_dir->file_count = i + 1;
if(current_dir->files[i] == NULL) {
printf("null at %d\n fetching files", i);
}
i++;
}
}
return current_dir;
}
|
/**
* Created by warlock on 18/03/16.
*/
|
L.blinkMarker = function (point, property,classNameVal) {
// 使用js标签,便于操作,这个temDivEle的作用是将divEle通过innerHTML的方式获取为字符串
var tempDivEle = document.createElement("div");
var divEle = document.createElement("div");
var spanEl = document.createElement("span");
var aEl = document.createElement("a");
tempDivEle.append(divEle);
divEle.append(spanEl);
spanEl.append(aEl);
// 设置上基础的样式
spanEl.classList.add("pulse-icon");
aEl.classList.add("dive-icon");
// 操作样式
var style = document.createElement("style");
style.type = "text/css";
document.head.appendChild(style);
// 主体颜色
if (property) {
if (property.color) {
spanEl.style.backgroundColor = property.color;
if (!property.diveColor) {
aEl.style.boxShadow = "0 0 6px 2px " + property.color;
}
}
// 标记大小
if (property.iconSize) {
spanEl.style.width = property.iconSize[0] + "px";
spanEl.style.height = property.iconSize[1] + "px";
}
// 发散的color
if (property.diveColor) {
// 发散的重度
if (property.level) {
aEl.style.boxShadow = "0 0 " + (property.level * 3) + "px " + property.level + "px " + property.diveColor;
} else {
aEl.style.boxShadow = "0 0 6px 2px " + property.diveColor;
}
}
// 发散的重度
if (property.level) {
if (property.diveColor) {
aEl.style.boxShadow = "0 0 " + (property.level * 3) + "px " + property.level + "px " + property.diveColor;
}else if (property.color) {
aEl.style.boxShadow = "0 0 " + (property.level * 3) + "px " + property.level + "px " + property.color;
}else{
aEl.style.boxShadow = "0 0 " + (property.level * 3) + "px " + property.level + "px red";
}
}
// 闪烁的速度
if (property.speedTime) {
aEl.style.setProperty("animation", "pulsate " + property.speedTime + "s infinite")
}
}
var myIcon = L.divIcon({ className: 'my-div-icon '+classNameVal, html: tempDivEle.innerHTML });
var marker = L.marker(point, { icon: myIcon, title: property.title });
return marker;
}
|
'use strict';
var React = require('react');
var authorAPI = require('../../api/api');
var AuthorList = require('./authorList');
var Authors = React.createClass({
getInitialState: function() {
return {authors: []};
},
componentDidMount: function() {
if (this.isMounted()) {
this.setState({authors: authorAPI.getAllAuthors()});
}
},
render: function() {
return (
<div>
<h1>Authors</h1>
<AuthorList authors={this.state.authors}/>
</div>
);
}
});
module.exports = Authors;
|
#!/bin/sh
# Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Stop on errors
set -e
# build the executable
scons -u -Q
executable=./conv6d-tiled-2dkpr.bin
if [ $# -eq 1 ]; then
infile=$1
elif [ $# -eq 2 ]; then
infile=A
tlevel=$2
else
infile=A
tlevel=0
fi
# Run the program with some interesting (and legal) default settings
WHOOP_CHECK_REFERENCE=1 ${executable} \
--vec_inputs_file=../../data-sets/inputs_2D${infile}.in.txt \
--vec_weights_file=../../data-sets/weights_2D${infile}.in.txt \
--vec_dimensions_file=../../data-sets/dimensions_2D${infile}.in.txt \
--ref_outputs_file=../../data-sets/outputs_2D${infile}.ref.txt \
--vec_outputs_file=./outputs_2D${infile}.out.txt \
--trace_level=${tlevel}
|
<gh_stars>0
/*
* Round.sql
* Chapter 4, Oracle10g PL/SQL Programming
* by <NAME>, <NAME>, <NAME>
*
* This script demonstrates the ROUND function
*/
SET SERVEROUTPUT ON
DECLARE
v_round NUMBER (10,4) := 12345.6789;
BEGIN
DBMS_OUTPUT.PUT_LINE('Default: '||ROUND(v_round));
DBMS_OUTPUT.PUT_LINE('+2: '||ROUND(v_round, 2));
DBMS_OUTPUT.PUT_LINE('-2: '||ROUND(v_round, -2));
END;
/
|
<gh_stars>0
package com.company;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.util.*;
import static com.company.PieceTypeSerializer.pieceTypeSerializer;
public class Main {
public static void main(String[] args) {
Gson gson = new GsonBuilder().
registerTypeAdapter(PieceType.class, pieceTypeSerializer()).
create();
Broker b = new Broker();
File file = new File("./chess_log.txt");
try {
PrintStream out = new PrintStream(file);
SubscriberSink ts = new TextOutputSubscriberSink(gson, out);
Subscriber s = new Subscriber(ts);
b.addSubscriber(s);
} catch(FileNotFoundException fnf) {
}
try {
b.start(); // start broker thread
Game g = new Game(gson, b);
GameController gc = new GameController(gson, b, g);
gc.run(); // run game controller, blocks and listens to sockets
b.noMoreMessages(); // exception in game controller, process remaining msgs
b.join();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void testCastling(Game g){
// kingside pawn
g.move("e7","e5");
// qs pawn black
g.move("d2","d4");
// ks bishop
g.move("f8","c5");
// black qs knight out
g.move("b1","c3");
// get ks knight out
g.move("g8","f6");
// black qs bishop out
g.move("c1","e3");
// castle white king side
g.move("e8","g8");
// black queen out
g.move("d1","d2");
// white move corner pawn
g.move("h7","h6");
// black castles
g.move("e1","c1");
}
public static void testFourMoveMate(Game g){
g.move("e7","e5");
g.move("e2","e4");
g.move("d8","f6");
g.move("b1","c3");
g.move("f8","c5");
g.move("h2","h3");
g.move("f6","f2");
}
public static void interactive(Game g){
Scanner scanner = new Scanner(System.in);
while (true) {
System.out.print("Move: ");
String moveStr = scanner.next();
String[] mvs = moveStr.split(",");
g.move(mvs[0], mvs[1]);
}
}
}
|
import numpy as np
import pandas as pd
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers import Dense
# load and preprocess the data
data = pd.read_csv('data.csv')
x = data.drop(['price'], axis=1).values
y = data['price'].values
# build the model
model = Sequential()
model.add(Dense(64, activation='relu', input_dim=9))
model.add(Dense(64, activation='relu'))
model.add(Dense(1))
# compile the model
model.compile(loss='mean_squared_error',
optimizer='adam',
metrics=['mean_absolute_error'])
# train the model
model.fit(x, y, epochs=10)
# evaluate the model
score = model.evaluate(x, y, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
|
'use strict';
var provider = (function() {
const backend = require('../grove-node-server-utils/backend');
//const fs = require('fs')
const four0four = require('../grove-node-server-utils/404')();
//const options = require('../grove-node-server-utils/options')()
var ca = '';
// FIXME: better handled inside options?
// if (options.mlCertificate) {
// console.log('Loading ML Certificate ' + options.mlCertificate)
// ca = fs.readFileSync(options.mlCertificate)
// }
// Note: config should not reveal any implementation details
var provide = function(config) {
const authProvider = config.authProvider;
if (!authProvider) {
throw new Error(
'defaultCrudRoute configuration must include an authProvider'
);
}
const router = require('express').Router();
var idConverter = config.idConverter || {
toId: function(uri) {
return encodeURIComponent(uri);
},
toUri: function(id) {
return decodeURIComponent(id);
}
};
config.views = config.views || {};
config.views.metadata = config.views.metadata || {
call: function(req, res, config, id) {
const uri = idConverter.toUri(id);
// Double-pass backend call required to get a (fairly) reliable content-type as well as metadata
docsBackendCall(req, res, config, 'GET', uri, {}, function(
backendResponse
) {
const contentType = backendResponse.headers['content-type'].split(
';'
)[0];
const format =
backendResponse.headers['vnd.marklogic.document-format'];
const size = backendResponse.headers['content-length'];
docsBackendCall(
req,
res,
config,
'GET',
uri,
{
category: 'metadata',
format: 'json'
},
function(backendResponse, metadata) {
res.status(backendResponse.statusCode);
for (var header in backendResponse.headers) {
// copy all others except auth challenge headers
if (
header !== 'www-authenticate' &&
header !== 'content-length'
) {
res.header(header, backendResponse.headers[header]);
}
}
// TODO: document this download flag
if ('' + req.query.download === 'true') {
res.header(
'content-disposition',
'attachment; filename=' + uri.split('/').pop()
);
}
// pass through REST-api metadata (props, collections, perms, etc)
let data = {};
// TODO: improve this error handling
try {
data = JSON.parse(metadata);
} catch (e) {
console.log(e);
}
// append some more, useful for showing binary files
data.contentType = contentType;
data.fileName = uri.split('/').pop();
data.format = format;
data.size = size;
data.uri = uri;
res.write(JSON.stringify(data));
res.end();
}
);
});
}
};
config.actions = config.actions || {};
var contentType = config.contentType || 'application/json';
// by default all CRUD calls are shielded by authentication
var authed = config.authed !== undefined ? config.authed : true;
if (authed) {
router.use(authProvider.isAuthenticated);
}
// GET Crud paths take an extra suffix as 'view' parameter
// We don't allow browsing, so only GET *with* id
router.get('/:id/:view?', function(req, res) {
const id = req.params.id;
const viewName = req.params.view || '_default';
const view = config.views[viewName];
// HEAD is captured by router.get if not caught before
if (req.method !== 'GET') {
four0four.methodNotAllowed(req, res, ['GET']);
return;
}
if (!id || id === '') {
four0four.missingRequired(req, res, ['id']);
return;
}
if (!view && viewName !== '_default') {
four0four.notImplemented(req, res, '/' + id + '/' + viewName);
return;
}
// reply with 406 if client doesn't Accept mimes matching expected Content-Type
if (!req.accepts(view ? view.contentType : contentType)) {
four0four.notAcceptable(req, res, [contentType]);
return;
}
if (view && view.call) {
view.call(req, res, config, id, viewName);
} else {
const uri = idConverter.toUri(id);
var params = {
uri: uri,
transform: view ? view.transform : null,
category: view ? view.category : null,
format: (view ? view.format : config.format) || 'json'
};
docsBackendCall(req, res, config, req.method, uri, params, function(
backendResponse,
data
) {
res.status(backendResponse.statusCode);
for (var header in backendResponse.headers) {
// copy all others except auth challenge headers
if (header !== 'www-authenticate') {
res.header(header, backendResponse.headers[header]);
}
}
if ('' + req.query.download === 'true') {
res.header(
'content-disposition',
'attachment; filename=' + uri.split('/').pop()
);
}
res.write(data);
res.end();
});
}
});
// https://restfulapi.net/rest-put-vs-post/
// Create -> POST without id, PUT with id
// Update -> PUT with id
// Delete -> DELETE with id
router.post(/^[/]?$/, function(req, res) {
// reply with 405 if a non-allowed method is used
if (['POST'].indexOf(req.method) < 0) {
four0four.methodNotAllowed(req, res, ['POST']);
return;
}
// reply with 415 if body doesn't match expected Content-Type
if (!req.is(contentType)) {
four0four.unsupportedMediaType(req, res, [contentType]);
return;
}
var params = {};
params.collection = config.collections;
// ML Rest api will generate a uri using prefix and extension
params.directory = config.directory || '/';
params.extension = config.extension || 'json';
// temporal applies to all methods, if specified (null is ignored)
params['temporal-collection'] = config.temporalCollection;
docsBackendCall(req, res, config, req.method, null, params, function(
backendResponse,
data
) {
var location;
res.status(backendResponse.statusCode);
for (var header in backendResponse.headers) {
// rewrite location
if (header === 'location') {
location = backendResponse.headers[header];
res.header(
header,
idConverter.toId(backendResponse.headers[header].substring(18))
);
// copy all others except auth challenge headers
} else if (header !== 'www-authenticate') {
res.header(header, backendResponse.headers[header]);
}
}
if ('' + req.query.download === 'true') {
res.header(
'content-disposition',
'attachment; filename=' + location.split('/').pop()
);
}
res.write(data);
res.end();
});
});
router.all(['/:id', '/:id/:action?'], function(req, res) {
const id = req.params.id;
const actionName = req.params.action || '_default';
const action = config.actions[actionName];
if (!action && actionName !== '_default') {
four0four.notImplemented(req, res, '/' + id + '/' + actionName);
return;
}
var method = req.method;
// reply with 405 if a non-allowed method is used
let methods = action ? allowedActionMethods(action) : ['DELETE', 'PUT'];
if (methods.indexOf(method) < 0) {
four0four.methodNotAllowed(req, res, ['DELETE', 'PUT']);
return;
}
// reply 400 if id is missing
if (!id) {
four0four.missingRequired(req, res, ['id']);
return;
}
// reply with 415 if body doesn't match expected Content-Type
let cType = action ? action.contentType : contentType;
if (expectBody(method) && !req.is(cType)) {
four0four.unsupportedMediaType(req, res, [cType]);
return;
}
// assume whatever comes after (the first) / is id
const uri = idConverter.toUri(id);
var data = [];
req.on('data', function(chunk) {
data.push(chunk);
});
req.on('end', function() {
var body = Buffer.concat(data).toString();
var params = {};
if (action && action[method]) {
var tmp = action[method](body, params);
method = tmp.method;
body = tmp.body;
params = tmp.params;
} else if (expectBody(method)) {
params.collection = config.collections;
}
// temporal applies to all methods, if specified (null is ignored)
params['temporal-collection'] = action
? action.temporalCollection
: config.temporalCollection;
docsBackendCall(
req,
res,
config,
method,
uri,
params,
function(backendResponse, data) {
res.status(backendResponse.statusCode);
for (var header in backendResponse.headers) {
// rewrite location
if (header === 'location') {
res.header(
header,
idConverter.toId(
backendResponse.headers[header].substring(18)
)
);
// copy all others except auth challenge headers
} else if (header !== 'www-authenticate') {
res.header(header, backendResponse.headers[header]);
}
}
if ('' + req.query.download === 'true') {
res.header(
'content-disposition',
'attachment; filename=' + uri.split('/').pop()
);
}
res.write(data);
res.end();
},
body
);
});
});
router.all('/:id/:view?', function(req, res) {
four0four.methodNotAllowed(req, res, ['GET']);
});
router.all('/:id', function(req, res) {
four0four.methodNotAllowed(req, res, ['PUT', 'DELETE']);
});
router.all('/', function(req, res) {
four0four.methodNotAllowed(req, res, ['POST']);
});
return router;
};
function docsBackendCall(
req,
res,
config,
method,
uri,
params,
callback,
body
) {
var path = '/v1/documents';
params.uri = uri;
var backendOptions = {
method: uri && method === 'POST' ? 'PUT' : method,
path: path,
params: params,
headers: req.headers,
ca: ca
};
if (body) {
backendOptions.body = body;
}
config.authProvider.getAuth(req.session, backendOptions).then(
function(authorization) {
if (authorization) {
backendOptions.headers.authorization = authorization;
}
var neverCache =
config.neverCache !== undefined ? config.neverCache : true;
if (neverCache || req.method !== 'GET') {
noCache(res);
}
// call backend, and pipe clientResponse straight into res
backend.call(req, backendOptions, callback);
},
function() {
// TODO: might return an error too?
four0four.unauthorized(req, res);
}
);
}
function allowedActionMethods(action) {
let methods = [];
if (action.POST) {
methods.push('POST');
}
if (action.PUT) {
methods.push('PUT');
}
if (action.DELETE) {
methods.push('DELETE');
}
return methods;
}
function expectBody(method) {
return ['POST', 'PUT'].indexOf(method) > -1;
}
function noCache(response) {
response.append('Cache-Control', 'no-cache, must-revalidate'); // HTTP 1.1 - must-revalidate
response.append('Pragma', 'no-cache'); // HTTP 1.0
response.append('Expires', 'Sat, 26 Jul 1997 05:00:00 GMT'); // Date in the past
}
return provide;
})();
module.exports = provider;
|
def common_elements(str1, str2):
common_elements = []
for letter in str1:
if letter in str2:
common_elements.append(letter)
return common_elements
common_letters = common_elements(str1, str2)
print(common_letters) # Output: []
|
# set -x
app="CNN"
dataset="MNIST-KMNIST"
ntask=2
bmin=1
bmax=27
eta=3
Nloop=3
# parse results for each run
for expid in N0 N1 N2
do
python parse_results_history.py -app ${app} -dataset ${dataset} -ntask ${ntask} -bmin ${bmin} -bmax ${bmax} -eta ${eta} -Nloop ${Nloop} -expid ${expid}
done
# plot for all runs
python plot_history.py -app CNN -explist N0 N1 N2 -deleted_tuners None -dataset ${dataset} -ntask ${ntask} -bmin ${bmin} -bmax ${bmax} -eta ${eta} -Nloop ${Nloop}
|
def sort_alphabetically(list):
# Bubble sort algorithm.
n = len(list)
# Traverse through all list elements
for i in range(n):
# Last i elements are already in the correct order
for j in range(0, n-i-1):
# Swap if the element found is greater
# than the next element
if list[j] > list[j+1] :
list[j], list[j+1] = list[j+1], list[j]
return list
sorted_list = sort_alphabetically(['dog', 'cat', 'mouse', 'egg'])
print(sorted_list)
|
#!/bin/bash
pandoc --template template.html --css style.css --lua-filter=code-filter.lua $1.md > $1.html
|
# npm run build
# pm2 startOrRestart process.json --update-env
|
class IntList:
def __init__(self):
self.values = []
def add_value(self, value):
self.values.append(value)
def find_min(self):
return min(self.values)
|
<filename>common/bundestagio/src/models/NamedPoll/NamedPoll/Votes.ts<gh_stars>10-100
import { Schema } from "mongoose";
import NamedPollVotesVotes, { INamedPollVotesVotes } from "./Votes/Votes";
import NamedPollVotesParty, { INamedPollVotesParty } from "./Votes/Party";
import NamedPollVotesDeputy, { INamedPollVotesDeputy } from "./Votes/Deputy";
export interface INamedPollVotes {
all: INamedPollVotesVotes;
parties: INamedPollVotesParty[];
deputies: INamedPollVotesDeputy[];
inverseVoteDirection: boolean;
}
const NamedPollVotes = new Schema(
{
all: NamedPollVotesVotes,
parties: [NamedPollVotesParty],
deputies: [NamedPollVotesDeputy],
inverseVoteDirection: { type: Boolean },
},
{ _id: false }
);
export default NamedPollVotes;
|
nohup python backend/networkTest.py &
python web/app.py
|
#!/bin/sh
# Copyright (c) 2011-2012 Jean-Marc Valin
#
# This file is extracted from RFC6716. Please see that RFC for additional
# information.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# - Neither the name of Internet Society, IETF or IETF Trust, nor the
# names of specific contributors, may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
rm logs_mono.txt
rm logs_stereo.txt
if [ "$#" -ne "3" ]; then
echo "usage: run_vectors.sh <exec path> <vector path> <rate>"
exit 1
fi
CMD_PATH=$1
VECTOR_PATH=$2
RATE=$3
: ${OPUS_DEMO:=$CMD_PATH/opus_demo}
: ${OPUS_COMPARE:=$CMD_PATH/opus_compare}
if [ -d $VECTOR_PATH ]; then
echo Test vectors found in $VECTOR_PATH
else
echo No test vectors found
#Don't make the test fail here because the test vectors
#will be distributed separately
exit 0
fi
if [ ! -x $OPUS_COMPARE ]; then
echo ERROR: Compare program not found: $OPUS_COMPARE
exit 1
fi
if [ -x $OPUS_DEMO ]; then
echo Decoding with $OPUS_DEMO
else
echo ERROR: Decoder not found: $OPUS_DEMO
exit 1
fi
echo "=============="
echo Testing mono
echo "=============="
echo
for file in 01 02 03 04 05 06 07 08 09 10 11 12
do
if [ -e $VECTOR_PATH/testvector$file.bit ]; then
echo Testing testvector$file
else
echo Bitstream file not found: testvector$file.bit
fi
if $OPUS_DEMO -d $RATE 1 $VECTOR_PATH/testvector$file.bit tmp.out >> logs_mono.txt 2>&1; then
echo successfully decoded
else
echo ERROR: decoding failed
exit 1
fi
$OPUS_COMPARE -r $RATE $VECTOR_PATH/testvector$file.dec tmp.out >> logs_mono.txt 2>&1
float_ret=$?
if [ "$float_ret" -eq "0" ]; then
echo output matches reference
else
echo ERROR: output does not match reference
exit 1
fi
echo
done
echo "=============="
echo Testing stereo
echo "=============="
echo
for file in 01 02 03 04 05 06 07 08 09 10 11 12
do
if [ -e $VECTOR_PATH/testvector$file.bit ]; then
echo Testing testvector$file
else
echo Bitstream file not found: testvector$file
fi
if $OPUS_DEMO -d $RATE 2 $VECTOR_PATH/testvector$file.bit tmp.out >> logs_stereo.txt 2>&1; then
echo successfully decoded
else
echo ERROR: decoding failed
exit 1
fi
$OPUS_COMPARE -s -r $RATE $VECTOR_PATH/testvector$file.dec tmp.out >> logs_stereo.txt 2>&1
float_ret=$?
if [ "$float_ret" -eq "0" ]; then
echo output matches reference
else
echo ERROR: output does not match reference
exit 1
fi
echo
done
echo All tests have passed successfully
grep quality logs_mono.txt | awk '{sum+=$4}END{print "Average mono quality is", sum/NR, "%"}'
grep quality logs_stereo.txt | awk '{sum+=$4}END{print "Average stereo quality is", sum/NR, "%"}'
|
<reponame>feueraustreter/YAPION<filename>src/main/java/yapion/parser/YAPIONParserMapObject.java
// SPDX-License-Identifier: Apache-2.0
// YAPION
// Copyright (C) 2019,2020 yoyosource
package yapion.parser;
import yapion.annotations.deserialize.YAPIONLoadExclude;
import yapion.annotations.serialize.YAPIONSaveExclude;
import yapion.hierarchy.types.YAPIONVariable;
@YAPIONSaveExclude(context = "*")
@YAPIONLoadExclude(context = "*")
public class YAPIONParserMapObject {
public final YAPIONVariable variable;
YAPIONParserMapObject(YAPIONVariable variable) {
this.variable = variable;
}
}
|
<reponame>gonjavi/railsSportsTrack
require 'rails_helper'
RSpec.describe Api::V1::MeasurementsController, type: :controller do
it { should route(:get, '/api/v1/measurements').to(action: :index) }
it { should route(:post, '/api/v1/measurements').to(action: :create) }
it { should route(:delete, '/api/v1/measurements/1').to(action: :destroy, id: 1) }
let(:sport) { Sport.create({ name: 'Wrestling' }) }
let(:valid_attributes) { { time: '00:40', date: '22/05/2020', sport_id: sport.id } }
let(:invalid_attributes) { { time: '', date: '', sport_id: nil } }
describe 'GET #index' do
before { get :index }
it { should respond_with(200) }
end
it { should use_before_action(:set_measurement) }
describe 'POST #create' do
it do
params = {
measurement: {
id: 1,
params: {
time: '00:30',
date: '22/05/2020',
sport_id: 1
}
}
}
should permit(:time, :date, :sport_id)
.for(:create, params: params)
.on(:measurement)
end
it 'creates a new Measurement' do
expect do
post :create, params: { measurement: valid_attributes }
end.to change(Measurement, :count).by(1)
expect(response).to have_http_status(200)
end
it 'it fails to create a new Measurement' do
expect do
post :create, params: { measurement: invalid_attributes }
end.to change(Measurement, :count).by(0)
expect(response).to have_http_status(422)
end
end
describe 'DELETE #destroy' do
it 'it deletes a measurement' do
@measurement = FactoryBot.create(:measurement)
expect do
delete 'destroy', params: { id: @measurement.id }
end.to change(Measurement, :count).by(-1)
expect(response).to have_http_status(204)
end
it 'it fails deletes a measurement' do
expect do
delete 'destroy', params: { id: 2 }
end.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.bigbluebutton.ui;
import java.util.List;
import org.olat.core.commons.persistence.SortKey;
import org.olat.core.gui.components.form.flexible.elements.FlexiTableFilter;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FilterableFlexiTableModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiSortableColumnDef;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableDataModel;
/**
*
* Initial date: 7 avr. 2020<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class BigBlueButtonAdminServersTableModel extends DefaultFlexiTableDataModel<BigBlueButtonServerRow>
implements SortableFlexiTableDataModel<BigBlueButtonServerRow>, FilterableFlexiTableModel {
private static final ServersCols[] COLS = ServersCols.values();
private boolean allInstances = true;
public BigBlueButtonAdminServersTableModel(FlexiTableColumnModel columnsModel) {
super(columnsModel);
}
@Override
public void sort(SortKey sortKey) {
//
}
@Override
public void filter(String searchString, List<FlexiTableFilter> filters) {
if(filters != null && !filters.isEmpty() && "this".equals(filters.get(0).getFilter())) {
allInstances = false;
} else {
allInstances = true;
}
}
@Override
public Object getValueAt(int row, int col) {
BigBlueButtonServerRow server = getObject(row);
return getValueAt(server, col);
}
@Override
public Object getValueAt(BigBlueButtonServerRow row, int col) {
switch(COLS[col]) {
case url: return row.getUrl();
case status: return row.isEnabled();
case capacityFactor: return row.getCapacityFactor();
case numberMeetings:return allInstances
? row.getAllInstancesServerInfos().getNumberOfMeetings() : row.getServerInfos().getNumberOfMeetings();
case moderatorCount: return allInstances
? row.getAllInstancesServerInfos().getModeratorCount() : row.getServerInfos().getModeratorCount();
case participantCount: return allInstances
? row.getAllInstancesServerInfos().getParticipantCount() : row.getServerInfos().getParticipantCount();
case listenerCount: return allInstances
? row.getAllInstancesServerInfos().getListenerCount() : row.getServerInfos().getListenerCount();
case voiceParticipantCount: return allInstances
? row.getAllInstancesServerInfos().getVoiceParticipantCount() : row.getServerInfos().getVoiceParticipantCount();
case videoCount: return allInstances
? row.getAllInstancesServerInfos().getVideoCount() : row.getServerInfos().getVideoCount();
case maxUsers: return allInstances
? row.getAllInstancesServerInfos().getMaxUsers() : row.getServerInfos().getMaxUsers();
case recordingMeetings: return allInstances
? row.getAllInstancesServerInfos().getRecordingMeetings() : row.getServerInfos().getRecordingMeetings();
case breakoutRecordingMeetings: return allInstances
? row.getAllInstancesServerInfos().getBreakoutRecordingMeetings() : row.getServerInfos().getBreakoutRecordingMeetings();
case load: return allInstances
? row.getAllInstancesServerInfos().getLoad() : row.getServerInfos().getLoad();
default: return "ERROR";
}
}
public enum ServersCols implements FlexiSortableColumnDef {
url("table.header.server.url"),
status("table.header.server.status"),
capacityFactor("table.header.capacity.factor"),
numberMeetings("table.header.number.meetings"),
moderatorCount("table.header.moderator.count"),
participantCount("table.header.participant.count"),
listenerCount("table.header.listener.count"),
voiceParticipantCount("table.header.voice.participant.count"),
videoCount("table.header.video.count"),
maxUsers("table.header.max.users"),
recordingMeetings("table.header.recording.meetings"),
breakoutRecordingMeetings("table.header.breakout.recording.meetings"),
load("table.header.load");
private final String i18nHeaderKey;
private ServersCols(String i18nHeaderKey) {
this.i18nHeaderKey = i18nHeaderKey;
}
@Override
public boolean sortable() {
return true;
}
@Override
public String sortKey() {
return name();
}
@Override
public String i18nHeaderKey() {
return i18nHeaderKey;
}
}
}
|
<gh_stars>0
import * as utility from './utility'
export async function createMilestoneRelease(owner: string, repo: string, unreleased: string, release: string): Promise<any> {
await updateUnreleased(owner, repo, unreleased, release)
await createUnreleased(owner, repo, unreleased)
}
async function updateUnreleased(owner: string, repo: string, unreleased: string, release: string): Promise<any> {
const milestone = await utility.getMilestone(owner, repo, unreleased)
if (milestone != null) {
await updateMilestone(owner, repo, unreleased, release, 'closed')
} else {
await createMilestone(owner, repo, release, 'closed')
}
}
async function createUnreleased(owner: string, repo: string, title: string): Promise<any> {
const milestone = await utility.getMilestone(owner, repo, title)
if (milestone == null) {
await createMilestone(owner, repo, title, 'open')
}
}
async function createMilestone(owner: string, repo: string, title: string, state: string): Promise<any> {
const octokit = utility.getOctokit()
await octokit.request(`POST /repos/${owner}/${repo}/milestones`, {
title: title,
state: state
})
}
async function updateMilestone(owner: string, repo: string, milestoneName: string, title: string, state: string): Promise<any> {
const octokit = utility.getOctokit()
const milestone = await utility.getMilestone(owner, repo, milestoneName)
await octokit.request(`PATCH /repos/${owner}/${repo}/milestones/${milestone.number}`, {
title: title,
state: state
})
}
|
/**
* Angular library starter
* Build an Angular library compatible with AoT compilation & Tree shaking like an official package
* Copyright <NAME>
* MIT license
* https://github.com/robisim74/agm-direction
*/
/**
* Entry point for all public APIs of the package.
*/
export * from './src/agm-direction';
|
#Convert date column to datetime
df['date'] = pd.to_datetime(df['date'])
#Set the index to the date column
df.set_index('date', inplace=True)
#Format the dates in the columns
df.index = df.index.strftime('%m-%d-%Y')
|
<reponame>sarastrasner/code-followers
'use strict';
//requiring API server and user model
const API = require('./API');
const server = require('./src/server');
const users = require('./users/user-model');
//third party dependancies
const prompts = require('prompts');
require('dotenv').config();
const superagent = require('superagent');
// Database
const mongoose = require('mongoose');
const { response } = require('express');
const options = {
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
};
mongoose.set('useFindAndModify', false); // This removes the notice of deprecation of findByIdAndUpdate.
//global variable
let counter = 0;
//Connect to the Mongo DB
try {
mongoose.connect(process.env.MONGODB_URI, options);
} catch (error) {
console.error('Could not start up server: ', error);
}
//render initial prompt
(async () => {
const response = await prompts({
type: 'toggle',
name: 'value',
message:
'Welcome to Code Followers, a text-based game of risk and reward. Before you can play, please sign in or sign up.',
initial: true,
active: 'sign in',
inactive: 'sign up',
});
if (response.value === false) {
signup();
} else if (response.value === true) {
signin();
}
})();
function signin() {
const signinQuestions = [
{
type: 'text',
name: 'username',
message: 'What is your username?',
},
{
type: 'password',
name: 'password',
message: 'What is your password?',
},
];
let token;
(async () => {
try {
const response = await prompts(signinQuestions);
const results = await superagent
.post(`https://code-followers.herokuapp.com/signin`)
.auth(response.username, response.password);
token = results.body.user.token;
console.log(`${response.username}, you have successfully logged in!`);
let userId = results.body.user._id;
renderGame(userId);
} catch {
e => console.error('this is a sign-in error!', e.message);
} finally {
if (!token) {
console.log('incorrect login. Press CTRL + C to retry');
}
}
})();
}
function signup() {
const signupQuestions = [
{
type: 'text',
name: 'username',
message: 'What is your username?',
},
{
type: 'password',
name: 'password',
message: 'What is your password?',
},
];
(async () => { // await superagent.post(`http://localhost:${process.env.PORT}/signup`)
const response = await prompts(signupQuestions);
await superagent
.post(`https://code-followers.herokuapp.com/signup`)
.send(response)
.then(results => {
console.log(`Welcome, ${results.body.user.username}!`);
let userId = results.body.user._id;
renderGame(userId);
})
.catch(e => console.error('This is a sign-up error!', e.message));
})();
}
function getTitles(currentNode) {
if (!currentNode) throw new Error();
let arrayOfTitles = [];
if (currentNode.left)
arrayOfTitles.push({
title: currentNode.left.name,
value: currentNode.left,
type: currentNode.left.type,
});
if (currentNode.right)
arrayOfTitles.push({
title: currentNode.right.name,
value: currentNode.right,
type: currentNode.right.type,
});
return arrayOfTitles;
}
async function tallyScore(counter, userId) {
// await superagent.put(`http://localhost:${process.env.PORT}/update-score/${userId}`)
await superagent
.put(`https://code-followers.herokuapp.com/update-score/${userId}`)
.send({ counter })
.catch(e => console.error(e.message, 'Your score is unavailable.'));
}
function playAgain(userId) {
(async () => {
const response = await prompts({
type: 'toggle',
name: 'value',
message: 'Do you want to play again',
initial: true,
active: 'yes',
inactive: 'no',
});
if (response.value === false) {
tallyScore(counter, userId);
console.log('Thanks for playing! Exit by typing control+c.');
} else if (response.value === true) {
tallyScore(counter, userId); // This could be a high-score counter.
renderGame(userId);
}
})();
}
function renderGame(userId) {
let node = API.root;
let response = {};
response.value = {};
response.value.description =
'You’ve just lost your job to the effects of a global pandemic, which has closed borders, shops, gyms, restaurants, and schools for the foreseeable future. The country has come together to protect the vulnerable and support the unemployed, so you’ve got time to pursue a career pivot. What’ll it be?';
(async () => {
while (true) {
console.log(`-----------------------------------`);
response = await prompts({
type: 'select',
name: 'value',
message: response.value.description,
choices: getTitles(node),
});
if (response.value.status === 'win') {
console.log(
`You've chosen wisely. You've won a point, and your current score is ${++counter}.`
);
} else if (response.value.status === 'lose') {
console.log(
`You've chosen poorly. You've lost a point, and your current score is ${--counter}.`
);
}
if (!response.value.left && !response.value.right) {
console.log(response.value.description);
if (counter >= 2)
console.log(`You've won(!) with a final score of ${counter}.`);
else console.log(`You've lost(!) with a final score of ${counter}.`);
break;
}
node = response.value;
}
playAgain(userId);
})();
}
module.exports = { getTitles, renderGame, signin, signup };
server.start(process.env.PORT || 3000);
|
<reponame>RKrahl/bagit-rk
from sys import exit, version
from setuptools import setup
import bagit
if version < '2.6.0':
print("python 2.6 or higher is required")
exit(1)
description = \
"""
This package can be used to create BagIt style packages of
digital content for safe transmission and digital preservation.
See: http://en.wikipedia.org/wiki/BagIt for more details.
"""
# for older pythons ...
requirements = []
try:
import multiprocessing
except:
requirements.append("multiprocessing")
try:
import hashlib
except:
requirements.append("hashlib")
version = bagit.VERSION
setup(
name = 'bagit',
use_scm_version=True,
url = 'https://libraryofcongress.github.io/bagit-python/',
author = '<NAME>',
author_email = '<EMAIL>',
py_modules = ['bagit',],
scripts = ['bagit.py'],
description = description,
platforms = ['POSIX'],
test_suite = 'test',
setup_requires=['setuptools_scm'],
tests_require=['mock'],
install_requires = requirements,
classifiers = [
'License :: Public Domain',
'Intended Audience :: Developers',
'Topic :: Communications :: File Sharing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Filesystems',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
SELECT TOP 10 Name
FROM Person
WHERE State = 'California'
AND Gender = 'Female'
ORDER BY Name ASC;
|
<filename>src/isa/avx/fmaf.c
/*
* Copyright (C) 2008-2020 Advanced Micro Devices, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "libm_amd.h"
#include "libm/amd_funcs_internal.h"
#include "libm_util_amd.h"
float FN_PROTOTYPE_BAS64(fmaf)(float a, float b, float c)
{
double dc, dp, dr;
long long ir;
UT32 aa, bb, cc, result;
int ae,be,ce;
union { double d; long long l; } dru, iru;
int spcl, spcl2;
aa.f32 = a;
bb.f32 = b;
cc.f32 = c;
ae = ((aa.u32 & 0x7f800000) >> 23) - 127;
be = ((bb.u32 & 0x7f800000) >> 23) - 127;
ce = ((cc.u32 & 0x7f800000) >> 23) - 127;
spcl = ( (ae == 128) || (be == 128) || (ce == 128));
if (spcl)
{
spcl2 = ((ce == 128) && (ae != 128) && (be != 128));
if(((((aa.u32 & 0x7fffffff) == 0) && ((bb.u32 & 0x7fffffff) == 0x7f800000)) ||
(((bb.u32 & 0x7fffffff) == 0) && ((aa.u32 & 0x7fffffff) == 0x7f800000))) &&
((cc.u32 & 0x7fffffff) > 0x7f800000))
result.f32 = c+c;
else
result.f32 = spcl2?(c+c):(a * b + c);
return result.f32;
//return _handle_error("fma", __amd_CodeFma, result.u64, DOMAIN, AMD_F_INVALID, EDOM, aa, 0.0, 1);
}
if(((aa.u32 & 0x7fffffff) == 0) ||
((bb.u32 & 0x7fffffff) == 0) ||
((cc.u32 & 0x7fffffff) == 0) )
{
return a * b + c;
}
dp = (double)a * (double)b;
dc = c;
dr = dp + dc;
dru.d = dr;
ir = dru.l;
if ((ir & 0x000000001fffffffL) == 0x0000000010000000L) {
int ec = ((cc.u32 >> 23) & 0xff) - 127;
int ep = ((aa.u32 >> 23) & 0xff) + ((bb.u32 >> 23) & 0xff) - 2*127;
long long jr, ir52 = (ir & 0x7ff0000000000000L) - 0x0340000000000000L;
jr = ir52 | (long long)(dr - dc > dp) << 63;
ir = (ec - ep > 53 - 48) & (dr - dc != dp) ? jr : 0L;
jr = ir52 | (long long)(dr - dp > dc) << 63;
ir = (ep - ec > 53 - 24) & (dr - dp != dc) ? jr : ir;
iru.l = ir;
dr += iru.d;
}
return (float)dr;
}
|
def selection_sort(nums):
n = len(nums)
for i in range(n):
min_idx = i
for j in range(i + 1, n):
if nums[min_idx] > nums[j]:
min_idx = j
nums[i], nums[min_idx] = nums[min_idx], nums[i]
return nums
print(selection_sort([3, 2, 5, 1, 6]))
|
#!/usr/bin/env sh
# generated from catkin/cmake/template/setup.sh.in
# Sets various environment variables and sources additional environment hooks.
# It tries it's best to undo changes from a previously sourced setup file before.
# Supported command line options:
# --extend: skips the undoing of changes from a previously sourced setup file
# (in plain sh shell which does't support arguments for sourced scripts you
# can set the environment variable `CATKIN_SETUP_UTIL_ARGS=--extend` instead)
# since this file is sourced either use the provided _CATKIN_SETUP_DIR
# or fall back to the destination set at configure time
: ${_CATKIN_SETUP_DIR:=/home/graham/Source/robot1-ros/devel}
_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py"
unset _CATKIN_SETUP_DIR
if [ ! -f "$_SETUP_UTIL" ]; then
echo "Missing Python script: $_SETUP_UTIL"
return 22
fi
# detect if running on Darwin platform
_UNAME=`uname -s`
_IS_DARWIN=0
if [ "$_UNAME" = "Darwin" ]; then
_IS_DARWIN=1
fi
unset _UNAME
# make sure to export all environment variables
export CMAKE_PREFIX_PATH
if [ $_IS_DARWIN -eq 0 ]; then
export LD_LIBRARY_PATH
else
export DYLD_LIBRARY_PATH
fi
unset _IS_DARWIN
export PATH
export PKG_CONFIG_PATH
export PYTHONPATH
# remember type of shell if not already set
if [ -z "$CATKIN_SHELL" ]; then
CATKIN_SHELL=sh
fi
# invoke Python script to generate necessary exports of environment variables
# use TMPDIR if it exists, otherwise fall back to /tmp
if [ -d "${TMPDIR:-}" ]; then
_TMPDIR="${TMPDIR}"
else
_TMPDIR=/tmp
fi
_SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"`
unset _TMPDIR
if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then
echo "Could not create temporary file: $_SETUP_TMP"
return 1
fi
CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ ${CATKIN_SETUP_UTIL_ARGS:-} >> "$_SETUP_TMP"
_RC=$?
if [ $_RC -ne 0 ]; then
if [ $_RC -eq 2 ]; then
echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?"
else
echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC"
fi
unset _RC
unset _SETUP_UTIL
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
return 1
fi
unset _RC
unset _SETUP_UTIL
. "$_SETUP_TMP"
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
# source all environment hooks
_i=0
while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do
eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i
unset _CATKIN_ENVIRONMENT_HOOKS_$_i
eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
# set workspace for environment hook
CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace
. "$_envfile"
unset CATKIN_ENV_HOOK_WORKSPACE
_i=$((_i + 1))
done
unset _i
unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
|
#! /bin/sh
mv envExemple .env
php artisan key:generate
composer install
chmod 777 storage/logs/
chmod 777 storage/framework/sessions/
chmod 777 storage/framework/views/
php artisan storage:link
php artisan migrate:refresh --seed
|
describe("Lookup Service", function () {
describe("creation", function () {
var lookupService;
beforeEach(function () {
module('LookupModule');
});
beforeEach(inject(function (_lookupService_) {
lookupService = _lookupService_;
}));
it("instantiates", function () {
expect(lookupService).toBeDefined();
});
it("get a hanzi", function () {
var result;
result = lookupService.get('x');
expect(result).toBeDefined();
expect(result).toBe([]);
});
});
});
|
<reponame>calypso-science/Toto
"""Read txt,csv file
This import text file. The function uses the read_csv function from panda <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html>_.
This class returns a Panda Dataframe with some extra attributes such as Latitude,Longitude,Units.
Parameters
~~~~~~~~~~
filename : (files,) str or list_like
A list of filename to process.
sep : str, default {_default_sep}
Delimiter to use. If sep is None, the C engine cannot automatically detect
the separator, but the Python parsing engine can, meaning the latter will
be used and automatically detect the separator by Python's builtin sniffer
tool, ``csv.Sniffer``. In addition, separators longer than 1 character and
different from ``'\s+'`` will be interpreted as regular expressions and
will also force the use of the Python parsing engine. Note that regex
delimiters are prone to ignoring quoted data. Regex example: ``'\r\t'``.
skiprows : list-like, int or callable, optional
Line numbers to skip (0-indexed) or number of lines to skip (int)
at the start of the file.
If callable, the callable function will be evaluated against the row
indices, returning True if the row should be skipped and False otherwise.
An example of a valid callable argument would be ``lambda x: x in [0, 2]``.
skipfooter : int, default 0
Number of lines at bottom of file to skip (Unsupported with engine='c').
miss_val : scalar, str, list-like, or dict, optional
Additional strings to recognize as NA/NaN. If dict passed, specific
per-column NA values.
colNamesLine : int, default 1
Line number where the header are defined
unitNamesLine : int, default 1
Line number where the units are defined
single_column : bool, default False
The time is represented in a single column
customUnit : str, default '%d-%m-%Y %H:%M:%S'
String reprensenting the time format
unit : str default 's', can be 'auto','custom','matlab' or 's' and 'D'
unit of the single column time. Only matter if `single_column` is True
time_col_name: dict, default {'Year':'year','Month':'month','Day':'day','Hour':'hour','Min':'Minute','Sec':'Second'}
Dictonary for renaming the each column, so Panda can interprate the time. Only matter if `single_column` is False
colNames : List, default []
List of column names to use.
unitNames : List, default []
List of unit to use.
Notes
-----
Whe openning the TOTOVIEW gui this function will be called with :py:class:`totoview.inputs.txtGUI`
Examples
~~~~~~~~
>>> from toto.inputs.txt import TXTfile
>>> tx=TXTfile([filename],colNamesLine=1,miss_val='NaN',\
sep=',',skiprows=1,unit='custom',time_col_name='time',unitNamesLine=0,\
single_column=True,customUnit='%d/%m/%Y %H:%M')
>>> tx.reads()
>>> tx.read_time()
>>> df=tx._toDataFrame()
"""
import glob,os,sys
import pandas as pd
import datetime as dt
import numpy as np
_NUMERIC_KINDS = set('buifc')
def matlab2datetime(matlab_datenum):
day = dt.datetime.fromordinal(int(matlab_datenum))
dayfrac = dt.timedelta(days=matlab_datenum%1) - dt.timedelta(days = 366)
return day + dayfrac
class TXTfile():
@staticmethod
def defaultExtensions():
return ['.csv','.txt']
def __init__(self,filename,sep='\t',\
colNames=[],\
unitNames=[],\
miss_val='NaN',\
colNamesLine=1,\
skiprows=2,\
unitNamesLine=2,\
skipfooter=0,\
single_column=False,\
unit='s',\
customUnit='%d-%m-%Y %H:%M:%S',\
time_col_name={'Year':'year','Month':'month','Day':'day','Hour':'hour','Min':'Minute','Sec':'Second'},\
):
self.filename = filename
self.fileparam=lambda:None
self.fileparam.sep = sep
self.fileparam.colNames = colNames
self.fileparam.unitNames = unitNames
self.fileparam.miss_val = miss_val
self.fileparam.ext=self.defaultExtensions()
self.fileparam.colNamesLine = colNamesLine
self.fileparam.skiprows = skiprows
self.fileparam.unitNamesLine = unitNamesLine
self.fileparam.skipfooter = skipfooter
self.fileparam.single_column=single_column
self.fileparam.unit=unit
self.fileparam.customUnit=customUnit
self.fileparam.time_col_name=time_col_name
self.encoding = None
self.data=[]
# set usr defined parameter
def reads(self):
for file in self.filename:
self.read(file)
def read(self,filename):
# --- Detecting encoding
# NOTE: done by parent class method
# --- Subfunctions
def readline(iLine):
with open(filename,'r',encoding=self.encoding) as f:
for i, line in enumerate(f):
if i==iLine:
return line.strip()
elif i>iLine:
break
def split(s):
if s is None:
return []
if self.fileparam.sep=='\s+':
return s.strip().split()
else:
return s.strip().split(self.fileparam.sep)
def strIsFloat(s):
try:
float(s)
return True
except:
return False
# --- Safety
if self.fileparam.sep=='' or self.fileparam.sep==' ':
self.fileparam.sep='\s+'
iStartLine=0
# Column header
line=readline(max(0,self.fileparam.colNamesLine-1))
self.fileparam.colNames=split(str(line).strip())
# unit header
if self.fileparam.unitNamesLine:
line=readline(max(0,self.fileparam.unitNamesLine-1))
unit={}
unitNames=split(str(line).strip())
for i,col in enumerate(self.fileparam.colNames):
unit[col]=unitNames[i].replace('[','').replace(']','')
self.fileparam.unitNames=unit
try:
with open(filename,'r',encoding=self.encoding) as f:
df=pd.read_csv(f,sep=self.fileparam.sep,skiprows=self.fileparam.skiprows,\
header=None,names=self.fileparam.colNames,skipfooter=self.fileparam.skipfooter,na_values=self.fileparam.miss_val)
except pd.errors.ParserError as e:
raise WrongFormatError('CSV File {}: '.format(filename)+e.args[0])
self.data.append(df)
def read_time(self):
for i,df in enumerate(self.data):
if self.fileparam.single_column is True:
if self.fileparam.unit == 'auto':
time=pd.to_datetime(df[self.fileparam.time_col_name])
elif self.fileparam.unit == 'matlab':
time=[matlab2datetime(tval) for tval in df['time']]
elif self.fileparam.unit == 's' or self.fileparam.unit == 'D':
time=pd.to_datetime(df[self.fileparam.time_col_name],unit=self.fileparam.unit)
elif self.fileparam.unit == 'custom':
time=pd.to_datetime(df[self.fileparam.time_col_name],format=self.fileparam.customUnit)
del df[self.fileparam.time_col_name]
else:
old_name=self.fileparam.time_col_name.keys()
time=pd.to_datetime(df[old_name].rename(columns=self.fileparam.time_col_name))
for oldkey in old_name:
del df[oldkey]
self.data[i]=df
self.data[i]['time']=time
self.data[i].set_index('time',inplace=True,drop=False)
self.add_unit(i)
keys=self.data[i].keys()
for key in keys:
self.data[i][key].long_name=key
# keys=list(df.keys())
# for key in keys:
# if np.all(df[key].isna()):
# del df[key]
# ### check if all numerics
# if not np.asarray(df[key].values).dtype.kind in _NUMERIC_KINDS:
# print('Warning %s: this key was deleted containes string' % key)
# del df[key]
def add_unit(self,i):
keys=self.data[i].keys()
for key in keys:
if key in self.fileparam.unitNames:
units=self.fileparam.unitNames[key]
self.data[i][key].units=units
elif '[' in key and ']' in key:
a,b=key.split('[')
self.data[i].rename(columns={key: a},inplace=True)
self.data[i][a].units=b.split(']')[0]
def _toDataFrame(self):
return self.data
|
<gh_stars>0
package com.globalcollect.gateway.sdk.java.gc.payment.definitions;
public class PaymentProduct840CustomerAccount {
private String accountId = null;
private String billingAgreementId = null;
private String companyName = null;
private String countryCode = null;
private String customerAccountStatus = null;
private String customerAddressStatus = null;
private String firstName = null;
private String payerId = null;
private String surname = null;
public String getAccountId() {
return accountId;
}
public void setAccountId(String value) {
this.accountId = value;
}
public String getBillingAgreementId() {
return billingAgreementId;
}
public void setBillingAgreementId(String value) {
this.billingAgreementId = value;
}
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String value) {
this.companyName = value;
}
public String getCountryCode() {
return countryCode;
}
public void setCountryCode(String value) {
this.countryCode = value;
}
public String getCustomerAccountStatus() {
return customerAccountStatus;
}
public void setCustomerAccountStatus(String value) {
this.customerAccountStatus = value;
}
public String getCustomerAddressStatus() {
return customerAddressStatus;
}
public void setCustomerAddressStatus(String value) {
this.customerAddressStatus = value;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String value) {
this.firstName = value;
}
public String getPayerId() {
return payerId;
}
public void setPayerId(String value) {
this.payerId = value;
}
public String getSurname() {
return surname;
}
public void setSurname(String value) {
this.surname = value;
}
}
|
<gh_stars>0
var forma = document.getElementById("forma"),
txtFecha = forma["fecha"],
salidaBoleta = document.getElementById("salidaBoleta"),
salidaNombre = document.getElementById("salidaNombre"),
salidaSecuencia = document.getElementById("salidaSecuencia"),
salidaMateria = document.getElementById("salidaMateria"),
salidaFecha = document.getElementById("salidaFecha");
forma.addEventListener("submit", recuperar, false);
function recuperar(){
var boleta = forma["boleta"].value,
nombre = forma["nombre"].value,
secuencia = forma["secuencia"].value,
materia = forma["materia"].value,
fecha = new Date(txtFecha.value);
salidaFecha.textContent = (fecha.getDate()+1)+"/0"+(fecha.getMonth()+1)+"/"+(fecha.getFullYear());
salidaBoleta.textContent = boleta;
salidaNombre.textContent = nombre;
salidaSecuencia.textContent = secuencia;
salidaMateria.textContent = materia;
}
|
#!/bin/bash
set -e
JDK_VER="11.0.8"
JDK_BUILD="10"
PACKR_VERSION="runelite-1.0"
APPIMAGE_VERSION="12"
umask 022
if ! [ -f OpenJDK11U-jre_x64_linux_hotspot_${JDK_VER}_${JDK_BUILD}.tar.gz ] ; then
curl -Lo OpenJDK11U-jre_x64_linux_hotspot_${JDK_VER}_${JDK_BUILD}.tar.gz \
https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-${JDK_VER}%2B${JDK_BUILD}/OpenJDK11U-jre_x64_linux_hotspot_${JDK_VER}_${JDK_BUILD}.tar.gz
fi
echo "98615b1b369509965a612232622d39b5cefe117d6189179cbad4dcef2ee2f4e1 OpenJDK11U-jre_x64_linux_hotspot_${JDK_VER}_${JDK_BUILD}.tar.gz" | sha256sum -c
# packr requires a "jdk" and pulls the jre from it - so we have to place it inside
# the jdk folder at jre/
if ! [ -d linux-jdk ] ; then
tar zxf OpenJDK11U-jre_x64_linux_hotspot_${JDK_VER}_${JDK_BUILD}.tar.gz
mkdir linux-jdk
mv jdk-$JDK_VER+$JDK_BUILD-jre linux-jdk/jre
fi
if ! [ -f packr_${PACKR_VERSION}.jar ] ; then
curl -Lo packr_${PACKR_VERSION}.jar \
https://github.com/runelite/packr/releases/download/${PACKR_VERSION}/packr.jar
fi
echo "18b7cbaab4c3f9ea556f621ca42fbd0dc745a4d11e2a08f496e2c3196580cd53 packr_${PACKR_VERSION}.jar" | sha256sum -c
# Note: Host umask may have checked out this directory with g/o permissions blank
chmod -R u=rwX,go=rX appimage
# ...ditto for the build process
chmod 644 target/RuneLite.jar
java -jar packr_${PACKR_VERSION}.jar \
--platform \
linux64 \
--jdk \
linux-jdk \
--executable \
RuneLite \
--classpath \
target/RuneLite.jar \
--mainclass \
net.runelite.launcher.Launcher \
--vmargs \
Drunelite.launcher.nojvm=true \
Xmx512m \
Xss2m \
XX:CompileThreshold=1500 \
Djna.nosys=true \
--output \
native-linux-x86_64/RuneLite.AppDir/ \
--resources \
target/filtered-resources/runelite.desktop \
appimage/runelite.png
pushd native-linux-x86_64/RuneLite.AppDir
mkdir -p jre/lib/amd64/server/
ln -s ../../server/libjvm.so jre/lib/amd64/server/ # packr looks for libjvm at this hardcoded path
# Symlink AppRun -> RuneLite
ln -s RuneLite AppRun
# Ensure RuneLite is executable to all users
chmod 755 RuneLite
popd
if ! [ -f appimagetool-x86_64.AppImage ] ; then
curl -Lo appimagetool-x86_64.AppImage \
https://github.com/AppImage/AppImageKit/releases/download/$APPIMAGE_VERSION/appimagetool-x86_64.AppImage
chmod +x appimagetool-x86_64.AppImage
fi
echo "d918b4df547b388ef253f3c9e7f6529ca81a885395c31f619d9aaf7030499a13 appimagetool-x86_64.AppImage" | sha256sum -c
./appimagetool-x86_64.AppImage \
native-linux-x86_64/RuneLite.AppDir/ \
native-linux-x86_64/RuneLite.AppImage
|
def binary_search(arr, target):
start = 0
end = len(arr) -1
while start <= end:
mid = (start + end) // 2
if arr[mid] == target:
return mid
elif arr[mid] > target:
end = mid - 1
else:
start = mid + 1
return -1
|
<reponame>hyperpape/needle<filename>needle-types/src/main/java/com/justinblank/strings/ByteClassUtil.java<gh_stars>10-100
package com.justinblank.strings;
public class ByteClassUtil {
public static void fillBytes(byte[] bytes, byte state, int startingIndex, int endingIndex) {
for (int i = startingIndex; i <= endingIndex; i++) {
bytes[i] = state;
}
}
public static int maxByteClass(byte[] bytes) {
int max = Integer.MIN_VALUE;
for (int i : bytes) {
if (i > max) {
max = i;
}
}
return max;
}
}
|
<reponame>littlerobin/react_pet_shop<filename>src/containers/FaqPage/FaqPage.js
import React from 'react';
import { StaticPage, TopbarContainer } from '../../containers';
import {
LayoutSingleColumn,
LayoutWrapperTopbar,
LayoutWrapperMain,
LayoutWrapperFooter,
Footer,
} from '../../components';
import css from './FaqPage.css';
const FaqPage = () => {
// prettier-ignore
return (
<StaticPage
title="FAQ | Trust My Pet Sitter"
schema={{
'@context': 'http://schema.org',
'@type': 'FaqPage',
description: 'Frequently Asked Questions',
name: 'FaqPage',
}}
>
<LayoutSingleColumn>
<LayoutWrapperTopbar>
<TopbarContainer />
</LayoutWrapperTopbar>
<LayoutWrapperMain className={css.staticPageWrapper}>
<div className={css.sectionContent}>
<a href="/pet-owner-faq">
<div className={css.faqBg} id="faqBg">
<div className={css.spanContainer} id="spanContainer">
<span>Pet <span className={css.spanMain}>Owners</span> Corner</span>
</div>
</div>
</a>
<a href="/pet-sitter-faq">
<div className={css.faqBg2} id="faqBg2">
<div className={css.spanContainer} id="spanContainer2">
<span>Pet <span className={css.spanMain}>Sitter</span> Corner</span>
</div>
</div>
</a>
<a href="/pet-services-faq">
<div className={css.faqBg3} id="faqBg3">
<div className={css.spanContainer} id="spanContainer3">
<span>Pet <span className={css.spanMain}>Services</span> Corner</span>
</div>
</div>
</a>
</div>
</LayoutWrapperMain>
<LayoutWrapperFooter>
<Footer />
</LayoutWrapperFooter>
</LayoutSingleColumn>
</StaticPage>
);
};
export default FaqPage;
|
#!/bin/bash
#SBATCH -J "NBody Reduced LRBD solver first synthesis"
#SBATCH -p fpgasyn
#SBATCH --mem=120000MB
#SBATCH -t 36:00:00
module reset
module load nalla_pcie/19.4.0_max
module load intelFPGA_pro/20.4.0
aoc -v -board=p520_max_sg280l device/ring_lrbd_lf_no_sync_local.cl -global-ring -duplicate-ring -ffp-reassoc -ffp-contract=fast -seed=42 -o bin/ring_lrbd_4x4PE_lf_no_sync_local_s42.aocx -DPE_LOCAL_CNT=4 -DPE_LOCAL_CNT_ALLOC=4 -DPE_REMOTE_CNT=4 -DPE_REMOTE_CNT_ALLOC=4
|
#!/usr/bin/env bash
# Cause the script to exit if a single command fails
set -eo pipefail -v
################################ global variables ################################
rm -rf ./tests/logs ./tests/output.txt
EXPDIR=./tests/_tests_dl_callbacks
LOGDIR=./tests/logs/_tests_dl_callbacks
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
EXP_OUTPUT=./tests/output.txt
function check_file_existence {
# $1 - path to file
if [[ ! -f "$1" ]]
then
echo "There is no '$1'!"
exit 1
fi
}
function check_num_files {
# $1 - ls directory
# $2 - expected count
NFILES=$( ls $1 | wc -l )
if [[ $NFILES -ne $2 ]]
then
echo "Different number of files in '$1' - "`
`"expected $2 but actual number is $NFILES!"
exit 1
fi
}
function check_checkpoints {
# $1 - file prefix
# $2 - expected count
check_num_files "${1}.pth" $2
check_num_files "${1}_full.pth" $2
}
function check_line_counts {
# $1 file
# $2 pattern
# $3 expected count
ACTUAL_COUNT=$( grep -c "$2" $1 || true ) # '|| true' for handling pipefail
if [ $ACTUAL_COUNT -ne $3 ]
then
echo "Different number of lines in file '$1' - "`
`"expected $3 (should match '$2') but actual number is $ACTUAL_COUNT!"
exit 1
fi
}
################################ pipeline 00 ################################
# checking default parameters of checkpoint and one stage
LOG_MSG='pipeline 00'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config0.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 01 ################################
# checking with one checkpoint and one stage
LOG_MSG='pipeline 01'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config1.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat $LOGFILE;
echo ${LOG_MSG};
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 02 ################################
# checking with one checkpoint and three stages
LOG_MSG='pipeline 02'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config2.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage3\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 11 # 5x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 03 ################################
# checking with three checkpoints and one stage
LOG_MSG='pipeline 03'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config3.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat $LOGFILE;
echo ${LOG_MSG};
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 11 # 5x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 04 ################################
# checking with three checkpoint and three stages
LOG_MSG='pipeline 04'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config4.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 3
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 3
check_checkpoints "${CHECKPOINTS}/stage3\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 23 # 11x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 05 ################################
# checking with zero checkpoints and one stage
LOG_MSG='pipeline 05'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config5.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_num_files ${CHECKPOINTS} 5 # 2x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 06 ################################
# checking with zepo checkpoints and one stage
# spoiler - no loading at the end of a stage, only load last state at stage start
# this means that will be default behaviour - loaded best state
LOG_MSG='pipeline 06'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config6.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_num_files ${CHECKPOINTS} 5 # 2x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 07 ################################
# checking with one checkpoint and one stage
LOG_MSG='pipeline 07'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config7.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 08 ################################
# checking with three checkpoints and three stages
LOG_MSG='pipeline 08'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config8.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 5
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*best_full\.pth" 3
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 3
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 3
check_checkpoints "${CHECKPOINTS}/stage3\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 23 # 11x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 09 ################################
# checking with one checkpoint and two stages
# with different ''load_on_stage_end'' options
LOG_MSG='pipeline 09'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config9.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 9 # 4x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 10 ################################
# checking with three checkpoints and two stages
# with different ''load_on_stage_end'' options
LOG_MSG='pipeline 10'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config10.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 3
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 17 # 8x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 11 ################################
# checking with three stages and default not specified callback
# (CheckpointCallback is one of default callbacks)
LOG_MSG='pipeline 11'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config11.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 1
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 9 # 4x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 12 ################################
# testing resume option
LOG_MSG='pipeline 12'
echo ${LOG_MSG}
LOGDIR=./tests/logs/_tests_dl_callbacks/for_resume
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config0.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence $LOGFILE
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files $CHECKPOINTS 7 # 3x2 checkpoints + metrics.json
LOGDIR=./tests/logs/_tests_dl_callbacks/resumed
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config12.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
rm -rf ./tests/logs/_tests_dl_callbacks ${EXP_OUTPUT}
################################ pipeline 13 ################################
# testing resume and resume_dir option
LOG_MSG='pipeline 13'
echo ${LOG_MSG}
LOGDIR=./tests/logs/_tests_dl_callbacks/for_resume
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config0.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
LOGDIR=./tests/logs/_tests_dl_callbacks/resumed
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config13.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
rm -rf ./tests/logs/_tests_dl_callbacks ${EXP_OUTPUT}
################################ pipeline 14 ################################
# testing on_stage_start option
LOG_MSG='pipeline 14'
echo ${LOG_MSG}
LOGDIR=./tests/logs/_tests_dl_callbacks
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config14.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*last\.pth" 1
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 2
check_checkpoints "${CHECKPOINTS}/stage3\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 17 # 8x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 15 ################################
# testing on_stage_start option with different loading states
LOG_MSG='pipeline 15'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config15.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 4
check_line_counts ${EXP_OUTPUT} "=> Loading .*best_full\.pth" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*last_full\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 2
check_checkpoints "${CHECKPOINTS}/stage3\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 17 # 8x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 16 ################################
# testing on_stage_start option with different loading states and
# missing model state (should load best)
LOG_MSG='pipeline 16'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config16.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 4
check_line_counts ${EXP_OUTPUT} "=> Loading .*best_full\.pth" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*last_full\.pth" 2
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 2
check_checkpoints "${CHECKPOINTS}/stage3\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 17 # 8x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 17 ################################
LOG_MSG='pipeline 17'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
USE_DDP=0 \
USE_APEX=0 \
python3 -c "
import torch
from torch.utils.data import DataLoader, TensorDataset
from catalyst.dl import SupervisedRunner, State, Callback, CallbackOrder, CheckpointCallback
# experiment_setup
logdir = '${LOGDIR}'
num_epochs = 5
# data
num_samples, num_features = int(1e4), int(1e1)
X = torch.rand(num_samples, num_features)
y = torch.randint(0, 5, size=[num_samples])
dataset = TensorDataset(X, y)
loader = DataLoader(dataset, batch_size=32, num_workers=1)
loaders = {'train': loader, 'valid': loader}
# model, criterion, optimizer, scheduler
model = torch.nn.Linear(num_features, 5)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters())
runner = SupervisedRunner()
# first stage
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[
CheckpointCallback(
save_n_best=2,
load_on_stage_end='best'
),
]
)
" > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 1
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/train\.[[:digit:]]" 2
check_num_files ${CHECKPOINTS} 9 # 4x2 checkpoints + metrics.json
rm -rf ${LOGDIR}
################################ pipeline 18 ################################
LOG_MSG='pipeline 18'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
USE_DDP=0 \
USE_APEX=0 \
python3 -c "
import torch
from torch.utils.data import DataLoader, TensorDataset
from catalyst.dl import SupervisedRunner, State, Callback, CallbackOrder, CheckpointCallback
# experiment_setup
logdir = '${LOGDIR}'
num_epochs = 5
# data
num_samples, num_features = int(1e4), int(1e1)
X = torch.rand(num_samples, num_features)
y = torch.randint(0, 5, size=[num_samples])
dataset = TensorDataset(X, y)
loader = DataLoader(dataset, batch_size=32, num_workers=1)
loaders = {'train': loader, 'valid': loader}
# model, criterion, optimizer, scheduler
model = torch.nn.Linear(num_features, 5)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters())
runner = SupervisedRunner()
# first stage
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[
CheckpointCallback(
save_n_best=2,
load_on_stage_end={
'model': 'best',
'criterion': 'best',
'optimizer': 'last',
}
),
]
)
# second stage
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[
CheckpointCallback(
save_n_best=3,
load_on_stage_start={
'model': 'last',
'criterion': 'last',
'optimizer': 'best',
}
),
]
)
" > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 3
check_line_counts ${EXP_OUTPUT} "=> Loading .*best_full\.pth" 2
check_line_counts ${EXP_OUTPUT} "=> Loading .*last_full\.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/train\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 11 # 5x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 19 ################################
LOG_MSG='pipeline 19'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} python3 -c "
import torch
from torch.utils.data import DataLoader, TensorDataset
from catalyst.dl import SupervisedRunner, State, Callback, CallbackOrder, CheckpointCallback
# experiment_setup
logdir = '${LOGDIR}'
num_epochs = 5
# data
num_samples, num_features = int(1e4), int(1e1)
X = torch.rand(num_samples, num_features)
y = torch.randint(0, 5, size=[num_samples])
dataset = TensorDataset(X, y)
loader = DataLoader(dataset, batch_size=32, num_workers=1)
loaders = {'train': loader, 'valid': loader}
# model, criterion, optimizer, scheduler
model = torch.nn.Linear(num_features, 5)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters())
runner = SupervisedRunner()
# first stage
try:
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[
CheckpointCallback(
save_n_best=2,
load_on_stage_end={
'model': 'best',
'criterion': 'best',
'optimizer': 'last',
},
resume='not_existing_file.pth'
),
]
)
# TODO: switch to pytest and handle there FileNotFoundError
except FileNotFoundError as e:
print('Successfully handled FileNotFoundError!')
"
rm -rf ${LOGDIR}
################################ pipeline 20 ################################
# test with an empty dict
LOG_MSG='pipeline 19'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
USE_DDP=0 \
USE_APEX=0 \
python3 -c "
import torch
from torch.utils.data import DataLoader, TensorDataset
from catalyst.dl import SupervisedRunner, State, Callback, CallbackOrder, CheckpointCallback
# experiment_setup
logdir = '${LOGDIR}'
num_epochs = 5
# data
num_samples, num_features = int(1e4), int(1e1)
X = torch.rand(num_samples, num_features)
y = torch.randint(0, 5, size=[num_samples])
dataset = TensorDataset(X, y)
loader = DataLoader(dataset, batch_size=32, num_workers=1)
loaders = {'train': loader, 'valid': loader}
# model, criterion, optimizer, scheduler
model = torch.nn.Linear(num_features, 5)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters())
runner = SupervisedRunner()
# first stage
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[
CheckpointCallback(save_n_best=2),
]
)
# second stage
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[
CheckpointCallback(
save_n_best=3,
load_on_stage_start={}
),
]
)
" > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/train\.[[:digit:]]" 3
check_num_files ${CHECKPOINTS} 11 # 5x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 21 ################################
# testing on_stage_start option with missing on_stage_start and with
# dict with random key (test for ignoring random keys)
LOG_MSG='pipeline 21'
echo ${LOG_MSG}
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config17.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 1
check_line_counts ${EXP_OUTPUT} "=> Loading .*best\.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 2
check_num_files ${CHECKPOINTS} 11 # 5x2 checkpoints + metrics.json
rm -rf ${LOGDIR} ${EXP_OUTPUT}
################################ pipeline 22 ################################
# testing loading from checkpoint specified by path
LOG_MSG='pipeline 22'
echo ${LOG_MSG}
LOGDIR=./tests/logs/_tests_dl_callbacks/for_resume
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config0.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence $LOGFILE
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files $CHECKPOINTS 7 # 3x2 checkpoints + metrics.json
LOGDIR=./tests/logs/_tests_dl_callbacks/other
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config18.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 2
check_line_counts ${EXP_OUTPUT} "=> .*for_resume/checkpoints/best.pth" 1
check_line_counts ${EXP_OUTPUT} "=> .*for_resume/checkpoints/last_full.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 7 # 3x2 checkpoints + metrics.json
rm -rf ./tests/logs/_tests_dl_callbacks ${EXP_OUTPUT}
################################ pipeline 23 ################################
# testing loading from checkpoint specified by path in cobination with other
LOG_MSG='pipeline 23'
echo ${LOG_MSG}
LOGDIR=./tests/logs/_tests_dl_callbacks/for_resume
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config0.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 0
check_file_existence $LOGFILE
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_num_files $CHECKPOINTS 7 # 3x2 checkpoints + metrics.json
LOGDIR=./tests/logs/_tests_dl_callbacks/other
CHECKPOINTS=${LOGDIR}/checkpoints
LOGFILE=${CHECKPOINTS}/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config19.yml \
--logdir=${LOGDIR} > ${EXP_OUTPUT}
cat ${EXP_OUTPUT}
check_line_counts ${EXP_OUTPUT} "=> Loading" 4
check_line_counts ${EXP_OUTPUT} "=> .*other/checkpoints/best_full.pth" 1
check_line_counts ${EXP_OUTPUT} "=> .*other/checkpoints/last_full.pth" 1
check_line_counts ${EXP_OUTPUT} "=> .*for_resume/checkpoints/best_full.pth" 1
check_line_counts ${EXP_OUTPUT} "=> .*for_resume/checkpoints/last_full.pth" 1
check_file_existence ${LOGFILE}
cat ${LOGFILE}
echo ${LOG_MSG}
check_checkpoints "${CHECKPOINTS}/best" 1
check_checkpoints "${CHECKPOINTS}/last" 1
check_checkpoints "${CHECKPOINTS}/stage1\.[[:digit:]]" 1
check_checkpoints "${CHECKPOINTS}/stage2\.[[:digit:]]" 1
check_num_files ${CHECKPOINTS} 9 # 4x2 checkpoints + metrics.json
rm -rf ./tests/logs/_tests_dl_callbacks ${EXP_OUTPUT}
|
The most optimal database schema would contain the following tables:
Users:
- Id
- Name
- Email
Reviews:
- Id
- User_id
- Product_id
- Rating
- Body
Products:
- Id
- Name
- Image_url
Product_categories:
- Product_id
- Category_id
Categories:
- Id
- Name
|
<filename>google/monitoring/dashboard/v1/google-cloud-monitoring-dashboard-v1-ruby/proto_docs/google/monitoring/dashboard/v1/xychart.rb
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Monitoring
module Dashboard
module V1
# A chart that displays data on a 2D (X and Y axes) plane.
# @!attribute [rw] data_sets
# @return [::Array<::Google::Cloud::Monitoring::Dashboard::V1::XyChart::DataSet>]
# Required. The data displayed in this chart.
# @!attribute [rw] timeshift_duration
# @return [::Google::Protobuf::Duration]
# The duration used to display a comparison chart. A comparison chart
# simultaneously shows values from two similar-length time periods
# (e.g., week-over-week metrics).
# The duration must be positive, and it can only be applied to charts with
# data sets of LINE plot type.
# @!attribute [rw] thresholds
# @return [::Array<::Google::Cloud::Monitoring::Dashboard::V1::Threshold>]
# Threshold lines drawn horizontally across the chart.
# @!attribute [rw] x_axis
# @return [::Google::Cloud::Monitoring::Dashboard::V1::XyChart::Axis]
# The properties applied to the X axis.
# @!attribute [rw] y_axis
# @return [::Google::Cloud::Monitoring::Dashboard::V1::XyChart::Axis]
# The properties applied to the Y axis.
# @!attribute [rw] chart_options
# @return [::Google::Cloud::Monitoring::Dashboard::V1::ChartOptions]
# Display options for the chart.
class XyChart
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Groups a time series query definition with charting options.
# @!attribute [rw] time_series_query
# @return [::Google::Cloud::Monitoring::Dashboard::V1::TimeSeriesQuery]
# Required. Fields for querying time series data from the
# Stackdriver metrics API.
# @!attribute [rw] plot_type
# @return [::Google::Cloud::Monitoring::Dashboard::V1::XyChart::DataSet::PlotType]
# How this data should be plotted on the chart.
# @!attribute [rw] legend_template
# @return [::String]
# A template string for naming `TimeSeries` in the resulting data set.
# This should be a string with interpolations of the form `${label_name}`,
# which will resolve to the label's value.
# @!attribute [rw] min_alignment_period
# @return [::Google::Protobuf::Duration]
# Optional. The lower bound on data point frequency for this data set, implemented by
# specifying the minimum alignment period to use in a time series query
# For example, if the data is published once every 10 minutes, the
# `min_alignment_period` should be at least 10 minutes. It would not
# make sense to fetch and align data at one minute intervals.
class DataSet
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# The types of plotting strategies for data sets.
module PlotType
# Plot type is unspecified. The view will default to `LINE`.
PLOT_TYPE_UNSPECIFIED = 0
# The data is plotted as a set of lines (one line per series).
LINE = 1
# The data is plotted as a set of filled areas (one area per series),
# with the areas stacked vertically (the base of each area is the top of
# its predecessor, and the base of the first area is the X axis). Since
# the areas do not overlap, each is filled with a different opaque color.
STACKED_AREA = 2
# The data is plotted as a set of rectangular boxes (one box per series),
# with the boxes stacked vertically (the base of each box is the top of
# its predecessor, and the base of the first box is the X axis). Since
# the boxes do not overlap, each is filled with a different opaque color.
STACKED_BAR = 3
# The data is plotted as a heatmap. The series being plotted must have a
# `DISTRIBUTION` value type. The value of each bucket in the distribution
# is displayed as a color. This type is not currently available in the
# Stackdriver Monitoring application.
HEATMAP = 4
end
end
# A chart axis.
# @!attribute [rw] label
# @return [::String]
# The label of the axis.
# @!attribute [rw] scale
# @return [::Google::Cloud::Monitoring::Dashboard::V1::XyChart::Axis::Scale]
# The axis scale. By default, a linear scale is used.
class Axis
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Types of scales used in axes.
module Scale
# Scale is unspecified. The view will default to `LINEAR`.
SCALE_UNSPECIFIED = 0
# Linear scale.
LINEAR = 1
# Logarithmic scale (base 10).
LOG10 = 2
end
end
end
# Options to control visual rendering of a chart.
# @!attribute [rw] mode
# @return [::Google::Cloud::Monitoring::Dashboard::V1::ChartOptions::Mode]
# The chart mode.
class ChartOptions
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Chart mode options.
module Mode
# Mode is unspecified. The view will default to `COLOR`.
MODE_UNSPECIFIED = 0
# The chart distinguishes data series using different color. Line
# colors may get reused when there are many lines in the chart.
COLOR = 1
# The chart uses the Stackdriver x-ray mode, in which each
# data set is plotted using the same semi-transparent color.
X_RAY = 2
# The chart displays statistics such as average, median, 95th percentile,
# and more.
STATS = 3
end
end
end
end
end
end
end
|
<filename>open-sphere-plugins/mapbox/src/main/java/io/opensphere/mapbox/model/package-info.java
/** Mapbox model classes. */
package io.opensphere.mapbox.model;
|
def remove_nodes(head, val):
'''
Removes all nodes that have a given value in a linked list
Args:
head (Node): Head Node of a linked list
val (int): The value of the nodes to be removed
Returns:
Node: The head Node of the linked list with the specified nodes removed
'''
current = head
# check if the head node needs to be removed
while current.val == val and current.next != None:
head = current.next
current = head
prev = head
current = head.next
while current != None:
if current.val == val:
# skip the node
prev.next = current.next
current = current.next
else:
prev = current
current = current.next
return head
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-pad/13-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-pad/13-512+0+512-ST-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_trigrams_first_half_full --eval_function last_element_eval
|
package br.indie.fiscal4j.cte300.classes.nota;
import br.indie.fiscal4j.DFBase;
import br.indie.fiscal4j.validadores.StringValidador;
import org.simpleframework.xml.Element;
import org.simpleframework.xml.Namespace;
import org.simpleframework.xml.Root;
/**
* @author Caio
* @info
*/
@Root(name = "emiOcc")
@Namespace(reference = "http://www.portalfiscal.inf.br/cte")
public class CTeNotaInfoCTeNormalInfoModalRodoviarioOrdemColetaAssociadasEmi extends DFBase {
private static final long serialVersionUID = -6636308547258454315L;
@Element(name = "CNPJ")
private String cnpj;
@Element(name = "cInt", required = false)
private String codigoInterno;
@Element(name = "IE")
private String inscricaoEstadual;
@Element(name = "UF")
private String siglaUF;
@Element(name = "fone", required = false)
private String telefone;
public CTeNotaInfoCTeNormalInfoModalRodoviarioOrdemColetaAssociadasEmi() {
this.cnpj = null;
this.codigoInterno = null;
this.inscricaoEstadual = null;
this.siglaUF = null;
this.telefone = null;
}
public String getCnpj() {
return this.cnpj;
}
/**
* Número do CNPJ<br>
* Informar os zeros não significativos.
*/
public void setCnpj(final String cnpj) {
StringValidador.cnpj(cnpj);
this.cnpj = cnpj;
}
public String getCodigoInterno() {
return this.codigoInterno;
}
/**
* Código interno de uso da transportadora<br>
* Uso intermo das transportadoras.
*/
public void setCodigoInterno(final String codigoInterno) {
StringValidador.tamanho10(codigoInterno, "Código interno de uso da transportadora");
this.codigoInterno = codigoInterno;
}
public String getInscricaoEstadual() {
return this.inscricaoEstadual;
}
/**
* Inscrição Estadual
*/
public void setInscricaoEstadual(final String inscricaoEstadual) {
StringValidador.inscricaoEstadual(inscricaoEstadual);
this.inscricaoEstadual = inscricaoEstadual;
}
public String getSiglaUF() {
return this.siglaUF;
}
/**
* Sigla da UF<br>
* Informar EX para operações com o exterior.
*/
public void setSiglaUF(final String siglaUF) {
StringValidador.exatamente2(siglaUF, "Sigla da UF");
this.siglaUF = siglaUF;
}
public String getTelefone() {
return this.telefone;
}
/**
* Telefone
*/
public void setTelefone(final String telefone) {
StringValidador.telefone(telefone);
this.telefone = telefone;
}
}
|
from typing import List, Tuple
def count_zero_rows_cols(grid: List[List[int]]) -> Tuple[int, int]:
rows = [0] * len(grid)
cols = [0] * len(grid[0])
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 0:
rows[i] = 1
cols[j] = 1
zero_rows = rows.count(0)
zero_cols = cols.count(0)
return zero_rows, zero_cols
|
#
# Copyright (c) 2018 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import time
import pika
import queue
import logging
import traceback
import threading
from klever.scheduler.server import Server
from klever.scheduler.utils.bridge import BridgeError
from klever.scheduler.utils import sort_priority, time_units_converter, memory_units_converter
class SchedulerException(RuntimeError):
"""Exception is used to determine when task or job fails but not scheduler."""
pass
class ListeningThread(threading.Thread):
conf = None
def __init__(self, local_queue, accept_jobs, accept_tag, cnf=None):
super(ListeningThread, self).__init__()
self._is_interrupted = False
self.accept_jobs = accept_jobs
self.accept_tag = accept_tag
if cnf:
self.conf = cnf
self._queue = local_queue
def stop(self):
self._is_interrupted = True
def run(self):
connection = pika.BlockingConnection(
pika.ConnectionParameters(
host=self.conf["host"],
credentials=pika.credentials.PlainCredentials(self.conf["username"], self.conf["password"]))
)
channel = connection.channel()
channel.queue_declare(queue=self.conf["name"], durable=True)
for method, properties, body in channel.consume(self.conf["name"], inactivity_timeout=1):
if self._is_interrupted:
break
if not body or not method:
continue
# Just forward to main loop all data. This can be done faster but it will require additional locks and sync
data = body.decode('utf-8').split(' ')
if len(data) == 4:
if (data[0] == 'job' and self.accept_jobs) or (data[0] == 'task' and data[-1] == self.accept_tag):
channel.basic_ack(method.delivery_tag)
self._queue.put(body)
else:
channel.basic_nack(method.delivery_tag, requeue=True)
else:
# Just ignore the message
channel.basic_ack(method.delivery_tag)
continue
class Scheduler:
"""Class provide general scheduler API."""
def __init__(self, conf, logger, work_dir, runner_class):
"""
Get configuration and prepare working directory.
:param conf: Dictionary with relevant configuration.
:param logger: Logger object.
:param work_dir: Path to the working directory.
:param runner_class: Runner class to work with hardware or cloud.
"""
# todo: remove useless data
self.conf = conf
self.logger = logger
self.work_dir = work_dir
self.runner = None
self.server = None
self._runner_class = runner_class
self._tasks = {}
self._jobs = {}
self._nodes = None
self._tools = None
self._iteration_period = 0.5
self._server_queue = None
self._channel = None
self._listening_thread = None
self._loop_thread = None
self.production = self.conf["scheduler"].setdefault("production", False)
logging.getLogger("pika").setLevel(logging.WARNING)
self.init_scheduler()
def init_scheduler(self):
"""
Initialize scheduler completely. This method should be called both at constructing stage and scheduler
reinitialization. Thus, all object attribute should be cleaned up and set as it is a newly created object.
"""
self._tasks = {}
self._jobs = {}
self._nodes = None
self._tools = None
self._server_queue = queue.Queue()
self.server = Server(self.logger, self.conf["Klever Bridge"], os.path.join(self.work_dir, "requests"))
_old_tasks_status = None
_old_jobs_status = None
# Check configuration completeness
self.logger.debug("Check whether configuration contains all necessary data")
# Initialize interaction
self.server.register(self._runner_class.scheduler_type())
self.runner = self._runner_class(self.conf, self.logger, self.work_dir, self.server)
self.runner.init()
# Create listening thread
if self._listening_thread and not self._listening_thread.is_alive():
self._listening_thread.stop()
self._listening_thread.join()
self._listening_thread = ListeningThread(self._server_queue, self._runner_class.accept_jobs,
self._runner_class.accept_tag,
self.conf["Klever jobs and tasks queue"])
self._listening_thread.start()
# Before we proceed lets check all existing jobs
self._check_jobs_status()
self.logger.info("Scheduler base initialization has been successful")
def launch(self):
"""
Start scheduler loop. This is an infinite loop that exchange data with Bridge to fetch new jobs and tasks and
upload result of solution previously received tasks and jobs. After data exchange it prepares for solution
new jobs and tasks, updates statuses of running jobs and tasks and schedule for solution pending ones.
This is just an algorythm, and all particular logic and resource management should be implemented in classes
that inherits this one.
"""
def nth_iteration(n):
return True if iteration_number % n == 0 else False
self.logger.info("Start scheduler loop")
iteration_number = 0
while True:
try:
if iteration_number == 10000:
iteration_number = 0
else:
iteration_number += 1
if not self._listening_thread.is_alive():
raise ValueError("Listening thread is not alive, terminating")
while True:
msg = self._server_queue.get_nowait()
kind, identifier, status, _ = msg.decode('utf-8').split(' ')
if kind == 'job':
self.logger.debug("New status of job {!r} is {!r}".format(identifier, status))
sch_status = self._jobs.get(identifier, dict()).get('status', None)
status = self._job_status(status)
if status == 'PENDING':
if identifier in self._jobs and sch_status not in ('PROCESSING', 'PENDING'):
self.logger.warning('Job {!r} is still tracking and has status {!r}'.
format(identifier, sch_status))
del self._jobs[identifier]
self.add_new_pending_job(identifier)
elif status == 'PROCESSING':
if sch_status in ('PENDING', 'PROCESSING'):
self._jobs[identifier]['status'] = 'PROCESSING'
elif identifier not in self._jobs:
self.server.submit_job_error(identifier, 'Job {!r} is not traching by the scheduler'.
format(identifier))
else:
self.logger.warning('Job {!r} alrady has status {!r}'.format(identifier, sch_status))
elif status in ('FAILED', 'CORRUPTED', 'CANCELLED'):
if identifier in self._jobs and self.runner.is_solving(self._jobs[identifier]):
self.logger.warning('Job {!r} is running but got status '.format(identifier))
self.runner.cancel_job(identifier, self._jobs[identifier],
self.relevant_tasks(identifier))
if identifier in self._jobs:
del self._jobs[identifier]
elif status == 'CORRUPTED':
# CORRUPTED
if identifier in self._jobs and self.runner.is_solving(self._jobs[identifier]):
self.logger.info('Job {!r} was corrupted'.format(identifier))
self.runner.cancel_job(identifier, self._jobs[identifier],
self.relevant_tasks(identifier))
if identifier in self._jobs:
del self._jobs[identifier]
elif status == 'CANCELLING':
# CANCELLING
if identifier in self._jobs and self.runner.is_solving(self._jobs[identifier]):
self.runner.cancel_job(identifier, self._jobs[identifier],
self.relevant_tasks(identifier))
self.server.submit_job_status(identifier, self._job_status('CANCELLED'))
for task_id, status in self.server.get_job_tasks(identifier):
if status in ('PENDING', 'PROCESSING'):
self.server.submit_task_status(task_id, 'CANCELLED')
if identifier in self._jobs:
del self._jobs[identifier]
else:
raise NotImplementedError('Unknown job status {!r}'.format(status))
else:
sch_status = self._tasks.get(identifier, dict()).get('status', None)
if status == 'PENDING':
if identifier in self._tasks and sch_status not in ('PROCESSING', 'PENDING'):
self.logger.warning('The task {!r} is still tracking and has status {!r}'.
format(identifier, sch_status))
del self._jobs[identifier]
self.add_new_pending_task(identifier)
elif status == 'PROCESSING':
# PROCESSING
if identifier not in self._tasks:
self.logger.warning("There is running task {!r}".format(identifier))
self.server.submit_task_error(identifier, 'Unknown task')
elif identifier in self._tasks and not self.runner.is_solving(self._tasks[identifier]) \
and sch_status != 'PROCESSING':
self.logger.warning("Task {!r} already has status {!r} and is not PROCESSING".
format(identifier, sch_status))
elif status in ('FINISHED', 'ERROR', 'CANCELLED'):
# CANCELLED
if identifier in self._tasks and self.runner.is_solving(self._tasks[identifier]):
self.runner.cancel_task(identifier, self._tasks[identifier])
if identifier in self._tasks:
del self._tasks[identifier]
else:
raise NotImplementedError('Unknown task status {!r}'.format(status))
except queue.Empty:
pass
try:
for job_id, desc in list(self._jobs.items()):
if self.runner.is_solving(desc) and desc["status"] == "PENDING":
desc["status"] = "PROCESSING"
elif desc['status'] == 'PROCESSING' and \
self.runner.process_job_result(
job_id, desc, [tid for tid in self._tasks if desc["status"] in ["PENDING", "PROCESSING"]
and self._tasks[tid]["description"]["job id"] == job_id]):
if desc['status'] == 'FINISHED' and not desc.get('error'):
self.server.submit_job_status(job_id, self._job_status('SOLVED'))
elif desc.get('error'):
# Sometimes job can be rescheduled, lets check this doing the following
if not desc.get('rescheduled'):
server_status = self._job_status(self.server.get_job_status(job_id))
if server_status == 'PENDING':
desc['rescheduled'] = True
desc['status'] = 'PENDING'
continue
self.server.submit_job_error(job_id, desc['error'])
else:
raise NotImplementedError("Cannot determine status of the job {!r}".format(job_id))
if job_id in self._jobs:
del self._jobs[job_id]
elif desc['status'] == 'PROCESSING':
# Request progress if it is available
if nth_iteration(10) and self.relevant_tasks(job_id):
progress = self.server.get_job_progress(job_id)
if progress:
self.runner.add_job_progress(job_id, self._jobs[job_id], progress)
for task_id, desc in list(self._tasks.items()):
if self.runner.is_solving(desc) and desc["status"] == "PENDING":
desc["status"] = "PROCESSING"
elif desc["status"] == "PROCESSING" and self.runner.process_task_result(task_id, desc):
if desc['status'] == 'FINISHED' and not desc.get('error'):
self.server.submit_task_status(task_id, 'FINISHED')
elif desc["status"] == 'PENDING':
# This case is for rescheduling
continue
elif desc.get('error'):
self.server.submit_task_error(task_id, desc['error'])
else:
raise NotImplementedError("Cannot determine status of the task {!r}: {!r}".
format(task_id, desc["status"]))
if task_id in self._tasks:
del self._tasks[task_id]
# Submit tools
try:
self.runner.update_tools()
except Exception as err:
self.logger.warning('Cannot submit verification tools information: {}'.format(err))
# Get actual information about connected nodes
submit = True
try:
self.runner.update_nodes()
except Exception as err:
self.logger.error("Cannot obtain information about connected nodes: {}".format(err))
submit = False
self.logger.warning("Do not run tasks until actual information about the nodes will be obtained")
if submit:
# Update resource limitations before scheduling
messages = dict()
for i, desc in ((i, self._tasks[i]) for i in self._tasks
if self._tasks[i]["status"] == "PENDING"):
messages[i] = self.runner.prepare_task(i, desc)
if not messages[i]:
self.server.submit_task_error(i, desc['error'])
del self._tasks[i]
# Schedule new tasks
pending_tasks = [desc for task_id, desc in self._tasks.items() if desc["status"] == "PENDING"]
pending_jobs = [desc for job_id, desc in self._jobs.items() if desc["status"] == "PENDING"
and not self.runner.is_solving(desc)]
pending_jobs = sorted(pending_jobs, key=lambda i: sort_priority(i['configuration']['priority']))
pending_tasks = sorted(pending_tasks, key=lambda i: sort_priority(i['description']['priority']))
tasks_to_start, jobs_to_start = self.runner.schedule(pending_tasks, pending_jobs)
if len(tasks_to_start) > 0 or len(jobs_to_start) > 0:
self.logger.info("Going to start {} new tasks and {} jobs".
format(len(tasks_to_start), len(jobs_to_start)))
self.logger.info("There are {} pending and {} solving jobs".format(
len(pending_jobs),
len({j for j in self._jobs if self._jobs[j]['status'] == 'PROCESSING'})))
self.logger.info("There are {} pending and {} solving tasks".format(
len(pending_tasks),
len({t for t in self._tasks if self._tasks[t]['status'] == 'PROCESSING'})))
for job_id in jobs_to_start:
started = self.runner.solve_job(job_id, self._jobs[job_id])
if started and self._jobs[job_id]['status'] not in ('PENDING', 'PROCESSING'):
raise RuntimeError('Expect that status of started job {!r} is solving but it has status'
' {!r}'.format(self._jobs[job_id]['status'], job_id))
elif not started and self._jobs[job_id]['status'] == 'ERROR':
self.server.submit_job_error(job_id, self._jobs[job_id]['error'])
if job_id in self._jobs:
del self._jobs[job_id]
for task_id in tasks_to_start:
# This check is very helpful for debugging
msg = messages.get(task_id)
if msg and isinstance(msg, str):
self.logger.info(msg)
started = self.runner.solve_task(task_id, self._tasks[task_id])
if started and self._tasks[task_id]['status'] != 'PROCESSING':
raise RuntimeError('Expect that status of started task is PROCESSING but it is {!r} '
'for {!r}'.format(self._tasks[task_id]['status'], task_id))
elif started and self._tasks[task_id]['status'] == 'PROCESSING':
if not self._tasks[task_id].get("rescheduled"):
self.server.submit_task_status(task_id, 'PROCESSING')
elif not started and self._tasks[task_id]['status'] == 'PROCESSING':
raise RuntimeError('In case of error task cannot be \'PROCESSING\' but it is for '
'{!r}'.format(task_id))
elif not started and self._tasks[task_id]['status'] == 'ERROR':
self.server.submit_task_error(task_id, self._tasks[task_id]['error'])
if task_id in self._tasks:
del self._tasks[task_id]
# Flushing tasks
if len(tasks_to_start) > 0 or \
len([True for i in self._tasks if self._tasks[i]["status"] == "PROCESSING"]) > 0:
self.runner.flush()
# Periodically check for jobs and task that have an unexpected status. This should help notice bugs
# related to interaction with Bridge through RabbitMQ
if nth_iteration(100):
self._check_jobs_status()
time.sleep(self._iteration_period)
except KeyboardInterrupt:
self.logger.error("Scheduler execution is interrupted, cancel all running threads")
self.terminate()
self.server.stop()
self._listening_thread.stop()
self._listening_thread.join()
exit(137)
except Exception:
exception_info = 'An error occured:\n{}'.format(traceback.format_exc().rstrip())
self.logger.error(exception_info)
self.terminate()
self._listening_thread.stop()
self._listening_thread.join()
self.server.stop()
if self.production:
self.logger.info("Reinitialize scheduler and try to proceed execution in 30 seconds...")
time.sleep(30)
self.init_scheduler()
else:
exit(1)
@staticmethod
def __add_missing_restrictions(collection):
"""
If resource limits are incomplete the method adds to given json all necessary fields filled with zeroes.
:param collection: 'resource limits' dictionary from a task description or job configuration.
"""
if len(collection.keys()) == 0:
raise SchedulerException("Resource limitations are missing: upload correct tasks.json file and properly "
"set job resource limitiations")
for tag in ['memory size', 'number of CPU cores', 'disk memory size']:
if tag not in collection or collection[tag] is None:
collection[tag] = 0
if 'CPU model' not in collection:
collection['CPU model'] = None
# Make unit translation
try:
for tag in (m for m in ("memory size", "disk memory size")
if m in collection and collection[m] is not None):
collection[tag] = memory_units_converter(collection[tag])[0]
for tag in (t for t in ("wall time", "CPU time") if t in collection and collection[t] is not None):
collection[tag] = time_units_converter(collection[tag])[0]
except Exception:
raise SchedulerException('Cannot interprete {} resource limitations: {!r}'.format(tag, collection[tag]))
def terminate(self):
"""Abort solution of all running tasks and any other actions before termination."""
running_jobs = [job_id for job_id in self._jobs if self._jobs[job_id]["status"] in ["PENDING", "PROCESSING"]]
# First, stop jobs
for job_id, item in [(job_id, self._jobs[job_id]) for job_id in running_jobs]:
relevant_tasks = self.relevant_tasks(job_id)
self.runner.cancel_job(job_id, item, relevant_tasks)
# Note here that some schedulers can solve tasks of jobs which run elsewhere
for task_id, item in [(task_id, self._tasks[task_id]) for task_id in self._tasks
if self._tasks[task_id]["status"] in ["PENDING", "PROCESSING"]]:
self.runner.cancel_task(task_id, item)
# Terminate tasks
self.cancel_all_tasks()
# Submit errors on all jobs
for job_id in running_jobs:
self.server.submit_job_error(job_id, 'Scheduler has been terminated or reset')
# Do final unitializations
self.runner.terminate()
def add_new_pending_job(self, identifier):
"""
Add new pending job and prepare its description.
:param identifier: Job identifier string.
"""
if identifier not in self._jobs:
job_conf = self.server.pull_job_conf(identifier)
if not job_conf:
self.server.submit_job_error(identifier, 'Failed to doenload configuration')
return
job_conf['configuration']['identifier'] = identifier
job_conf['configuration']['task resource limits'] = job_conf['tasks']
# TODO: Get Verifier Cloud login and password
self.logger.info("Prepare new job {} before launching".format(identifier))
if identifier in self._jobs and self._jobs[identifier]["status"] == "PROCESSING":
raise RuntimeError(
"This should not be possible to get PEDING status for a PROCESSING jib {!r}".format(identifier))
# Check and set necessary restrictions for further scheduling
for collection in [job_conf['configuration']["resource limits"],
job_conf['configuration']['task resource limits']]:
try:
self.__add_missing_restrictions(collection)
except SchedulerException as err:
self._jobs[identifier] = {
"id": identifier,
"status": "ERROR",
"error": str(err)
}
break
self._jobs[identifier] = {
"id": identifier,
"status": "PENDING",
"configuration": job_conf['configuration']
}
prepared = self.runner.prepare_job(identifier, self._jobs[identifier])
if not prepared:
self.server.submit_job_error(identifier, self._jobs[identifier]['error'])
del self._jobs[identifier]
else:
self.logger.warning('Attempt to schedule job {} second time but it already has status {}'.
format(identifier, self._jobs[identifier]['status']))
def add_new_pending_task(self, identifier):
"""
Add new pending task and prepare its description.
:param identifier: Task identifier string.
"""
if identifier not in self._tasks:
task_conf = self.server.pull_task_conf(identifier)
if not task_conf:
self.server.submit_task_error(identifier, 'Failed to download configuration')
return
self.logger.info("Add new PENDING task {}".format(identifier))
self._tasks[identifier] = {
"id": identifier,
"status": "PENDING",
"description": task_conf['description'],
"priority": task_conf['description']["priority"]
}
self.logger.debug("Prepare new task {!r} before launching".format(identifier))
# Add missing restrictions
try:
self.__add_missing_restrictions(
self._tasks[identifier]["description"]["resource limits"])
except SchedulerException as err:
self._jobs[identifier] = {
"id": identifier,
"status": "ERROR",
"error": str(err)
}
else:
prepared = self.runner.prepare_task(identifier, self._tasks[identifier])
if not prepared:
self.server.submit_task_error(identifier, self._tasks[identifier]['error'])
del self._tasks[identifier]
else:
self.logger.warning('Attempt to schedule job {} second time but it already has status {}'.
format(identifier, self._tasks[identifier]['status']))
def relevant_tasks(self, job_id):
"""
Collect and return the list of task descriptions for a particular job.
:param job_id: Relevant job identifier.
:return: List of dictionaries.
"""
return [self._tasks[tid] for tid in self._tasks
if self._tasks[tid]["status"] in ["PENDING", "PROCESSING"]
and self._tasks[tid]["description"]["job id"] == job_id]
def cancel_all_tasks(self):
"""Cancel and delete all jobs and tasks before terminating or restarting scheduler."""
# Check all tasks and cancel them
tasks = self.server.get_all_tasks()
for identifier, status in tasks:
# TODO: Remove this when Bridge will not raise an error 'Job is not solving'
if status in ('PENDING', 'PROCESSING'):
self.server.submit_task_error(identifier, 'Scheduler terminated or reset')
try:
self.server.delete_task(identifier)
except BridgeError as err:
self.logger.warning('Brdige reports an error on attempt to delete task {}: {!r}'.
format(identifier, err))
def _check_jobs_status(self):
"""This functions checks complience of server and scheduler statuses."""
if self._runner_class.accept_jobs:
# todo: At the moment we do not have several scheduilers that can serve jobs but in other case whis should
# be fixed
result = self.server.get_all_jobs()
if result:
for identifier, status in result:
status = self._job_status(status)
if identifier not in self._jobs and status == 'PENDING':
self.add_new_pending_job(identifier)
elif identifier not in self._jobs and status == 'PROCESSING':
self.server.submit_job_error(identifier, 'Scheduler terminated or reset and does not '
'track the job {}'.format(identifier))
elif identifier not in self._jobs and status == 'CANCELLING':
self.server.cancel_job(identifier)
def _job_status(self, status):
job_map = {
'0': 'NOT SOLVED',
'1': 'PENDING',
'2': 'PROCESSING',
'3': 'SOLVED',
'4': 'FAILED',
'5': 'CORRUPTED',
'6': 'CANCELLING',
'7': 'CANCELLED',
'8': 'TERMINATED',
'9': 'REFINED'
}
if len(status) == 1:
# This is digital status and we can return the word
return job_map[status]
else:
# Else
return tuple(job_map.keys())[tuple(job_map.values()).index(status)]
|
# -*- coding: utf-8 -*-
import ast
from abc import ABC
from collections import deque
import numpy as np
from pytrol.control.Communicating import Communicating
from pytrol.model.action.Action import Action
from pytrol.model.action.Actions import Actions
from pytrol.model.action.MovingToAction import MovingToAction
from pytrol.model.knowledge.EnvironmentKnowledge import EnvironmentKnowledge
from pytrol.util.net.Connection import Connection
from pytrol.model.action.GoingToAction import GoingToAction
class Agent(ABC, Communicating):
r"""
`Agent` is an abstract class defining a template for any agent strategy.
This template defines, in fact, the basic procedure that any agent must
follow. This basic procedure, qualified as *main procedure of agent*,
represents the life cycle of agents and consists of:
- `Agent.prepare`: any preprocessing, if necessary, the
agent needs to carry out to prepare the impending main procedure,
- `Agent.perceive`: the agent perceives the position of the other
ones, if required by its strategy; in the current version of PyTrol
only the position of the agent itself is perceived, although other
types of perception are left to the discretion of the user,
- `Agent.communicate`: the agent communicates with other
ones, if required by its strategy;
- `Agent.analyse`: the agent checks and processes messages he has
received,
- `Agent.decide`: the agent decides; this method makes up the core
of the strategy, given that any strategy is a decision-making
procedure in the context of MAP,
- `Agent.act`: the agent acts according to the decision made in the
previous method.
Each agent, namely each object instantiating the `Agent` class, is a
*communicating* and therefore a thread; concretely, the `Agent` class
extends the `Communicating` class. Any new strategy to add in PyTrol shall
be implemented from the above methods, then added to the
`pytrol.control.agent` package, and finally referenced in
`pytrol.model.AgentTypes`
Args:
id_ (int):
original_id (str):
env_knl (EnvironmentKnowledge):
connection (Connection):
agts_addrs (list):
variant (str):
depth (float):
situated (bool):
interaction (bool):
"""
def __init__(self, id_: int,
original_id: str,
env_knl: EnvironmentKnowledge,
connection: Connection,
agts_addrs: list,
variant: str = '',
depth: float = 3.0,
situated: bool = True,
interaction: bool = False):
Communicating.__init__(self, connection)
self.id = id_
self.original_id = original_id
self.agts_addrs = agts_addrs
self.env_knl = env_knl
self.agts_addrs = agts_addrs
# Current position
self.pos = tuple(env_knl.agts_pos[self.id])
self.goal_pos = (-1, -1, -1)
self.depth = depth
# If an agent is not situated, then its position is not taken into
# account while incrementing idleness
self.situated = situated
# If the agent is nos positioned, then it is not situated
if self.pos == (-1, -1, -1):
self.situated = False
# Plan
self.PLAN = deque() # TODO: Sending_message actions must be put on
# the top of deque PLAN and move actions (type 0 et 1) on the bottom.
# Decision is made or not
self.d = True
self.variant = self.get_strat_params(variant)
self.interaction = interaction
self.interacted = False
def set_agts_addrs(self, agts_addrs):
r"""
Args:
agts_addrs:
"""
self.agts_addrs = agts_addrs
@staticmethod
def get_strat_params(variant: str):
"""Returns the parameters setting the strategy's variant. By default
returns the variable `variant` passed as argument
Args:
variant (str):
"""
return variant
def send(self, message, agt_id: int):
r"""
Args:
message:
agt_id (int):
"""
super(Agent, self).send(message, self.agts_addrs[agt_id])
# 3. Preparation of the upcoming main procedure
def prepare(self):
self.ac = False
self.interacted = False
# 4. Main procedure of agent
# 4.1 Communicating
# First communications if needed
def communicate(self):
if self.interaction and not self.interacted:
self.interact()
self.interacted = True
# 4.2 Perceiving
def perceive(self, agts_pos: np.ndarray):
# Perceives the other agents around it
r"""
Args:
agts_pos (np.ndarray):
"""
self.env_knl.agts_pos = agts_pos
# 4.3 Analysing
# Analyses perceptions and received message to know its new state
def analyse(self):
self.receive()
if len(self.messages) > 0:
while len(self.messages) > 0:
m = self.messages.popleft()
self.process_message(m)
# 4.3.1 Message processing
def process_message(self, m):
r"""
Args:
m:
"""
if self.interaction:
if str(m).startswith("shared_idlenesses"):
# Transmitted idleness
# TDP
# print("#", self.env_knl.t, self.id, ":")
# print(self.env_knl.idls)
# print(str(m).split(':')[1])
# print(np.array(ast.literal_eval(str(m).split(':')[3])))
self.env_knl.shared_idls = \
np.minimum(self.env_knl.shared_idls, ast.literal_eval(
str(m).split(':')[3]))
# `ast.literal_eval(str(m).split(':')[2]))` yields the list
# following the second `:` in the received message
# TDP
# print(self.env_knl.idls)
# print("# --------------------------------")
# 4.4 Deciding
def decide(self):
# If it is the time to decide:
if self.d:
# Applying politic defined by the strategy:
self.strategy_decide()
self.d = False
def strategy_decide(self):
pass
# 4.5 Acting
def act(self) -> Action:
a = Action("none_action", -1)
if len(self.PLAN) != 0:
while not self.ac:
a = self.PLAN.popleft()
if a.type == Actions.Going_to:
self.act_gt(a)
elif a.type == Actions.Moving_to:
self.act_mt(a)
self.ac = True
elif a.type == Actions.Waiting:
self.act_w(a)
self.ac = True
'''
elif a.type == Actions.Stopping_move:
self.act_st_mv(a)
'''
return a
# 4.5.1: Act going to
def act_gt(self, a: GoingToAction):
r"""
Args:
a (GoingToAction):
"""
# TDP
# print(str(self.id), "is in", self.pos,
# " and planned to go to ", a.goal_position)
# Retrieval of the path from.pos to goal_vertex
path = self.env_knl.ntw.path(self.pos,
a.goal_position)
# TDP
# print(str(self.id), " takes the path: ", path)
self.goal_pos = a.goal_position
# Making up of the Moving_to actions' plan to go to goal_vertex
for i in range(len(path) - 1):
self.PLAN.append(MovingToAction(path[i], path[i + 1]))
# 4.5.2: Act moving to
def act_mt(self, a: MovingToAction):
# TDP
# print(str(self.id), "is in", self.pos,
# " and planned to move to ", a.to)
"""
Args:
a (MovingToAction):
"""
self.pos = a.to
if self.goal_pos == a.to:
# misc.vertices_equals(self.goal_pos, a.to)
self.d = True
# 4.5.3: Act waiting
def act_w(self, a: Action):
r"""
Args:
a (Action):
"""
pass
# # 4.5.4: Act stopping move
def act_st_mv(self, a: Action):
r"""
Args:
a (Action):
"""
for i in range(len(self.PLAN)):
# Deletion of the actions being Going/Moving_to
# actions
if self.PLAN[i].type < 2:
del self.PLAN[i]
# 5 Knowledge Processing
def update_knowledge(self):
self.env_knl.tick()
# If the agent is on a vertex, situated and not crossing an edge
if self.situated:
self.env_knl.reset_idl(self.pos)
# 6 Post-processings
def post_process(self):
self.interacted = False
def stop(self):
pass
def interact(self):
for i, p in enumerate(self.env_knl.agts_pos):
if p[0] != -1:
m = "shared_idlenesses:{}:{}:{}". \
format(self.env_knl.t, self.id,
self.env_knl.shared_idls.tolist())
# TDP
# print(m, i)
self.send(m, i)
|
#!/bin/bash
if [[ -z $1 ]]; then
echo "Usage note: tctestsgen.sh <module_name>"
exit 1
fi
MODULE_NAME=$1
MODULE_NAME_C=$(echo $MODULE_NAME | sed -e 's/-\([a-z]\)/\U\1/' -e 's/^\([a-z]\)/\U\1/')
SCRIPT_DIR="$(cd "$(dirname $0)" && pwd)"
TC_DIR="/opt/usr/bin/tct-$1-core"
if [[ $3 == "desktop" ]] ; then
TC_DIR="build/src/$1"
fi
FILE="$2/tests.xml"
if [ -a $FILE ]; then
rm $FILE
fi
TFILE="/tmp/tcs.csv"
if [ -a $TFILE ]; then
rm $TFILE
fi
function gen {
awk -F',' -v MODULE_NAME=$MODULE_NAME -v MODULE_NAME_C=$MODULE_NAME_C -v TC_DIR=$TC_DIR '
BEGIN {
set = ""
print "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
print " <?xml-stylesheet type=\"text/xsl\" href=\"./testcase.xsl\"?>";
print "<test_definition>";
print " <suite name=\"tct-"MODULE_NAME"-core-tests\" category=\"Core APIs\">";
}
{
if (set != "" && set != $2) {
print " </set>"
}
if (set != $2) {
set = $2;
print " <set name=\"" set "\">";
}
tcname = $1;
tcpurpose = $3
print " <testcase component=\"CoreAPI/" MODULE_NAME_C "/" set "\" execution_type=\"auto\" id=\"" tcname "\" purpose=\"" tcpurpose "\">";
print " <description>";
print " <test_script_entry test_script_expected_result=\"0\">" TC_DIR "/tct-" MODULE_NAME "-core " tcname "</test_script_entry>";
print " </description>";
print " </testcase>";
}
END {
if (set != "") {
print " </set>"
}
print " </suite>"
print "</test_definition>"
}' $TFILE > $FILE
}
(cd $SCRIPT_DIR/..; scripts/retriever.sh -f src/$MODULE_NAME $4 > ${TFILE}_pre)
if [ $? -ne 0 ]; then cat ${TFILE}_pre; exit 1; fi
cat ${TFILE}_pre | sort -t',' -k2,2 -s > $TFILE
gen
|
import random
def alligator_six_making(player1: str, player2: str) -> str:
players = [player1, player2]
scores = {player1: 0, player2: 0}
while all(score < 50 for score in scores.values()):
for player in players:
roll = random.randint(1, 6)
scores[player] += roll
if scores[player] >= 50:
return player
return player1 if scores[player1] >= 50 else player2
|
<html>
<head>
<title>Form Validation</title>
<script>
function validateForm() {
var x = document.forms["myForm"]["inputfield"].value;
if (x == "") {
alert("Input must be filled out");
return false;
}
}
</script>
</head>
<body>
<form name="myForm" action="" onsubmit="return validateForm()" method="post">
<input type="text" name="inputfield" required>
<input type="submit">
</form>
</body>
</html>
|
import { Composer, Scenes } from 'telegraf';
import { User } from '@models/user';
interface StartWizardSession extends Scenes.WizardSessionData {
email: string;
password: string;
}
type StartWizardContext = Scenes.WizardContext<StartWizardSession>;
export const startSceneId = 'start-wizard';
const startWizard = new Scenes.WizardScene<StartWizardContext>(
startSceneId,
async (ctx) => {
const user = await User.retrieve(`${ctx.message.chat.id}`);
if (user) {
await ctx.reply(`Welcome back ${user.userInfo.name}`);
// eslint-disable-next-line no-return-await
return await ctx.scene.leave();
}
ctx.scene.session.email = '';
await ctx.reply('What\'s your email address?');
return ctx.wizard.next();
},
new Composer<StartWizardContext>()
.on('text', async (ctx) => {
ctx.scene.session.email = ctx.message.text;
await ctx.reply('Enter your password');
return ctx.wizard.next();
}),
new Composer<StartWizardContext>()
.on('text', async (ctx) => {
const { email } = ctx.scene.session;
const password = ctx.message.text;
const user = await User.create(`${ctx.message.chat.id}`, email, password);
await ctx.reply(user ? `Welcome ${user.userInfo.name}` : 'Invalid credentials!');
// eslint-disable-next-line no-return-await
return await ctx.scene.leave();
}),
);
export const startStage = new Scenes.Stage<StartWizardContext>([startWizard]);
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a wrapper for running lldb on mesos-agent before it is
# installed that first sets up some flags via environment variables.
# Use colors for errors.
. /home/lilelr/open-source/mesos-1.3.2/support/colors.sh
LIBTOOL=/home/lilelr/open-source/mesos-1.3.2/cmake-build-debug/libtool
test ! -e ${LIBTOOL} && \
echo "${RED}Failed to find ${LIBTOOL}, have you run configure?${NORMAL}" && \
exit 1
# Confirm libtool has "lldb" support.
${LIBTOOL} --mode=execute lldb -batch >/dev/null 2>&1
test $? != 0 && \
echo "${RED}Generated libtool doesn't appear to support lldb${NORMAL}" && \
exit 1
. /home/lilelr/open-source/mesos-1.3.2/cmake-build-debug/bin/mesos-agent-flags.sh
exec ${LIBTOOL} --mode=execute lldb -- \
/home/lilelr/open-source/mesos-1.3.2/cmake-build-debug/src/mesos-agent "${@}"
|
package com.solvd.booking.company;
import com.solvd.booking.company.BookingCompany;
public interface Listable {
void list(BookingCompany company);
void unlist(BookingCompany company);
}
|
// C Program to create and traverse a linked list
#include <stdio.h>
#include <stdlib.h>
// Structure of a node
struct Node {
int data;
struct Node* next;
};
// Function to insert a node at the beginning of the Linked List
void push(struct Node** head_ref, int new_data)
{
struct Node* new_node = (struct Node*) malloc(sizeof(struct Node));
new_node->data = new_data;
new_node->next = (*head_ref);
(*head_ref) = new_node;
}
// Function to print nodes in a given linked list
void printList(struct Node* node)
{
while (node != NULL) {
printf(" %d ", node->data);
node = node->next;
}
}
// Driver program to test above functions
int main()
{
struct Node* head = NULL;
// Create linked list
push(&head, 10);
push(&head, 20);
push(&head, 30);
push(&head, 40);
printf("Linked List: ");
printList(head);
return 0;
}
|
package com.lzh.replugindemo;
import android.app.Activity;
import android.app.ProgressDialog;
import android.net.Uri;
import com.alibaba.fastjson.JSON;
import com.lzh.compiler.parceler.Parceler;
import com.lzh.compiler.parceler.annotation.FastJsonConverter;
import com.lzh.nonview.router.Router;
import com.lzh.nonview.router.RouterConfiguration;
import com.lzh.nonview.router.anno.RouteConfig;
import com.lzh.nonview.router.host.RouterHostService;
import com.lzh.replugindemo.verify.RePluginVerification;
import com.lzh.router.RouterRuleCreator;
import com.lzh.router.replugin.core.IPluginCallback;
import com.lzh.router.replugin.host.HostRouterConfiguration;
import com.lzh.router.replugin.update.IUpdateCombine;
import com.lzh.router.replugin.update.UpdateRePluginCallbacks;
import com.qihoo360.replugin.RePluginApplication;
import com.qihoo360.replugin.RePluginCallbacks;
import com.qihoo360.replugin.RePluginConfig;
import org.lzh.framework.updatepluginlib.UpdateConfig;
import org.lzh.framework.updatepluginlib.base.UpdateChecker;
import org.lzh.framework.updatepluginlib.base.UpdateParser;
import org.lzh.framework.updatepluginlib.base.UpdateStrategy;
import org.lzh.framework.updatepluginlib.model.CheckEntity;
import org.lzh.framework.updatepluginlib.model.Update;
// 指定生成路由的baseUrl。此baseUrl会与使用RouteRule所指定的path所组合。形成一个完整的路由地址。
// 生成的路由表。参考下方添加路由规则的RouterRuleCreator类。
@RouteConfig(baseUrl = "host://")
public class HostApplication extends RePluginApplication{
@Override
public void onCreate() {
super.onCreate();
// 启动远程路由前。加入安全验证器。
RouterHostService.setVerify(new RePluginVerification());
HostRouterConfiguration.init("com.lzh.replugindemo", this);
HostRouterConfiguration.get().setCallback(new PluginCallback());
// 添加路由规则。
RouterConfiguration.get().addRouteCreator(new RouterRuleCreator());
Parceler.setDefaultConverter(FastJsonConverter.class);
Router.DEBUG = true;
}
@Override
protected RePluginConfig createConfig() {
RePluginConfig config = super.createConfig();
config.setUseHostClassIfNotFound(true);
return config;
}
@Override
protected RePluginCallbacks createCallbacks() {
return new UpdateRePluginCallbacks(this,
// 设置UpdateConfig。用于进行远程plugin更新。
UpdateConfig.createConfig()
.setUpdateChecker(new PluginChecker())
.setUpdateParser(new JsonParser())
.setUpdateStrategy(new PluginStrategy()),
// 设置远程插件更新接口api组装。
new HostUpdateCombine());
}
/**
* 插件接口返回数据json解析器。在此解析出接口api更新信息。并下载。
*/
private static class JsonParser implements UpdateParser {
@Override
public Update parse(String httpResponse) throws Exception {
return JSON.parseObject(httpResponse, Update.class);
}
}
/**
* 对插件api通过上方JsonParser解析后的更新实体类进行检查。检查是否需要进行更新下载安装。
*/
private static class PluginChecker implements UpdateChecker {
@Override
public boolean check(Update update) throws Exception {
return true;
}
}
/**
* 插件的更新通知策略:显示检查到有插件可用时的弹窗以及下载进度条。
*/
private static class PluginStrategy implements UpdateStrategy {
@Override
public boolean isShowUpdateDialog(Update update) {
return true;
}
@Override
public boolean isAutoInstall() {
return true;
}
@Override
public boolean isShowDownloadDialog() {
return true;
}
}
/**
* 根据插件名组装出真正的插件api地址。
*/
private static class HostUpdateCombine implements IUpdateCombine {
@Override
public CheckEntity combine(String alias) {
return new CheckEntity().setUrl("https://raw.githubusercontent.com/JumeiRdGroup/Router/master/demos/RePluginDemo/mocked/api/" + alias + ".json");
}
}
private static class PluginCallback implements IPluginCallback {
ProgressDialog dialog;
@Override
public void onInvalidUri(Uri uri) {
// 当uri为非法
}
@Override
public void notFound(Uri uri, String alias) {
}
@Override
public void onResume(Uri uri) {
}
@Override
public void onStartLoading(Uri uri, String alias) {
if (dialog != null) {
return;
}
Activity top = ActivityStackHelper.top();
dialog = new ProgressDialog(top);
dialog.setTitle("加载插件" + alias + "中...");
dialog.show();
}
@Override
public void onLoadedCompleted(Uri uri, String alias) {
if (dialog == null) {
return;
}
dialog.dismiss();
dialog = null;
}
}
}
|
<filename>panopto_client/tests/test_remote_recorder.py
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from unittest import TestCase
from panopto_client.remote_recorder import (
RemoteRecorderManagement, PanoptoAPIException)
from panopto_client.tests import instance_args
from datetime import datetime
import mock
@mock.patch.object(RemoteRecorderManagement, '_instance',
return_value=mock.sentinel.instance)
@mock.patch.object(RemoteRecorderManagement, '_request')
class RemoteRecorderManagementTest(TestCase):
def test_init(self, mock_request, mock_instance):
client = RemoteRecorderManagement()
self.assertEqual(
client._port, 'BasicHttpBinding_IRemoteRecorderManagement')
self.assertEqual(client._actas, None)
self.assertEqual(client._data, client._live)
def test_getRemoteRecordersById(self, mock_request, mock_instance):
client = RemoteRecorderManagement()
result = client.getRemoteRecordersById('test-recorder-id')
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:AuthenticationInfo', 'ns4:ArrayOfguid'])
mock_request.assert_called_with('GetRemoteRecordersById', {
'auth': mock.sentinel.instance,
'remoteRecorderIds': mock.sentinel.instance})
def test_getRemoteRecordersByExternalId(self, mock_request, mock_instance):
client = RemoteRecorderManagement()
result = client.getRemoteRecordersByExternalId('test-external-id')
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:AuthenticationInfo', 'ns4:ArrayOfstring'])
mock_request.assert_called_with('GetRemoteRecordersByExternalId', {
'auth': mock.sentinel.instance,
'externalIds': mock.sentinel.instance})
def test_scheduleRecording(self, mock_request, mock_instance):
mock.sentinel.instance.RecorderSettings = []
client = RemoteRecorderManagement()
result = client.scheduleRecording(
'test-name', folder_id='test-folder-id', is_broadcast=False,
start_time=datetime(2013, 3, 15, 9, 0, 0),
end_time=datetime(2013, 3, 15, 10, 0, 0),
recorder_id='test-recorder-id')
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:ArrayOfRecorderSettings', 'ns0:AuthenticationInfo',
'ns0:RecorderSettings'])
mock_request.assert_called_with('ScheduleRecording', {
'auth': mock.sentinel.instance, 'name': 'test-name',
'folderId': 'test-folder-id', 'isBroadcast': False,
'start': datetime(2013, 3, 15, 9, 0),
'end': datetime(2013, 3, 15, 10, 0),
'recorderSettings': mock.sentinel.instance})
def test_listRecorders(self, mock_request, mock_instance):
client = RemoteRecorderManagement()
try:
result = client.listRecorders()
except TypeError:
pass
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:AuthenticationInfo', 'ns0:Pagination'])
mock_request.assert_called_with('ListRecorders', {
'auth': mock.sentinel.instance,
'pagination': mock.sentinel.instance, 'sortBy': 'Name'})
def test_updateRemoteRecorderExternalId(self, mock_request, mock_instance):
client = RemoteRecorderManagement()
result = client.updateRemoteRecorderExternalId(
'test-recorder-id', 'test-external-id')
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:AuthenticationInfo'])
mock_request.assert_called_with('UpdateRemoteRecorderExternalId', {
'auth': mock.sentinel.instance, 'externalId': 'test-external-id',
'remoteRecorderId': 'test-recorder-id'})
def test_updateRecordingTime(self, mock_request, mock_instance):
client = RemoteRecorderManagement()
result = client.updateRecordingTime(
'test-session-id', start=datetime(2013, 3, 15, 9, 0, 0),
end=datetime(2013, 3, 15, 10, 0, 0))
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:AuthenticationInfo'])
mock_request.assert_called_with('UpdateRecordingTime', {
'auth': mock.sentinel.instance, 'sessionId': 'test-session-id',
'start': datetime(2013, 3, 15, 9, 0),
'end': datetime(2013, 3, 15, 10, 0)})
|
<gh_stars>1-10
/*
* Copyright © 2018 Mercateo AG (http://www.mercateo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mvnd.proto
package com.mercateo.oss.mvnd;
public final class MVNDProto {
private MVNDProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface InvokeRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:mvnd.InvokeRequest)
com.google.protobuf.MessageOrBuilder {
/**
* <code>optional string workDir = 1;</code>
*/
java.lang.String getWorkDir();
/**
* <code>optional string workDir = 1;</code>
*/
com.google.protobuf.ByteString
getWorkDirBytes();
/**
* <code>repeated string args = 2;</code>
*/
java.util.List<java.lang.String>
getArgsList();
/**
* <code>repeated string args = 2;</code>
*/
int getArgsCount();
/**
* <code>repeated string args = 2;</code>
*/
java.lang.String getArgs(int index);
/**
* <code>repeated string args = 2;</code>
*/
com.google.protobuf.ByteString
getArgsBytes(int index);
}
/**
* Protobuf type {@code mvnd.InvokeRequest}
*/
public static final class InvokeRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:mvnd.InvokeRequest)
InvokeRequestOrBuilder {
// Use InvokeRequest.newBuilder() to construct.
private InvokeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InvokeRequest() {
workDir_ = "";
args_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private InvokeRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
workDir_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
args_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
args_.add(s);
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
args_ = args_.getUnmodifiableView();
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.class, com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.Builder.class);
}
private int bitField0_;
public static final int WORKDIR_FIELD_NUMBER = 1;
private volatile java.lang.Object workDir_;
/**
* <code>optional string workDir = 1;</code>
*/
public java.lang.String getWorkDir() {
java.lang.Object ref = workDir_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workDir_ = s;
return s;
}
}
/**
* <code>optional string workDir = 1;</code>
*/
public com.google.protobuf.ByteString
getWorkDirBytes() {
java.lang.Object ref = workDir_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
workDir_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ARGS_FIELD_NUMBER = 2;
private com.google.protobuf.LazyStringList args_;
/**
* <code>repeated string args = 2;</code>
*/
public com.google.protobuf.ProtocolStringList
getArgsList() {
return args_;
}
/**
* <code>repeated string args = 2;</code>
*/
public int getArgsCount() {
return args_.size();
}
/**
* <code>repeated string args = 2;</code>
*/
public java.lang.String getArgs(int index) {
return args_.get(index);
}
/**
* <code>repeated string args = 2;</code>
*/
public com.google.protobuf.ByteString
getArgsBytes(int index) {
return args_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getWorkDirBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, workDir_);
}
for (int i = 0; i < args_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, args_.getRaw(i));
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getWorkDirBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, workDir_);
}
{
int dataSize = 0;
for (int i = 0; i < args_.size(); i++) {
dataSize += computeStringSizeNoTag(args_.getRaw(i));
}
size += dataSize;
size += 1 * getArgsList().size();
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.mercateo.oss.mvnd.MVNDProto.InvokeRequest)) {
return super.equals(obj);
}
com.mercateo.oss.mvnd.MVNDProto.InvokeRequest other = (com.mercateo.oss.mvnd.MVNDProto.InvokeRequest) obj;
boolean result = true;
result = result && getWorkDir()
.equals(other.getWorkDir());
result = result && getArgsList()
.equals(other.getArgsList());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + WORKDIR_FIELD_NUMBER;
hash = (53 * hash) + getWorkDir().hashCode();
if (getArgsCount() > 0) {
hash = (37 * hash) + ARGS_FIELD_NUMBER;
hash = (53 * hash) + getArgsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.mercateo.oss.mvnd.MVNDProto.InvokeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code mvnd.InvokeRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:mvnd.InvokeRequest)
com.mercateo.oss.mvnd.MVNDProto.InvokeRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.class, com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.Builder.class);
}
// Construct using com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
workDir_ = "";
args_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeRequest_descriptor;
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeRequest getDefaultInstanceForType() {
return com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.getDefaultInstance();
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeRequest build() {
com.mercateo.oss.mvnd.MVNDProto.InvokeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeRequest buildPartial() {
com.mercateo.oss.mvnd.MVNDProto.InvokeRequest result = new com.mercateo.oss.mvnd.MVNDProto.InvokeRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.workDir_ = workDir_;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
args_ = args_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.args_ = args_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.mercateo.oss.mvnd.MVNDProto.InvokeRequest) {
return mergeFrom((com.mercateo.oss.mvnd.MVNDProto.InvokeRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.mercateo.oss.mvnd.MVNDProto.InvokeRequest other) {
if (other == com.mercateo.oss.mvnd.MVNDProto.InvokeRequest.getDefaultInstance()) return this;
if (!other.getWorkDir().isEmpty()) {
workDir_ = other.workDir_;
onChanged();
}
if (!other.args_.isEmpty()) {
if (args_.isEmpty()) {
args_ = other.args_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureArgsIsMutable();
args_.addAll(other.args_);
}
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.mercateo.oss.mvnd.MVNDProto.InvokeRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.mercateo.oss.mvnd.MVNDProto.InvokeRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object workDir_ = "";
/**
* <code>optional string workDir = 1;</code>
*/
public java.lang.String getWorkDir() {
java.lang.Object ref = workDir_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workDir_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string workDir = 1;</code>
*/
public com.google.protobuf.ByteString
getWorkDirBytes() {
java.lang.Object ref = workDir_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
workDir_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string workDir = 1;</code>
*/
public Builder setWorkDir(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
workDir_ = value;
onChanged();
return this;
}
/**
* <code>optional string workDir = 1;</code>
*/
public Builder clearWorkDir() {
workDir_ = getDefaultInstance().getWorkDir();
onChanged();
return this;
}
/**
* <code>optional string workDir = 1;</code>
*/
public Builder setWorkDirBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
workDir_ = value;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList args_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureArgsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
args_ = new com.google.protobuf.LazyStringArrayList(args_);
bitField0_ |= 0x00000002;
}
}
/**
* <code>repeated string args = 2;</code>
*/
public com.google.protobuf.ProtocolStringList
getArgsList() {
return args_.getUnmodifiableView();
}
/**
* <code>repeated string args = 2;</code>
*/
public int getArgsCount() {
return args_.size();
}
/**
* <code>repeated string args = 2;</code>
*/
public java.lang.String getArgs(int index) {
return args_.get(index);
}
/**
* <code>repeated string args = 2;</code>
*/
public com.google.protobuf.ByteString
getArgsBytes(int index) {
return args_.getByteString(index);
}
/**
* <code>repeated string args = 2;</code>
*/
public Builder setArgs(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureArgsIsMutable();
args_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string args = 2;</code>
*/
public Builder addArgs(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureArgsIsMutable();
args_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string args = 2;</code>
*/
public Builder addAllArgs(
java.lang.Iterable<java.lang.String> values) {
ensureArgsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, args_);
onChanged();
return this;
}
/**
* <code>repeated string args = 2;</code>
*/
public Builder clearArgs() {
args_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>repeated string args = 2;</code>
*/
public Builder addArgsBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureArgsIsMutable();
args_.add(value);
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:mvnd.InvokeRequest)
}
// @@protoc_insertion_point(class_scope:mvnd.InvokeRequest)
private static final com.mercateo.oss.mvnd.MVNDProto.InvokeRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.mercateo.oss.mvnd.MVNDProto.InvokeRequest();
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InvokeRequest>
PARSER = new com.google.protobuf.AbstractParser<InvokeRequest>() {
public InvokeRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new InvokeRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<InvokeRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InvokeRequest> getParserForType() {
return PARSER;
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface InvokeResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:mvnd.InvokeResponse)
com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
int getTypeValue();
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType getType();
/**
* <code>optional string line = 2;</code>
*/
java.lang.String getLine();
/**
* <code>optional string line = 2;</code>
*/
com.google.protobuf.ByteString
getLineBytes();
/**
* <code>optional int32 errorCode = 3;</code>
*/
int getErrorCode();
}
/**
* Protobuf type {@code mvnd.InvokeResponse}
*/
public static final class InvokeResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:mvnd.InvokeResponse)
InvokeResponseOrBuilder {
// Use InvokeResponse.newBuilder() to construct.
private InvokeResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InvokeResponse() {
type_ = 0;
line_ = "";
errorCode_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private InvokeResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
type_ = rawValue;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
line_ = s;
break;
}
case 24: {
errorCode_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.class, com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.Builder.class);
}
/**
* Protobuf enum {@code mvnd.InvokeResponse.ResponseType}
*/
public enum ResponseType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>OUT = 0;</code>
*/
OUT(0),
/**
* <code>ERR = 1;</code>
*/
ERR(1),
/**
* <code>EXIT = 2;</code>
*/
EXIT(2),
UNRECOGNIZED(-1),
;
/**
* <code>OUT = 0;</code>
*/
public static final int OUT_VALUE = 0;
/**
* <code>ERR = 1;</code>
*/
public static final int ERR_VALUE = 1;
/**
* <code>EXIT = 2;</code>
*/
public static final int EXIT_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ResponseType valueOf(int value) {
return forNumber(value);
}
public static ResponseType forNumber(int value) {
switch (value) {
case 0: return OUT;
case 1: return ERR;
case 2: return EXIT;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ResponseType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ResponseType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ResponseType>() {
public ResponseType findValueByNumber(int number) {
return ResponseType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.getDescriptor().getEnumTypes().get(0);
}
private static final ResponseType[] VALUES = values();
public static ResponseType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ResponseType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:mvnd.InvokeResponse.ResponseType)
}
public static final int TYPE_FIELD_NUMBER = 1;
private int type_;
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public int getTypeValue() {
return type_;
}
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType getType() {
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType result = com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType.valueOf(type_);
return result == null ? com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType.UNRECOGNIZED : result;
}
public static final int LINE_FIELD_NUMBER = 2;
private volatile java.lang.Object line_;
/**
* <code>optional string line = 2;</code>
*/
public java.lang.String getLine() {
java.lang.Object ref = line_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
line_ = s;
return s;
}
}
/**
* <code>optional string line = 2;</code>
*/
public com.google.protobuf.ByteString
getLineBytes() {
java.lang.Object ref = line_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
line_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ERRORCODE_FIELD_NUMBER = 3;
private int errorCode_;
/**
* <code>optional int32 errorCode = 3;</code>
*/
public int getErrorCode() {
return errorCode_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (type_ != com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType.OUT.getNumber()) {
output.writeEnum(1, type_);
}
if (!getLineBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, line_);
}
if (errorCode_ != 0) {
output.writeInt32(3, errorCode_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (type_ != com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType.OUT.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, type_);
}
if (!getLineBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, line_);
}
if (errorCode_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, errorCode_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.mercateo.oss.mvnd.MVNDProto.InvokeResponse)) {
return super.equals(obj);
}
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse other = (com.mercateo.oss.mvnd.MVNDProto.InvokeResponse) obj;
boolean result = true;
result = result && type_ == other.type_;
result = result && getLine()
.equals(other.getLine());
result = result && (getErrorCode()
== other.getErrorCode());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + LINE_FIELD_NUMBER;
hash = (53 * hash) + getLine().hashCode();
hash = (37 * hash) + ERRORCODE_FIELD_NUMBER;
hash = (53 * hash) + getErrorCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.mercateo.oss.mvnd.MVNDProto.InvokeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code mvnd.InvokeResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:mvnd.InvokeResponse)
com.mercateo.oss.mvnd.MVNDProto.InvokeResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.class, com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.Builder.class);
}
// Construct using com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
type_ = 0;
line_ = "";
errorCode_ = 0;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.mercateo.oss.mvnd.MVNDProto.internal_static_mvnd_InvokeResponse_descriptor;
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeResponse getDefaultInstanceForType() {
return com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.getDefaultInstance();
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeResponse build() {
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeResponse buildPartial() {
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse result = new com.mercateo.oss.mvnd.MVNDProto.InvokeResponse(this);
result.type_ = type_;
result.line_ = line_;
result.errorCode_ = errorCode_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.mercateo.oss.mvnd.MVNDProto.InvokeResponse) {
return mergeFrom((com.mercateo.oss.mvnd.MVNDProto.InvokeResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.mercateo.oss.mvnd.MVNDProto.InvokeResponse other) {
if (other == com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.getDefaultInstance()) return this;
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (!other.getLine().isEmpty()) {
line_ = other.line_;
onChanged();
}
if (other.getErrorCode() != 0) {
setErrorCode(other.getErrorCode());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.mercateo.oss.mvnd.MVNDProto.InvokeResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int type_ = 0;
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public int getTypeValue() {
return type_;
}
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public Builder setTypeValue(int value) {
type_ = value;
onChanged();
return this;
}
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType getType() {
com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType result = com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType.valueOf(type_);
return result == null ? com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType.UNRECOGNIZED : result;
}
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public Builder setType(com.mercateo.oss.mvnd.MVNDProto.InvokeResponse.ResponseType value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .mvnd.InvokeResponse.ResponseType type = 1;</code>
*/
public Builder clearType() {
type_ = 0;
onChanged();
return this;
}
private java.lang.Object line_ = "";
/**
* <code>optional string line = 2;</code>
*/
public java.lang.String getLine() {
java.lang.Object ref = line_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
line_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string line = 2;</code>
*/
public com.google.protobuf.ByteString
getLineBytes() {
java.lang.Object ref = line_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
line_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string line = 2;</code>
*/
public Builder setLine(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
line_ = value;
onChanged();
return this;
}
/**
* <code>optional string line = 2;</code>
*/
public Builder clearLine() {
line_ = getDefaultInstance().getLine();
onChanged();
return this;
}
/**
* <code>optional string line = 2;</code>
*/
public Builder setLineBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
line_ = value;
onChanged();
return this;
}
private int errorCode_ ;
/**
* <code>optional int32 errorCode = 3;</code>
*/
public int getErrorCode() {
return errorCode_;
}
/**
* <code>optional int32 errorCode = 3;</code>
*/
public Builder setErrorCode(int value) {
errorCode_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 errorCode = 3;</code>
*/
public Builder clearErrorCode() {
errorCode_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:mvnd.InvokeResponse)
}
// @@protoc_insertion_point(class_scope:mvnd.InvokeResponse)
private static final com.mercateo.oss.mvnd.MVNDProto.InvokeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.mercateo.oss.mvnd.MVNDProto.InvokeResponse();
}
public static com.mercateo.oss.mvnd.MVNDProto.InvokeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InvokeResponse>
PARSER = new com.google.protobuf.AbstractParser<InvokeResponse>() {
public InvokeResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new InvokeResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<InvokeResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InvokeResponse> getParserForType() {
return PARSER;
}
public com.mercateo.oss.mvnd.MVNDProto.InvokeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_mvnd_InvokeRequest_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_mvnd_InvokeRequest_fieldAccessorTable;
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_mvnd_InvokeResponse_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_mvnd_InvokeResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\nmvnd.proto\022\004mvnd\".\n\rInvokeRequest\022\017\n\007w" +
"orkDir\030\001 \001(\t\022\014\n\004args\030\002 \003(\t\"\216\001\n\016InvokeRes" +
"ponse\022/\n\004type\030\001 \001(\0162!.mvnd.InvokeRespons" +
"e.ResponseType\022\014\n\004line\030\002 \001(\t\022\021\n\terrorCod" +
"e\030\003 \001(\005\"*\n\014ResponseType\022\007\n\003OUT\020\000\022\007\n\003ERR\020" +
"\001\022\010\n\004EXIT\020\0022F\n\013MVNDService\0227\n\006invoke\022\023.m" +
"vnd.InvokeRequest\032\024.mvnd.InvokeResponse\"" +
"\0000\001B$\n\025com.mercateo.oss.mvndB\tMVNDProtoP" +
"\000b\006proto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_mvnd_InvokeRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_mvnd_InvokeRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_mvnd_InvokeRequest_descriptor,
new java.lang.String[] { "WorkDir", "Args", });
internal_static_mvnd_InvokeResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_mvnd_InvokeResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_mvnd_InvokeResponse_descriptor,
new java.lang.String[] { "Type", "Line", "ErrorCode", });
}
// @@protoc_insertion_point(outer_class_scope)
}
|
<reponame>livingston/react-chart-spikes<filename>src/d3Charts/index.js<gh_stars>0
import React, { Component } from 'react';
import Scatter from './scatter.js';
import Stack from './stack.js';
const D3Charts = () => (<section className="d3 page">
<Scatter />
{/* <Stack /> */}
</section>);
export default D3Charts;
|
package all
import (
"strings"
"testing"
"github.com/Shopify/kubeaudit"
"github.com/Shopify/kubeaudit/auditors/apparmor"
"github.com/Shopify/kubeaudit/auditors/asat"
"github.com/Shopify/kubeaudit/auditors/capabilities"
"github.com/Shopify/kubeaudit/auditors/hostns"
"github.com/Shopify/kubeaudit/auditors/image"
"github.com/Shopify/kubeaudit/auditors/limits"
"github.com/Shopify/kubeaudit/auditors/netpols"
"github.com/Shopify/kubeaudit/auditors/nonroot"
"github.com/Shopify/kubeaudit/auditors/privesc"
"github.com/Shopify/kubeaudit/auditors/privileged"
"github.com/Shopify/kubeaudit/auditors/rootfs"
"github.com/Shopify/kubeaudit/auditors/seccomp"
"github.com/Shopify/kubeaudit/config"
"github.com/Shopify/kubeaudit/internal/test"
"github.com/stretchr/testify/require"
)
const fixtureDir = "../../internal/test/fixtures/all_resources"
func TestAuditAll(t *testing.T) {
allErrors := []string{
apparmor.AppArmorAnnotationMissing,
asat.AutomountServiceAccountTokenTrueAndDefaultSA,
capabilities.CapabilityOrSecurityContextMissing,
hostns.NamespaceHostNetworkTrue,
hostns.NamespaceHostIPCTrue,
hostns.NamespaceHostPIDTrue,
image.ImageTagMissing,
limits.LimitsNotSet,
netpols.MissingDefaultDenyIngressAndEgressNetworkPolicy,
nonroot.RunAsNonRootPSCNilCSCNil,
privesc.AllowPrivilegeEscalationNil,
privileged.PrivilegedNil,
rootfs.ReadOnlyRootFilesystemNil,
seccomp.SeccompAnnotationMissing,
}
allAuditors, err := Auditors(config.KubeauditConfig{})
require.NoError(t, err)
for _, file := range test.GetAllFileNames(t, fixtureDir) {
// This line is needed because of how scopes work with parallel tests (see https://gist.github.com/posener/92a55c4cd441fc5e5e85f27bca008721)
file := file
t.Run(file, func(t *testing.T) {
t.Parallel()
test.AuditMultiple(t, fixtureDir, file, allAuditors, allErrors, "", test.MANIFEST_MODE)
test.AuditMultiple(t, fixtureDir, file, allAuditors, allErrors, strings.Split(file, ".")[0], test.LOCAL_MODE)
})
}
}
func TestFixAll(t *testing.T) {
allAuditors, err := Auditors(config.KubeauditConfig{})
require.NoError(t, err)
files := test.GetAllFileNames(t, fixtureDir)
for _, file := range files {
t.Run(file, func(t *testing.T) {
_, report := test.FixSetupMultiple(t, fixtureDir, file, allAuditors)
for _, result := range report.Results() {
for _, auditResult := range result.GetAuditResults() {
require.NotEqual(t, kubeaudit.Error, auditResult.Severity)
}
}
})
}
}
// Test all auditors with config
func TestAllWithConfig(t *testing.T) {
enabledAuditors := []string{
apparmor.Name, seccomp.Name,
}
expectedErrors := []string{
apparmor.AppArmorAnnotationMissing,
seccomp.SeccompAnnotationMissing,
}
conf := config.KubeauditConfig{
EnabledAuditors: enabledAuditorsToMap(enabledAuditors),
}
auditors, err := Auditors(conf)
require.NoError(t, err)
for _, file := range test.GetAllFileNames(t, fixtureDir) {
t.Run(file, func(t *testing.T) {
test.AuditMultiple(t, fixtureDir, file, auditors, expectedErrors, "", test.MANIFEST_MODE)
})
}
}
func enabledAuditorsToMap(enabledAuditors []string) map[string]bool {
enabledAuditorMap := map[string]bool{}
for _, auditorName := range AuditorNames {
enabledAuditorMap[auditorName] = false
}
for _, auditorName := range enabledAuditors {
enabledAuditorMap[auditorName] = true
}
return enabledAuditorMap
}
|
#!/bin/bash
# Strict mode, fail on any error
set -euo pipefail
EVENTHUB_CS=$(az eventhubs namespace authorization-rule keys list -g $RESOURCE_GROUP --namespace-name $EVENTHUB_NAMESPACE --name RootManageSharedAccessKey --query "primaryConnectionString" -o tsv)
eh_resource=$(az resource show -g $RESOURCE_GROUP --resource-type Microsoft.EventHub/namespaces -n "$EVENTHUB_NAMESPACE" --query id -o tsv)
export KAFKA_BROKERS="$EVENTHUB_NAMESPACE.servicebus.windows.net:9093"
export KAFKA_SECURITY_PROTOCOL=SASL_SSL
export KAFKA_SASL_MECHANISM=PLAIN
# For running outside of Databricks: org.apache.kafka.common.security.plain.PlainLoginModule
# For running within Databricks: kafkashaded.org.apache.kafka.common.security.plain.PlainLoginModule
loginModule="org.apache.kafka.common.security.plain.PlainLoginModule"
loginModuleDatabricks="kafkashaded.$loginModule"
export KAFKA_SASL_JAAS_CONFIG="$loginModule required username=\"\$ConnectionString\" password=\"$EVENTHUB_CS\";"
export KAFKA_SASL_JAAS_CONFIG_DATABRICKS="$loginModuleDatabricks required username=\"\$ConnectionString\" password=\"$EVENTHUB_CS\";"
|
<reponame>wolfchinaliu/gameCenter
package org.jeewx.api.wxbase.wxserviceip;
import java.util.ArrayList;
import java.util.List;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.jeewx.api.core.exception.WexinReqException;
import org.jeewx.api.core.req.WeiXinReqService;
import org.jeewx.api.core.req.model.ServiceIP;
import org.jeewx.api.core.util.WeiXinConstant;
/**
* 微信--token信息
*
* @author lizr
*
*/
public class JwServiceIpAPI {
/**
* 返回的信息名称
*/
public static String RETURN_INFO_NAME = "ip_list";
/**
* 获取服务的ip列表信息
* @param accessToke
* @return
* @throws WexinReqException
*/
public static List<String> getServiceIpList(String accessToke) throws WexinReqException{
ServiceIP param = new ServiceIP();
param.setAccess_token(accessToke);
JSONObject result = WeiXinReqService.getInstance().doWeinxinReqJson(param);
Object error = result.get(WeiXinConstant.RETURN_ERROR_INFO_CODE);
List<String> lstServiceIp = null;
if(error == null){
JSONArray infoArray = result.getJSONArray(RETURN_INFO_NAME);
lstServiceIp = new ArrayList<String>(infoArray.size());
for(int i=0;i<infoArray.size();i++){
lstServiceIp.add(infoArray.getString(i));
}
}
return lstServiceIp;
}
public static void main(String[] args){
try {
List<String> s = JwServiceIpAPI.getServiceIpList("<KEY>CJHE7v7uF_l1hI6qi6QBsA");
System.out.println(s);
} catch (WexinReqException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
<gh_stars>10-100
package com.readytalk.swt.helpers;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
/**
* Helper methods to answer common ancestry questions related to SWT components.<br/>
* <br/>
* Methods in this class should be <code>static</code>.
*/
public class AncestryHelper {
/**
* Helper method to determine the shell an SWT <code>Control</code> element belongs to.
* @param control The <code>Control</code> element you want to determine what Shell it belongs to.
* @return The <code>Shell</code> that the <code>Control</code> belongs to.
*/
public static Shell getShellFromControl(Control control) {
if (control == null) {
return null;
} else if (control instanceof Shell) {
return (Shell) control;
}
Control currentControl = control;
while(!(currentControl instanceof Shell)) {
currentControl = currentControl.getParent();
}
return (Shell) currentControl;
}
}
|
/* Furthest point sampling
* Original author: <NAME>
* Modified by <NAME>
* All Rights Reserved. 2017.
*/
#include "tensorflow/core/framework/common_shape_fns.h"
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/shape_inference.h"
using namespace tensorflow;
REGISTER_OP("Open3DBallQuery")
.Input("xyz: float32")
.Input("center: float32")
.Attr("radius: float")
.Attr("nsample: int")
.Output("out: int32")
.SetShapeFn([](::tensorflow::shape_inference::InferenceContext* c) {
::tensorflow::shape_inference::ShapeHandle
dims1; // batch_size * nsample * 3
TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 3, &dims1));
int nsample;
TF_RETURN_IF_ERROR(c->GetAttr("nsample", &nsample));
::tensorflow::shape_inference::ShapeHandle output =
c->MakeShape({c->Dim(dims1, 0), c->Dim(dims1, 1), nsample});
c->set_output(0, output);
return Status::OK();
})
.Doc(R"doc( TODO )doc");
|
node http-file-server.js . &
sleep 1
echo '- - - - - - - - - - - - - - - - - - - -'
node http-request-client.js /files/a.txt
echo '- - - - - - - - - - - - - - - - - - - -'
node http-request-client.js ../index.html
kill %1
|
<gh_stars>1-10
package com.wixpress.dst.greyhound.core.producer
import java.util.concurrent.TimeUnit
import java.util.concurrent.TimeUnit.MILLISECONDS
import _root_.zio.blocking.Blocking
import com.wixpress.dst.greyhound.core.PartitionInfo
import com.wixpress.dst.greyhound.core.metrics.{GreyhoundMetric, GreyhoundMetrics}
import com.wixpress.dst.greyhound.core.producer.ProducerMetric._
import zio.clock.{currentTime, Clock}
import zio.{Chunk, IO, RIO, ULayer, ZIO}
import GreyhoundMetrics._
import scala.concurrent.duration.FiniteDuration
case class ReportingProducer[-R](internal: ProducerR[R], extraAttributes: Map[String, String])
extends ProducerR[GreyhoundMetrics with Clock with R] {
override def produceAsync(
record: ProducerRecord[Chunk[Byte], Chunk[Byte]]
): ZIO[Blocking with Clock with GreyhoundMetrics with R, ProducerError, IO[ProducerError, RecordMetadata]] =
ReportingProducer.reporting[R](internal.produceAsync)(record, attributes)
override def attributes: Map[String, String] = internal.attributes ++ extraAttributes
override def partitionsFor(topic: String): RIO[R with Blocking with Clock with GreyhoundMetrics, Seq[PartitionInfo]] =
internal
.partitionsFor(topic)
.reporting(ProducerGotPartitionsInfo(topic, attributes, _))
}
object ReportingProducer {
type Dependencies = ULayer[GreyhoundMetrics with zio.ZEnv]
def apply[R](internal: ProducerR[R], attributes: (String, String)*): ReportingProducer[R] =
new ReportingProducer(internal, attributes.toMap)
def reporting[R](
produceAsync: ProducerRecord[Chunk[Byte], Chunk[Byte]] => ZIO[
Blocking with Clock with R,
ProducerError,
IO[ProducerError, RecordMetadata]
]
)(
record: ProducerRecord[Chunk[Byte], Chunk[Byte]],
attributes: Map[String, String] = Map.empty
): ZIO[Blocking with Clock with GreyhoundMetrics with R, ProducerError, IO[ProducerError, RecordMetadata]] = {
for {
started <- currentTime(TimeUnit.MILLISECONDS)
env <- ZIO.environment[Clock with GreyhoundMetrics with Blocking]
_ <- GreyhoundMetrics.report(ProducingRecord(record, attributes))
onError <- ZIO.memoize((error: ProducerError) => GreyhoundMetrics.report(ProduceFailed(error, record.topic, attributes)).provide(env))
onSuccess <-
ZIO.memoize((metadata: RecordMetadata) =>
currentTime(TimeUnit.MILLISECONDS)
.flatMap(ended =>
GreyhoundMetrics.report(RecordProduced(record, metadata, attributes, FiniteDuration(ended - started, MILLISECONDS)))
)
.provide(env)
)
promise <- produceAsync(record).map(_.tapBoth(onError, onSuccess))
} yield promise
}
}
sealed trait ProducerMetric extends GreyhoundMetric
object ProducerMetric {
case class ProducingRecord(record: ProducerRecord[Chunk[Byte], Chunk[Byte]], attributes: Map[String, String]) extends ProducerMetric
case class RecordProduced(
record: ProducerRecord[Chunk[Byte], Chunk[Byte]],
metadata: RecordMetadata,
attributes: Map[String, String],
duration: FiniteDuration
) extends ProducerMetric
case class ProduceFailed(error: ProducerError, topic: String, attributes: Map[String, String]) extends ProducerMetric
case class ProducerGotPartitionsInfo(topic: String, attributes: Map[String, String], result: MetricResult[Throwable, Seq[PartitionInfo]])
extends ProducerMetric
}
|
<reponame>oueya1479/OpenOLAT
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.ceditor.ui.component;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.DefaultComponentRenderer;
import org.olat.core.gui.components.form.flexible.impl.NameValuePair;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.render.StringOutput;
import org.olat.core.gui.render.URLBuilder;
import org.olat.core.gui.translator.Translator;
/**
*
* Initial date: 6 déc. 2019<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public abstract class AbstractContentEditorComponentRenderer extends DefaultComponentRenderer {
protected void renderAddAbove(StringOutput sb, Component cmp, URLBuilder ubu, Translator translator) {
sb.append("<div class='o_page_add_above'>");
sb.append("<a id='o_ccaab_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "add_element_above"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_add_element_above' title='").append(translator.translate("add.element"))
.append("'><i class='o_icon o_icon_add'> </i></a>")
.append("</div>");
}
protected void renderAddBelow(StringOutput sb, Component cmp, URLBuilder ubu, Translator translator) {
sb.append("<div class='o_page_add_below'>");
sb.append("<a id='o_ccabe_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "add_element_below"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_add_element_below' title='").append(translator.translate("add.element"))
.append("'><i class='o_icon o_icon_add'> </i></a>")
.append("</div>");
}
protected void renderPageUpDown(StringOutput sb, ContentEditorFragment cmp, URLBuilder ubu, Translator translator) {
if(cmp.isMoveable()) {
sb.append("<div class='o_page_tools o_page_tools_dd'>");
sb.append("<a id='o_ccup_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "move_up"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_move_up_element' title='").append(translator.translate("move.up"))
.append("'><i class='o_icon o_icon-sm o_icon_move_up'> </i></a>");
sb.append("<a id='o_ccdown_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "move_down"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_move_down_element' title='").append(translator.translate("move.down"))
.append("'><i class='o_icon o_icon-sm o_icon_move_down'> </i></a>");
sb.append("</div>");
} else {
sb.append("<div class='o_page_tools'> </div>");
}
}
protected void renderClose(StringOutput sb, ContentEditorFragment cmp, URLBuilder ubu, Translator translator) {
sb.append("<a id='o_ccclose_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "save_element"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_save_element' title='").append(translator.translate("save.and.close"))
.append("'><i class='o_icon o_icon-sm o_icon_close'> </i> <span>").append(translator.translate("save.and.close")).append("</span></a>");
}
protected void renderDelete(StringOutput sb, ContentEditorFragment cmp, URLBuilder ubu, Translator translator) {
if(cmp.isDeleteable()) {
sb.append("<a id='o_ccdelete_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "delete_element"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_delete_element' title='").append(translator.translate("delete"))
.append("'><i class='o_icon o_icon-sm o_icon_delete_item'> </i> <span>").append(translator.translate("delete")).append("</span></a>");
}
}
protected void renderDuplicate(StringOutput sb, ContentEditorFragment cmp, URLBuilder ubu, Translator translator) {
if(cmp.isCloneable()) {
sb.append("<a id='o_ccclone_").append(cmp.getDispatchID()).append("' ")
.append("href='javascript:;' onclick=\"");// add elements directly in container
ubu.buildXHREvent(sb, "", false, true,
new NameValuePair(VelocityContainer.COMMAND_ID, "clone_element"),
new NameValuePair("fragment", cmp.getComponentName())); // EditorFragment cmpFragment.getCmpId()
sb.append(" return false;\" class='o_sel_clone_element' title='").append(translator.translate("duplicate"))
.append("'><i class='o_icon o_icon-sm o_icon_copy'> </i> <span>").append(translator.translate("duplicate")).append("</span></a>");
}
}
}
|
<reponame>Mdamman/APP_MakeTheChange
import { IonicModule } from "@ionic/angular";
import { RouterModule } from "@angular/router";
import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { ContactCardPage } from "./contact-card.page";
import { ComponentsModule } from "../components/components.module";
import { AngularFireModule } from "@angular/fire";
import { environment } from "../../environments/environment";
import { DonateService } from "../donate/donate.service";
@NgModule({
imports: [
IonicModule,
CommonModule,
ComponentsModule,
RouterModule.forChild([{ path: "", component: ContactCardPage }]),
AngularFireModule.initializeApp(environment.firebase),
],
declarations: [ContactCardPage],
providers: [DonateService],
})
export class ContactCardPageModule {}
|
<reponame>camplight/hylo-evo<filename>src/routes/Search/Search.connector.test.js
import { mergeProps } from './Search.connector'
describe('mergeProps', () => {
it('populates fetchSearchResults and fetchMoreSearchResults', () => {
const stateProps = {
searchResults: [1, 2, 3],
filter: 'all',
searchForInput: 're',
hasMore: true
}
const fetchSearchResultsDebounced = jest.fn()
const dispatchProps = {
fetchSearchResultsDebounced
}
const merged = mergeProps(stateProps, dispatchProps, {})
expect(fetchSearchResultsDebounced).not.toHaveBeenCalled()
merged.fetchSearchResults()
expect(fetchSearchResultsDebounced).toHaveBeenCalledWith(
{
search: stateProps.searchForInput,
filter: stateProps.filter
})
fetchSearchResultsDebounced.mockClear()
merged.fetchMoreSearchResults()
expect(fetchSearchResultsDebounced).toHaveBeenCalledWith(
{
search: stateProps.searchForInput,
filter: stateProps.filter,
offset: stateProps.searchResults.length
})
fetchSearchResultsDebounced.mockClear()
const merged2 = mergeProps({ ...stateProps, hasMore: false }, dispatchProps, {})
merged2.fetchMoreSearchResults()
expect(fetchSearchResultsDebounced).not.toHaveBeenCalled()
})
})
|
package com.krrrr38.mackerel4s
package api
import org.scalatest._
import org.scalatest.concurrent.ScalaFutures._
class RoleAPISpec extends MockApiServerFun with Matchers {
object MockHostAPI
extends RoleAPI
with MackerelClientBase {
override val setting: ClientSetting = mockSetting
override val apiKey: String = ""
override val userAgent: String = ""
}
describe("get role list through api") {
it("return roles") {
val futureResponse =
MockHostAPI.listRoles("service_name")
.run
whenReady(futureResponse, patience) { res =>
// see src/test/resources/api/GET/v0/services/service_name/roles.json
res.roles.size shouldBe 2
val role = res.roles(0)
role.name shouldBe "app"
role.memo shouldBe "memo"
}
}
}
}
|
import { BasicColumn } from '/@/components/Table';
import { FormSchema } from '/@/components/Table';
import { h } from 'vue';
import { Tag } from 'ant-design-vue';
export const columns: BasicColumn[] = [
{
title: '编码',
dataIndex: 'fnumber',
width: 160,
align: 'left',
},
{
title: '名称',
dataIndex: 'fname',
width: 160,
align: 'left',
},
{
title: '排序',
dataIndex: 'sort',
width: 50,
sorter: true,
},
{
title: '是否零散库',
dataIndex: 'is_scattered',
width: 80,
customRender: ({ record }) => {
const status = record.is_scattered;
const enable = ~~status === 1;
const color = enable ? 'green' : 'red';
const text = enable ? '是' : '否';
return h(Tag, { color: color }, () => text);
},
},
{
title: '创建时间',
dataIndex: 'create_time',
width: 180,
},
];
export const searchFormSchema: FormSchema[] = [
{
field: 'fname',
label: '仓库名称',
component: 'Input',
colProps: { span: 8 },
},
];
export const formSchema: FormSchema[] = [
{
field: 'id',
label: 'ID',
component: 'Input',
defaultValue: 0,
show: false,
},
{
field: 'fname',
label: '仓库名称',
component: 'Input',
required: true,
},
{
field: 'fnumber',
label: '仓库编码',
component: 'Input',
required: true,
},
{
field: 'sort',
label: '排序',
component: 'InputNumber',
defaultValue: 100,
required: true,
},
{
field: 'is_scattered',
label: '是否零散库',
component: 'RadioButtonGroup',
defaultValue: -1,
componentProps: {
options: [
{ label: '是', value: 1 },
{ label: '否', value: -1 },
],
},
required: true,
},
];
|
const
parseArgs = require('minimist'),
path = require('path'),
{ writeFileSync } = require('fs-extra')
const argv = parseArgs(process.argv.slice(2), {
alias: {
h: 'help'
},
boolean: ['h']
})
if (argv.help) {
console.log(`
Description
Tauri dev.
Usage
$ tauri dev
Options
--help, -h Displays this message
`)
process.exit(0)
}
const appPaths = require('../helpers/app-paths'),
Runner = require('../runner'),
tauri = new Runner(appPaths),
tauriConfig = require('../helpers/tauri-config')({
ctx: {
debug: true
}
})
const { bundle, ...cfg } = tauriConfig.tauri,
cfgDir = injector.configDir()
require('../generator').generate(tauriConfig.tauri)
require('../entry').generate(appPaths.tauriDir, tauriConfig, true)
tauri.run(tauriConfig)
|
function longestWord(str) {
let words = str.split(' ');
let longest = '';
for (let word of words) {
if (word.length > longest.length) {
longest = word;
}
}
return longest;
}
|
#!/bin/bash
set -euo pipefail
echo -e "\e[31m###########################\e[0m"
echo -e "\e[31mInstalling dog\e[0m"
echo -e "\e[31m###########################\e[0m"
curl -L -o dog.zip https://github.com/ogham/dog/releases/download/v0.1.0/dog-v0.1.0-x86_64-unknown-linux-gnu.zip
mkdir dog
unzip dog.zip -d dog
mv dog/bin/dog "${HOME}/bin"
rm dog.zip
rm -rf dog
|
package prospector.routiduct.gui;
/**
* File Created by Prospector.
*/
public interface IDynamicAdjustmentGUI {
public int getOffsetFactorX();
public int getOffsetFactorY();
}
|
#!/bin/sh
docker run -v $PWD:/home/${PWD##*/} -it ${PWD##*/}
|
#include "device.hpp"
#include "quantumvk/utils/bitops.hpp"
#include "quantumvk/utils/hash.hpp"
namespace Vulkan
{
ShaderHandle Device::CreateShader(size_t num_words, const uint32_t* code)
{
return ShaderHandle(handle_pool.shaders.allocate(this, code, num_words));
}
ProgramHandle Device::CreateGraphicsProgram(const GraphicsProgramShaders& shaders)
{
ProgramHandle program = ProgramHandle(handle_pool.programs.allocate(this, shaders));
#ifdef QM_VULKAN_MT
std::lock_guard holder_{ lock.program_lock };
#endif
if (!invalid_programs.empty())
{
active_programs[invalid_programs.back()] = program;
invalid_programs.pop_back();
}
else
{
active_programs.push_back(program);
}
return program;
}
ProgramHandle Device::CreateComputeProgram(const ComputeProgramShaders& shaders)
{
ProgramHandle program = ProgramHandle(handle_pool.programs.allocate(this, shaders));
#ifdef QM_VULKAN_MT
std::lock_guard holder_{ lock.program_lock };
#endif
if (!invalid_programs.empty())
{
active_programs[invalid_programs.back()] = program;
invalid_programs.pop_back();
}
else
{
active_programs.push_back(program);
}
return program;
}
void Device::UpdateInvalidProgramsNoLock()
{
#ifdef QM_VULKAN_MT
std::lock_guard holder_{ lock.program_lock };
#endif
// Always called inside device
for (uint32_t i = 0; i < active_programs.size(); i++)
{
ProgramHandle& program = active_programs[i];
if(program)
// If this is the only reference left
if (program->GetRefCount() == 1)
{
program.Reset();
invalid_programs.push_back(i);
}
}
}
}
|
#! /usr/bin/env bash
# Copyright (c) 2018 Herbert Shen <ishbguy@hotmail.com> All Rights Reserved.
# Released under the terms of the MIT License.
# source guard
[[ $MKDIRS_SOURCED -eq 1 ]] && return
declare -r MKDIRS_SOURCED=1
declare -r MKDIRS_ABS_SRC="$(realpath "${BASH_SOURCE[0]}")"
declare -r MKDIRS_ABS_DIR="$(dirname "$MKDIRS_ABS_SRC")"
mkdirs() {
local cache="$1"; shift
[[ -n $cache ]] || return 1
for dir in "$@"; do
[[ -d $cache/$dir ]] || mkdir -p $cache/$dir
done
}
[[ ${FUNCNAME[0]} == "main" || ${FUNCNAME[0]} == '' ]] \
&& mkdirs "$@"
# vim:set ft=sh ts=4 sw=4:
|
#!/bin/bash
FILE=variables.tf
source .env
aws_default_region="${AWS_DEFAULT_REGION:-us-east-1}"
if [ -z ${TF_PROJECT_NAME} ]; then
echo "'TF_PROJECT_NAME' is empty, exiting with failure."
exit 1
fi
echo $TF_PROJECT_NAME
tf_spine="${TF_SPINE:-rk}"
export VARIABLES_TF=$(cat <<EOF
# Variables.tf declares has the default variables that are shared by all environments
# \$var.region, \$var.domain, \$var.tf_s3_bucket
# Read credentials from environment variables
#$ export AWS_ACCESS_KEY_ID="anaccesskey"
#$ export AWS_SECRET_ACCESS_KEY="asecretkey"
#$ export AWS_DEFAULT_REGION="us-west-2"
#$ terraform plan
provider "aws" {
profile = "\${var.aws_profile}"
region = "\${var.region}"
}
provider "aws" {
profile = "\${var.aws_profile}"
region = "us-east-1"
alias = "us-east-1"
}
provider "aws" {
profile = "\${var.aws_profile}"
region = "us-west-1"
alias = "us-west-1"
}
data "terraform_remote_state" "master_state" {
backend = "s3"
config {
bucket = "\${var.tf_s3_bucket}"
region = "\${var.region}"
key = "\${var.master_state_file}"
}
}
variable "aws_profile" {
description = "Which AWS profile is should be used? Defaults to \"default\""
default = "default"
}
variable "region" { default = "${aws_default_region}" }
# This should be changed to reflect the service / stack defined by this repo
# for example replace "ref" with "cms", "slackbot", etc
variable "stack" { default = "ref" }
variable "tf_s3_bucket" {
description = "S3 bucket Terraform can use for state"
default = "${tf_spine}-devops-state-${aws_default_region}"
}
variable "master_state_file" { default = "${TF_PROJECT_NAME}/state/base/base.tfstate" }
variable "prod_state_file" { default = "${TF_PROJECT_NAME}/state/production/production.tfstate" }
variable "staging_state_file" { default = "${TF_PROJECT_NAME}/state/staging/staging.tfstate" }
variable "dev_state_file" { default = "${TF_PROJECT_NAME}/state/dev/dev.tfstate" }
EOF
)
if [ ! -s $FILE ]; then
echo "$VARIABLES_TF"
echo "$VARIABLES_TF" > $FILE
fi
|
<reponame>imwalrus/finalProject
package co.finalproject.farm.app.user.controller;
public class UserController {
}
|
import React from 'react';
import { render } from 'react-dom';
import Gameview from './gameview';
render(<Gameview />, document.querySelector('#content'));
|
<gh_stars>1-10
import Component from '@ember/component';
export default Component.extend({
tagName: 'main',
classNames: ['gh-main'],
ariaRole: 'main',
mouseEnter() {
let action = this.onMouseEnter;
if (action) {
action();
}
}
});
|
#!/bin/bash
# setup an up to date environment for development
# do regular init
./scripts/init.sh
# use most recent commits from submodules
git submodule foreach "git checkout master && git pull"
|
import { hasPermission } from './task-13';
test('hasPermission([]) to equal true', () => {
expect(hasPermission([])).toBe(true);
});
test("hasPermission([], 'P1') to equal false", () => {
expect(hasPermission([], 'P1')).toBe(false);
});
test("hasPermission(['P2', 'P3'], 'P1') to equal false", () => {
expect(hasPermission(['P2', 'P3'], 'P1')).toBe(false);
});
test("hasPermission(['P2', 'P3'], 'P3') to equal true", () => {
expect(hasPermission(['P2', 'P3'], 'P3')).toBe(true);
});
|
using Microsoft.Extensions.Configuration;
public class ServerInfo
{
public string ServerName { get; set; }
public string IPAddress { get; set; }
public string OperatingSystem { get; set; }
public string Domain { get; set; }
private IConfiguration _configuration;
public ServerInfo(IConfiguration configuration)
{
_configuration = configuration;
ServerName = _configuration["ServerName"];
IPAddress = _configuration["IPAddress"];
OperatingSystem = _configuration["OperatingSystem"];
Domain = _configuration["Domain"];
}
}
|
<reponame>yyzclyang/algae-ui
import React from 'react';
import { CodeDemo, Api } from '../CommonDispalyComponents';
import './switch.example.scss';
import CodeDemo1 from './switch.codeDemo1';
const code1 = require('!!raw-loader!./switch.codeDemo1.tsx');
import CodeDemo2 from './switch.codeDemo2';
const code2 = require('!!raw-loader!./switch.codeDemo2.tsx');
import CodeDemo3 from './switch.codeDemo3';
const code3 = require('!!raw-loader!./switch.codeDemo3.tsx');
import CodeDemo4 from './switch.codeDemo4';
const code4 = require('!!raw-loader!./switch.codeDemo4.tsx');
const SwitchExample: React.FunctionComponent = () => {
return (
<div className="switch-example-page">
<section>
<h1>Switch 开关</h1>
<p>开关选择器</p>
</section>
<section>
<h2>何时使用</h2>
<p>需要表示开关状态/两种状态之间的切换时。</p>
</section>
<section>
<h2>代码演示</h2>
<div className="code-demonstration">
<div className="code-demo-column">
<CodeDemo
title="基础使用"
content={<p>最简单的用法</p>}
code={code1.default}
>
<CodeDemo1 />
</CodeDemo>
<CodeDemo
title="文字和图标"
content={<p>用文字和图标来表示当前的状态</p>}
code={code3.default}
>
<CodeDemo3 />
</CodeDemo>
</div>
<div className="code-demo-column">
<CodeDemo
title="不可用状态"
content={<p>Switch 不可用状态</p>}
code={code2.default}
>
<CodeDemo2 />
</CodeDemo>
<CodeDemo
title="加载中"
content={<p>表示相关操作正在进行中</p>}
code={code4.default}
>
<CodeDemo4 />
</CodeDemo>
</div>
</div>
</section>
<section>
<h2>API</h2>
<Api
data={[
[
'className',
<>
<code>Switch</code>的类名
</>,
'string',
'-'
],
[
'check',
<>
表示当前<code>Switch</code>的状态
</>,
'boolean',
'-'
],
[
'defaultChecked',
<>
表示当前<code>Switch</code>的默认值。
</>,
'boolean',
'-'
],
[
'disabled',
<>
是否禁用<code>Switch</code>
</>,
'boolean',
'false'
],
[
'loading',
<>
表示加载状态中的<code>Switch</code>
</>,
'boolean',
'false'
],
[
'onClick',
<>
<code>Switch</code>开关点击时的回调
</>,
'React.MouseEventHandler',
'-'
],
[
'onChange',
<>
<code>Switch</code>状态改变时的回调,参数为即将变化到的状态值
</>,
'(arg1?: boolean) => void',
'-'
],
[
'checkedEl',
<>
<code>Switch</code>选中状态时的内容
</>,
'string | React.ReactElement',
'-'
],
[
'uncheckedEl',
<>
<code>Switch</code>未选中状态时的内容
</>,
'string | React.ReactElement',
'-'
]
]}
/>
</section>
</div>
);
};
export default SwitchExample;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.