text stringlengths 1 1.05M |
|---|
<filename>src/reducers/addresses_new.js
import * as actionType from '../actions/constants';
export function addressParams(state = '', action){
switch (action.type) {
case actionType.ADDRESS_PARAMS:
return action.params;
default:
return state;
}
}
export function addressesHasErrored(state = false, action){
switch (action.type) {
case actionType.ADDRESSES_HAS_ERRORED:
return action.hasErrored;
default:
return state;
}
}
export function addressesIsLoading(state = false, action){
switch (action.type) {
case actionType.ADDRESSES_IS_LOADING:
return action.isLoading;
default:
return state;
}
}
export function addresses(state = [], action){
switch (action.type) {
case actionType.ADDRESSES:
return action.addresses;
default:
return state;
}
}
export function addressesIsUpdating(state = false, action){
switch (action.type) {
case actionType.ADDRESSES_IS_UPDATING:
return action.isUpdating;
default:
return state;
}
}
export function addressesIsAdding(state = false, action) {
switch (action.type) {
case actionType.ADDRESSES_IS_ADDING:
return action.isAdding;
default:
return state;
}
}
export function addressesAddSuccess(state = false, action) {
switch (action.type) {
case actionType.ADDRESSES_ADD_SUCCESS:
return action.addSuccess;
default:
return state;
}
}
export function addressesAddHasErrored(state = false, action) {
switch (action.type) {
case actionType.ADDRESSES_ADD_HAS_ERRORED:
return action.addError;
default:
return state;
}
}
export function pickUpPointsHasErrored(state = false, action){
switch (action.type) {
case actionType.PICK_UP_POINTS_HAS_ERRORED:
return action.hasErrored;
default:
return state;
}
}
export function pickUpPointsIsLoading(state = false, action){
switch (action.type) {
case actionType.PICK_UP_POINTS_IS_LOADING:
return action.isLoading;
default:
return state;
}
}
export function pickUpPoints(state = [], action){
switch (action.type) {
case actionType.PICK_UP_POINTS:
return action.points;
default:
return state;
}
}
|
'use strict';
const CONFIG = {
api_host: 'habdev.site:8000',
cookie_host: 'habdev.site',
};
|
import React, { useState, useEffect } from 'react'
import Error from './components/Error'
import Media from './components/Media'
import UsernamesHeader from './components/Usernames header'
import { combineLists } from './services/ops'
import { Container, Row, Col, Form, Button, Table } from 'react-bootstrap'
import './custom.scss'
const App = () => {
const [user1, setuser1] = useState('')
const [user2, setuser2] = useState('')
const [user1compared, setuser1compared] = useState('') // separate state from the input box for the comparison table's header
const [user2compared, setuser2compared] = useState('')
const [combined, setcombined] = useState([])
const [loading, setloading] = useState(false)
const [checked, setchecked] = useState(true) // "anime" checkbox is default
const [mediaType, setmediaType] = useState('ANIME')
const [message, setMessage] = useState(null)
useEffect(() => { // clear message after 5 seconds
console.log('clearing message')
const timer = setTimeout(() => {
setMessage(null)
}, 5000)
return () => clearTimeout(timer)
}, [message])
const handleUser1Change = (event) => {
setuser1(event.target.value)
}
const handleUser2Change = (event) => {
setuser2(event.target.value)
}
const handleCheckedChange = (event) => {
setchecked(!checked)
setmediaType(event.target.value)
}
const verifyUserName = (nameInput) => {
if (!nameInput) {
setMessage('Please enter two usernames.')
return false
}
if (nameInput.length > 100) {
setMessage('Username must be under 100 characters.')
return false
}
console.log('verified ', nameInput)
return true
}
const compareUsers = async (event) => {
event.preventDefault()
setloading(true)
if (!verifyUserName(user1) || !verifyUserName(user2)) { return }
console.log(`Comparing ${user1} and ${user2}`)
let data
try {
data = await combineLists(user1, user2, mediaType)
if (data.includes('is not a valid username')) {
setloading(false)
setMessage(data)
return
}
} catch (err) {
setloading(false)
setMessage(err)
return
}
setuser1compared(user1)
setuser2compared(user2)
setcombined(data)
setloading(false)
}
return (
<Container>
<Row>
<Col lg={4}>
<h1>Compare media lists of two AniList users</h1>
<div className='description'>Scores are converted to a 10-point scale.</div>
<Form onSubmit={compareUsers}>
<Form.Group as={Row} controlId='enter-user1' className='pt-3'>
<Form.Label column xs={2} md={3} className='text-nowrap'>
user 1:
</Form.Label>
<Col xs={5}>
<Form.Control onChange={handleUser1Change} />
</Col>
</Form.Group>
<Form.Group as={Row} controlId='enter-user2' className='pt-3'>
<Form.Label column xs={2} md={3} className='text-nowrap'>
user 2:
</Form.Label>
<Col xs={5}>
<Form.Control onChange={handleUser2Change} />
</Col>
</Form.Group>
<Form.Group>
<Form.Check type='radio' label='anime' value='ANIME' checked={checked} onChange={handleCheckedChange} />
<Form.Check type='radio' label='manga' value='MANGA' checked={!checked} onChange={handleCheckedChange} />
</Form.Group>
<Button variant='primary' disabled={loading} type='submit' id='submit' aria-label='submit'>{loading ? 'Comparing' : 'Compare'}</Button>
</Form>
<Error message={message} />
</Col>
<Col md={6}>
<UsernamesHeader user1={user1compared} user2={user2compared} />
<h3 className='agree'>Agree</h3>
<Table aria-label='agree-media' bordered hover className='w-auto'>
<tbody>
{combined.filter((media) => media.scoreDifference <= 1)
.map((media) => <Media key={media.mediaId} media={media} user1={user1compared} user2={user2compared} mediaType={mediaType} />)}
</tbody>
</Table>
<h3 className='disagree'>Disagree</h3>
<Table aria-label='disagree-media' bordered hover className='w-auto'>
<tbody>
{combined.filter((media) => media.scoreDifference > 1)
.map((media) => <Media key={media.mediaId} media={media} user1={user1compared} user2={user2compared} mediaType={mediaType} />)}
</tbody>
</Table>
</Col>
</Row>
<Row className='pb-2'>
<Col className='justify-content-center pt-5'>This website was made by yutsi using <a href='https://react-bootstrap.github.io/' className='link-dark'>React-Bootstrap</a> and the <a href='https://bootswatch.com/cyborg/' className='link-dark'>Cyborg</a> theme with the <a href='https://anilist.gitbook.io/anilist-apiv2-docs/'>AniList API</a>. Check it out on <a href='https://github.com/yutsi/aniComp'>Github</a>.
</Col>
</Row>
</Container>
)
}
export default App
|
import ChannelWatchers from './watchers/channel';
import SettingsWatchers from './watchers/settings';
import SourceWatchers from './watchers/source';
const sagas = [
...SourceWatchers,
...ChannelWatchers,
...SettingsWatchers,
];
export default runSaga => {
sagas.map(runSaga);
}; |
import mx from '../../multiplex';
import * as mocks from './_mocks';
import {
qmodule,
qtest
} from '../../qunit';
qmodule('linq-min');
function double(t) {
return t * 2;
}
qtest('basic "min" test', function (assert) {
assert.equal(mx(mocks.array).min(), 1, 'min element of an array of numbers');
assert.equal(mx(mocks.array).min(double), 2, 'min element of an array of numbers with selector');
assert.equal(mx('test').min(), 'e', 'min element of an array of strings');
assert.equal(mx([true, false, true]).min(), false, 'min element of an array of boolean');
assert.equal(mx([new Date(2017, 1, 1), new Date(2018, 1, 1), new Date(2016, 1, 1)]).min().getFullYear(), 2016, 'min element of an array of dates');
function cmp(b) {
return this.val - b.val;
}
function valueOf() {
return this.val;
}
var data1 = [
{ val: 2, __cmp__: cmp },
{ val: 3, __cmp__: cmp },
{ val: 1, __cmp__: cmp }
];
var data2 = [
{ val: 2, valueOf: valueOf },
{ val: 3, valueOf: valueOf },
{ val: 1, valueOf: valueOf }
];
assert.equal(mx(data1).min().val, 1, 'min element of an array of objects with __cmp__ method');
assert.equal(mx(data2).min().val, 1, 'min element of an array of objects with valueOf method');
});
qtest('collections "min" method tests', function (assert) {
assert.equal(mocks.enumerable.min(), 1, 'min element in an enumerable');
assert.equal(mocks.enumerable.min(double), 2, 'min element in an enumerable with predicate');
assert.equal(mocks.collection.min(), 1, 'min element in a Collection');
assert.equal(mocks.collection.min(double), 2, 'min element in a Collection with predicate');
assert.equal(mocks.list.min(), 1, 'min element in a List');
assert.equal(mocks.list.min(double), 2, 'min element in a List with predicate');
assert.equal(mocks.readOnlyCollection.min(), 1, 'min element in a ReadOnlyCollection');
assert.equal(mocks.readOnlyCollection.min(double), 2, 'min element in a ReadOnlyCollection with predicate');
assert.equal(mocks.linkedList.min(), 1, 'min element in a LinkedList');
assert.equal(mocks.linkedList.min(double), 2, 'min element in a LinkedList with predicate');
assert.equal(mocks.hashSet.min(), 1, 'min element in a HashSet');
assert.equal(mocks.hashSet.min(double), 2, 'min element in a HashSet with predicate');
assert.equal(mocks.stack.min(), 1, 'min element in a Stack');
assert.equal(mocks.stack.min(double), 2, 'min element in a Stack with predicate');
assert.equal(mocks.queue.min(), 1, 'min element in a Queue');
assert.equal(mocks.queue.min(double), 2, 'min element in a Queue with predicate');
assert.equal(mocks.set.min(), 1, 'min element in a Set');
assert.equal(mocks.set.min(double), 2, 'min element in a Set with predicate');
assert.equal(mocks.map.min(function (t) {
return t[0];
}), 1, 'min element in a Map');
assert.equal(mocks.dictionary.min(function (t) {
return t.key;
}), 1, 'min element in a Dictionary');
assert.equal(mocks.lookup.min(function (t) {
return t.key;
}), 1, 'min element in a Lookup');
assert.equal(mocks.sortedList.min(function (t) {
return t.key;
}), 1, 'min element in a SortedList');
});
qtest('"min" method validations', function (assert) {
assert.throws(function () {
mx([]).min();
}, 'empty collection');
assert.throws(function () {
mx([1]).min(1);
}, 'non function selector');
}); |
<reponame>rafaeljusto/toglacier
package config_test
import (
"errors"
"testing"
"github.com/rafaeljusto/toglacier/internal/config"
)
func TestError_Error(t *testing.T) {
scenarios := []struct {
description string
err *config.Error
expected string
}{
{
description: "it should show the message with filename and low level error",
err: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
expected: "config: filename “example.txt”, error reading the configuration file. details: low level error",
},
{
description: "it should show the message only with the filename",
err: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
},
expected: "config: filename “example.txt”, error reading the configuration file",
},
{
description: "it should show the message only with the low level error",
err: &config.Error{
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
expected: "config: error reading the configuration file. details: low level error",
},
{
description: "it should show the correct error message for reading configuration file problem",
err: &config.Error{Code: config.ErrorCodeReadingFile},
expected: "config: error reading the configuration file",
},
{
description: "it should show the correct error message for parsing YAML problem",
err: &config.Error{Code: config.ErrorCodeParsingYAML},
expected: "config: error parsing yaml",
},
{
description: "it should show the correct error message for parsing YAML problem",
err: &config.Error{Code: config.ErrorCodeReadingEnvVars},
expected: "config: error reading environment variables",
},
{
description: "it should show the correct error message for initializing cipher problem",
err: &config.Error{Code: config.ErrorCodeInitCipher},
expected: "config: error initializing cipher",
},
{
description: "it should show the correct error message for decoding base64 problem",
err: &config.Error{Code: config.ErrorCodeDecodeBase64},
expected: "config: error decoding base64",
},
{
description: "it should show the correct error message for password size problem",
err: &config.Error{Code: config.ErrorCodePasswordSize},
expected: "config: invalid password size",
},
{
description: "it should show the correct error message for filling iv problem",
err: &config.Error{Code: config.ErrorCodeFillingIV},
expected: "config: error filling iv",
},
{
description: "it should show the correct error message for invalid cloud type",
err: &config.Error{Code: config.ErrorCodeCloudType},
expected: "config: invalid cloud type",
},
{
description: "it should show the correct error message for invalid database type",
err: &config.Error{Code: config.ErrorCodeDatabaseType},
expected: "config: invalid database type",
},
{
description: "it should show the correct error message for invalid log level",
err: &config.Error{Code: config.ErrorCodeLogLevel},
expected: "config: invalid log level",
},
{
description: "it should show the correct error message for invalid email format",
err: &config.Error{Code: config.ErrorCodeEmailFormat},
expected: "config: invalid email format",
},
{
description: "it should show the correct error message for invalid percentage format",
err: &config.Error{Code: config.ErrorCodePercentageFormat},
expected: "config: invalid percentage format",
},
{
description: "it should show the correct error message for invalid percentage range",
err: &config.Error{Code: config.ErrorCodePercentageRange},
expected: "config: invalid percentage range",
},
{
description: "it should show the correct error message for invalid pattern",
err: &config.Error{Code: config.ErrorCodePattern},
expected: "config: invalid pattern",
},
{
description: "it should show the correct error message for invalid scheduler format",
err: &config.Error{Code: config.ErrorCodeSchedulerFormat},
expected: "config: wrong number of space-separated values in scheduler",
},
{
description: "it should show the correct error message for invalid scheduler value",
err: &config.Error{Code: config.ErrorCodeSchedulerValue},
expected: "config: invalid value in scheduler",
},
{
description: "it should detect when the code doesn't exist",
err: &config.Error{Code: config.ErrorCode("i-dont-exist")},
expected: "config: unknown error code",
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
if msg := scenario.err.Error(); msg != scenario.expected {
t.Errorf("errors don't match. expected “%s” and got “%s”", scenario.expected, msg)
}
})
}
}
func TestErrorEqual(t *testing.T) {
scenarios := []struct {
description string
err1 error
err2 error
expected bool
}{
{
description: "it should detect equal Error instances",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
err2: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
expected: true,
},
{
description: "it should detect when the filename is different",
err1: &config.Error{
Filename: "example1.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
err2: &config.Error{
Filename: "example2.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
expected: false,
},
{
description: "it should detect when the code is different",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
err2: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeParsingYAML,
Err: errors.New("low level error"),
},
expected: false,
},
{
description: "it should detect when the low level error is different",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error 1"),
},
err2: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error 2"),
},
expected: false,
},
{
description: "it should detect when both errors are undefined",
expected: true,
},
{
description: "it should detect when only one error is undefined",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
expected: false,
},
{
description: "it should detect when both causes of the error are undefined",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
},
err2: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
},
expected: true,
},
{
description: "it should detect when only one causes of the error is undefined",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
err2: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
},
expected: false,
},
{
description: "it should detect when one the error isn't Error type",
err1: &config.Error{
Filename: "example.txt",
Code: config.ErrorCodeReadingFile,
Err: errors.New("low level error"),
},
err2: errors.New("low level error"),
expected: false,
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
if equal := config.ErrorEqual(scenario.err1, scenario.err2); equal != scenario.expected {
t.Errorf("results don't match. expected “%t” and got “%t”", scenario.expected, equal)
}
})
}
}
|
#!/bin/bash
while read line
do
echo "$line" | cut -c2,7
done < "${1:-/dev/stdin}"
|
export * from './angularfire2';
export * from './firebase.app.module'; |
def longest_common_subsequence(A, B):
n = len(A)
m = len(B)
dp = [[0 for _ in range(m+1)] for _ in range(n+1)]
for i in range(1, n+1):
for j in range(1, m+1):
if A[i-1] == B[j-1]:
dp[i][j] = 1 + dp[i-1][j-1]
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
index = dp[n][m]
lcs = [None] * (index+1)
lcs[index] = ''
i = n
j = m
while i > 0 and j > 0:
if A[i-1] == B[j-1]:
lcs[index-1] = A[i-1]
i-=1
j-=1
index-=1
elif dp[i-1][j] > dp[i][j-1]:
i-=1
else:
j-=1
return lcs
longest_common_subsequence(A, B) # ['5', '8', '9'] |
<gh_stars>0
const gulp = require('gulp');
const sass = require('gulp-sass');
const cleanCss = require('gulp-clean-css');
const rename = require('gulp-rename');
const autoPrefixer = require('gulp-autoprefixer');
const sourcemap = require('gulp-sourcemaps');
const plumber = require('gulp-plumber');
const notify = require('gulp-notify');
const imagemin = require('gulp-imagemin');
const fileinclude = require('gulp-file-include');
const uglify = require('gulp-uglify');
const path = {
sass: './src/sass/**/*.{sass,scss}',
css: {
dev: './src/css',
dist: './dist/css'
},
fonts: {
dev: './src/fonts/**/*',
dist: './dist/fonts'
},
img: {
dev: './src/img/**/*',
dist: './dist/img',
},
js: {
common: './src/js/common.js',
dev: './src/js',
dist: './dist/js'
},
html: {
dev: './src/*.html',
dist: './dist/'
},
base: './src/'
};
function styles() {
return gulp.src(path.sass)
.pipe(plumber({
errorHandler: notify.onError(function (err) {
return {
title: 'Sass task',
message: err.message
};
})
}))
.pipe(sourcemap.init())
.pipe(sass())
.pipe(autoPrefixer([
'last 15 versions', '> 5%', 'ie 8'
]))
.pipe(cleanCss({
level: 2
}))
.pipe(rename({
suffix: '.min'
}))
.pipe(sourcemap.write())
.pipe(gulp.dest(path.css.dev))
.pipe(gulp.dest(path.css.dist));
}
gulp.task('styles', styles);
function scripts() {
return gulp.src(path.js.common)
.pipe(plumber({
errorHandler: notify.onError(function (err) {
return {
title: 'JavaScript task',
message: err.message
};
})
}))
.pipe(fileinclude({
prefix: '@@',
basepath: '@file'
}))
.pipe(sourcemap.init())
.pipe(uglify())
.pipe(rename({
suffix: '.min'
}))
.pipe(sourcemap.write())
.pipe(gulp.dest(path.js.dev))
.pipe(gulp.dest(path.js.dist));
}
gulp.task(('scripts', scripts));
function fonts() {
return gulp.src(path.fonts.dev)
.pipe(plumber({
errorHandler: notify.onError(function (err) {
return {
title: 'Fonts task',
message: err.message
};
})
}))
.pipe(gulp.dest(path.fonts.dist));
}
gulp.task('fonts', fonts);
function images() {
return gulp.src(path.img.dev)
.pipe(imagemin())
.pipe(gulp.dest(path.img.dist));
}
gulp.task('images', images);
function site() {
return gulp.src(path.html.dev)
.pipe(gulp.dest(path.html.dist));
}
gulp.task('html', site);
gulp.task('default', gulp.series(gulp.parallel(styles,scripts,fonts,images,site))); |
function showPrimes(n) {
for (let i = 2; i <= n; i++) {
if (isPrime(i)) console.log(i);
}
}
function isPrime(n) {
if (n < 2) {
return false;
}
for (let i = 2; i < n; i++) {
if (n % i === 0) {
return false;
}
}
return true;
} |
var inc = v => v + 1
export = inc
|
<filename>arms/src/main/java/com/jess/arms/http/imageloader/BaseImageLoaderStrategy.java
package com.jess.arms.http.imageloader;
import android.content.Context;
/**
* desc:图片加载策略
* Created by xcz
* on 2019/3/4.
*/
public interface BaseImageLoaderStrategy<T extends ImageConfig> {
/**
* 加载图片
*
* @param ctx
* @param config
*/
void loadImage(Context ctx, T config);
/**
* 停止加载
*
* @param ctx
* @param config
*/
void clear(Context ctx, T config);
}
|
/*
* Copyright 2017-present, Converse.AI
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
*/
const Payloads = require('../').Payloads;
const Channel = Payloads.Channel;
const expect = require('chai').expect;
describe('Channels – Sync', function () {
it('Inbound Response – FULL', function() {
const Sync = Channel.Sync;
var payload = new Sync.InboundResponse();
var output = new Sync.InboundOutput();
var message = new Sync.Message.Text('ABC');
expect(message).has.property('text').to.equal('ABC');
output.setMessage(message);
output.setData({a: 'b', c: 'd'});
expect(output).has.property('channelMessage').to.equal(message);
expect(output).has.property('channelSetting');
expect(output.channelSetting).has.property('data').has.property('a').to.equal('b');
expect(output.channelSetting).has.property('sync').to.equal(true);
payload.addOutput(output);
expect(payload).has.property('channelOutput').to.be.an('array').that.does.include(output);
})
it('Outbound Response – FULL', function() {
const Sync = Channel.Sync;
var payload = new Sync.OutboundResponse();
var httpResponse = new Payloads.HTTPResponse();
httpResponse.setHTTPStatus(200);
httpResponse.setBody('ABC');
httpResponse.setHeaders({a: 'b'});
expect(httpResponse).has.property('httpStatus').to.equal(200);
expect(httpResponse).has.property('body').to.equal('ABC');
expect(httpResponse).has.property('headers').has.property('a').to.equal('b');
payload.setHTTPResponse(httpResponse);
expect(payload).has.property('httpResponse').to.equal(httpResponse);
})
it('Inbound Response – FAST', function() {
const Sync = Channel.Sync;
var payload = new Sync.InboundResponse();
var output = new Sync.InboundOutput(new Sync.Message.Text('ABC'), {
data: {a: 'b', c: 'd'}
});
expect(output).has.property('channelMessage').has.property('text').to.equal('ABC');
expect(output).has.property('channelSetting').has.property('data').has.property('a').to.equal('b');
expect(output.channelSetting).has.property('sync').to.equal(true);
payload.addOutput(output);
expect(payload).has.property('channelOutput').to.be.an('array').that.does.include(output);
})
it('Outbound Response – FAST', function() {
const Sync = Channel.Sync;
var payload = new Sync.OutboundResponse({httpStatus: 200, body: 'ABC', 'headers': {a: 'b'}});
payload.setChannelSetting({});
expect(payload).has.property('channelSetting');
expect(payload).has.property('httpResponse');
expect(payload.httpResponse).has.property('httpStatus').to.equal(200);
expect(payload.httpResponse).has.property('body').to.equal('ABC');
expect(payload.httpResponse).has.property('headers').has.property('a').to.equal('b');
})
});
describe('Channels – Async', function () {
it('Inbound Response – FULL', function() {
const Async = Channel.Async;
var payload = new Async.InboundResponse();
var httpResponse = new Payloads.HTTPResponse();
httpResponse.setHTTPStatus(200);
httpResponse.setBody('ABC');
httpResponse.setHeaders({a: 'b'});
expect(httpResponse).has.property('httpStatus').to.equal(200);
expect(httpResponse).has.property('body').to.equal('ABC');
expect(httpResponse).has.property('headers').has.property('a').to.equal('b');
var output = new Async.InboundOutput();
var message = new Async.Message.Text('ABC');
expect(message).has.property('text').to.equal('ABC');
output.setMessage(message);
output.setData({a: 'b', c: 'd'});
expect(output).has.property('channelMessage').to.equal(message);
expect(output).has.property('channelSetting');
expect(output.channelSetting).has.property('data').has.property('a').to.equal('b');
expect(output.channelSetting).has.property('sync').to.equal(false);
payload.addOutput(output);
expect(payload).has.property('channelOutput').to.be.an('array').that.does.include(output);
})
it('Outbound Response – FULL', function() {
const Async = Channel.Async;
var payload = new Async.OutboundResponse();
})
it('Inbound Response – FAST', function() {
const Async = Channel.Async;
var payload = new Async.InboundResponse({httpStatus: 200, body: 'ABC', headers: {a: 'b'}});
expect(payload).has.property('httpResponse');
expect(payload.httpResponse).has.property('httpStatus').to.equal(200);
expect(payload.httpResponse).has.property('body').to.equal('ABC');
expect(payload.httpResponse).has.property('headers').has.property('a').to.equal('b');
var output = new Async.InboundOutput(new Async.Message.Text('ABC'), {
data: {a: 'b', b: 'c'}
});
expect(output).has.property('channelMessage').has.property('text').to.equal('ABC');
expect(output).has.property('channelSetting');
expect(output.channelSetting).has.property('data').has.property('a').to.equal('b');
expect(output.channelSetting).has.property('sync').to.equal(false);
payload.addOutput(output);
expect(payload).has.property('channelOutput').to.be.an('array').that.does.include(output);
})
it('Outbound Response – FAST', function() {
const Async = Channel.Async;
var payload = new Async.OutboundResponse();
})
});
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import logging
import os
from typing import Dict
from flask import Flask, render_template, request, Response
import sqlalchemy
from connect_connector import connect_with_connector
from connect_tcp import connect_tcp_socket
from connect_unix import connect_unix_socket
app = Flask(__name__)
logger = logging.getLogger()
def init_connection_pool() -> sqlalchemy.engine.base.Engine:
# use a TCP socket when INSTANCE_HOST (e.g. 127.0.0.1) is defined
if os.environ.get("INSTANCE_HOST"):
return connect_tcp_socket()
# use a Unix socket when INSTANCE_UNIX_SOCKET (e.g. /cloudsql/project:region:instance) is defined
if os.environ.get("INSTANCE_UNIX_SOCKET"):
return connect_unix_socket()
# use the connector when INSTANCE_CONNECTION_NAME (e.g. project:region:instance) is defined
if os.environ.get("INSTANCE_CONNECTION_NAME"):
return connect_with_connector()
raise ValueError(
"Missing database connection type. Please define one of INSTANCE_HOST, INSTANCE_UNIX_SOCKET, or INSTANCE_CONNECTION_NAME"
)
# create 'votes' table in database if it does not already exist
def migrate_db(db: sqlalchemy.engine.base.Engine) -> None:
with db.connect() as conn:
conn.execute(
"CREATE TABLE IF NOT EXISTS votes "
"( vote_id SERIAL NOT NULL, time_cast timestamp NOT NULL, "
"candidate VARCHAR(6) NOT NULL, PRIMARY KEY (vote_id) );"
)
# This global variable is declared with a value of `None`, instead of calling
# `init_db()` immediately, to simplify testing. In general, it
# is safe to initialize your database connection pool when your script starts
# -- there is no need to wait for the first request.
db = None
# init_db lazily instantiates a database connection pool. Users of Cloud Run or
# App Engine may wish to skip this lazy instantiation and connect as soon
# as the function is loaded. This is primarily to help testing.
@app.before_first_request
def init_db() -> sqlalchemy.engine.base.Engine:
global db
db = init_connection_pool()
migrate_db(db)
@app.route("/", methods=["GET"])
def render_index() -> str:
context = get_index_context(db)
return render_template("index.html", **context)
@app.route("/votes", methods=["POST"])
def cast_vote() -> Response:
team = request.form['team']
return save_vote(db, team)
# get_index_context gets data required for rendering HTML application
def get_index_context(db: sqlalchemy.engine.base.Engine) -> Dict:
votes = []
with db.connect() as conn:
# Execute the query and fetch all results
recent_votes = conn.execute(
"SELECT candidate, time_cast FROM votes ORDER BY time_cast DESC LIMIT 5"
).fetchall()
# Convert the results into a list of dicts representing votes
for row in recent_votes:
votes.append({"candidate": row[0], "time_cast": row[1]})
stmt = sqlalchemy.text(
"SELECT COUNT(vote_id) FROM votes WHERE candidate=:candidate"
)
# Count number of votes for tabs
tab_result = conn.execute(stmt, candidate="TABS").fetchone()
tab_count = tab_result[0]
# Count number of votes for spaces
space_result = conn.execute(stmt, candidate="SPACES").fetchone()
space_count = space_result[0]
return {
"space_count": space_count,
"recent_votes": votes,
"tab_count": tab_count,
}
# save_vote saves a vote to the database that was retrieved from form data
def save_vote(db: sqlalchemy.engine.base.Engine, team: str) -> Response:
time_cast = datetime.datetime.now(tz=datetime.timezone.utc)
# Verify that the team is one of the allowed options
if team != "TABS" and team != "SPACES":
logger.warning(f"Received invalid 'team' property: '{team}'")
return Response(
response="Invalid team specified. Should be one of 'TABS' or 'SPACES'",
status=400,
)
# [START cloud_sql_postgres_sqlalchemy_connection]
# Preparing a statement before hand can help protect against injections.
stmt = sqlalchemy.text(
"INSERT INTO votes (time_cast, candidate) VALUES (:time_cast, :candidate)"
)
try:
# Using a with statement ensures that the connection is always released
# back into the pool at the end of statement (even if an error occurs)
with db.connect() as conn:
conn.execute(stmt, time_cast=time_cast, candidate=team)
except Exception as e:
# If something goes wrong, handle the error in this section. This might
# involve retrying or adjusting parameters depending on the situation.
# [START_EXCLUDE]
logger.exception(e)
return Response(
status=500,
response="Unable to successfully cast vote! Please check the "
"application logs for more details.",
)
# [END_EXCLUDE]
# [END cloud_sql_postgres_sqlalchemy_connection]
return Response(
status=200,
response=f"Vote successfully cast for '{team}' at time {time_cast}!",
)
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080, debug=True)
|
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
if [[ -z "${CREDENTIALS}" ]]; then
CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account
fi
if [[ -z "${STAGING_BUCKET_V2}" ]]; then
echo "Need to set STAGING_BUCKET_V2 environment variable"
exit 1
fi
# work from the git root directory
pushd $(dirname "$0")/../../
# install docuploader package
python3 -m pip install gcp-docuploader
# compile all packages
mvn clean install -B -q -DskipTests=true
export NAME=google-cloud-logging
export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3)
# cloud RAD generation
mvn clean javadoc:aggregate -B -q -P docFX
# include CHANGELOG
cp CHANGELOG.md target/docfx-yml/history.md
pushd target/docfx-yml
# create metadata
python3 -m docuploader create-metadata \
--name ${NAME} \
--version ${VERSION} \
--xrefs devsite://java/gax \
--xrefs devsite://java/google-cloud-core \
--xrefs devsite://java/api-common \
--xrefs devsite://java/proto-google-common-protos \
--xrefs devsite://java/google-api-client \
--xrefs devsite://java/google-http-client \
--xrefs devsite://java/protobuf \
--language java
# upload yml to production bucket
python3 -m docuploader upload . \
--credentials ${CREDENTIALS} \
--staging-bucket ${STAGING_BUCKET_V2} \
--destination-prefix docfx
|
<reponame>vaskoz/jruby
package org.jruby.java.proxies;
import java.lang.reflect.Array;
import java.util.Arrays;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyBoolean;
import org.jruby.RubyClass;
import org.jruby.RubyFixnum;
import org.jruby.RubyModule;
import org.jruby.RubyNumeric;
import org.jruby.RubyObject;
import org.jruby.RubyRange;
import org.jruby.RubyString;
import org.jruby.anno.JRubyMethod;
import org.jruby.internal.runtime.methods.DynamicMethod;
import org.jruby.java.util.ArrayUtils;
import org.jruby.javasupport.Java;
import org.jruby.javasupport.JavaArray;
import org.jruby.javasupport.JavaClass;
import org.jruby.javasupport.JavaUtil;
import org.jruby.runtime.Block;
import org.jruby.runtime.ObjectAllocator;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.Visibility;
import org.jruby.runtime.builtin.IRubyObject;
public class ArrayJavaProxy extends JavaProxy {
private final JavaUtil.JavaConverter converter;
public ArrayJavaProxy(Ruby runtime, RubyClass klazz, Object array) {
this(runtime, klazz, array, JavaUtil.getJavaConverter(array.getClass().getComponentType()));
}
public ArrayJavaProxy(Ruby runtime, RubyClass klazz, Object array, JavaUtil.JavaConverter converter) {
super(runtime, klazz, array);
this.converter = converter;
}
public static RubyClass createArrayJavaProxy(ThreadContext context) {
Ruby runtime = context.runtime;
RubyClass arrayJavaProxy = runtime.defineClass("ArrayJavaProxy",
runtime.getJavaSupport().getJavaProxyClass(),
ObjectAllocator.NOT_ALLOCATABLE_ALLOCATOR);
RubyClass singleton = arrayJavaProxy.getSingletonClass();
singleton.addMethod("new", new ArrayNewMethod(singleton, Visibility.PUBLIC));
arrayJavaProxy.defineAnnotatedMethods(ArrayJavaProxy.class);
arrayJavaProxy.includeModule(runtime.getEnumerable());
return arrayJavaProxy;
}
static ArrayJavaProxy newArray(final Ruby runtime, final Class<?> elementType, final int... dimensions) {
final Object array;
try {
array = Array.newInstance(elementType, dimensions);
}
catch (IllegalArgumentException e) {
throw runtime.newArgumentError("can not create " + dimensions.length + " dimensional array");
}
return new ArrayJavaProxy(runtime, Java.getProxyClassForObject(runtime, array), array);
}
public JavaArray getJavaArray() {
JavaArray javaArray = (JavaArray) dataGetStruct();
if (javaArray == null) {
javaArray = new JavaArray(getRuntime(), getObject());
dataWrapStruct(javaArray);
}
return javaArray;
}
@JRubyMethod(name = {"length", "size"})
public RubyFixnum length(ThreadContext context) {
return context.runtime.newFixnum( Array.getLength( getObject() ) );
}
@JRubyMethod(name = "empty?")
public RubyBoolean empty_p(ThreadContext context) {
return context.runtime.newBoolean( Array.getLength( getObject() ) == 0 );
}
@JRubyMethod(name = "[]")
public IRubyObject op_aref(ThreadContext context, IRubyObject arg) {
if ( arg instanceof RubyRange ) return arrayRange(context, (RubyRange) arg);
final int i = convertArrayIndex(arg);
return ArrayUtils.arefDirect(context.runtime, getObject(), converter, i);
}
public Object get(final int index) {
return Array.get(getObject(), index);
}
public void set(final int index, final Object value) {
Array.set(getObject(), index, value);
}
@JRubyMethod(name = "[]", required = 1, rest = true)
public final IRubyObject op_aref(ThreadContext context, IRubyObject[] args) {
if ( args.length == 1 ) return op_aref(context, args[0]);
return getRange(context, args);
}
@JRubyMethod(name = "[]=")
public final IRubyObject op_aset(ThreadContext context, IRubyObject index, IRubyObject value) {
final int i = convertArrayIndex(index);
return ArrayUtils.asetDirect(context.runtime, getObject(), converter, i, value);
}
@JRubyMethod(name = "dig", required = 1, rest = true)
public final IRubyObject dig(ThreadContext context, IRubyObject[] args) {
return dig(context, args, 0);
}
final IRubyObject dig(ThreadContext context, IRubyObject[] args, int idx) {
final IRubyObject val = at(context, args[idx++]);
return idx == args.length ? val : RubyObject.dig(context, val, args, idx);
}
private static int convertArrayIndex(final IRubyObject index) {
if ( index instanceof JavaProxy ) {
return (Integer) index.toJava(Integer.class);
}
return RubyNumeric.num2int(index);
}
@JRubyMethod
public IRubyObject at(ThreadContext context, IRubyObject index) {
return at(context, convertArrayIndex(index));
}
private final IRubyObject at(ThreadContext context, int i) {
final Ruby runtime = context.runtime;
final Object array = getObject();
final int length = Array.getLength(array);
if ( i < 0 ) i = i + length;
if ( i >= 0 && i < length ) {
return ArrayUtils.arefDirect(runtime, array, converter, i);
}
return context.nil;
}
@JRubyMethod(name = "+")
public IRubyObject op_plus(ThreadContext context, IRubyObject other) {
final Object array = getObject();
if ( other instanceof ArrayJavaProxy ) {
final Object otherArray = ((ArrayJavaProxy) other).getObject();
final Class<?> componentType = array.getClass().getComponentType();
if ( componentType.isAssignableFrom( otherArray.getClass().getComponentType() ) ) {
return ArrayUtils.concatArraysDirect(context, array, otherArray);
}
}
return ArrayUtils.concatArraysDirect(context, array, other);
}
@JRubyMethod
public IRubyObject each(ThreadContext context, Block block) {
final Ruby runtime = context.runtime;
final Object array = getObject();
final int length = Array.getLength(array);
for ( int i = 0; i < length; i++ ) {
IRubyObject element = ArrayUtils.arefDirect(runtime, array, converter, i);
block.yield(context, element);
}
return this;
}
@JRubyMethod(name = {"to_a", "to_ary"})
public RubyArray to_a(ThreadContext context) {
final Object array = getObject();
return JavaUtil.convertJavaArrayToRubyWithNesting(context, array);
}
@JRubyMethod(name = {"component_type"})
public IRubyObject component_type(ThreadContext context) {
Class<?> componentType = getObject().getClass().getComponentType();
final JavaClass javaClass = JavaClass.get(context.runtime, componentType);
return Java.getProxyClass(context.runtime, javaClass);
}
@JRubyMethod
public RubyString inspect(ThreadContext context) {
return RubyString.newString(context.runtime, arrayToString());
}
@Override
public String toString() {
return arrayToString().toString();
}
private StringBuilder arrayToString() {
final StringBuilder buffer = new StringBuilder(24);
Class<?> componentClass = getObject().getClass().getComponentType();
buffer.append(componentClass.getName());
if (componentClass.isPrimitive()) {
switch (componentClass.getName().charAt(0)) {
case 'b':
if (componentClass == byte.class) buffer.append(Arrays.toString((byte[])getObject()));
else /* if (componentClass == boolean.class) */ buffer.append(Arrays.toString((boolean[])getObject()));
break;
case 's':
/* if (componentClass == short.class) */ buffer.append(Arrays.toString((short[])getObject()));
break;
case 'c':
/* if (componentClass == char.class) */ buffer.append(Arrays.toString((char[])getObject()));
break;
case 'i':
/* if (componentClass == int.class) */ buffer.append(Arrays.toString((int[])getObject()));
break;
case 'l':
/* if (componentClass == long.class) */ buffer.append(Arrays.toString((long[])getObject()));
break;
case 'f':
/* if (componentClass == float.class) */ buffer.append(Arrays.toString((float[])getObject()));
break;
case 'd':
/* if (componentClass == double.class) */ buffer.append(Arrays.toString((double[])getObject()));
break;
}
} else {
buffer.append(Arrays.toString((Object[]) getObject()));
}
return buffer.append('@').append(Integer.toHexString(inspectHashCode()));
}
@Override
@JRubyMethod(name = "==")
public RubyBoolean op_equal(ThreadContext context, IRubyObject other) {
if ( other instanceof RubyArray ) {
// we respond_to? to_ary thus shall handle [1].to_java == [1]
return context.runtime.newBoolean( equalsRubyArray((RubyArray) other) );
}
return eql_p(context, other);
}
private boolean equalsRubyArray(final RubyArray rubyArray) {
final Object thisArray = this.getObject();
final int len = rubyArray.size();
if ( len != Array.getLength(thisArray) ) return false;
final Class<?> componentType = thisArray.getClass().getComponentType();
for ( int i = 0; i < len; i++ ) {
final Object ruby = rubyArray.eltInternal(i).toJava(componentType);
final Object elem = Array.get(thisArray, i);
if ( ruby == null ) return elem == null;
if ( ! ruby.equals(elem) ) return false;
}
return true;
}
@JRubyMethod(name = "eql?")
public RubyBoolean eql_p(ThreadContext context, IRubyObject obj) {
boolean equals = false;
if ( obj instanceof ArrayJavaProxy ) {
final ArrayJavaProxy that = (ArrayJavaProxy) obj;
equals = arraysEquals(this.getObject(), that.getObject());
}
else if ( obj.getClass().isArray() ) {
equals = arraysEquals(getObject(), obj);
}
return context.runtime.newBoolean(equals);
}
@Override
public boolean equals(Object obj) {
if ( obj instanceof ArrayJavaProxy ) {
final ArrayJavaProxy that = (ArrayJavaProxy) obj;
final Object thisArray = this.getObject();
final Object thatArray = that.getObject();
return arraysEquals(thisArray, thatArray);
}
return false;
}
private static boolean arraysEquals(final Object thisArray, final Object thatArray) {
final Class<?> componentType = thisArray.getClass().getComponentType();
if ( ! componentType.equals(thatArray.getClass().getComponentType()) ) {
return false;
}
if ( componentType.isPrimitive() ) {
switch ( componentType.getName().charAt(0) ) {
case 'b':
if (componentType == byte.class) return Arrays.equals((byte[]) thisArray, (byte[]) thatArray);
else /* if (componentType == boolean.class) */ return Arrays.equals((boolean[]) thisArray, (boolean[]) thatArray);
case 's':
/* if (componentType == short.class) */ return Arrays.equals((short[]) thisArray, (short[]) thatArray);
case 'c':
/* if (componentType == char.class) */ return Arrays.equals((char[]) thisArray, (char[]) thatArray);
case 'i':
/* if (componentType == int.class) */ return Arrays.equals((int[]) thisArray, (int[]) thatArray);
case 'l':
/* if (componentType == long.class) */ return Arrays.equals((long[]) thisArray, (long[]) thatArray);
case 'f':
/* if (componentType == float.class) */ return Arrays.equals((float[]) thisArray, (float[]) thatArray);
case 'd':
/* if (componentType == double.class) */ return Arrays.equals((double[]) thisArray, (double[]) thatArray);
}
}
return Arrays.equals((Object[]) thisArray, (Object[]) thatArray);
}
@Override
@JRubyMethod
public RubyFixnum hash() {
return getRuntime().newFixnum( hashCode() );
}
@Override
public int hashCode() {
final Object array = getObject();
final Class<?> componentType = array.getClass().getComponentType();
if ( componentType.isPrimitive() ) {
switch ( componentType.getName().charAt(0) ) {
case 'b':
if (componentType == byte.class) return 11 * Arrays.hashCode((byte[]) array);
else /* if (componentType == boolean.class) */ return 11 * Arrays.hashCode((boolean[]) array);
case 's':
/* if (componentType == short.class) */ return 11 * Arrays.hashCode((short[]) array);
case 'c':
/* if (componentType == char.class) */ return 11 * Arrays.hashCode((char[]) array);
case 'i':
/* if (componentType == int.class) */ return 11 * Arrays.hashCode((int[]) array);
case 'l':
/* if (componentType == long.class) */ return 11 * Arrays.hashCode((long[]) array);
case 'f':
/* if (componentType == float.class) */ return 11 * Arrays.hashCode((float[]) array);
case 'd':
/* if (componentType == double.class) */ return 11 * Arrays.hashCode((double[]) array);
}
}
return 11 * Arrays.hashCode((Object[]) array);
}
public IRubyObject getRange(ThreadContext context, IRubyObject[] args) {
if (args.length == 1) {
return getRange(context, args[0]);
}
if (args.length == 2) {
return getRange(context, args[0], args[1]);
}
throw context.runtime.newArgumentError(args.length, 1);
}
public IRubyObject getRange(ThreadContext context, IRubyObject arg0) {
if ( arg0 instanceof RubyRange ) {
return arrayRange(context, (RubyRange) arg0);
}
throw context.runtime.newTypeError(arg0, context.runtime.getRange());
}
private IRubyObject arrayRange(final ThreadContext context, final RubyRange range) {
final Object array = getObject();
final int arrayLength = Array.getLength( array );
final IRubyObject rFirst = range.first(context);
final IRubyObject rLast = range.last(context);
if ( rFirst instanceof RubyFixnum && rLast instanceof RubyFixnum ) {
int first = (int) ((RubyFixnum) rFirst).getLongValue();
int last = (int) ((RubyFixnum) rLast).getLongValue();
first = first >= 0 ? first : arrayLength + first;
last = last >= 0 ? last : arrayLength + last;
int newLength = last - first;
if ( range.exclude_end_p().isFalse() ) newLength += 1;
if ( newLength <= 0 ) {
return ArrayUtils.emptyJavaArrayDirect(context, array.getClass().getComponentType());
}
return ArrayUtils.javaArraySubarrayDirect(context, array, first, newLength);
}
throw context.runtime.newTypeError("only Fixnum ranges supported");
}
public IRubyObject getRange(ThreadContext context, IRubyObject first, IRubyObject length) {
return arrayRange(context, first, length);
}
private IRubyObject arrayRange(final ThreadContext context,
final IRubyObject rFirst, final IRubyObject rLength) {
final Object array = getObject();
final int arrayLength = Array.getLength( array );
if ( rFirst instanceof RubyFixnum && rLength instanceof RubyFixnum ) {
int first = (int) ((RubyFixnum) rFirst).getLongValue();
int length = (int) ((RubyFixnum) rLength).getLongValue();
if ( length > arrayLength ) {
throw context.runtime.newIndexError("length specifed is longer than array");
}
if ( length <= 0 ) {
return ArrayUtils.emptyJavaArrayDirect(context, array.getClass().getComponentType());
}
first = first >= 0 ? first : arrayLength + first;
return ArrayUtils.javaArraySubarrayDirect(context, array, first, length);
}
throw context.runtime.newTypeError("only Fixnum ranges supported");
}
private static final class ArrayNewMethod extends org.jruby.internal.runtime.methods.JavaMethod.JavaMethodOne {
private final DynamicMethod newMethod;
ArrayNewMethod(RubyModule implClass, Visibility visibility) {
this(implClass, visibility, implClass.searchMethod("new"));
}
public ArrayNewMethod(RubyModule implClass, Visibility visibility, DynamicMethod oldNew) {
super(implClass, visibility);
this.newMethod = oldNew;
}
@Override
public final IRubyObject call(ThreadContext context, IRubyObject self, RubyModule clazz, String name, IRubyObject arg0) {
final Ruby runtime = context.runtime;
if ( ! ( arg0 instanceof JavaArray ) ) {
throw runtime.newTypeError(arg0, runtime.getJavaSupport().getJavaArrayClass());
}
IRubyObject proxy = newMethod.call(context, self, clazz, "new_proxy");
proxy.dataWrapStruct(arg0);
return proxy;
}
}
}
|
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.type;
import static org.fest.assertions.api.Assertions.assertThat;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class CounterBuilderTest {
@Rule
public ExpectedException exception = ExpectedException.none();
@Test
public void should_incr() throws Exception {
Counter counter = CounterBuilder.incr();
assertThat(counter.get()).isEqualTo(1L);
}
@Test
public void should_incr_n() throws Exception {
Counter counter = CounterBuilder.incr(10L);
assertThat(counter.get()).isEqualTo(10L);
}
@Test
public void should_decr() throws Exception {
Counter counter = CounterBuilder.decr();
assertThat(counter.get()).isEqualTo(-1L);
}
@Test
public void should_decr_n() throws Exception {
Counter counter = CounterBuilder.decr(10L);
assertThat(counter.get()).isEqualTo(-10L);
}
@Test
public void should_be_able_to_serialize_and_deserialize_counter_impl() throws Exception {
ObjectMapper mapper = new ObjectMapper();
Counter counter = CounterBuilder.incr(11L);
String serialized = mapper.writeValueAsString(counter);
assertThat(serialized).isEqualTo("\"11\"");
Counter deserialized = mapper.readValue(serialized, Counter.class);
assertThat(deserialized.get()).isEqualTo(11L);
assertThat(mapper.writeValueAsString(CounterBuilder.incr(0))).isEqualTo("\"0\"");
}
@Test
public void should_be_able_to_serialize_and_deserialize_null_counter() throws Exception {
ObjectMapper mapper = new ObjectMapper();
Counter counter = new TestCounter();
String serialized = mapper.writeValueAsString(counter);
assertThat(serialized).isEqualTo("\"\"");
serialized = mapper.writeValueAsString(null);
assertThat(serialized).isEqualTo("null");
Counter deserialized = mapper.readValue("null", Counter.class);
assertThat(deserialized).isNull();
}
private static class TestCounter implements Counter {
private Long value;
@Override
public Long get() {
return value;
}
@Override
public void incr() {
value++;
}
@Override
public void incr(long increment) {
value += increment;
}
@Override
public void decr() {
value--;
}
@Override
public void decr(long decrement) {
value -= decrement;
}
}
}
|
#!/usr/bin/bash
git add .
git commit -m "Final"
git push --set-upstream origin main |
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/mantle/src/main/java/io/opensphere/mantle/data/geom/style/VisualizationStyleRegistry.java
package io.opensphere.mantle.data.geom.style;
import java.awt.Color;
import java.util.Set;
import io.opensphere.mantle.data.VisualizationSupport;
/**
* The Interface VisualizationStyleRegistry.
*/
public interface VisualizationStyleRegistry
{
/**
* Adds the visualization style registry change listener.
*
* Note: Listeners are held as weak references, submitter should hold a hard
* reference or the listener may be garbage collected.
*
* @param listener the listener
*/
void addVisualizationStyleRegistryChangeListener(VisualizationStyleRegistryChangeListener listener);
/**
* Gets the default {@link VisualizationStyle} for a particular
* {@link VisualizationSupport}, will search for the first applicable style
* based on the MGS type, or its super types and interfaces until if finds
* an applicable type, or eventually fails.
*
* Search is performed as follows:
*
* Submitted Direct Class A -> Direct interfaces of class A that extend
* {@link VisualizationSupport} -> Direct super class of class A ( Class
* A' ) -> Direct interfaces of class A' that extend
* {@link VisualizationSupport} -> etc.
*
*
* @param mgsClass the Class
* @return the default style or null if no default style is found.
*/
VisualizationStyle getDefaultStyle(Class<? extends VisualizationSupport> mgsClass);
/**
* Gets the default {@link VisualizationStyle} instance for a concrete
* {@link VisualizationStyle} class that is in the registry.
*
* @param styleClass the {@link VisualizationStyle} class to search for.
* @return the default {@link VisualizationStyle} instance for style class
*/
VisualizationStyle getDefaultStyleInstanceForStyleClass(Class<? extends VisualizationStyle> styleClass);
/**
* Gets {@link Set} of all default style instances.
*
* @return the default styles set.
*/
Set<VisualizationStyle> getDefaultStyles();
/**
* Gets the feature color for data type if there is an active style that
* matches the input parameters that has a color.
*
* @param dtiKey the DataTypeInfo key.
* @param vsSupportType the VisualizationSupport type class or null for the
* first active type.
* @return the color or null if no active style with a color is found that
* matches the parameters.
*/
Color getFeatureColorForActiveStyle(String dtiKey, Class<? extends VisualizationSupport> vsSupportType);
/**
* Gets {@link Set} of all feature classes overridden for a particular data
* type ( not defaults ).
*
* @param dtiKey the data type key
* @return the unmodifiable {@link Set} of feature classes.
*/
Set<Class<? extends VisualizationSupport>> getFeatureTypes(String dtiKey);
/**
* Gets the {@link VisualizationStyle} for a specific.
*
* @param mgsCLass the {@link VisualizationSupport} class
* @param dtiKey the Data Type key.
* @param returnDefaultIfNoSpecificStyle the return default if no specific
* style
* @return the style or null if none found. {@link VisualizationSupport}
* will search for the first applicable style based on the MGS type,
* or its super types and interfaces until if finds an applicable
* type, or eventually fails. Will not return the default type if a
* specific type is not found.
*
* Uses the same search methodology as getDefaultStyle.
*/
VisualizationStyle getStyle(Class<? extends VisualizationSupport> mgsCLass, String dtiKey,
boolean returnDefaultIfNoSpecificStyle);
/**
* Gets {@link Set} of all style instances for a particular data type ( not
* defaults ).
*
* @param dtiKey the data type key
* @return the unmodifiable {@link Set} of styles
*/
Set<VisualizationStyle> getStyles(String dtiKey);
/**
* Gets the all of the VisualizationStyle classes that are sub-classes of
* the submitted style class.
*
* @param styleClass the style class to use to retrieve sub-classes
* @return the unmodifiable {@link Set} of styles that are sub-types of the
* submitted style, may be empty but will never be null.
*/
Set<Class<? extends VisualizationStyle>> getStylesForStyleType(Class<? extends VisualizationStyle> styleClass);
/**
* Install a VisualizationStyle.
*
* Provided the style is not already installed, it will be added and a
* default instance will be created and managed by the registry. Will fire a
* visualizationStyleInstalled to registry listeners.
*
* @param styleClass the style class to install
* @param source the source installing the style
* @return true, if successful, false if not installed
*/
boolean installStyle(Class<? extends VisualizationStyle> styleClass, Object source);
/**
* Removes the visualization style registry change listener.
*
* @param listener the listener
*/
void removeVisualizationStyleRegistryChangeListener(VisualizationStyleRegistryChangeListener listener);
/**
* Reset the style for a specific {@link VisualizationSupport} class for a
* specific data type back to the default ( or next capturing type ).
*
* @param mgsClass the {@link VisualizationSupport} class
* @param dtiKey the dti key
* @param source the source
*/
void resetStyle(Class<? extends VisualizationSupport> mgsClass, String dtiKey, Object source);
/**
* Sets the default style for a particular class of
* {@link VisualizationSupport}, any VisualizationSupport classes that
* inherits from the specified {@link VisualizationSupport} will use the
* installed style unless it or one of its more direct descendants have an
* installed style.
*
* Will fire a defaultStyleChanged if successful.
*
* @param mgsClass the {@link VisualizationSupport} class for which this
* style is to apply.
* @param styleClass the style class ( will be installed if not already
* installed ).
* @param source the source setting the default style.
*/
void setDefaultStyle(Class<? extends VisualizationSupport> mgsClass, Class<? extends VisualizationStyle> styleClass,
Object source);
/**
* Sets the style for a particular {@link VisualizationSupport} class type
* for a particular DataType that is to override the default style.
*
* fires a {@link VisualizationStyleDatatypeChangeEvent} if the style is
* changed.
*
* @param mgsClass the {@link VisualizationSupport} class for which the
* style applies.
* @param dtiKey the Data Type key.
* @param style the VisualizationStyle for the type
* @param source the source setting the style
* @return the previous style for this mgs class and dtiKey ( which could be
* the default style ).
*/
VisualizationStyle setStyle(Class<? extends VisualizationSupport> mgsClass, String dtiKey, VisualizationStyle style,
Object source);
/**
* The listener interface for receiving events and notifications from the
* visualization style registry.
*/
interface VisualizationStyleRegistryChangeListener
{
/**
* Default style changed.
*
* @param mgsClass the mgs class
* @param styleClass the style class
* @param source the source
*/
void defaultStyleChanged(Class<? extends VisualizationSupport> mgsClass, Class<? extends VisualizationStyle> styleClass,
Object source);
/**
* Visualization style datatype changed.
*
* @param evt the evt
*/
void visualizationStyleDatatypeChanged(VisualizationStyleDatatypeChangeEvent evt);
/**
* Visualization style installed.
*
* @param styleClass the style class
* @param source the source
*/
void visualizationStyleInstalled(Class<? extends VisualizationStyle> styleClass, Object source);
}
}
|
from math import exp, gamma, log, log1p
def process_functions(functions, input_values):
result_dict = {}
for func_name, func in functions.items():
if isinstance(func, type(lambda: 0)):
if func.__code__.co_argcount == 1:
result_dict[func_name] = func(input_values[0])
elif func.__code__.co_argcount == 2:
result_dict[func_name] = func(*input_values)
else:
result_dict[func_name] = func(*input_values)
return result_dict |
#!/bin/bash
# needed so the alias work
shopt -s expand_aliases
RULE=$1
INPUT=$2
ROOT=$(pwd)
if [ "-z ${RULE}" ] && [ -z "${INPUT}" ]; then
echo "usage: ./grun.sh RULE INPUT_FILE"
exit 1
fi
INPUT=${ROOT}/${INPUT}
if [ ! -f "$INPUT" ]; then
echo "input file $INPUT doesn't exist or is not a file."
exit 1
fi
export ANTLR4=$(pwd)/lib/antlr-4.7.2-complete.jar
export CLASSPATH=.:${ANTLR4}
alias antlr4="java -jar ${ANTLR4}"
alias grun='java org.antlr.v4.gui.TestRig'
mkdir -p /tmp/grun
cp src/decaf/Decaf.g4 /tmp/grun
pushd /tmp/grun > /dev/null
antlr4 Decaf.g4
javac *.java
grun Decaf ${RULE} -trace -gui -tokens -tree ${INPUT}
popd
|
<reponame>thanhlmm/lightweight-charts
import { BarPrice } from '../../model/bar';
import { ChartModel } from '../../model/chart-model';
import { Coordinate } from '../../model/coordinate';
import { Series } from '../../model/series';
import { BaseValueType } from '../../model/series-options';
import { TimePointIndex } from '../../model/time-data';
import { PaneRendererAreaBaseline, PaneRendererAreaBaselineData } from '../../renderers/area-baseline-renderer';
import { CompositeRenderer } from '../../renderers/composite-renderer';
import { IPaneRenderer } from '../../renderers/ipane-renderer';
import { LineItem } from '../../renderers/line-renderer';
import { LinePaneViewBase } from './line-pane-view-base';
export class SeriesAreaBaselinePaneView extends LinePaneViewBase<'AreaBaseline', LineItem> {
private readonly _renderer: CompositeRenderer = new CompositeRenderer();
private readonly _areaRenderer: PaneRendererAreaBaseline = new PaneRendererAreaBaseline();
public constructor(series: Series<'AreaBaseline'>, model: ChartModel) {
super(series, model);
this._renderer.setRenderers([this._areaRenderer]);
}
public renderer(height: number, width: number): IPaneRenderer | null {
if (!this._series.visible()) {
return null;
}
const areaBaselineStyleProperties = this._series.options();
this._makeValid();
const data: PaneRendererAreaBaselineData = {
lineType: areaBaselineStyleProperties.lineType,
items: this._items,
topLineColor: areaBaselineStyleProperties.topLineColor,
bottomLineColor: areaBaselineStyleProperties.bottomLineColor,
lineStyle: areaBaselineStyleProperties.lineStyle,
lineWidth: areaBaselineStyleProperties.lineWidth,
topFillColor1: areaBaselineStyleProperties.topFillColor1,
topFillColor2: areaBaselineStyleProperties.topFillColor2,
bottomFillColor1: areaBaselineStyleProperties.bottomFillColor1,
bottomFillColor2: areaBaselineStyleProperties.bottomFillColor2,
bottom: height as Coordinate,
baseLine: this._series.priceScale().priceToCoordinate(areaBaselineStyleProperties.baseValue.price, 0),
visibleRange: this._itemsVisibleRange,
barWidth: this._model.timeScale().barSpacing(),
};
this._areaRenderer.setData(data);
return this._renderer;
}
protected _getBaselineCoordinate(baseValue: BaseValueType): Coordinate {
return this._series.priceScale().priceToCoordinate(baseValue.price, 0);
}
protected _createRawItem(time: TimePointIndex, price: BarPrice): LineItem {
return this._createRawItemBase(time, price);
}
}
|
#! /bin/bash
set -e
# Only deploy if building from master on my repo.
if [ "${TRAVIS_REPO_SLUG}" != "jasonish/evebox" ]; then
echo "Not deploying packages for builds from repo ${TRAVIS_REPO_SLUG}."
exit 0
fi
if [ "${TRAVIS_BRANCH}" != "master" ]; then
echo "Not deploying packages for branch ${TRAVIS_BRANCH}."
exit 0
fi
if [ "${BINTRAY_API_KEY}" = "" ]; then
echo "BINTRAY_API_KEY is empty. Not deploying."
exit 0
fi
# Deploy zip's to Bintray.
for zip in dist/evebox-*.zip; do
echo "Uploading ${zip}."
curl -T ${zip} -u jasonish:${BINTRAY_API_KEY} \
"https://api.bintray.com/content/jasonish/evebox-zip-dev/evebox/dev/$(basename ${zip})?publish=1&override=1"
echo
done
# Deploy RPM to Bintray.
for rpm in dist/evebox*.rpm; do
echo "Uploading ${rpm}."
curl -T ${rpm} -u jasonish:${BINTRAY_API_KEY} \
"https://api.bintray.com/content/jasonish/evebox-rpm-dev/evebox/dev/$(basename ${rpm})?publish=1&override=1"
echo
break
done
# Deploy Debian package to Bintray.
for deb in dist/evebox*.deb; do
echo "Uploading ${deb}."
curl -T ${deb} -u jasonish:${BINTRAY_API_KEY} \
"https://api.bintray.com/content/jasonish/deb-evebox-latest/evebox/latest/$(basename ${deb});deb_distribution=jessie;deb_component=main;deb_architecture=amd64?publish=1&override=1"
echo
break
done
|
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var bytes = require('../bytes.js');
var digest$1 = require('./digest.js');
const code = 0;
const name = 'identity';
const digest = input => digest$1.create(code, bytes.coerce(input));
const identity = {
code,
name,
digest
};
exports.code = code;
exports.digest = digest;
exports.identity = identity;
exports.name = name;
|
#!/bin/sh
set -e
docker build --file=cwltool.Dockerfile --tag=commonworkflowlanguage/cwltool-module --target module .
docker build --file=cwltool.Dockerfile --tag=commonworkflowlanguage/cwltool .
version=$(git describe --tags)
echo $version | grep -vq '\-' >& /dev/null
if [ $? -eq 0 ];then
docker tag commonworkflowlanguage/cwltool-module commonworkflowlanguage/cwltool-module:$version
docker tag commonworkflowlanguage/cwltool commonworkflowlanguage/cwltool:$version
fi
|
<filename>acceptance/suites/tests/full/20_client_commands/create-hook/create-hook_with_missing_required_config_param.rb<gh_stars>100-1000
# -*- encoding: utf-8 -*-
# this is required because of the use of eval interacting badly with require_relative
require 'razor/acceptance/utils'
require 'yaml'
confine :except, :roles => %w{master dashboard database frictionless}
test_name 'QA-1820 - C59742 - create-hook with missing required configuration parameter'
step 'https://testrail.ops.puppetlabs.net/index.php?/cases/view/59742'
hook_dir = '/opt/puppetlabs/server/apps/razor-server/share/razor-server/hooks'
hook_type = 'hook_type_1'
hook_name = 'hook_name_2'
hook_path = "#{hook_dir}/#{hook_type}.hook"
configuration_file =<<-EOF
---
value:
description: "The current value of the hook"
required: true
foo:
description: "The current value of the hook"
default: defaultFoo
bar:
description: "The current value of the hook"
default: defaultBar
EOF
agents.each do |agent|
with_backup_of(agent, hook_dir) do
step "Create hook type"
on(agent, "mkdir -p #{hook_path}")
create_remote_file(agent,"#{hook_path}/configuration.yaml", configuration_file)
on(agent, "chmod +r #{hook_path}/configuration.yaml")
step 'create hook with missing hook configuration attr'
on(agent, "razor create-hook --name #{hook_name}" \
" --hook-type #{hook_type}", :acceptable_exit_codes => [1]) do |result| \
assert_match %r(error: configuration key 'value' is required by this hook type, but was not supplied), result.stdout
end
end
end
|
# Sample usage of the RedisHandler class
conf = {
'host': 'localhost',
'index_name': 0,
'master_name': 'my_master',
'<PASSWORD>': 'my_password'
}
# Create an instance of RedisHandler
redis_handler = RedisHandler(conf)
# Initialize the Redis connection
redis_handler.init_redis() |
<filename>extras/cmake-example/mvlc-connect-usb.cc
#include <iostream>
#include <mesytec-mvlc/mesytec-mvlc.h>
using namespace mesytec::mvlc;
int main(int argc, char *argv[])
{
std::cout << "Using mesytec-mvlc library version " << library_version() << std::endl;
auto mvlc = make_mvlc_usb();
if (auto ec = mvlc.connect())
{
std::cout << "Could not connect to mvlc: " << ec.message() << std::endl;
return 1;
}
std::cout << "Connected to MVLC, " << mvlc.connectionInfo() << std::endl;
return 0;
}
|
declare type Message = string;
export interface Logger {
trace(message: Message): void;
debug(message: Message): void;
info(message: Message): void;
warn(message: Message): void;
error(message: Message): void;
}
declare class DebugLogger implements Logger {
private readonly logger;
constructor(namespace: string);
trace(message: Message): void;
debug(message: Message): void;
info(message: Message): void;
warn(message: Message): void;
error(message: Message): void;
}
export declare class FakeLogger implements Logger {
readonly traceLogs: Message[];
readonly debugLogs: Message[];
readonly infoLogs: Message[];
readonly warnLogs: Message[];
readonly errorLogs: Message[];
trace(message: Message): void;
debug(message: Message): void;
info(message: Message): void;
warn(message: Message): void;
error(message: Message): void;
}
export declare const log: DebugLogger;
export declare const containerLog: DebugLogger;
export {};
|
// Protocol definition
protocol AbstractViewController {
var interactionController: BaseInteractionController? { get }
}
// BaseInteractionController protocol
protocol BaseInteractionController {
// Define required methods or properties for interaction controller
// ...
}
// Conforming class
class FATestViewController: AbstractViewController {
var verticalSwipController: BaseInteractionController? // Assuming this property is defined in the class
var interactionController: BaseInteractionController? {
return self.verticalSwipController
}
} |
rm -rf ../trace/flow_fastsort_0.txt
rm -rf ../trace/cpu_fastsort_0.txt
rm -rf ../trace/L2_fastsort_0.txt
rm -rf ../trace/router_fastsort_0.txt
~/huarzail/CMP_POPNET_ACK/CMP/PMS_trace/cmp0/sim-outorder \
-redir:sim ./result/test_fastsort.txt \
-redir:dump ./result/test_fastsort.txt.out \
-read_file ../../../pt_net_0.txt \
-write_file ../../../pt_net_in.txt \
-write_file_backup ../../../pt_net_in_backup.txt \
-flow_trace ../trace/flow_fastsort_0.txt \
-router_trace ../trace/router_fastsort_0.txt \
-cpu:trace ../trace/cpu_fastsort_0.txt \
-L2:trace ../trace/L2_fastsort_0.txt \
-config ./config_mesh_ooo_xy_org \
-max:barrier 0 -max:inst 10000000 hello.BNC
|
<?php
// Function to read product information from file and generate report
function generateProductReport($filename)
{
if (file_exists($filename)) {
$productInfo = file_get_contents($filename);
$pattern = '/Product Name: (.+)\nPrice: (.+)\nCategory: (.+)\nIn Stock: (.+)/';
if (preg_match($pattern, $productInfo, $matches)) {
$report = "Product Name: " . $matches[1] . "\nPrice: " . $matches[2] . "\nCategory: " . $matches[3] . "\nIn Stock: " . $matches[4];
echo $report;
} else {
echo "Error: Invalid format in product information file.";
}
} else {
echo "Error: Product information file not found.";
}
}
// Usage
generateProductReport('inc.product.info');
?> |
<reponame>crrobinson14/sits
exports.action = {
name: 'processImage',
description: 'Process and store an image using all specified variants. May overwrite existing files.',
blockedConnectionTypes: ['websocket'],
middleware: ['requireAPIKey'],
inputs: {
url: {
required: true,
},
variantIds: {
required: true,
validator: (p, conn, tmpl) => (Array.isArray(p) && p.length > 0) || 'must be non-empty array',
},
},
run: function(api, data, next) {
Promise.all(data.params.variantIds.map(variantId => api.store.get(data.params.url, variantId)))
.then(() => next())
.catch(e => next(e));
}
};
|
sleep 10 && echo "hello"
sleep 5 && sh copy_result.sh # the second line must wait until the first line finished, so do this every 1 hour
sleep 1h && sh copy_result.sh
sleep 1h && sh copy_result.sh
sleep 1h && sh copy_result.sh
sleep 1h && sh copy_result.sh
|
<filename>adapter/swapi-client/src/main/java/com/report/adapter/swapi/client/converter/UrlIteratorToLongIdSetConverter.java<gh_stars>1-10
package com.report.adapter.swapi.client.converter;
import com.fasterxml.jackson.databind.JsonNode;
import com.report.adapter.swapi.client.vo.LongIdSet;
import com.report.adapter.swapi.client.vo.UrlIterator;
import lombok.RequiredArgsConstructor;
import org.modelmapper.AbstractConverter;
import org.modelmapper.ModelMapper;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
@RequiredArgsConstructor
public class UrlIteratorToLongIdSetConverter extends AbstractConverter<UrlIterator, LongIdSet> {
private final ModelMapper modelMapper;
@Override
protected LongIdSet convert(UrlIterator urlIterator) {
if(urlIterator == null) {
return null;
}
Iterator<JsonNode> elements = urlIterator.getRaw();
Set<Long> ids = new HashSet<>();
while(elements.hasNext()) {
addConvertedElementToIds(elements.next(), ids);
}
return new LongIdSet(ids);
}
private void addConvertedElementToIds(JsonNode element, Set<Long> ids) {
String url = element.asText();
Long id = modelMapper.map(url, Long.class);
ids.add(id);
}
}
|
'use strict';
let config = require('./config.json');
let cfg = require('../web/config.json');
let mongoose = require('mongoose');
let isDEV = process.env.NODE_ENV !== 'production';
if (isDEV) {
cfg.db.mongodb.server="localhost";
}
//mongo db connect
mongoose.connect('mongodb://' + cfg.db.mongodb.server + '/' + cfg.db.mongodb.db,
{
user:cfg.db.mongodb.user,
pass:cfg.db.mongodb.pass
});
let Storage = require('../web/lib/storage')(mongoose);
let epa = require('./lib/epa')(config, Storage);
config.db = cfg.db;
epaRun();
function epaRun() {
epa();
setTimeout(epaRun, config.epa.reload * 1000);
} |
<filename>exchange.signalR.client.web.frontend/src/app/shared/models/notification-factory.model.ts<gh_stars>1-10
import { Notification } from "@interfaces/notification.interface";
export class NotificationFactory {
notification: Notification
constructor() {
//default value
this.notification = {
content: "",
dismissed: false,
id: "",
style: ""
}
}
create(content: string, dismissed: boolean, id: string, style: 'error' | 'success'): Notification {
return this.notification = {
content: content,
dismissed: dismissed,
id: id,
style: style
}
}
} |
#!/bin/sh
# Builds and runs tests for a particular target passed as an argument to this
# script.
set -ex
TARGET=$1
# If we're going to run tests inside of a qemu image, then we don't need any of
# the scripts below. Instead, download the image, prepare a filesystem which has
# the current state of this repository, and then run the image.
#
# It's assume that all images, when run with two disks, will run the `run.sh`
# script from the second which we place inside.
if [ "$QEMU" != "" ]; then
tmpdir=/tmp/qemu-img-creation
mkdir -p $tmpdir
if [ -z "${QEMU#*.gz}" ]; then
# image is .gz : download and uncompress it
qemufile=$(echo ${QEMU%.gz} | sed 's/\//__/g')
if [ ! -f $tmpdir/$qemufile ]; then
curl https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc/$QEMU | \
gunzip -d > $tmpdir/$qemufile
fi
elif [ -z "${QEMU#*.xz}" ]; then
# image is .xz : download and uncompress it
qemufile=$(echo ${QEMU%.xz} | sed 's/\//__/g')
if [ ! -f $tmpdir/$qemufile ]; then
curl https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc/$QEMU | \
unxz > $tmpdir/$qemufile
fi
else
# plain qcow2 image: just download it
qemufile=$(echo ${QEMU} | sed 's/\//__/g')
if [ ! -f $tmpdir/$qemufile ]; then
curl https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc/$QEMU \
> $tmpdir/$qemufile
fi
fi
# Create a mount a fresh new filesystem image that we'll later pass to QEMU.
# This will have a `run.sh` script will which use the artifacts inside to run
# on the host.
rm -f $tmpdir/libc-test.img
mkdir $tmpdir/mount
# Do the standard rigamarole of cross-compiling an executable and then the
# script to run just executes the binary.
cargo build \
--manifest-path libc-test/Cargo.toml \
--target $TARGET \
--test main
rm $CARGO_TARGET_DIR/$TARGET/debug/main-*.d
cp $CARGO_TARGET_DIR/$TARGET/debug/main-* $tmpdir/mount/libc-test
echo 'exec $1/libc-test' > $tmpdir/mount/run.sh
du -sh $tmpdir/mount
genext2fs \
--root $tmpdir/mount \
--size-in-blocks 100000 \
$tmpdir/libc-test.img
# Pass -snapshot to prevent tampering with the disk images, this helps when
# running this script in development. The two drives are then passed next,
# first is the OS and second is the one we just made. Next the network is
# configured to work (I'm not entirely sure how), and then finally we turn off
# graphics and redirect the serial console output to out.log.
qemu-system-x86_64 \
-m 1024 \
-snapshot \
-drive if=virtio,file=$tmpdir/$qemufile \
-drive if=virtio,file=$tmpdir/libc-test.img \
-net nic,model=virtio \
-net user \
-nographic \
-vga none 2>&1 | tee $CARGO_TARGET_DIR/out.log
exec grep "^PASSED .* tests" $CARGO_TARGET_DIR/out.log
fi
# FIXME: x86_64-unknown-linux-gnux32 fail to compile wihout --release
# See https://github.com/rust-lang/rust/issues/45417
opt=
if [ "$TARGET" = "x86_64-unknown-linux-gnux32" ]; then
opt="--release"
fi
cargo test $opt --no-default-features --manifest-path libc-test/Cargo.toml --target $TARGET
exec cargo test $opt --manifest-path libc-test/Cargo.toml --target $TARGET
|
package control;
import modelo.datos.VO.LogroVO;
import modelo.datos.VO.UsuarioVO;
import modelo.datos.WebFacade;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.ArrayList;
public class logrosServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request, response);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
PrintWriter out = response.getWriter();
WebFacade fachada = new WebFacade();
try {
ArrayList<LogroVO> listaLogros = fachada.getLogros();
request.setAttribute("logros", listaLogros);
RequestDispatcher rd = getServletContext().getRequestDispatcher("verLogros.jsp");
rd.forward(request, response);
} catch (SQLException e) {
e.printStackTrace();
}
}
}
|
function generateExportStatements(moduleNames: string[]): string {
return moduleNames.map(module => `export * from './lib/${module}';`).join('\n');
}
// Test the function
const modules = ['user.module', 'auth.service', 'logger'];
const exportStatements = generateExportStatements(modules);
console.log(exportStatements); |
<reponame>arc-repos/arc-functions-python
# # -*- coding: utf-8 -*-
from uuid import UUID
import pytest
import arc.events
@pytest.mark.filterwarnings("ignore:the imp module is deprecated")
def test_publish(arc_reflection, sns_client):
topic_name = "ping"
sns_client.create_topic(Name=topic_name)
topics = sns_client.list_topics()
arc_reflection(params={f"events/{topic_name}": topics["Topics"][0]["TopicArn"]})
val = arc.events.publish(name=topic_name, payload={"python": True})
assert isinstance(val, dict)
assert "MessageId" in val
parsed = UUID(val["MessageId"], version=4)
assert isinstance(parsed, UUID)
|
<reponame>lgoldstein/communitychest<filename>development/src/main/java/net/community/chest/net/proto/text/ssh/message/auth/UserAuthFailure.java
/*
*
*/
package net.community.chest.net.proto.text.ssh.message.auth;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StreamCorruptedException;
import java.util.Collection;
import net.community.chest.net.proto.text.ssh.SSHMsgCode;
import net.community.chest.net.proto.text.ssh.SSHProtocol;
import net.community.chest.net.proto.text.ssh.io.SSHInputDataDecoder;
import net.community.chest.net.proto.text.ssh.io.SSHOutputDataEncoder;
import net.community.chest.net.proto.text.ssh.message.AbstractSSHMsgEncoder;
/**
* <P>Copyright as per GPLv2</P>
*
* @author <NAME>.
* @since Jul 2, 2009 9:38:50 AM
*/
public class UserAuthFailure extends AbstractSSHMsgEncoder<UserAuthFailure> {
/**
*
*/
private static final long serialVersionUID = -933831896636947794L;
public UserAuthFailure ()
{
super(SSHMsgCode.SSH_MSG_USERAUTH_FAILURE);
}
private Collection<String> _allowedAuthsList;
public Collection<String> getAllowedAuthsList ()
{
return _allowedAuthsList;
}
public void setAllowedAuthsList (Collection<String> allowedAuthsList)
{
_allowedAuthsList = allowedAuthsList;
}
private Boolean _partialSuccess;
public Boolean getPartialSuccess ()
{
return _partialSuccess;
}
public void setPartialSuccess (Boolean partialSuccess)
{
_partialSuccess = partialSuccess;
}
/*
* @see net.community.chest.io.encode.ElementEncoder#read(java.io.InputStream)
*/
@Override
public UserAuthFailure read (final InputStream in) throws IOException
{
setAllowedAuthsList(SSHProtocol.readNamesList(in));
setPartialSuccess(Boolean.valueOf(SSHProtocol.readBoolean(in)));
return this;
}
/*
* @see net.community.chest.net.proto.text.ssh.SSHDataObjectEncoder#decode(net.community.chest.net.proto.text.ssh.io.SSHInputDataDecoder)
*/
@Override
public UserAuthFailure decode (SSHInputDataDecoder in) throws IOException
{
if (null == in)
throw new IOException("decode(" + getMsgCode() + ") no " + SSHInputDataDecoder.class.getSimpleName() + " instance");
setAllowedAuthsList(in.readNamesList());
setPartialSuccess(Boolean.valueOf(in.readBoolean()));
return this;
}
/*
* @see net.community.chest.io.encode.ElementEncoder#write(java.io.OutputStream)
*/
@Override
public void write (final OutputStream out) throws IOException
{
final Boolean ps=getPartialSuccess();
if (null == ps)
throw new StreamCorruptedException("write(" + getMsgCode() + ") partial success value not set");
SSHProtocol.writeNamesList(out, getAllowedAuthsList());
SSHProtocol.writeBoolean(out, ps.booleanValue());
}
/*
* @see net.community.chest.net.proto.text.ssh.SSHDataObjectEncoder#encode(net.community.chest.net.proto.text.ssh.io.SSHOutputDataEncoder)
*/
@Override
public void encode (SSHOutputDataEncoder out) throws IOException
{
if (null == out)
throw new IOException("encode(" + getMsgCode() + ") no " + SSHOutputDataEncoder.class.getSimpleName() + " instance");
final Boolean ps=getPartialSuccess();
if (null == ps)
throw new StreamCorruptedException("encode(" + getMsgCode() + ") partial success value not set");
out.writeNamesList(getAllowedAuthsList());
out.writeBoolean(ps.booleanValue());
}
}
|
import pygame
class PlayerAirplane:
def __init__(self):
self.image_one = None
self.image_two = None
def load_images(self, image_path1, image_path2):
"""
Load images for the airplane from the given file paths.
Args:
image_path1 (str): File path for the first image of the airplane.
image_path2 (str): File path for the second image of the airplane.
"""
self.image_one = pygame.image.load(image_path1)
self.image_two = pygame.image.load(image_path2)
def get_position(self):
"""
Retrieve the current position of the airplane.
Returns:
tuple: A tuple representing the current position of the airplane (x, y).
"""
# Implement the logic to retrieve the position of the airplane
# For example:
# return (x, y)
pass # Placeholder for the position retrieval logic |
<reponame>Sherlock92/greentop
/**
* Copyright 2017 <NAME>. Distributed under the MIT license.
*/
#include "greentop/sport/ListEventTypesRequest.h"
namespace greentop {
namespace sport {
ListEventTypesRequest::ListEventTypesRequest() {
}
ListEventTypesRequest::ListEventTypesRequest(const MarketFilter& filter,
const std::string& locale) :
filter(filter),
locale(locale) {
}
void ListEventTypesRequest::fromJson(const Json::Value& json) {
if (json.isMember("filter")) {
filter.fromJson(json["filter"]);
}
if (json.isMember("locale")) {
locale = json["locale"].asString();
}
}
Json::Value ListEventTypesRequest::toJson() const {
Json::Value json(Json::objectValue);
if (filter.isValid()) {
json["filter"] = filter.toJson();
}
if (locale != "") {
json["locale"] = locale;
}
return json;
}
bool ListEventTypesRequest::isValid() const {
return filter.isValid();
}
const MarketFilter& ListEventTypesRequest::getFilter() const {
return filter;
}
void ListEventTypesRequest::setFilter(const MarketFilter& filter) {
this->filter = filter;
}
const std::string& ListEventTypesRequest::getLocale() const {
return locale;
}
void ListEventTypesRequest::setLocale(const std::string& locale) {
this->locale = locale;
}
}
}
|
/**
* Copyright (c) 2015, <NAME>, Quintelligence d.o.o. and contributors
* All rights reserved.
*
* This source code is licensed under the FreeBSD license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "fs_nodejs.h"
///////////////////////////////
// NodeJs-Directory
TNodeJsFPath::TNodeJsFPath(const TStr& FPath): CanonicalFPath(GetCanonicalPath(FPath)) { }
bool TNodeJsFPath::Equals(const TNodeJsFPath& JsFPath) const {
return CanonicalFPath == JsFPath.GetFPath();
}
bool TNodeJsFPath::IsSubdir(const TNodeJsFPath& JsFPath) const {
return CanonicalFPath.StartsWith(JsFPath.GetFPath());
}
void TNodeJsFPath::GetFPathV(const TStrV& FPathV, TVec<TNodeJsFPath>& JsFPathV) {
for (TStrV::TIter It = FPathV.BegI(); It != FPathV.EndI(); ++It) {
JsFPathV.Add(TNodeJsFPath(*It));
}
}
TStr TNodeJsFPath::GetCanonicalPath(const TStr& FPath) {
// Get absolute path
TStr AbsFPath = TStr::GetNrAbsFPath(FPath);
// Remove any redundancies
TStrV CanonV; AbsFPath.SplitOnAllCh('/', CanonV);
TSStack<TStr> CanonS; TStr CurrStr;
for (int ElN = 0; ElN < CanonV.Len(); ++ElN) {
CurrStr = CanonV.GetVal(ElN);
if (CurrStr == "..") {
EAssertR(!CanonS.Empty(), "Stack empty");
CanonS.Pop();
} else if (CurrStr != ".") {
CanonS.Push(CurrStr+"/");
}
}
// Assemble the canonical path (from left to right
EAssertR(!CanonS.Empty(), "Stack empty");
// We start with drive letter (Windows) or slash (Unix)
TChA CanonFPath = AbsFPath.LeftOf('/'); CanonFPath += '/';
// Get the rest of the path
for (int CanonN = CanonS.Len() - 1; CanonN >= 0; CanonN--) {
CanonFPath += CanonS[CanonN];
}
// Done
return CanonFPath;
}
///////////////////////////////
// NodeJs-Filesystem
void TNodeJsFs::Init(v8::Local<v8::Object> exports) {
// Add all prototype methods, getters and setters here.
NODE_SET_METHOD(exports, "openRead", _openRead);
NODE_SET_METHOD(exports, "openWrite", _openWrite);
NODE_SET_METHOD(exports, "openAppend", _openAppend);
NODE_SET_METHOD(exports, "exists", _exists);
NODE_SET_METHOD(exports, "copy", _copy);
NODE_SET_METHOD(exports, "move", _move);
NODE_SET_METHOD(exports, "del", _del);
NODE_SET_METHOD(exports, "rename", _rename);
NODE_SET_METHOD(exports, "fileInfo", _fileInfo);
NODE_SET_METHOD(exports, "mkdir", _mkdir);
NODE_SET_METHOD(exports, "rmdir", _rmdir);
NODE_SET_METHOD(exports, "listFile", _listFile);
NODE_SET_METHOD(exports, "readLines", _readLines);
}
void TNodeJsFs::openRead(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(), "Expected file path.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
// file exist check is done by TFIn
Args.GetReturnValue().Set(
TNodeJsUtil::NewInstance<TNodeJsFIn>(new TNodeJsFIn(FNm)));
}
void TNodeJsFs::openWrite(const v8::FunctionCallbackInfo<v8::Value>& Args) { // Call withb AppendP = false
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(), "Expected file path.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
Args.GetReturnValue().Set(
TNodeJsUtil::NewInstance<TNodeJsFOut>(new TNodeJsFOut(FNm, false)));
}
void TNodeJsFs::openAppend(const v8::FunctionCallbackInfo<v8::Value>& Args) { // Call with AppendP = true
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(), "Expected file path.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
Args.GetReturnValue().Set(
TNodeJsUtil::NewInstance<TNodeJsFOut>(new TNodeJsFOut(FNm, true)));
}
void TNodeJsFs::exists(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(), "Expected file path.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
Args.GetReturnValue().Set(Nan::New(TFile::Exists(FNm)));
}
void TNodeJsFs::copy(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 2 && Args[0]->IsString() && Args[1]->IsString(),
"Expected 2 arguments: source and destination file paths.");
TStr SrcFNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
EAssertR(TFile::Exists(SrcFNm), "File '" + SrcFNm + "' does not exist");
TStr DstFNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[1]))));
TFile::Copy(SrcFNm, DstFNm);
Args.GetReturnValue().Set(Nan::Undefined());
}
void TNodeJsFs::move(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 2 && Args[0]->IsString() && Args[1]->IsString(),
"Expected 2 arguments: source and destination file paths.");
TStr SrcFNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
EAssertR(TFile::Exists(SrcFNm), TStr("File '" + SrcFNm + "' does not exist").CStr());
TStr DstFNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[1]))));
TFile::Copy(SrcFNm, DstFNm);
TFile::Del(SrcFNm, false); // ThrowExceptP = false
Args.GetReturnValue().Set(Nan::Undefined());
}
void TNodeJsFs::del(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(),
"Expected a file path as the only argument.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
EAssertR(TFile::Exists(FNm), TStr("File '" + FNm + "' does not exist").CStr());
Args.GetReturnValue().Set(Nan::New(TFile::Del(FNm, false))); // ThrowExceptP = false
}
void TNodeJsFs::rename(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 2 && Args[0]->IsString() && Args[1]->IsString(),
"Expected 2 arguments: source and destination file paths.");
TStr SrcFNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
EAssertR(TFile::Exists(SrcFNm), TStr("File '" + SrcFNm + "' does not exist").CStr());
TStr DstFNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[1]))));
TFile::Rename(SrcFNm, DstFNm);
Args.GetReturnValue().Set(Nan::Undefined());
}
void TNodeJsFs::fileInfo(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(),
"Expected a file path as the only argument.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
EAssertR(TFile::Exists(FNm), TStr("File '" + FNm + "' does not exist").CStr());
const uint64 CreateTm = TFile::GetCreateTm(FNm);
const uint64 LastAccessTm = TFile::GetLastAccessTm(FNm);
const uint64 LastWriteTm = TFile::GetLastWriteTm(FNm);
const uint64 Size = TFile::GetSize(FNm);
v8::Local<v8::Object> Obj = v8::Object::New(Isolate);
Nan::Set(Obj, TNodeJsUtil::ToLocal(Nan::New("createTime")),
TNodeJsUtil::ToLocal(Nan::New(TTm::GetTmFromMSecs(CreateTm).GetWebLogDateTimeStr().CStr())));
Nan::Set(Obj, TNodeJsUtil::ToLocal(Nan::New("lastAccessTime")),
TNodeJsUtil::ToLocal(Nan::New(TTm::GetTmFromMSecs(LastAccessTm).GetWebLogDateTimeStr().CStr())));
Nan::Set(Obj, TNodeJsUtil::ToLocal(Nan::New("lastWriteTime")),
TNodeJsUtil::ToLocal(Nan::New(TTm::GetTmFromMSecs(LastWriteTm).GetWebLogDateTimeStr().CStr())));
Nan::Set(Obj, TNodeJsUtil::ToLocal(Nan::New("size")),
Nan::New(static_cast<double>(Size)));
Args.GetReturnValue().Set(Obj);
}
void TNodeJsFs::mkdir(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(),
"Expected directory name as the only argument.");
TStr FPath(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
const bool GenDirP = TDir::GenDir(FPath);
Args.GetReturnValue().Set(Nan::New(GenDirP));
}
void TNodeJsFs::rmdir(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1 && Args[0]->IsString(),
"Expected directory name as the only argument.");
TStr FPath(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
const bool DelDirP = TDir::DelDir(FPath);
Args.GetReturnValue().Set(Nan::New(DelDirP));
}
void TNodeJsFs::listFile(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() >= 1 && Args[0]->IsString(),
"Expected directory path as the first argument.");
// Read parameters
TStr FPath(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
TStrV FExtV;
if (Args.Length() >= 2 && Args[1]->IsString()) {
FExtV.Add(TStr(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[1])))));
}
const bool RecurseP = Args.Length() >= 3 && Args[2]->IsBoolean() && Nan::To<bool>(Args[2]).FromJust();
// Get file list
TStrV FNmV;
TFFile::GetFNmV(FPath, FExtV, RecurseP, FNmV);
FNmV.Sort();
v8::Local<v8::Array> FNmArr = v8::Array::New(Isolate, FNmV.Len());
for(int FldN = 0; FldN < FNmV.Len(); ++FldN) {
Nan::Set(FNmArr, v8::Integer::New(Isolate, FldN), TNodeJsUtil::ToLocal(Nan::New(FNmV.GetVal(FldN).CStr())));
}
Args.GetReturnValue().Set(FNmArr);
}
void TNodeJsFs::readLines(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 3, "TNodeJsFs::readLines: Invalid number of arguments!");
EAssertR(!TNodeJsUtil::IsArgNull(Args, 0), "TNodeJsFs::readLines: Buffer is null or undefined!");
PSIn SIn;
if (TNodeJsUtil::IsArgStr(Args, 0)) {
// Read from file
const TStr FNm = TNodeJsUtil::GetArgStr(Args, 0);
SIn = TFIn::New(FNm);
} else if (TNodeJsUtil::IsArgWrapObj(Args, 0, TNodeJsFIn::GetClassId())) {
// Read from input stream
TNodeJsFIn* JsFIn = TNodeJsUtil::GetArgUnwrapObj<TNodeJsFIn>(Args, 0);
SIn = JsFIn->SIn;
} else {
// Read from Node.js Buffer
EAssertR(TNodeJsUtil::IsArgBuffer(Args, 0), "TNodeJsFs::readLines: argument not a buffer");
v8::Local<v8::Object> BuffObj = TNodeJsUtil::ToLocal(Nan::To<v8::Object>(Args[0]));
char* Buff = node::Buffer::Data(BuffObj);
size_t BuffLen = node::Buffer::Length(BuffObj);
SIn = new TThinMIn(Buff, (int)BuffLen);
}
v8::Local<v8::Function> LineCallback = TNodeJsUtil::GetArgFun(Args, 1);
v8::Local<v8::Function> EndCallback = TNodeJsUtil::GetArgFun(Args, 2);
TStr LineStr;
while (SIn->GetNextLn(LineStr)) {
bool ContinueLoop = true;
v8::Local<v8::String> LineV8Str = TNodeJsUtil::ToLocal(Nan::New(LineStr.CStr()));
ContinueLoop = TNodeJsUtil::ExecuteBool(LineCallback, LineV8Str);
if (!ContinueLoop) { break; }
}
TNodeJsUtil::ExecuteVoid(EndCallback);
Args.GetReturnValue().Set(Nan::Undefined());
}
///////////////////////////////
// NodeJs-FIn
v8::Persistent<v8::Function> TNodeJsFIn::Constructor;
void TNodeJsFIn::Init(v8::Local<v8::Object> exports) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
v8::Local<v8::Context> context = Nan::GetCurrentContext();
// template for creating function from javascript using "new", uses _NewJs callback
v8::Local<v8::FunctionTemplate> tpl = v8::FunctionTemplate::New(Isolate, TNodeJsUtil::_NewJs<TNodeJsFIn>);
// child will have the same properties and methods, but a different callback: _NewCpp
v8::Local<v8::FunctionTemplate> child = v8::FunctionTemplate::New(Isolate, TNodeJsUtil::_NewCpp<TNodeJsFIn>);
child->Inherit(tpl);
child->SetClassName(TNodeJsUtil::ToLocal(Nan::New(GetClassId().CStr())));
// ObjectWrap uses the first internal field to store the wrapped pointer
child->InstanceTemplate()->SetInternalFieldCount(1);
tpl->SetClassName(TNodeJsUtil::ToLocal(Nan::New(GetClassId().CStr())));
// ObjectWrap uses the first internal field to store the wrapped pointer
tpl->InstanceTemplate()->SetInternalFieldCount(1);
// Add all prototype methods, getters and setters here
NODE_SET_PROTOTYPE_METHOD(tpl, "peekCh", _peekCh);
NODE_SET_PROTOTYPE_METHOD(tpl, "getCh", _getCh);
NODE_SET_PROTOTYPE_METHOD(tpl, "readLine", _readLine);
NODE_SET_PROTOTYPE_METHOD(tpl, "readString", _readString);
NODE_SET_PROTOTYPE_METHOD(tpl, "readAll", _readAll);
NODE_SET_PROTOTYPE_METHOD(tpl, "close", _close);
NODE_SET_PROTOTYPE_METHOD(tpl, "isClosed", _isClosed);
// Add properties
tpl->InstanceTemplate()->SetAccessor(TNodeJsUtil::ToLocal(Nan::New("eof")), _eof);
tpl->InstanceTemplate()->SetAccessor(TNodeJsUtil::ToLocal(Nan::New("length")), _length);
// This has to be last, otherwise the properties won't show up on the object in JavaScript
// Constructor is used when creating the object from C++
Constructor.Reset(Isolate, TNodeJsUtil::ToLocal(child->GetFunction(context)));
// we need to export the class for calling using "new FIn(...)"
Nan::Set(exports, TNodeJsUtil::ToLocal(Nan::New(GetClassId().CStr())),
TNodeJsUtil::ToLocal(tpl->GetFunction(context)));
}
TNodeJsFIn* TNodeJsFIn::NewFromArgs(const v8::FunctionCallbackInfo<v8::Value>& Args) {
// parse arguments
EAssertR(Args.Length() == 1 && Args[0]->IsString(), "Expected a file path.");
return new TNodeJsFIn(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
}
void TNodeJsFIn::peekCh(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
Args.GetReturnValue().Set(TNodeJsUtil::ToLocal(Nan::New(TStr(JsFIn->SIn->PeekCh()).CStr())));
}
void TNodeJsFIn::getCh(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
Args.GetReturnValue().Set(TNodeJsUtil::ToLocal(Nan::New(TStr(JsFIn->SIn->GetCh()).CStr())));
}
void TNodeJsFIn::readLine(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
TChA LnChA; JsFIn->SIn->GetNextLnBf(LnChA);
Args.GetReturnValue().Set(TNodeJsUtil::ToLocal(Nan::New(LnChA.CStr())));
}
void TNodeJsFIn::readString(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
EAssertR(!JsFIn->SIn.Empty(), "Input stream is closed!");
TStr Str = TStr(*JsFIn->SIn);
Args.GetReturnValue().Set(
TNodeJsUtil::ToLocal(Nan::New(Str.CStr())));
}
void TNodeJsFIn::readAll(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
TStr Res = TStr::LoadTxt(JsFIn->SIn);
Args.GetReturnValue().Set(TNodeJsUtil::ToLocal(Nan::New(Res.CStr())));
}
void TNodeJsFIn::close(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
if (!JsFIn->SIn.Empty()) {
JsFIn->SIn.Clr();
}
Args.GetReturnValue().Set(Nan::Undefined());
}
void TNodeJsFIn::isClosed(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Args.This());
Args.GetReturnValue().Set(Nan::New(JsFIn->SIn.Empty()));
}
void TNodeJsFIn::eof(v8::Local<v8::Name> Name, const v8::PropertyCallbackInfo<v8::Value>& Info) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Info.Holder());
Info.GetReturnValue().Set(Nan::New(JsFIn->SIn->Eof()));
}
void TNodeJsFIn::length(v8::Local<v8::Name> Name, const v8::PropertyCallbackInfo<v8::Value>& Info) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFIn* JsFIn = ObjectWrap::Unwrap<TNodeJsFIn>(Info.Holder());
Info.GetReturnValue().Set(v8::Integer::New(Isolate, JsFIn->SIn->Len()));
}
///////////////////////////////
// NodeJs-FOut
v8::Persistent<v8::Function> TNodeJsFOut::Constructor;
void TNodeJsFOut::Init(v8::Local<v8::Object> exports) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
v8::Local<v8::Context> context = Nan::GetCurrentContext();
// template for creating function from javascript using "new", uses _NewJs callback
v8::Local<v8::FunctionTemplate> tpl = v8::FunctionTemplate::New(Isolate, TNodeJsUtil::_NewJs<TNodeJsFOut>);
// child will have the same properties and methods, but a different callback: _NewCpp
v8::Local<v8::FunctionTemplate> child = v8::FunctionTemplate::New(Isolate, TNodeJsUtil::_NewCpp<TNodeJsFOut>);
child->Inherit(tpl);
child->SetClassName(TNodeJsUtil::ToLocal(Nan::New(GetClassId().CStr())));
// ObjectWrap uses the first internal field to store the wrapped pointer
child->InstanceTemplate()->SetInternalFieldCount(1);
tpl->SetClassName(TNodeJsUtil::ToLocal(Nan::New(GetClassId().CStr())));
// ObjectWrap uses the first internal field to store the wrapped pointer
tpl->InstanceTemplate()->SetInternalFieldCount(1);
// Add all prototype methods, getters and setters here.
NODE_SET_PROTOTYPE_METHOD(tpl, "write", _write);
NODE_SET_PROTOTYPE_METHOD(tpl, "writeBinary", _writeBinary);
NODE_SET_PROTOTYPE_METHOD(tpl, "writeLine", _writeLine);
NODE_SET_PROTOTYPE_METHOD(tpl, "flush", _flush);
NODE_SET_PROTOTYPE_METHOD(tpl, "close", _close);
// This has to be last, otherwise the properties won't show up on the object in JavaScript
// Constructor is used when creating the object from C++
Constructor.Reset(Isolate, TNodeJsUtil::ToLocal(child->GetFunction(context)));
// we need to export the class for calling using "new FIn(...)"
Nan::Set(exports, TNodeJsUtil::ToLocal(Nan::New(GetClassId().CStr())),
TNodeJsUtil::ToLocal(tpl->GetFunction(context)));
}
TNodeJsFOut* TNodeJsFOut::NewFromArgs(const v8::FunctionCallbackInfo<v8::Value>& Args) {
// parse arguments
EAssertR(Args.Length() >= 1 && Args[0]->IsString(),
"Expected file path.");
TStr FNm(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
bool AppendP = Args.Length() >= 2 && Args[1]->IsBoolean() && Nan::To<bool>(Args[1]).FromJust();
return new TNodeJsFOut(FNm, AppendP);
}
void TNodeJsFOut::write(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1, "Invalid number of arguments to fout.write()");
TNodeJsFOut* JsFOut = ObjectWrap::Unwrap<TNodeJsFOut>(Args.This());
EAssertR(!JsFOut->SOut.Empty(), "Output stream already closed!");
if (Args[0]->IsString()) {
JsFOut->SOut->PutStr(*Nan::Utf8String (TNodeJsUtil::ToLocal(Nan::To<v8::String>(Args[0]))));
} else if (Args[0]->IsInt32()) {
JsFOut->SOut->PutStr(TInt::GetStr(Nan::To<int32_t>(Args[0]).FromJust()));
} else if (Args[0]->IsNumber()) {
JsFOut->SOut->PutStr(TFlt::GetStr(Nan::To<double>(Args[0]).FromJust()));
} else if (TNodeJsUtil::IsArgJson(Args, 0)) {
JsFOut->SOut->PutStr(TJsonVal::GetStrFromVal(TNodeJsUtil::GetArgJson(Args, 0)));
} else {
EFailR("Invalid type passed to fout.write() function.");
}
Args.GetReturnValue().Set(Args.Holder());
}
void TNodeJsFOut::writeBinary(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
EAssertR(Args.Length() == 1, "Invalid number of arguments to fout.write()");
TNodeJsFOut* JsFOut = ObjectWrap::Unwrap<TNodeJsFOut>(Args.This());
EAssertR(!JsFOut->SOut.Empty(), "Output stream already closed!");
if (Args[0]->IsString()) {
TStr Str = TNodeJsUtil::GetArgStr(Args, 0);
Str.Save(*JsFOut->SOut);
} else if (Args[0]->IsNumber()) {
JsFOut->SOut->Save(Nan::To<double>(Args[0]).FromJust());
} else if (TNodeJsUtil::IsArgJson(Args, 0)) {
PJsonVal JsonVal = TNodeJsUtil::GetArgJson(Args, 0);
JsonVal->Save(*JsFOut->SOut);
} else {
EFailR("Invalid type passed to fout.write() function.");
}
Args.GetReturnValue().Set(Args.Holder());
}
void TNodeJsFOut::writeLine(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
write(Args);
TNodeJsFOut* JsFOut = ObjectWrap::Unwrap<TNodeJsFOut>(Args.This());
EAssertR(!JsFOut->SOut.Empty(), "Output stream already closed!");
JsFOut->SOut->PutLn();
Args.GetReturnValue().Set(Args.Holder());
}
void TNodeJsFOut::flush(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFOut* JsFOut = ObjectWrap::Unwrap<TNodeJsFOut>(Args.This());
EAssertR(!JsFOut->SOut.Empty(), "Output stream already closed!");
JsFOut->SOut->Flush();
Args.GetReturnValue().Set(Args.Holder());
}
void TNodeJsFOut::close(const v8::FunctionCallbackInfo<v8::Value>& Args) {
v8::Isolate* Isolate = v8::Isolate::GetCurrent();
v8::HandleScope HandleScope(Isolate);
TNodeJsFOut* JsFOut = ObjectWrap::Unwrap<TNodeJsFOut>(Args.This());
EAssertR(!JsFOut->SOut.Empty(), "Output stream already closed!");
JsFOut->SOut->Flush();
JsFOut->SOut.Clr();
Args.GetReturnValue().Set(Args.Holder());
}
|
#!/bin/bash
# Author: yeho <lj2007331 AT gmail.com>
# BLOG: https://linuxeye.com
#
# Notes: OneinStack for CentOS/RedHat 6+ Debian 8+ and Ubuntu 14+
#
# Project home page:
# https://oneinstack.com
# https://github.com/oneinstack/oneinstack
Install_JDK17() {
pushd ${oneinstack_dir}/src > /dev/null
JDK_FILE="jdk-`echo ${jdk17_ver} | awk -F. '{print $2}'`u`echo ${jdk17_ver} | awk -F_ '{print $NF}'`-linux-${SYS_BIT_j}.tar.gz"
JAVA_dir=/usr/java
JDK_NAME="jdk${jdk17_ver}"
JDK_PATH=${JAVA_dir}/${JDK_NAME}
[ "${PM}" == 'yum' ] && [ -n "`rpm -qa | grep jdk`" ] && rpm -e `rpm -qa | grep jdk`
[ ! -e ${JAVA_dir} ] && mkdir -p ${JAVA_dir}
tar xzf ${JDK_FILE} -C ${JAVA_dir}
if [ -d "${JDK_PATH}" ]; then
/bin/cp -i ${JDK_PATH}/jre/lib/security/cacerts /etc/ssl/certs/java
[ -z "`grep ^'export JAVA_HOME=' /etc/profile`" ] && { [ -z "`grep ^'export PATH=' /etc/profile`" ] && echo "export JAVA_HOME=${JDK_PATH}" >> /etc/profile || sed -i "s@^export PATH=@export JAVA_HOME=${JDK_PATH}\nexport PATH=@" /etc/profile; } || sed -i "s@^export JAVA_HOME=.*@export JAVA_HOME=${JDK_PATH}@" /etc/profile
[ -z "`grep ^'export CLASSPATH=' /etc/profile`" ] && sed -i "s@export JAVA_HOME=\(.*\)@export JAVA_HOME=\1\nexport CLASSPATH=\$JAVA_HOME/lib/tools.jar:\$JAVA_HOME/lib/dt.jar:\$JAVA_HOME/lib@" /etc/profile
[ -n "`grep ^'export PATH=' /etc/profile`" -a -z "`grep '$JAVA_HOME/bin' /etc/profile`" ] && sed -i "s@^export PATH=\(.*\)@export PATH=\$JAVA_HOME/bin:\1@" /etc/profile
[ -z "`grep ^'export PATH=' /etc/profile | grep '$JAVA_HOME/bin'`" ] && echo 'export PATH=$JAVA_HOME/bin:$PATH' >> /etc/profile
. /etc/profile
echo "${CSUCCESS}$JDK_NAME installed successfully! ${CEND}"
else
rm -rf $JAVA_dir
echo "${CFAILURE}JDK install failed, Please contact the author! ${CEND}"
kill -9 $$
fi
popd
}
|
package org.openapitools.client.api;
import org.openapitools.client.ApiClient;
import java.io.File;
import org.openapitools.client.model.ModelApiResponse;
import org.openapitools.client.model.Pet;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.util.UriComponentsBuilder;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
@Component("org.openapitools.client.api.PetApi")
public class PetApi {
private ApiClient apiClient;
public PetApi() {
this(new ApiClient());
}
@Autowired
public PetApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/**
* Add a new pet to the store
*
* <p><b>200</b> - successful operation
* <p><b>405</b> - Invalid input
* @param body Pet object that needs to be added to the store (required)
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public void addPet(Pet body) throws RestClientException {
addPetWithHttpInfo(body);
}
/**
* Add a new pet to the store
*
* <p><b>200</b> - successful operation
* <p><b>405</b> - Invalid input
* @param body Pet object that needs to be added to the store (required)
* @return ResponseEntity<Void>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<Void> addPetWithHttpInfo(Pet body) throws RestClientException {
Object postBody = body;
// verify the required parameter 'body' is set
if (body == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'body' when calling addPet");
}
String path = apiClient.expandPath("/pet", Collections.<String, Object>emptyMap());
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
final String[] accepts = { };
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
"application/json", "application/xml"
};
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<Void> returnType = new ParameterizedTypeReference<Void>() {};
return apiClient.invokeAPI(path, HttpMethod.POST, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* Deletes a pet
*
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid pet value
* @param petId Pet id to delete (required)
* @param apiKey (optional)
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public void deletePet(Long petId, String apiKey) throws RestClientException {
deletePetWithHttpInfo(petId, apiKey);
}
/**
* Deletes a pet
*
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid pet value
* @param petId Pet id to delete (required)
* @param apiKey (optional)
* @return ResponseEntity<Void>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<Void> deletePetWithHttpInfo(Long petId, String apiKey) throws RestClientException {
Object postBody = null;
// verify the required parameter 'petId' is set
if (petId == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'petId' when calling deletePet");
}
// create path and map variables
final Map<String, Object> uriVariables = new HashMap<String, Object>();
uriVariables.put("petId", petId);
String path = apiClient.expandPath("/pet/{petId}", uriVariables);
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
if (apiKey != null)
headerParams.add("api_key", apiClient.parameterToString(apiKey));
final String[] accepts = { };
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = { };
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<Void> returnType = new ParameterizedTypeReference<Void>() {};
return apiClient.invokeAPI(path, HttpMethod.DELETE, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* Finds Pets by status
* Multiple status values can be provided with comma separated strings
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid status value
* @param status Status values that need to be considered for filter (required)
* @return List<Pet>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public List<Pet> findPetsByStatus(List<String> status) throws RestClientException {
return findPetsByStatusWithHttpInfo(status).getBody();
}
/**
* Finds Pets by status
* Multiple status values can be provided with comma separated strings
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid status value
* @param status Status values that need to be considered for filter (required)
* @return ResponseEntity<List<Pet>>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<List<Pet>> findPetsByStatusWithHttpInfo(List<String> status) throws RestClientException {
Object postBody = null;
// verify the required parameter 'status' is set
if (status == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'status' when calling findPetsByStatus");
}
String path = apiClient.expandPath("/pet/findByStatus", Collections.<String, Object>emptyMap());
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
queryParams.putAll(apiClient.parameterToMultiValueMap(ApiClient.CollectionFormat.valueOf("csv".toUpperCase(Locale.ROOT)), "status", status));
final String[] accepts = {
"application/xml", "application/json"
};
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = { };
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<List<Pet>> returnType = new ParameterizedTypeReference<List<Pet>>() {};
return apiClient.invokeAPI(path, HttpMethod.GET, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* Finds Pets by tags
* Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid tag value
* @param tags Tags to filter by (required)
* @return List<Pet>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
@Deprecated
public List<Pet> findPetsByTags(List<String> tags) throws RestClientException {
return findPetsByTagsWithHttpInfo(tags).getBody();
}
/**
* Finds Pets by tags
* Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid tag value
* @param tags Tags to filter by (required)
* @return ResponseEntity<List<Pet>>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
@Deprecated
public ResponseEntity<List<Pet>> findPetsByTagsWithHttpInfo(List<String> tags) throws RestClientException {
Object postBody = null;
// verify the required parameter 'tags' is set
if (tags == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'tags' when calling findPetsByTags");
}
String path = apiClient.expandPath("/pet/findByTags", Collections.<String, Object>emptyMap());
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
queryParams.putAll(apiClient.parameterToMultiValueMap(ApiClient.CollectionFormat.valueOf("csv".toUpperCase(Locale.ROOT)), "tags", tags));
final String[] accepts = {
"application/xml", "application/json"
};
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = { };
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<List<Pet>> returnType = new ParameterizedTypeReference<List<Pet>>() {};
return apiClient.invokeAPI(path, HttpMethod.GET, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* Find pet by ID
* Returns a single pet
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid ID supplied
* <p><b>404</b> - Pet not found
* @param petId ID of pet to return (required)
* @return Pet
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public Pet getPetById(Long petId) throws RestClientException {
return getPetByIdWithHttpInfo(petId).getBody();
}
/**
* Find pet by ID
* Returns a single pet
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid ID supplied
* <p><b>404</b> - Pet not found
* @param petId ID of pet to return (required)
* @return ResponseEntity<Pet>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<Pet> getPetByIdWithHttpInfo(Long petId) throws RestClientException {
Object postBody = null;
// verify the required parameter 'petId' is set
if (petId == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'petId' when calling getPetById");
}
// create path and map variables
final Map<String, Object> uriVariables = new HashMap<String, Object>();
uriVariables.put("petId", petId);
String path = apiClient.expandPath("/pet/{petId}", uriVariables);
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
final String[] accepts = {
"application/xml", "application/json"
};
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = { };
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "api_key" };
ParameterizedTypeReference<Pet> returnType = new ParameterizedTypeReference<Pet>() {};
return apiClient.invokeAPI(path, HttpMethod.GET, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* Update an existing pet
*
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid ID supplied
* <p><b>404</b> - Pet not found
* <p><b>405</b> - Validation exception
* @param body Pet object that needs to be added to the store (required)
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public void updatePet(Pet body) throws RestClientException {
updatePetWithHttpInfo(body);
}
/**
* Update an existing pet
*
* <p><b>200</b> - successful operation
* <p><b>400</b> - Invalid ID supplied
* <p><b>404</b> - Pet not found
* <p><b>405</b> - Validation exception
* @param body Pet object that needs to be added to the store (required)
* @return ResponseEntity<Void>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<Void> updatePetWithHttpInfo(Pet body) throws RestClientException {
Object postBody = body;
// verify the required parameter 'body' is set
if (body == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'body' when calling updatePet");
}
String path = apiClient.expandPath("/pet", Collections.<String, Object>emptyMap());
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
final String[] accepts = { };
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
"application/json", "application/xml"
};
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<Void> returnType = new ParameterizedTypeReference<Void>() {};
return apiClient.invokeAPI(path, HttpMethod.PUT, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* Updates a pet in the store with form data
*
* <p><b>405</b> - Invalid input
* @param petId ID of pet that needs to be updated (required)
* @param name Updated name of the pet (optional)
* @param status Updated status of the pet (optional)
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public void updatePetWithForm(Long petId, String name, String status) throws RestClientException {
updatePetWithFormWithHttpInfo(petId, name, status);
}
/**
* Updates a pet in the store with form data
*
* <p><b>405</b> - Invalid input
* @param petId ID of pet that needs to be updated (required)
* @param name Updated name of the pet (optional)
* @param status Updated status of the pet (optional)
* @return ResponseEntity<Void>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<Void> updatePetWithFormWithHttpInfo(Long petId, String name, String status) throws RestClientException {
Object postBody = null;
// verify the required parameter 'petId' is set
if (petId == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'petId' when calling updatePetWithForm");
}
// create path and map variables
final Map<String, Object> uriVariables = new HashMap<String, Object>();
uriVariables.put("petId", petId);
String path = apiClient.expandPath("/pet/{petId}", uriVariables);
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
if (name != null)
formParams.add("name", name);
if (status != null)
formParams.add("status", status);
final String[] accepts = { };
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
"application/x-www-form-urlencoded"
};
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<Void> returnType = new ParameterizedTypeReference<Void>() {};
return apiClient.invokeAPI(path, HttpMethod.POST, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* uploads an image
*
* <p><b>200</b> - successful operation
* @param petId ID of pet to update (required)
* @param additionalMetadata Additional data to pass to server (optional)
* @param file file to upload (optional)
* @return ModelApiResponse
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ModelApiResponse uploadFile(Long petId, String additionalMetadata, File file) throws RestClientException {
return uploadFileWithHttpInfo(petId, additionalMetadata, file).getBody();
}
/**
* uploads an image
*
* <p><b>200</b> - successful operation
* @param petId ID of pet to update (required)
* @param additionalMetadata Additional data to pass to server (optional)
* @param file file to upload (optional)
* @return ResponseEntity<ModelApiResponse>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<ModelApiResponse> uploadFileWithHttpInfo(Long petId, String additionalMetadata, File file) throws RestClientException {
Object postBody = null;
// verify the required parameter 'petId' is set
if (petId == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'petId' when calling uploadFile");
}
// create path and map variables
final Map<String, Object> uriVariables = new HashMap<String, Object>();
uriVariables.put("petId", petId);
String path = apiClient.expandPath("/pet/{petId}/uploadImage", uriVariables);
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
if (additionalMetadata != null)
formParams.add("additionalMetadata", additionalMetadata);
if (file != null)
formParams.add("file", new FileSystemResource(file));
final String[] accepts = {
"application/json"
};
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
"multipart/form-data"
};
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<ModelApiResponse> returnType = new ParameterizedTypeReference<ModelApiResponse>() {};
return apiClient.invokeAPI(path, HttpMethod.POST, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
/**
* uploads an image (required)
*
* <p><b>200</b> - successful operation
* @param petId ID of pet to update (required)
* @param requiredFile file to upload (required)
* @param additionalMetadata Additional data to pass to server (optional)
* @return ModelApiResponse
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ModelApiResponse uploadFileWithRequiredFile(Long petId, File requiredFile, String additionalMetadata) throws RestClientException {
return uploadFileWithRequiredFileWithHttpInfo(petId, requiredFile, additionalMetadata).getBody();
}
/**
* uploads an image (required)
*
* <p><b>200</b> - successful operation
* @param petId ID of pet to update (required)
* @param requiredFile file to upload (required)
* @param additionalMetadata Additional data to pass to server (optional)
* @return ResponseEntity<ModelApiResponse>
* @throws RestClientException if an error occurs while attempting to invoke the API
*/
public ResponseEntity<ModelApiResponse> uploadFileWithRequiredFileWithHttpInfo(Long petId, File requiredFile, String additionalMetadata) throws RestClientException {
Object postBody = null;
// verify the required parameter 'petId' is set
if (petId == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'petId' when calling uploadFileWithRequiredFile");
}
// verify the required parameter 'requiredFile' is set
if (requiredFile == null) {
throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'requiredFile' when calling uploadFileWithRequiredFile");
}
// create path and map variables
final Map<String, Object> uriVariables = new HashMap<String, Object>();
uriVariables.put("petId", petId);
String path = apiClient.expandPath("/fake/{petId}/uploadImageWithRequiredFile", uriVariables);
final MultiValueMap<String, String> queryParams = new LinkedMultiValueMap<String, String>();
final HttpHeaders headerParams = new HttpHeaders();
final MultiValueMap<String, Object> formParams = new LinkedMultiValueMap<String, Object>();
if (additionalMetadata != null)
formParams.add("additionalMetadata", additionalMetadata);
if (requiredFile != null)
formParams.add("requiredFile", new FileSystemResource(requiredFile));
final String[] accepts = {
"application/json"
};
final List<MediaType> accept = apiClient.selectHeaderAccept(accepts);
final String[] contentTypes = {
"multipart/form-data"
};
final MediaType contentType = apiClient.selectHeaderContentType(contentTypes);
String[] authNames = new String[] { "petstore_auth" };
ParameterizedTypeReference<ModelApiResponse> returnType = new ParameterizedTypeReference<ModelApiResponse>() {};
return apiClient.invokeAPI(path, HttpMethod.POST, queryParams, postBody, headerParams, formParams, accept, contentType, authNames, returnType);
}
}
|
#!/bin/bash
docker run -d \
-p 27017:27017 \
-v /home/pi/mongodb:/data \
--name mongodb-server \
sumglobal/rpi-mongodb
|
<reponame>shin-kinoshita/dbflute-core<gh_stars>0
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.chelper;
import org.dbflute.cbean.ckey.ConditionKey;
import org.dbflute.cbean.ordering.ManualOrderOption;
/**
* @author jflute
*/
public class HpMobConnectedBean {
// ===================================================================================
// Attribute
// =========
protected final ManualOrderOption _parentOption;
// ===================================================================================
// Constructor
// ===========
public HpMobConnectedBean(ManualOrderOption parentOption) {
_parentOption = parentOption;
}
// ===================================================================================
// And Connection
// ==============
public HpMobConnectedBean and_Equal(Object orderValue) {
return doAnd(ConditionKey.CK_EQUAL, orderValue);
}
public HpMobConnectedBean and_NotEqual(Object orderValue) {
return doAnd(ConditionKey.CK_NOT_EQUAL_STANDARD, orderValue);
}
public HpMobConnectedBean and_GreaterThan(Object orderValue) {
return doAnd(ConditionKey.CK_GREATER_THAN, orderValue);
}
public HpMobConnectedBean and_LessThan(Object orderValue) {
return doAnd(ConditionKey.CK_LESS_THAN, orderValue);
}
public HpMobConnectedBean and_GreaterEqual(Object orderValue) {
return doAnd(ConditionKey.CK_GREATER_EQUAL, orderValue);
}
public HpMobConnectedBean and_LessEqual(Object orderValue) {
return doAnd(ConditionKey.CK_LESS_EQUAL, orderValue);
}
public HpMobConnectedBean and_IsNull() {
return doAnd(ConditionKey.CK_IS_NULL, null);
}
public HpMobConnectedBean and_IsNotNull() {
return doAnd(ConditionKey.CK_IS_NOT_NULL, null);
}
public HpMobConnectedBean doAnd(ConditionKey conditionKey, Object orderValue) {
toBeConnectionModeAsAnd();
try {
return delegate(conditionKey, orderValue);
} finally {
clearConnectionMode();
}
}
// ===================================================================================
// Or Connection
// =============
public HpMobConnectedBean or_Equal(Object orderValue) {
return doOr(ConditionKey.CK_EQUAL, orderValue);
}
public HpMobConnectedBean or_NotEqual(Object orderValue) {
return doOr(ConditionKey.CK_NOT_EQUAL_STANDARD, orderValue);
}
public HpMobConnectedBean or_GreaterThan(Object orderValue) {
return doOr(ConditionKey.CK_GREATER_THAN, orderValue);
}
public HpMobConnectedBean or_LessThan(Object orderValue) {
return doOr(ConditionKey.CK_LESS_THAN, orderValue);
}
public HpMobConnectedBean or_GreaterEqual(Object orderValue) {
return doOr(ConditionKey.CK_GREATER_EQUAL, orderValue);
}
public HpMobConnectedBean or_LessEqual(Object orderValue) {
return doOr(ConditionKey.CK_LESS_EQUAL, orderValue);
}
public HpMobConnectedBean or_IsNull() {
return doOr(ConditionKey.CK_IS_NULL, null);
}
public HpMobConnectedBean or_IsNotNull() {
return doOr(ConditionKey.CK_IS_NOT_NULL, null);
}
public HpMobConnectedBean doOr(ConditionKey conditionKey, Object orderValue) {
toBeConnectionModeAsOr();
try {
return delegate(conditionKey, orderValue);
} finally {
clearConnectionMode();
}
}
protected HpMobConnectedBean delegate(ConditionKey conditionKey, Object orderValue) {
if (ConditionKey.CK_EQUAL.equals(conditionKey)) {
_parentOption.when_Equal(orderValue);
} else if (ConditionKey.CK_NOT_EQUAL_STANDARD.equals(conditionKey)) {
_parentOption.when_NotEqual(orderValue);
} else if (ConditionKey.CK_GREATER_THAN.equals(conditionKey)) {
_parentOption.when_GreaterThan(orderValue);
} else if (ConditionKey.CK_LESS_THAN.equals(conditionKey)) {
_parentOption.when_LessThan(orderValue);
} else if (ConditionKey.CK_GREATER_EQUAL.equals(conditionKey)) {
_parentOption.when_GreaterEqual(orderValue);
} else if (ConditionKey.CK_LESS_EQUAL.equals(conditionKey)) {
_parentOption.when_LessEqual(orderValue);
} else if (ConditionKey.CK_IS_NULL.equals(conditionKey)) {
_parentOption.when_IsNull();
} else if (ConditionKey.CK_IS_NOT_NULL.equals(conditionKey)) {
_parentOption.when_IsNotNull();
} else {
String msg = "Unknown conditionKey: " + conditionKey;
throw new IllegalStateException(msg);
}
return this;
}
// ===================================================================================
// Connection Mode
// ===============
protected void toBeConnectionModeAsAnd() {
_parentOption.toBeConnectionModeAsAnd();
}
protected void toBeConnectionModeAsOr() {
_parentOption.toBeConnectionModeAsOr();
}
protected void clearConnectionMode() {
_parentOption.clearConnectionMode();
}
// ===================================================================================
// Then Value
// ==========
/**
* Add 'then' value to the last case-when element. (Basically for SwitchOrder) <br>
* You should also set 'then' values to other elements and set 'else' value.
* @param thenValue The value for 'then', String, Integer, Date, DreamCruiseTicket... (NotNull)
*/
public void then(Object thenValue) {
if (thenValue == null) {
String msg = "The argument 'thenValue' should not be null.";
throw new IllegalArgumentException(msg);
}
_parentOption.xregisterThenValueToLastElement(thenValue);
}
}
|
#!/bin/bash
docker build -t pharndt/speedportmqtt . |
package com.vc.easy
object L733 {
def floodFill(image: Array[Array[Int]], sr: Int, sc: Int, newColor: Int): Array[Array[Int]] = {
val dirs = Array(
(1, 0),
(0, 1),
(-1, 0),
(0, -1)
)
import scala.collection.mutable
val st = new mutable.Stack[(Int, Int)]()
st.push((sr, sc))
val oldColor = image(sr)(sc)
val r = image.length
val c = image(0).length
while(st.nonEmpty) {
val e = st.pop
if(image(e._1)(e._2) != newColor) {
image(e._1)(e._2) = newColor
dirs.foreach(dir => {
val x = dir._1 + e._1
val y = dir._2 + e._2
if(x >= 0 && x < r && y >= 0 && y < c && image(x)(y) == oldColor) {
st.push((x, y))
}
})
}
}
image
}
}
|
<filename>src/totpDisable/__tests__/actions.test.ts
import fetchMock from 'fetch-mock';
import thunk from 'redux-thunk';
import configureMockStore from 'redux-mock-store';
import config from '@authenticator/config';
import { disable } from '@authenticator/totpDisable/actions';
import {
REQUEST,
REQUEST_ERROR,
REQUEST_SUCCESS,
} from '@authenticator/totpDisable/constants';
describe('TOTPDisable Actions: Disable Test', (): void => {
let storeMock: any;
beforeEach((): void => {
const middlewares = [ thunk ];
const mockStore = configureMockStore(middlewares);
storeMock = mockStore({});
});
afterEach((): void => {
fetchMock.restore();
});
test('dispatches error on request', async (): Promise<void> => {
const url = `${config.api.baseURL}/api/v1/totp/configure`;
fetchMock.mock(url, {
status: 500,
body: {
error: {
message: 'Whoops something bad happened',
code: 'internal',
},
},
});
await storeMock.dispatch(disable({ code: '123456' }));
expect(storeMock.getActions()).toEqual([
{ type: REQUEST },
{ type: REQUEST_ERROR, error: {
code: 'internal',
message: 'Whoops something bad happened',
}},
]);
});
test('dispatches success on request', async (): Promise<void> => {
const mockToken = '<KEY>' +
'<KEY>' +
'<KEY>' +
'zVhMzQ1NmJkY2RhYmU5YjMxZGIxOWQyNDI0M2JmNWQyNDE4YmNhOGVjOTRmYjA' +
'<KEY>' +
'<KEY> +
<KEY> +
<KEY>' +
'<KEY>' +
'05H2CpbdStFatLLVnA';
const url = `${config.api.baseURL}/api/v1/totp/configure`;
fetchMock.mock(url, {
status: 200,
body: {
token: mockToken,
clientID: 'client-id',
},
});
await storeMock.dispatch(disable({ code: '123456' }));
expect(storeMock.getActions()).toEqual([
{ type: REQUEST },
{ type: REQUEST_SUCCESS },
]);
});
test('dispatches error on setting token', async (): Promise<void> => {
const url = `${config.api.baseURL}/api/v1/totp/configure`;
fetchMock.mock(url, {
status: 200,
body: {
token: 'invalid-token',
clientID: 'client-id',
},
});
await storeMock.dispatch(disable({ code: '123456' }));
expect(storeMock.getActions()).toEqual([
{ type: REQUEST },
{ type: REQUEST_ERROR, error: {
code: 'invalid_token',
message: 'Token is not correctly formatted',
}},
]);
});
});
|
#!/bin/bash
# OUTPUTDIR is directory containing this run.sh script
OUTPUTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
python train.py \
--num_hidden_layers 6 \
--num_attention_heads 9 \
--optimizer_cosine_lr True \
--optimizer_warmup_ratio 0.05 \
--batch_size 100 \
--num_epochs 300 \
--hidden_size 400 \
--attention_isotropic_gaussian True \
--num_keep_checkpoints 30 \
--output_dir $OUTPUTDIR
|
<reponame>jorgedemetrio/social-midia-manager<gh_stars>0
/**
*
*/
package com.br.alldreams.socialmidia.manager.service.impl;
import static java.util.Objects.isNull;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.br.alldreams.socialmidia.conf.beans.DriversConfBeans;
import com.br.alldreams.socialmidia.conf.beans.SegurancaConfBeans;
import com.br.alldreams.socialmidia.manager.domain.Configuracao;
import com.br.alldreams.socialmidia.manager.repository.ComentarioRepository;
import com.br.alldreams.socialmidia.manager.repository.ConfiguracaoRepository;
import com.br.alldreams.socialmidia.manager.repository.HistoricoRepository;
import com.br.alldreams.socialmidia.manager.repository.TagRepository;
import com.br.alldreams.socialmidia.manager.repository.UsuarioRepository;
import com.br.alldreams.socialmidia.manager.service.JobService;
import com.br.alldreams.socialmidia.manager.utils.CriptografiaUtils;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
/**
* @author <NAME>
* @since 19 de abr de 2019 19:56:11
* @version 1.0
*/
@Slf4j
@Data
@Service
public class JobServiceImpl implements JobService {
private final static HashMap<String, Thread> configuracoes = new HashMap<String, Thread>();
@Autowired
private ComentarioRepository comentarioRepository;
@Autowired
private HistoricoRepository historicoRepository;
@Autowired
private TagRepository tagRepository;
@Autowired
private SegurancaConfBeans conf;
@Autowired
private CriptografiaUtils criptografia;
@Autowired
private UsuarioRepository usuarioRepository;
@Autowired
private DriversConfBeans drivesConfig;
@Autowired
private ConfiguracaoRepository configuracaoRepository;
public Boolean executar = Boolean.TRUE;
public void removeEvento(String nome) {
Thread thread = configuracoes.get(nome);
thread.stop();
configuracoes.remove(nome);
}
@Override
public void startService() {
while (executar) {
Calendar cal = Calendar.getInstance();
String hr = String.valueOf(cal.get(Calendar.HOUR_OF_DAY));
if (hr.length() <= 1)
hr = "0".concat(hr);
String mins = String.valueOf(cal.get(Calendar.MINUTE));
if (mins.length() <= 1)
mins = "0".concat(mins);
hr = hr.concat(mins);
List<Configuracao> list = configuracaoRepository.getConfiguracoesAtivas(hr);
for (Configuracao configuracao : list) {
if (isNull(configuracoes.get(configuracao.getDescricao()))) {
Thread t = new Thread(new ControleRedeSocialThread(configuracao, this));
try {
configuracoes.put(configuracao.getDescricao(), t);
t.start();
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
}
}
}
try {
// Aguardar 1 hora para rodar novamente
Thread.currentThread().sleep(1000 * 60 * 10);
} catch (InterruptedException e) {
log.error(e.getMessage(), e);
}
}
}
}
|
def add_list_items(my_list):
total = 0
for i in my_list:
total += i
return total
my_list = [1, 2, 3, 4]
result = add_list_items(my_list)
print(result) |
<filename>backend/migrations/1489367487181-populate_district_number_for_users.js
const mongoose = require('mongoose')
const dotenv = require('dotenv')
const { User } = require('../app/models')
dotenv.load()
mongoose.Promise = require('es6-promise')
if (!mongoose.connection.readyState) {
mongoose.connect(process.env.MONGODB_URI)
}
/**
* Make any changes you need to make to the database here
*/
export async function up () {
await User.collection.updateMany({}, { $rename: { congressionalDistrict: 'districtNumber' } })
const users = await User.find({})
for (let user of users) {
user.district = `${user.state}-${user.districtNumber}`
await user.save()
}
}
/**
* Make any changes that UNDO the up function side effects here (if possible)
*/
export async function down () {
await User.collection.updateMany({}, { $unset: { district: '' }, $rename: { districtNumber: 'congressionalDistrict' } })
}
|
package artifality.item;
import artifality.item.base.ArtifactItem;
import artifality.util.EffectsUtils;
import dev.emi.trinkets.api.SlotReference;
import net.minecraft.entity.LivingEntity;
import net.minecraft.entity.effect.StatusEffects;
import net.minecraft.item.ItemStack;
public class InvisibilityCapeItem extends ArtifactItem {
public InvisibilityCapeItem(ArtifactSettings settings) {
super(settings);
}
@Override
public void tick(ItemStack stack, SlotReference slot, LivingEntity entity) {
if (!entity.isSneaking()) return;
EffectsUtils.ticking(entity, StatusEffects.INVISIBILITY);
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script builds and pushes docker images when run from a release of Spark
# with Kubernetes support.
function error {
echo "$@" 1>&2
exit 1
}
if [ -z "${SPARK_HOME}" ]; then
SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi
. "${SPARK_HOME}/bin/load-spark-env.sh"
function image_ref {
local image="$1"
local add_repo="${2:-1}"
if [ $add_repo = 1 ] && [ -n "$REPO" ]; then
image="$REPO/$image"
fi
if [ -n "$TAG" ]; then
image="$image:$TAG"
fi
echo "$image"
}
function build {
local BUILD_ARGS
local IMG_PATH
if [ ! -f "$SPARK_HOME/RELEASE" ]; then
# Set image build arguments accordingly if this is a source repo and not a distribution archive.
IMG_PATH=resource-managers/kubernetes/docker/src/main/dockerfiles
BUILD_ARGS=(
--build-arg
img_path=$IMG_PATH
--build-arg
spark_jars=assembly/target/scala-$SPARK_SCALA_VERSION/jars
)
else
# Not passed as an argument to docker, but used to validate the Spark directory.
IMG_PATH="kubernetes/dockerfiles"
BUILD_ARGS=()
fi
if [ ! -d "$IMG_PATH" ]; then
error "Cannot find docker image. This script must be run from a runnable distribution of Apache Spark."
fi
docker build "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$IMG_PATH/spark/Dockerfile" .
}
function push {
docker push "$(image_ref spark)"
}
function usage {
cat <<EOF
Usage: $0 [options] [command]
Builds or pushes the built-in Spark Docker image.
Commands:
build Build image. Requires a repository address to be provided if the image will be
pushed to a different registry.
push Push a pre-built image to a registry. Requires a repository address to be provided.
Options:
-r repo Repository address.
-t tag Tag to apply to the built image, or to identify the image to be pushed.
-m Use minikube's Docker daemon.
Using minikube when building images will do so directly into minikube's Docker daemon.
There is no need to push the images into minikube in that case, they'll be automatically
available when running applications inside the minikube cluster.
Check the following documentation for more information on using the minikube Docker daemon:
https://kubernetes.io/docs/getting-started-guides/minikube/#reusing-the-docker-daemon
Examples:
- Build image in minikube with tag "testing"
$0 -m -t testing build
- Build and push image with tag "v2.3.0" to docker.io/myrepo
$0 -r docker.io/myrepo -t v2.3.0 build
$0 -r docker.io/myrepo -t v2.3.0 push
EOF
}
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
REPO=
TAG=
while getopts mr:t: option
do
case "${option}"
in
r) REPO=${OPTARG};;
t) TAG=${OPTARG};;
m)
if ! which minikube 1>/dev/null; then
error "Cannot find minikube."
fi
eval $(minikube docker-env --shell bash)
;;
esac
done
case "${@: -1}" in
build)
build
;;
push)
if [ -z "$REPO" ]; then
usage
exit 1
fi
push
;;
*)
usage
exit 1
;;
esac
|
# Sample input data representing the $cata array
cata = [
{'id': 1, 'name': 'Electronics'},
{'id': 2, 'name': 'Clothing'},
{'id': 3, 'name': 'Books'},
{'id': 4, 'name': 'Home Decor'}
]
# Data structure to store category names and IDs
category_data = {category['name']: category['id'] for category in cata}
# Function to find category ID based on category name
def find_category_id(category_name):
return category_data.get(category_name, "Category not found")
# Function to find category name based on category ID
def find_category_name(category_id):
for name, cid in category_data.items():
if cid == category_id:
return name
return "Category ID not found"
# Sample usage
print(find_category_id('Clothing')) # Output: 2
print(find_category_name(3)) # Output: Books
print(find_category_id('Furniture')) # Output: Category not found
print(find_category_name(5)) # Output: Category ID not found |
/**
* @author <NAME> <<EMAIL>>
* @copyright 2020 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
/**
* The Resume All Animations Event.
*
* This event is dispatched when the global Animation Manager resumes, having been previously paused.
*
* When this happens all current animations will continue updating again.
*
* @event Phaser.Animations.Events#RESUME_ALL
* @since 3.0.0
*/
module.exports = 'resumeall';
|
<filename>packages/create-app/template/src/demo2/subapp-demo2.js
import React from "react";
import { connect } from "react-redux";
import { reduxLoadSubApp } from "subapp-redux";
import reduxReducers from "./reducers";
const incNumber = () => {
return {
type: "INC_NUMBER"
};
};
const decNumber = () => {
return {
type: "DEC_NUMBER"
};
};
const Demo2 = props => {
const { value, dispatch } = props;
return (
<div>
<div
style={{
padding: "5px",
marginTop: "15px",
border: "solid",
marginLeft: "15%",
marginRight: "15%"
}}
>
<p>subapp demo2</p>
Redux State Demo: <button onClick={() => dispatch(decNumber())}>≪</button>
{value}
<button onClick={() => dispatch(incNumber())}>≫</button>
</div>
<p style={{ textAlign: "center" }}>© {new Date().getFullYear()} Your (Company) Name Here</p>
</div>
);
};
const mapStateToProps = state => state;
export default reduxLoadSubApp({
Component: connect(
mapStateToProps,
dispatch => ({ dispatch })
)(Demo2),
name: "Demo2",
reduxReducers,
prepare: ({ initialData }) => {
return Promise.resolve(initialData || { value: 999 });
}
});
|
echo "Use dev or demo AWS account?"
echo "Enter(dev / demo):"
read account
if [ "$account" != "dev" ] && [ "$account" != "demo" ]
then
echo "Failed: enter a account"
exit
fi
echo "Enter path of cloudformation file path(default: csye6225-infra.json):"
read templatePath
if [ -z "$templatePath" ]; then
templatePath="csye6225-infra.json"
fi
echo "Enter a stack name:"
read stackName
if [ -z "$stackName" ]
then
echo "Failed: enter a stack name"
exit
fi
echo "Do you want to create or delete this stack?"
echo "Enter(create / delete):"
read stackOp
if [ "$stackOp" != "create" ] && [ "$stackOp" != "delete" ]
then
echo "Failed: unknown operation on stack"
exit
fi
# delete stack
if [ "$stackOp" == "delete" ]
then
aws cloudformation describe-stacks --stack-name $stackName --profile $account &> /dev/null
if [ $? -ne 0 ];then
echo "Failed: stack by this name may not exist"
exit
fi
aws cloudformation delete-stack --stack-name $stackName --profile $account
echo "Deleted successfully"
exit
fi
# create stack
aws cloudformation describe-stacks --stack-name $stackName --profile $account &> /dev/null
if [ $? -eq 0 ]
then
echo "Failed: stack by this name already exist"
exit
fi
# parameters
echo "Enter VPC NAME(default: vpc-test):"
read vpcName
if [ -z "$vpcName" ]
then
vpcName="vpc-test"
fi
echo "Enter InternetGateWay NAME(default: igw-test):"
read igwName
if [ -z "$igwName" ]
then
igwName="igw-test"
fi
echo "Enter RouteTable NAME(default: rt-test):"
read rtName
if [ -z "$rtName" ]
then
rtName="rt-test"
fi
echo "Enter VPC CIDR(default: 199.0.0.0/16):"
read vpcCidr
if [ -z "$vpcCidr" ]
then
vpcCidr="199.0.0.0/16"
fi
echo "Enter subnet1 CIDR(default: 199.0.0.0/24):"
read subnetCidr1
if [ -z "$subnetCidr1" ]
then
subnetCidr1="199.0.0.0/24"
fi
echo "Enter subnet2 CIDR(default: 199.0.1.0/24):"
read subnetCidr2
if [ -z "$subnetCidr2" ]
then
subnetCidr2="199.0.1.0/24"
fi
echo "Enter subnet3 CIDR(default: 199.0.2.0/24):"
read subnetCidr3
if [ -z "$subnetCidr3" ]
then
subnetCidr3="199.0.2.0/24"
fi
status=$(aws cloudformation create-stack --stack-name $stackName --profile $account \
--template-body file://$templatePath \
--parameters ParameterKey=VPCNAME,ParameterValue=$vpcName \
ParameterKey=IGWNAME,ParameterValue=$igwName \
ParameterKey=RTNAME,ParameterValue=$rtName \
ParameterKey=VPCCIDR,ParameterValue=$vpcCidr \
ParameterKey=SubnetCIDR1,ParameterValue=$subnetCidr1 \
ParameterKey=SubnetCIDR2,ParameterValue=$subnetCidr2 \
ParameterKey=SubnetCIDR3,ParameterValue=$subnetCidr3 \
)
# --on-failure DELETE)
if [ $? -eq 0 ]
then
echo "please wait....."
aws cloudformation wait stack-create-complete --stack-name $stackName --profile $account
if [ $? -eq 0 ]
then
echo "Successfully setup the stack"
echo $status
else
echo "Failed: failed to deploy the stack"
echo $status
fi
else
echo "Failed: failed to deploy the stack"
echo $status
fi
|
const Manager = require(`../lib/Manager`);
describe(`Manager`,() => {
test(`Can instantiate Manager Instance`,() => {
const type = new Manager();
expect(typeof type).toBe(`object`);
})
})
describe(`Manager Name`,() => {
test(`Manager Name is a usable String`,() => {
const name = Manager.name;
expect(typeof name).toBe(`string`);
})
}) |
import Vector from './Vector'
export default class Position extends Vector {
/**
* Get the string representation of this Position Vector.
*
* @returns {string} The string representation of this Position Vector.
*/
toString() {
return `Position [${this.elements.toString()}]`
}
/**
* Add a Position to this Position.
*
* @param {Position} position The Position to add to this Position.
*
* @returns {Position} The resulting Position after adding a Position to this Position.
*/
add(position) {
return new Position(...super.add(position).elements)
}
/**
* Subtract a Position from this Position.
*
* @param {Position} position The Position to subtract from this Position.
*
* @returns {Position} The resulting Position after subracting a Position from this Position.
*/
subtract(position) {
return new Position(...super.subtract(position).elements)
}
/**
* Multiply this Position's size by a scalar value.
*
* @param {number} scalar The scalar value.
*
* @returns {Position} This Position multiplied by a scalar value.
*/
multiply(scalar) {
return new Position(...super.multiply(scalar).elements)
}
}
|
package com.java.study.algorithm.zuo.abasic.basic_class_07;
/**
* 给定两个数组w和v,
* 两个数组长度相等,w[i]表示第i件商品的 重量,
* v[i]表示第i件商品的价值。 再给定一个整数bag,
* 要求你挑选商品的重量加起来一定不能超 过bag,返回满足这个条件下,你能获得的最大价值
*/
public class Code_09_Knapsack {
public static int Knapsack(int[] weight, int[] value, int bag) {
if (weight == null || weight.length == 0 || value == null || value.length == 0) {
return 0;
}
int[][] dp = new int[weight.length + 1][bag + 1];
// 行
for (int i = dp.length - 2; i >= 0; i--) {
//列
for (int j = 0; j < dp[0].length; j++) {
if (j < weight[i]) {
dp[i][j] = dp[i + 1][j];
continue;
}
dp[i][j] = Math.max(dp[i + 1][j], dp[i + 1][j - weight[i]] + value[i]);
}
}
return dp[0][bag];
}
public static int KnapsackRecursive(int[] weight, int[] value, int bag) {
if (weight == null || weight.length == 0 || value == null || value.length == 0) {
return 0;
}
return KnapsackRecursive(weight, value, bag, 0, 0);
}
private static int KnapsackRecursive(int[] weight, int[] value, int remainBag, int index, int result) {
if (index == weight.length) {
return result;
}
if (weight[index] > remainBag) {
return KnapsackRecursive(weight, value, remainBag, index + 1, result);
}
return Math.max(KnapsackRecursive(weight, value, remainBag, index + 1, result), KnapsackRecursive(weight, value, remainBag - weight[index], index + 1, result + value[index]));
}
public static void main(String[] args) {
int[] c = {3, 2, 4, 7};
int[] p = {5, 6, 3, 19};
for (int i = 0; i < 100; i++) {
int result1 = KnapsackRecursive(c, p, i);
int result2 = Knapsack(c, p, i);
int result3 = com.java.study.answer.zuo.abasic.basic_class_07.Code_09_Knapsack.maxValue1(c, p, i);
if (result1 != result2 || result2 != result3){
System.out.println(i);
}
}
// System.out.println(maxValue2(c, p, bag));
}
} |
class EvalAccuracy:
def __init__(self, config):
self.config = config
def calculate_accuracy(self, running_accuracy=None):
if running_accuracy is not None:
return {"accuracy": "Accuracy: " + "{0:.4f}".format(running_accuracy)}
else:
return {} |
source ~/.ssh/use_dong_hf_key_for_github
message=${1:-'添加笔记'}
git add .
git commit -am "$message"
git push -u haifeng |
#!/bin/bash
# This source file is part of the Limited-area GAME version (L-GAME), which is released under the MIT license.
# Github repository: https://github.com/OpenNWP/L-GAME
run_id=schaer
plot_time_since_init_min=600
varname="w"
source .sh/root_script.sh
|
#!/usr/bin/env bash
# requires go-algorand checked out at $GOALGORAND or "${GOPATH}/src/github.com/algorand/go-algorand"
#
# Builds data to $E2EDATA or "${HOME}/Algorand/e2edata"
#
# If $BUILD_BLOCK_ARCHIVE is not empty, the blocks will be extracted and archived
#
# Requires Python with py-algorand-sdk installed.
#
# usage:
# #!/usr/bin/env bash
# rm -rf ve3
# export GOALGORAND="${GOPATH}/src/github.com/algorand/go-algorand"
# export E2EDATA="${HOME}/algorand/indexer/e2edata"
# export BUILD_BLOCK_ARCHIVE="yes please"
# rm -rf "$E2EDATA"
# mkdir -p "$E2EDATA"
# python3 -m venv ve3
# ve3/bin/pip install py-algorand-sdk
# . ve3/bin/activate
# ./misc/buildtestdata.sh
set -x
set -e
if [ -z "${GOALGORAND}" ]; then
echo "Using default GOALGORAND"
GOALGORAND="${GOPATH}/src/github.com/algorand/go-algorand"
fi
if [ -z "${E2EDATA}" ]; then
echo "Using default E2EDATA"
E2EDATA="${HOME}/Algorand/e2edata"
fi
# TODO: EXPERIMENTAL
# run faster rounds? 1000 down from 2000
export ALGOSMALLLAMBDAMSEC=1000
rm -rf "${E2EDATA}"
mkdir -p "${E2EDATA}"
(cd "${GOALGORAND}/test/scripts" && TEMPDIR="${E2EDATA}" python3 e2e_client_runner.py --keep-temps e2e_subs/*.sh)
(cd "${E2EDATA}" && tar -j -c -f net_done.tar.bz2 --exclude node.log --exclude agreement.cdv net)
#RSTAMP=$(python -c 'import time; print("{:08x}".format(0xffffffff - int(time.time() + time.mktime((2020,1,1,0,0,0,-1,-1,-1)))))')
RSTAMP=$(TZ=UTC python -c 'import time; print("{:08x}".format(0xffffffff - int(time.time() - time.mktime((2020,1,1,0,0,0,-1,-1,-1)))))')
echo "COPY AND PASTE THIS TO UPLOAD:"
echo aws s3 cp --acl public-read "${E2EDATA}/net_done.tar.bz2" s3://algorand-testdata/indexer/e2e2/${RSTAMP}/net_done.tar.bz2
|
#!/bin/bash
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
####################################################################################
# Fail on any error.
set -e
# Display commands to stderr.
set -x
usage() {
echo "Usage: $0 [-dlh]"
echo " -d: Dry run. Only execute idempotent commands (default: FALSE)."
echo " -l: Local. Publish to local Maven repository (default: FALSE)."
echo " -h: Help. Print this usage information."
exit 1
}
# Process flags.
DRY_RUN="false"
LOCAL="false"
while getopts "dlh" opt; do
case "${opt}" in
d) DRY_RUN="true" ;;
l) LOCAL="true" ;;
h) usage ;;
*) usage ;;
esac
done
shift $((OPTIND - 1))
readonly DRY_RUN
readonly LOCAL
declare -a COMMON_FLAGS
if [[ "${DRY_RUN}" == "true" ]]; then
COMMON_FLAGS+=( -d )
fi
readonly COMMON_FLAGS
if [[ "${LOCAL}" == "true" ]]; then
echo -e "Publishing local Maven snapshot...\n"
bash "$(dirname $0)/execute_deploy.sh" "${COMMON_FLAGS[@]}" -l \
"install:install-file" \
"HEAD-SNAPSHOT"
else
echo -e "Publishing Maven snapshot...\n"
bash "$(dirname $0)/execute_deploy.sh" "${COMMON_FLAGS[@]}" \
"deploy:deploy-file" \
"HEAD-SNAPSHOT" \
"-DrepositoryId=ossrh" \
"-Durl=https://oss.sonatype.org/content/repositories/snapshots" \
"--settings=../$(dirname $0)/settings.xml"
fi
echo -e "Finished publishing Maven snapshot."
|
<gh_stars>0
package group
import (
"fmt"
"sync"
"time"
"github.com/kruglovmax/stack/pkg/app"
"github.com/kruglovmax/stack/pkg/conditions"
"github.com/kruglovmax/stack/pkg/log"
"github.com/kruglovmax/stack/pkg/misc"
"github.com/kruglovmax/stack/pkg/types"
)
// groupItem type
type groupItem struct {
Group []types.RunItem `json:"group,omitempty"`
Parallel bool `json:"parallel,omitempty"`
When string `json:"when,omitempty"`
Wait string `json:"wait,omitempty"`
RunTimeout time.Duration `json:"runTimeout,omitempty"`
WaitTimeout time.Duration `json:"waitTimeout,omitempty"`
rawItem map[string]interface{}
stack types.Stack
}
// New func
func New(stack types.Stack, rawItem map[string]interface{}) types.RunItem {
item := new(groupItem)
item.rawItem = rawItem
item.stack = stack
return item
}
// Exec func
func (item *groupItem) Exec(parentWG *sync.WaitGroup) {
item.parse()
if parentWG != nil {
defer parentWG.Done()
}
if !conditions.When(item.stack, item.When) {
return
}
if !conditions.Wait(item.stack, item.Wait, item.WaitTimeout) {
return
}
var wg sync.WaitGroup
wg.Add(1)
go item.execGroup(&wg)
if item.RunTimeout != 0 {
if misc.WaitTimeout(&wg, item.RunTimeout) {
log.Logger.Fatal().
Str("stack", item.stack.GetWorkdir()).
Str("timeout", fmt.Sprint(item.RunTimeout)).
Msg("Group waiting failed")
}
}
}
func (item *groupItem) execGroup(parentWG *sync.WaitGroup) {
defer parentWG.Done()
if item.Parallel {
var wg sync.WaitGroup
for _, runItem := range item.Group {
wg.Add(1)
go runItem.Exec(&wg)
}
wg.Wait()
} else {
for _, runItem := range item.Group {
var wg sync.WaitGroup
wg.Add(1)
go runItem.Exec(&wg)
wg.Wait()
}
}
}
func (item *groupItem) parse() {
item.Group = item.stack.GetRunItemsParser().ParseRun(item.stack, item.rawItem["group"].([]interface{}))
parallel := item.rawItem["parallel"]
if parallel == nil {
parallel = false
}
item.Parallel = parallel.(bool)
whenCondition := (item.rawItem)["when"]
waitCondition := (item.rawItem)["wait"]
if whenCondition != nil {
item.When = whenCondition.(string)
}
if waitCondition != nil {
item.Wait = waitCondition.(string)
}
var err error
runTimeout := item.rawItem["runTimeout"]
item.RunTimeout = 0
if runTimeout != nil {
item.RunTimeout, err = time.ParseDuration(runTimeout.(string))
misc.CheckIfErr(err, item.stack)
}
waitTimeout := item.rawItem["waitTimeout"]
item.WaitTimeout = *app.App.Config.DefaultTimeout
if waitTimeout != nil {
item.WaitTimeout, err = time.ParseDuration(waitTimeout.(string))
misc.CheckIfErr(err, item.stack)
}
}
|
import { Component } from '@angular/core';
@Component({
selector: 'my-app',
template: `
<h1>Contact Form</h1>
<form #f="ngForm">
<label>Name: </label>
<input type="text" name="name" ngModel>
<label>Email: </label>
<input type="email" name="email" ngModel>
<label>Message: </label>
<textarea name="message" ngModel></textarea>
</form>
`
})
export class AppComponent { } |
"""
DIRBS REST-ful job_metadata API module.
Copyright (c) 2018-2019 Qualcomm Technologies, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the
limitations in the disclaimer below) provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
- Neither the name of Qualcomm Technologies, Inc. nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
- The origin of this software must not be misrepresented; you must not claim that you wrote the original software.
If you use this software in a product, an acknowledgment is required by displaying the trademark/log as per the
details provided here: https://www.qualcomm.com/documents/dirbs-logo-and-brand-guidelines
- Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
- This notice may not be removed or altered from any source distribution.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY
THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import operator
from flask import jsonify
from psycopg2 import sql
from dirbs.api.common.db import get_db_connection
from dirbs.api.common.pagination import Pagination
from dirbs.api.v2.schemas.job_metadata import JobKeys, JobMetadata
def get_metadata(command=None, subcommand=None, run_id=None, status=None):
"""
Get metadata for jobs.
:param command: command name (default None)
:param subcommand: sub-command name (default None)
:param run_id: job run id (default None)
:param status: job execution status (default None)
:return: psycopg2 results
"""
with get_db_connection() as db_conn, db_conn.cursor() as cursor:
# Build the query with params retrieved from request
filters_sql = []
for field, label in [(status, 'status'), (command, 'command'), (subcommand, 'subcommand')]:
if len(field) > 0:
mogrified_sql = cursor.mogrify(sql.SQL("""{0}::TEXT IN %s""").
format(sql.Identifier(label)), [tuple(field)])
filters_sql.append(sql.SQL(str(mogrified_sql, db_conn.encoding)))
if len(run_id) > 0:
mogrified_sql = cursor.mogrify(sql.SQL("""{0} IN (SELECT UNNEST(%s::BIGINT[]))""")
.format(sql.Identifier('run_id')), [(run_id)])
filters_sql.append(sql.SQL(str(mogrified_sql, db_conn.encoding)))
base_sql = sql.SQL("""SELECT * FROM job_metadata""")
final_sql = base_sql
if len(filters_sql) > 0:
final_sql = sql.SQL('{0} WHERE {1}').format(base_sql, sql.SQL(' AND ').join(filters_sql))
final_sql = sql.SQL('{0} ORDER BY start_time').format(final_sql)
cursor.execute(final_sql)
return cursor.fetchall()
def job_metadata_api(command=None, subcommand=None, run_id=None, status=None, show_details=True,
order=None, offset=None, limit=None):
"""
Defines handler method for job-metadata GET API (version 2.0).
:param command: command name (default None)
:param subcommand: sub-command name (default None)
:param run_id: job run id (default None)
:param status: job execution status (default None)
:param show_details: show full job details (default True)
:param order: sorting order (Ascending/Descending, default None)
:param offset: offset of data (default None)
:param limit: limit of the data (default None)
:return: json
"""
result = get_metadata(command, subcommand, run_id, status)
if order is not None or (offset is not None and limit is not None):
data = [rec._asdict() for rec in result]
paginated_data = Pagination.paginate(data, offset, limit)
if order == 'Ascending':
paginated_data.get('data').sort(key=operator.itemgetter('run_id'))
elif order == 'Descending':
paginated_data.get('data').sort(key=operator.itemgetter('run_id'), reverse=True)
if not show_details:
response = {
'_keys': JobKeys().dump(dict(paginated_data.get('keys'))).data,
'jobs': [JobMetadata(exclude=('extra_metadata',)).dump(dict(dat)).data for dat in
paginated_data.get('data')]
}
return jsonify(response)
else:
response = {
'_keys': JobKeys().dump(dict(paginated_data.get('keys'))).data,
'jobs': [JobMetadata().dump(dict(dat)).data for dat in paginated_data.get('data')]
}
return jsonify(response)
keys = {'offset': '', 'limit': '', 'previous_key': '', 'next_key': '', 'result_size': len(result)}
if not show_details:
response = {
'_keys': JobKeys().dump(dict(keys)).data,
'jobs': [JobMetadata(exclude=('extra_metadata',)).dump(rec._asdict()).data for rec in result]
}
else:
response = {
'_keys': JobKeys().dump(dict(keys)).data,
'jobs': [JobMetadata().dump(rec._asdict()).data for rec in result]
}
return jsonify(response)
|
function mergeArrays(arr1, arr2) {
const result = [];
let i = 0;
let j = 0;
while(i < arr1.length && j < arr2.length) {
let el1 = arr1[i];
let el2 = arr2[j];
if(el1 < el2) {
result.push(el1);
i++;
} else {
result.push(el2);
j++;
}
}
// add remaining elements from each array
while(i < arr1.length) {
result.push(arr1[i]);
i++;
}
while(j < arr2.length) {
result.push(arr2[j]);
j++;
}
return result;
} |
<reponame>Sasha7b9Work/S8-53M2<gh_stars>0
#include "defines.h"
#include <stm32f4xx_hal.h>
HAL_StatusTypeDef FMC_NORSRAM_Init(FMC_NORSRAM_TypeDef *Device, FMC_NORSRAM_InitTypeDef *Init)
{
UNUSED(Device);
UNUSED(Init);
return HAL_OK;
}
HAL_StatusTypeDef FMC_NORSRAM_Timing_Init(FMC_NORSRAM_TypeDef *Device, FMC_NORSRAM_TimingTypeDef *Timing, uint32_t Bank)
{
UNUSED(Device);
UNUSED(Timing);
UNUSED(Bank);
return HAL_OK;
} |
<reponame>mizukai/sample<filename>js/directives/sidebar.js
angular.module('audioVizApp')
.directive('sidebar', function () {
var linker = function(scope, element, attrs) {
scope.canClose = _.has(attrs, "closable");
console.log(attrs);
scope.toggle = function() {
scope.isclosed = !scope.isclosed;
};
};
return {
template: '<div class="sidebar side-buttons-container">' +
'<div class="content box" ng-transclude ng-hide="isclosed"">' +
'</div>' +
'<div class="button toggle">' +
'<a ng-click="toggle()" href="" ng-show="canClose && !isclosed">hide sidebar</a>' +
'<a ng-click="toggle()" href="" ng-hide="canClose && !isclosed">show sidebar</a>' +
'</div>' +
'</div>',
restrict: 'E',
transclude: true,
replace: true,
link: linker
};
}); |
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Fade16 = factory());
}(this, (function () { 'use strict';
var _16 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 16 16',
width: 16,
height: 16,
},
content: [
{
elem: 'path',
attrs: {
d:
'M4.1 12.6l-.6.8c.6.5 1.3.9 2.1 1.2l.3-.9c-.6-.3-1.2-.7-1.8-1.1zM2.1 9l-1 .2c.1.8.4 1.6.8 2.3l.9-.5c-.4-.6-.6-1.3-.7-2zm3.8-6.6l-.3-1c-.8.3-1.5.7-2.1 1.3l.6.8c.6-.5 1.2-.9 1.8-1.1zM2.8 5l-.9-.5c-.4.7-.6 1.5-.8 2.3l1 .2c.1-.7.4-1.4.7-2zM8 1v1c3.3 0 6 2.7 6 6s-2.7 6-6 6v1c3.9 0 7-3.1 7-7s-3.1-7-7-7z',
},
},
],
name: 'fade',
size: 16,
};
return _16;
})));
|
<reponame>bBlocks/lab<filename>split.js
'use strict'
const fs = require('fs')
const path = require('path')
const split = require('css-split')
// prepare data
const SPLIT_SIZE = 450;
const content = fs.readFileSync(path.join(__dirname, './node_modules/bootstrap/dist/css/bootstrap.min.css'), 'utf8')
let outputDir = path.join(__dirname, 'css')
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir)
}
split(content, SPLIT_SIZE).forEach((part, i) => {
let file = path.join(outputDir, `part${i}-css-split.css`)
let data = part.content.trim()
fs. writeFileSync(file, data)
});
|
<reponame>Sagittarius84/mobileTest
/*
* Copyright 2016 <NAME>, <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.noorganization.instalist.view.touchlistener;
import android.content.Context;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
/**
* Created by TS on 04.05.2015.
* Touchlistener that proceed gesturedetection of swiping and tapping.
*/
public class OnSimpleSwipeGestureListener implements View.OnTouchListener, IOnItemTouchEvents{
private final GestureDetector mGestureDetector;
public OnSimpleSwipeGestureListener(Context context, View recyclerView){
mGestureDetector = new GestureDetector(context, new GestureListener(this, recyclerView));
}
@Override
public boolean onTouch(View v, MotionEvent event) {
return mGestureDetector.onTouchEvent(event);
}
public void onSwipeRight(View childView) {}
public void onSwipeLeft(View childView) {}
public void onSingleTap(View childView){}
public void onLongTap(View childView) {}
private static final class GestureListener extends GestureDetector.SimpleOnGestureListener {
private static final int SWIPE_THRESHOLD = 20;
private static final int SWIPE_VELOCITY_THRESHOLD = 100;
private OnSimpleSwipeGestureListener mGestureListener;
private View mRecyclerView;
/**
* Creates a new instance of a GestureListener.
* @param _OnGestureListener The listener, that listens to those events.
* @param _RecyclerView the recyclerview attached to layout.
*/
public GestureListener(OnSimpleSwipeGestureListener _OnGestureListener, View _RecyclerView){
this.mGestureListener = _OnGestureListener;
this.mRecyclerView = _RecyclerView;
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
mGestureListener.onSingleTap(mRecyclerView);
return true;
}
@Override
public void onLongPress(MotionEvent e) {
mGestureListener.onLongTap(mRecyclerView);
}
@Override
public boolean onDown(MotionEvent e) {
return true;
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
// boolean result = false;
if(e1 == null || e2 == null) return true;
float diffX = e2.getX() - e1.getX();
if (Math.abs(diffX) > SWIPE_THRESHOLD && Math.abs(velocityX) > SWIPE_VELOCITY_THRESHOLD) {
if (diffX > 0) {
mGestureListener.onSwipeRight(mRecyclerView);
} else {
mGestureListener.onSwipeLeft(mRecyclerView);
}
}
return false;
}
}
}
|
<reponame>purvikpatel/ROVDashboard
const { app, BrowserWindow } = require('electron')
const rclnodejs = require('rclnodejs')
//let count = 1
app.allowRendererProcessReuse = false
function createWindow () {
// Create the browser window.
const win = new BrowserWindow({
width: 800,
height: 600,
//frame: false,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
enableRemoteModule: true,
}
})
var message = {}
var image = []
rclnodejs.init().then(() => {
const node = rclnodejs.createNode('subscription_example_node');
node.createSubscription('sensor_msgs/msg/Imu', '/rexrov/imu', (msg) => {
//console.log(`Received message: ${typeof msg['orientation']}`, msg['orientation']);
message = msg
win.webContents.send('imu', message)
//console.log(count)
//count += 1
});
node.createSubscription('sensor_msgs/msg/Image', '/rexrov/camera/image_raw', {isRaw : true }, (m) => {
//image = msg['data']
img = new Uint8ClampedArray(m);
img = img.slice(64);
// 492 768 2304
//console.log(img);
win.webContents.send('image',img);
})
node.createSubscription('sensor_msgs/msg/Image', '/rexrov/cameraright/image_raw', {isRaw : true }, (camera2) => {
//image = msg['data']
img2 = new Uint8ClampedArray(camera2);
img2 = img2.slice(72);
// 492 768 2304
//console.log(img);
win.webContents.send('image2',img2);
})
node.createSubscription('uuv_sensor_ros_plugins_msgs/msg/DVL', '/rexrov/dvl', (dvl) => {
//image = msg['data']
//console.log(velocity);
win.webContents.send('velocity', dvl);
})
node.createSubscription('sensor_msgs/msg/FluidPressure', '/rexrov/pressure', (pressure) => {
//image = msg['data']
//console.log(velocity);
win.webContents.send('pressure', pressure);
})
node.spin();
});
// and load the index.html of the app.
win.loadFile('connect.html')
}
app.whenReady().then(createWindow)
|
const getQueryParam = (url, queryParamName) => {
let queryParams = url.split('?')[1];
queryParams = queryParams.split('&');
for (let param of queryParams) {
let [name, value] = param.split('=');
if (name === queryParamName) {
return value;
}
}
return null;
}; |
<reponame>PeratX/Polaris
/*
* Copyright (C) 2018 iTX Technologies
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <inttypes.h>
#include <TFT_22_ILI9225.h>
#include "LcdHelper.h"
#include "../util/Configuration.h"
#include "../util/DataManager.h"
#include "../config.h"
#define TFT_DEFAULT_BRIGHTNESS 50
#define BRIGHTNESS_LEVELS 10
static const uint16_t lightLevelList[] = {1, 2, 3, 5, 10, 20, 50, 100, 200, 300, 400, 500, 600, 700, 800, 1000, 1200, 1600, 2200, 3000, 4000};
static const uint8_t brightnessList[] = {1, 2, 4, 8, 12, 20, 30, 46, 49, 54, 61, 65, 70, 76, 82, 87, 98, 108, 131, 161, 230, 255};
TFT_22_ILI9225 display(TFT_RST, TFT_RS, TFT_CS, TFT_LED, TFT_DEFAULT_BRIGHTNESS);
unsigned long LcdHelper::lastUpdate = 0;
void LcdHelper::init()
{
display.begin();
}
TFT_22_ILI9225 LcdHelper::getDisplay()
{
return display;
}
void LcdHelper::load()
{
display.clear();
display.setOrientation(3);
#if !MINIMIZE
display.drawRectangle(0, 0, display.maxX() - 1, display.maxY() - 1, COLOR_ORANGE);
display.setFont(Terminal6x8);
//display.drawText(10, 10, "Now loading...", COLOR_GRAY);
display.drawText(10, display.maxY() - 12, COPYRIGHT, COLOR_GRAY);
display.setFont(Terminal12x16);
display.drawText(70, 60, NAME, COLOR_LIGHTBLUE);
display.setFont(Terminal11x16);
display.drawText(85, 85, VER);
delay(1500);
display.clear();
#endif
display.drawRectangle(0, 0, display.maxX() - 1, display.maxY() - 1, COLOR_GREEN);
printHeader();
}
void LcdHelper::printHeader()
{
display.setFont(Terminal6x8);
char str[30];
sprintf(str, "%s %s", NAME, VER);
display.drawText(10, 10, str);
display.drawText(10, 20, DataManager::wifiModuleFirmwareInfo);
display.drawText(110, 20, Configuration::WIFI_SSID);
display.drawText(10, 30, COPYRIGHT);
}
void LcdHelper::updateBrightness(unsigned long lightLevel)
{
if (millis() - lastUpdate > UPDATE_DELAY)
{
lastUpdate = millis();
bool valid = false;
for(int i = 0; i < BRIGHTNESS_LEVELS; i++){
if(lightLevel <= lightLevelList[i])
{
valid = true;
display.setBacklightBrightness(brightnessList[i]);
break;
}
}
if(!valid)
{
display.setBacklightBrightness(brightnessList[BRIGHTNESS_LEVELS + 1]);
}
}
} |
#pragma once
#include <vector>
#include "nifty/marray/marray.hxx"
#include "nifty/cgp/topological_grid.hxx"
#include "nifty/array/arithmetic_array.hxx"
namespace nifty{
namespace cgp{
} // namespace nifty::cgp
} // namespace nifty
|
var is_loading = !1, is_no_more = !0;
Page({
data: {
naver: "prize",
list: [],
page: 1
},
onLoad: function(t) {
getApp().page.onLoad(this, t), this.setData({
status: t.status || 0
});
var a = this;
getApp().core.showLoading({
title: "加载中"
}), getApp().request({
url: getApp().api.lottery.prize,
data: {
status: a.data.status,
page: a.data.page
},
success: function(t) {
0 == t.code && (a.setData({
list: t.data.list
}), null != t.data.list && 0 < t.data.list.length && (is_no_more = !1));
},
complete: function() {
getApp().core.hideLoading();
}
});
},
onReachBottom: function() {
is_no_more || this.loadData();
},
loadData: function() {
if (!is_loading) {
is_loading = !0, getApp().core.showLoading({
title: "加载中"
});
var a = this, e = a.data.page + 1;
getApp().request({
url: getApp().api.lottery.prize,
data: {
status: a.data.status,
page: e
},
success: function(t) {
if (0 == t.code) {
if (null == t.data.list || 0 == t.data.list.length) return void (is_no_more = !0);
a.setData({
list: a.data.list.concat(t.data.list),
page: e
});
} else a.showToast({
title: t.msg
});
},
complete: function() {
getApp().core.hideLoading(), is_loading = !1;
}
});
}
}
}); |
<filename>src/shared/types/models/widgets/versioned/v3.ts<gh_stars>1-10
import { Omit, IHoldingVersion } from 'shared/types/app';
import { IReportsSettings } from '../shared';
import {
IGenericVersionedTypes, GetSettingsAssoc, GetFormSettingsAssoc, GetSettings, GetFormSettings,
} from '../helpers';
import * as V2 from './v2';
interface IOrderListFormSettings extends GetFormSettings<V2.IVersionedTypes, 'order-list'> {
shouldOpenCancelOrderModal: boolean;
}
interface IOrderListSettings extends GetSettings<V2.IVersionedTypes, 'order-list'>, IOrderListFormSettings { }
interface IWidgetsSettingsAssoc extends Omit<GetSettingsAssoc<V2.IVersionedTypes>, 'order-list'> {
'order-list': IOrderListSettings;
}
interface IWidgetsFormSettingsAssoc extends Omit<GetFormSettingsAssoc<V2.IVersionedTypes>, 'order-list'> {
'order-list': IOrderListFormSettings;
}
export type IVersionedTypes = IGenericVersionedTypes<
V2.IVersionedTypes['WidgetKind'],
IWidgetsSettingsAssoc,
IWidgetsFormSettingsAssoc
>;
export interface IUserConfig extends Omit<V2.IUserConfig, 'presets' | 'version'>, IHoldingVersion<3> {
presets: Array<IVersionedTypes['IPreset']>;
reportsSettings: IReportsSettings;
}
|
package be.kwakeroni.parameters.basic.backend.query;
import be.kwakeroni.parameters.backend.api.BackendGroup;
import be.kwakeroni.parameters.backend.api.query.BackendWireFormatter;
import be.kwakeroni.parameters.backend.api.query.BackendWireFormatterContext;
import java.util.Map;
/**
* (C) 2016 <NAME>
*/
public interface BasicBackendWireFormatter extends BackendWireFormatter {
String wireToBackendValue(Object value);
Object backendValueToWire(String value);
Map<String, String> wireToBackendEntry(Object entry);
Object backendEntryToWire(Map<String, String> entry);
default <Q> Q internalizeValueQuery(String parameter, BackendGroup<Q> group, BackendWireFormatterContext context) {
SimpleBackendGroup<Q> simple = group.as(SimpleBackendGroup.class);
return simple.getValueQuery(parameter);
}
default <Q> Q internalizeEntryQuery(BackendGroup<Q> group, BackendWireFormatterContext context) {
SimpleBackendGroup<Q> simple = group.as(SimpleBackendGroup.class);
return simple.getEntryQuery();
}
default <Q> Q internalizeRangedQuery(String value, Object rawSubQuery, BackendGroup<Q> group, BackendWireFormatterContext context) {
RangedBackendGroup<Q, ?> ranged = group.as(RangedBackendGroup.class);
Q subQuery = context.internalize(ranged.getSubGroup(), rawSubQuery);
return ranged.getRangedQuery(value, subQuery);
}
default <Q> Q internalizeMappedQuery(String key, Object rawSubQuery, BackendGroup<Q> group, BackendWireFormatterContext context) {
MappedBackendGroup<Q, ?> mapped = group.as(MappedBackendGroup.class);
Q subQuery = context.internalize(mapped.getSubGroup(), rawSubQuery);
return mapped.getMappedQuery(key, subQuery);
}
}
|
package org.janelia.saalfeldlab.n5.metadata;
import net.imglib2.realtransform.AffineGet;
import net.imglib2.realtransform.AffineTransform3D;
public interface PhysicalMetadata extends N5Metadata
{
public AffineGet physicalTransform();
public String[] units();
public default AffineTransform3D physicalTransform3d()
{
final AffineGet transform = physicalTransform();
// return identity if null
if( transform == null )
return new AffineTransform3D();
else if( transform instanceof AffineTransform3D )
return ( AffineTransform3D ) transform;
else
{
final int N = transform.numSourceDimensions();
int k = 0;
final AffineTransform3D transform3d = new AffineTransform3D();
final double[] params = transform3d.getRowPackedCopy();
for( int i = 0; i < 3; i++ )
{
for( int j = 0; j < 3; j++ )
{
if( i < N && j < N )
params[ k ] = transform.get( i, j );
k++;
}
// j == 4
if( i < N )
params[ k ] = transform.get( i, N );
k++;
}
transform3d.set( params );
return transform3d;
}
}
}
|
import Employee from './../../models/employee';
import Client from './../../models/client';
async function userType(idCredential) {
let employee = new Employee();
let client = new Client();
let result;
employee.idCredential = idCredential;
result = await employee.findByIdCredential();
if (result == undefined){
client.idCredential = idCredential;
result = await client.findByIdCredential();
}
return result;
}
module.exports = {
userType: userType
}
|
<filename>app/src/main/java/br/com/mbecker/jagastei/domain/ServiceDomain.java<gh_stars>0
package br.com.mbecker.jagastei.domain;
import java.util.List;
import br.com.mbecker.jagastei.db.GastoModel;
import br.com.mbecker.jagastei.db.TagModel;
public interface ServiceDomain {
long salvarGasto(GastoModel g);
List<GastoModel> listarGastos(String mesAno);
List<GastoModel> listarGastosPorTag(long tagId);
List<TagModel> listarTags();
void atualizaTags(long id, List<String> tags);
}
|
<gh_stars>0
const requireComponent = require.context('../../assets', false, /\.png$/);
const result = {};
function capitalizeFirstLetter(str) {
return str.toUpperCase();
}
function validateFileName(str) {
return /^\S+\.png$/.test(str) && str.replace(/^\S+\/(\w+)\.png$/, (rs, $1) => capitalizeFirstLetter($1));
}
requireComponent.keys().forEach((filePath) => {
const componentConfig = requireComponent(filePath);
const fileName = validateFileName(filePath);
result[fileName] = componentConfig;
});
export default result;
|
/**
* Utility to turn a list of extnames (*with* dots) into an expression.
*
* @param {Array<string>} extnames
* List of extnames.
* @returns {RegExp}
* Regex matching them.
*/
export function extnamesToRegex(extnames) {
// eslint-disable-next-line security/detect-non-literal-regexp
return new RegExp(
'\\.(' + extnames.map((d) => d.slice(1)).join('|') + ')([?#]|$)'
)
}
|
<gh_stars>0
export default {
html: '<div><p>Hello</p></div>',
test ( assert, component, target ) {
assert.equal( component.get( 'data' ), 'Hello' );
component.set({data: 'World'})
assert.equal( component.get( 'data' ), 'World' );
assert.equal( target.innerHTML, '<div><p>World<!--yield--></p></div>' );
}
}
|
#include <iostream>
// Function to generate the nth Fibonacci number
int fibonacci(int n)
{
// Initialise first two Fibonacci numbers
int f1 = 0, f2 = 1, next;
// Return 0 if value of n is 0
if (n == 0)
return 0;
for (int i = 2; i <= n; i++) {
next = f1 + f2;
f1 = f2;
f2 = next;
}
return f2;
}
int main() {
int n = 6;
std::cout << "The " << n << "th fibonacci number is: " << fibonacci(n);
return 0;
} |
package yimei.jss.rule.workcenter.basic;
import yimei.jss.jobshop.OperationOption;
import yimei.jss.jobshop.WorkCenter;
import yimei.jss.rule.AbstractRule;
import yimei.jss.rule.RuleType;
import yimei.jss.simulation.state.SystemState;
/**
* Created by dyska on 6/06/17.
* Longest ready time.
* This rule should have a priority of the negative of the workCenter's ready time.
*/
public class LRT extends AbstractRule {
private RuleType type;
public LRT(RuleType t) {
name = "\"LRT\"";
this.type = t;
}
@Override
public double priority(OperationOption op, WorkCenter workCenter, SystemState systemState) {
return - workCenter.getReadyTime();
}
}
|
source /conda/etc/profile.d/conda.sh
conda activate rapids
echo "Running: rapids_opt2.py $@"
python rapids_opt2.py $@
|
#pragma once
#include "MenuElement.h"
#include "AbstractConsole.h"
class Label:public MenuElement
{
private:
std::string text;
public:
Label(void);
Label(std::string initval, int nx, int ny);
virtual ~Label(void);
virtual void render(AbstractConsole* pConsole);
virtual void onKeyDown(int keycode);
virtual bool isFocusable();
virtual void setText(std::string pText);
virtual std::string getText();
};
|
package io.dronefleet.mavlink.common;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
*
*/
@MavlinkEnum
public enum MavState {
/**
* Uninitialized system, state is unknown.
*/
@MavlinkEntryInfo(0)
MAV_STATE_UNINIT,
/**
* System is booting up.
*/
@MavlinkEntryInfo(1)
MAV_STATE_BOOT,
/**
* System is calibrating and not flight-ready.
*/
@MavlinkEntryInfo(2)
MAV_STATE_CALIBRATING,
/**
* System is grounded and on standby. It can be launched any time.
*/
@MavlinkEntryInfo(3)
MAV_STATE_STANDBY,
/**
* System is active and might be already airborne. Motors are engaged.
*/
@MavlinkEntryInfo(4)
MAV_STATE_ACTIVE,
/**
* System is in a non-normal flight mode. It can however still navigate.
*/
@MavlinkEntryInfo(5)
MAV_STATE_CRITICAL,
/**
* System is in a non-normal flight mode. It lost control over parts or over the whole airframe. It
* is in mayday and going down.
*/
@MavlinkEntryInfo(6)
MAV_STATE_EMERGENCY,
/**
* System just initialized its power-down sequence, will shut down now.
*/
@MavlinkEntryInfo(7)
MAV_STATE_POWEROFF,
/**
* System is terminating itself.
*/
@MavlinkEntryInfo(8)
MAV_STATE_FLIGHT_TERMINATION
}
|
# configures my personal settings on my current version of Ubuntu
# only run once (preferably on a clean machine)
STARTING_DIR=$(pwd)
WORKING_DIR="$STARTING_DIR/.configureScripts"
# copy into /usr/bin scripts and make globally available
sudo cp "$STARTING_DIR/usrbin/*" /usr/bin
exec bash
# Check for internet connection
|
<filename>android/sdk/src/main/java/org/jitsi/meet/sdk/ParticipantsService.java
package org.jitsi.meet.sdk;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.jitsi.meet.sdk.log.JitsiMeetLogger;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nullable;
public class ParticipantsService extends android.content.BroadcastReceiver {
private static final String TAG = ParticipantsService.class.getSimpleName();
private static final String REQUEST_ID = "requestId";
private final Map<String, WeakReference<ParticipantsInfoCallback>> participantsInfoCallbackMap = new HashMap<>();
private static ParticipantsService instance;
@Nullable
public static ParticipantsService getInstance() {
return instance;
}
private ParticipantsService(Context context) {
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(context);
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(BroadcastEvent.Type.PARTICIPANTS_INFO_RETRIEVED.getAction());
localBroadcastManager.registerReceiver(this, intentFilter);
}
static void init(Context context) {
instance = new ParticipantsService(context);
}
public void retrieveParticipantsInfo(ParticipantsInfoCallback participantsInfoCallback) {
String callbackKey = UUID.randomUUID().toString();
this.participantsInfoCallbackMap.put(callbackKey, new WeakReference<>(participantsInfoCallback));
String actionName = BroadcastAction.Type.RETRIEVE_PARTICIPANTS_INFO.getAction();
WritableMap data = Arguments.createMap();
data.putString(REQUEST_ID, callbackKey);
ReactInstanceManagerHolder.emitEvent(actionName, data);
}
@Override
public void onReceive(Context context, Intent intent) {
BroadcastEvent event = new BroadcastEvent(intent);
switch (event.getType()) {
case PARTICIPANTS_INFO_RETRIEVED:
try {
List<ParticipantInfo> participantInfoList = new Gson().fromJson(
event.getData().get("participantsInfo").toString(),
new TypeToken<ArrayList<ParticipantInfo>>() {
}.getType());
ParticipantsInfoCallback participantsInfoCallback = this.participantsInfoCallbackMap.get(event.getData().get(REQUEST_ID).toString()).get();
if (participantsInfoCallback != null) {
participantsInfoCallback.onReceived(participantInfoList);
this.participantsInfoCallbackMap.remove(participantsInfoCallback);
}
} catch (Exception e) {
JitsiMeetLogger.w(TAG + "error parsing participantsList", e);
}
break;
}
}
public interface ParticipantsInfoCallback {
void onReceived(List<ParticipantInfo> participantInfoList);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.