text stringlengths 1 1.05M |
|---|
var rnd = Math.random();
var s = document.createElement('script');
s.setAttribute('data-rnd', rnd);
s.innerText = "$('.act-bar > .inner > .form-group > .input-select-a.gift-num-select > .input-text > .value').trigger('change'); document.querySelector('head > script[data-rnd=\"" + rnd + "\"]').remove(); ";
(document.head || document.documentElement).appendChild(s);
|
const merchantList = () => import(/* webpackChunkName: "merchant" */ '@/views/merchant/merchantList/index.vue')
export default [
{
path: "/merchantList",
name: 'merchantList',
component: merchantList // 商户列表
},
{
path: "/merchantList/merchantDetail",
name: 'merchantDetail',
component: merchantList // 商户详情
},
] |
// Define the ProblemDifficulty enum
enum ProblemDifficulty {
case easy, medium, hard
}
// Define the MathSymbols enum
enum MathSymbols: String {
case times = "×"
func padded(with padding: String) -> String {
return padding + self.rawValue + padding
}
}
// Define the Problem struct
struct Problem {
let expectedAnswer: Int
let questionText: String
let scaleParameterizer: ScaleParameterizer
}
// Define the ScaleParameterizer protocol
protocol ScaleParameterizer {
// Define the protocol requirements
}
// Define the generateTermValues function
func generateTermValues(difficulty: ProblemDifficulty) -> [Int] {
// Implement the logic to generate term values based on the difficulty level
// ...
return [2, 3] // Placeholder values
}
// Implement the generateProblem function
func generateProblem(difficulty: ProblemDifficulty) -> Problem {
let values = generateTermValues(difficulty: difficulty)
return Problem(
expectedAnswer: values.reduce(1, *),
questionText: "What is \(values[0]) \(MathSymbols.times.padded(with: " ")) \(values[1])?",
scaleParameterizer: scaleParameterizer
)
} |
package com.jensen.draculadaybyday.notification;
import android.app.job.JobInfo;
import android.app.job.JobScheduler;
import android.content.ComponentName;
import android.content.Context;
import android.util.Log;
public class Schedule {
private final JobScheduler mJobScheduler;
private static Schedule mSchedule;
private static Context mContext;
private int JOB_ID = 1;
private Schedule(Context context) {
mJobScheduler = (JobScheduler)context.getSystemService(Context.JOB_SCHEDULER_SERVICE);
}
public static Schedule getInstance(Context context) {
if (mSchedule == null) {
mSchedule = new Schedule(context);
}
mContext = context;
return mSchedule;
}
public void makeNotification(long timeToFutureJob, int futureNotifications) {
if (mJobScheduler != null) {
try {
ComponentName notificationName = new ComponentName(mContext, NotificationService.class);
JobInfo jInfo = new JobInfo.Builder(JOB_ID, notificationName)
.setRequiredNetworkType(JobInfo.NETWORK_TYPE_NONE)
.setRequiresCharging(false)
.setRequiresDeviceIdle(false)
.setOverrideDeadline(timeToFutureJob)
.setPersisted(true)
.build();
mJobScheduler.schedule(jInfo);
} catch (Exception e) {
Log.d("Exception", e.getMessage());
}
}
}
}
|
// Copyright (C) 2019-2021 Intel Corporation
//
// SPDX-License-Identifier: MIT
// Setup mock for a server
jest.mock('../../src/server-proxy', () => {
const mock = require('../mocks/server-proxy.mock');
return mock;
});
// Initialize api
window.cvat = require('../../src/api');
const { Project } = require('../../src/project');
describe('Feature: get projects', () => {
test('get all projects', async () => {
const result = await window.cvat.projects.get();
expect(Array.isArray(result)).toBeTruthy();
expect(result).toHaveLength(2);
for (const el of result) {
expect(el).toBeInstanceOf(Project);
}
});
test('get project by id', async () => {
const result = await window.cvat.projects.get({
id: 2,
});
expect(Array.isArray(result)).toBeTruthy();
expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(Project);
expect(result[0].id).toBe(2);
// eslint-disable-next-line no-underscore-dangle
expect(result[0]._internalData.task_ids).toHaveLength(1);
});
test('get a project by an unknown id', async () => {
const result = await window.cvat.projects.get({
id: 1,
});
expect(Array.isArray(result)).toBeTruthy();
expect(result).toHaveLength(0);
});
test('get a project by an invalid id', async () => {
expect(
window.cvat.projects.get({
id: '1',
}),
).rejects.toThrow(window.cvat.exceptions.ArgumentError);
});
test('get projects by filters', async () => {
const result = await window.cvat.projects.get({
status: 'completed',
});
expect(Array.isArray(result)).toBeTruthy();
expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(Project);
expect(result[0].id).toBe(2);
expect(result[0].status).toBe('completed');
});
test('get projects by invalid filters', async () => {
expect(
window.cvat.projects.get({
unknown: '5',
}),
).rejects.toThrow(window.cvat.exceptions.ArgumentError);
});
});
describe('Feature: save a project', () => {
test('save some changed fields in a project', async () => {
let result = await window.cvat.tasks.get({
id: 2,
});
result[0].bugTracker = 'newBugTracker';
result[0].name = 'New Project Name';
result[0].save();
result = await window.cvat.tasks.get({
id: 2,
});
expect(result[0].bugTracker).toBe('newBugTracker');
expect(result[0].name).toBe('New Project Name');
});
test('save some new labels in a project', async () => {
let result = await window.cvat.projects.get({
id: 6,
});
const labelsLength = result[0].labels.length;
const newLabel = new window.cvat.classes.Label({
name: 'My boss\'s car',
attributes: [
{
default_value: 'false',
input_type: 'checkbox',
mutable: true,
name: 'parked',
values: ['false'],
},
],
});
result[0].labels = [...result[0].labels, newLabel];
result[0].save();
result = await window.cvat.projects.get({
id: 6,
});
expect(result[0].labels).toHaveLength(labelsLength + 1);
const appendedLabel = result[0].labels.filter((el) => el.name === 'My boss\'s car');
expect(appendedLabel).toHaveLength(1);
expect(appendedLabel[0].attributes).toHaveLength(1);
expect(appendedLabel[0].attributes[0].name).toBe('parked');
expect(appendedLabel[0].attributes[0].defaultValue).toBe('false');
expect(appendedLabel[0].attributes[0].mutable).toBe(true);
expect(appendedLabel[0].attributes[0].inputType).toBe('checkbox');
});
test('save new project without an id', async () => {
const project = new window.cvat.classes.Project({
name: 'New Empty Project',
labels: [
{
name: 'car',
attributes: [
{
default_value: 'false',
input_type: 'checkbox',
mutable: true,
name: 'parked',
values: ['false'],
},
],
},
],
bug_tracker: 'bug tracker value',
});
const result = await project.save();
expect(typeof result.id).toBe('number');
});
});
describe('Feature: delete a project', () => {
test('delete a project', async () => {
let result = await window.cvat.projects.get({
id: 6,
});
await result[0].delete();
result = await window.cvat.projects.get({
id: 6,
});
expect(Array.isArray(result)).toBeTruthy();
expect(result).toHaveLength(0);
});
});
describe('Feature: delete a label', () => {
test('delete a label', async () => {
let result = await window.cvat.projects.get({
id: 2,
});
const labelsLength = result[0].labels.length;
const deletedLabels = result[0].labels.filter((el) => el.name !== 'bicycle');
result[0].labels = deletedLabels;
result[0].save();
result = await window.cvat.projects.get({
id: 2,
});
expect(result[0].labels).toHaveLength(labelsLength - 1);
});
});
|
# The Book of Ruby - http://www.sapphiresteel.com
h1 = {
'room1'=>'The Treasure Room',
'room2'=>'The Throne Room',
'loc1'=>'A Forest Glade',
'loc2'=>'A Mountain Stream'
}
h2 = {1=>'one', 2=>'two', 3=> 'three'}
h3 = {6=>'six', 5=>'five', 4=> 'four'}
# a complicated hash!
multihash = {
'name' => 'Multi-Hash',
'array' => ['one','two','three','four'],
'nested array' =>
["I",
["wandered","lonely","as",
["a","cloud"]
]
],
'nested hash' => {'a'=>'hi','b'=>'goodbye'}
}
hnew = (h2.merge(h3))
hnew2 = ((h2.merge(h3)).merge(multihash))
#:arg: aHash => Hash
# sort aHash ascending
def sorted_hash( aHash )
return aHash.sort{
|a,b|
a.to_s <=> b.to_s
}
end
p( sorted_hash(hnew) )
p( sorted_hash(hnew2) )
p( sorted_hash({50=>1, 40=>2, 30=>3}) ) |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.gui.demo.guidemo;
import static org.olat.core.gui.components.util.SelectionValues.entry;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.olat.core.gui.components.form.flexible.elements.AutoCompletionMultiSelection.AutoCompletionSource;
import org.olat.core.gui.components.util.SelectionValues;
/**
*
* Initial date: 6 Jan 2022<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class NameSource implements AutoCompletionSource {
private static final List<String> ALL = List.of("<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>",
"<NAME>", "<NAME>", "<NAME>", "<NAME>");
@Override
public SelectionValues getSelectionValues(Collection<String> keys) {
SelectionValues selectionValues = new SelectionValues();
ALL.stream().filter(value -> keys.contains(value))
.forEach(value -> selectionValues.add(entry(value, value)));
return selectionValues;
}
@Override
public SearchResult getSearchResult(String searchText) {
List<String> filtered = ALL.stream().filter(value -> value.toLowerCase().contains(searchText)).sorted().collect(Collectors.toList());
int countTotal = filtered.size();
List<String> result = filtered.size() > 15? filtered.subList(0, 14): filtered;
int countCurrent = result.size();
SelectionValues selectionValues = new SelectionValues();
result.forEach(key -> selectionValues.add(entry(key, key)));
return new SearchResult(countTotal, countCurrent, selectionValues);
}
}
|
package tracer
import (
cryptorand "crypto/rand"
"encoding/hex"
"math"
"math/big"
"math/rand"
"sync"
"time"
"github.com/google/uuid"
"go.undefinedlabs.com/scopeagent/instrumentation"
)
var (
random *rand.Rand
mu sync.Mutex
)
func getRandomId() uint64 {
mu.Lock()
defer mu.Unlock()
ensureRandom()
return random.Uint64()
}
func ensureRandom() {
if random == nil {
random = rand.New(&safeSource{
source: rand.NewSource(getSeed()),
})
}
}
//go:noinline
func getSeed() int64 {
var seed int64
n, err := cryptorand.Int(cryptorand.Reader, big.NewInt(math.MaxInt64))
if err == nil {
seed = n.Int64()
} else {
instrumentation.Logger().Printf("cryptorand error generating seed: %v. \n falling back to time.Now()", err)
// Adding some jitter to the clock seed using golang channels and goroutines
jitterStart := time.Now()
cb := make(chan time.Time, 0)
go func() { cb <- <-time.After(time.Nanosecond) }()
now := <-cb
jitter := time.Since(jitterStart)
// Seed based on the clock + some jitter
seed = now.Add(jitter).UnixNano()
}
instrumentation.Logger().Printf("seed: %d", seed)
return seed
}
// safeSource holds a thread-safe implementation of rand.Source64.
type safeSource struct {
source rand.Source
sync.Mutex
}
func (rs *safeSource) Int63() int64 {
rs.Lock()
n := rs.source.Int63()
rs.Unlock()
return n
}
func (rs *safeSource) Uint64() uint64 { return uint64(rs.Int63()) }
func (rs *safeSource) Seed(seed int64) {
rs.Lock()
rs.source.Seed(seed)
rs.Unlock()
}
func UUIDToString(uuid uuid.UUID) string {
return hex.EncodeToString(uuid[:])
}
func StringToUUID(val string) (uuid.UUID, error) {
if data, err := hex.DecodeString(val); err != nil {
return uuid.UUID{}, err
} else {
return uuid.FromBytes(data)
}
}
|
import { NextApiHandler, NextApiRequest, NextApiResponse } from 'next';
import NextAuth, { Session } from 'next-auth';
import getConfig from 'next/config';
import GoogleProvider from 'next-auth/providers/google';
import FacebookProvider from 'next-auth/providers/facebook';
import CredentialsProvider from 'next-auth/providers/credentials';
import { PrismaAdapter } from '@next-auth/prisma-adapter';
import { User, Account } from 'next-auth/core/types';
import prisma from 'lib-server/prisma';
import { apiHandler } from 'lib-server/nc';
import { Routes } from 'lib-client/constants';
import { ClientUser } from 'types/models/User';
import { loginUser } from 'lib-server/services/auth';
import ApiError from 'lib-server/error';
const { serverRuntimeConfig } = getConfig();
const handler = apiHandler();
handler.use(
(req: NextApiRequest, res: NextApiResponse): NextApiHandler =>
NextAuth(req, res, {
providers: [
FacebookProvider({
clientId: serverRuntimeConfig.FACEBOOK_CLIENT_ID,
clientSecret: serverRuntimeConfig.FACEBOOK_CLIENT_SECRET,
}),
GoogleProvider({
clientId: serverRuntimeConfig.GOOGLE_CLIENT_ID,
clientSecret: serverRuntimeConfig.GOOGLE_CLIENT_SECRET,
}),
CredentialsProvider({
name: 'Credentials',
credentials: {
email: {
label: 'Email',
type: 'email',
},
password: {
label: 'Password',
type: 'password',
},
},
// redirect to same page and parse query params, unable to return api res
async authorize(credentials) {
if (!credentials) throw new ApiError('undefined credentials', 400);
const { user, error } = await loginUser(credentials);
if (error) throw error;
return user;
},
}),
],
session: {
strategy: 'jwt',
maxAge: 60 * 60, // 1h
},
callbacks: {
// both jwt and session are used to attach user to session
async jwt({ token, user, account, isNewUser }) {
// isNewUser = true only on user creation, can be used
// to update db and session
if (isNewUser && user && account) {
const data = await updateUser(user, account);
user = { ...user, ...data };
}
user && (token.user = user);
return token;
},
async session({ session, token }) {
let _session: Session | undefined = undefined;
const user = token.user as ClientUser;
// put just user's immutable props in session (id and email)
// for session user use useUser React Query state
if (user) {
_session = { ...session, user: { id: user.id, email: user.email } };
}
return _session as Session;
},
},
secret: serverRuntimeConfig.SECRET,
pages: { signIn: Routes.SITE.LOGIN },
adapter: PrismaAdapter(prisma),
debug: false,
})
);
async function updateUser(user: User, account: Account) {
const data = {
provider: account.provider,
username: `${account.provider}_user_${user.username}`,
} as any;
if (!user.email) {
data.email = `${data.username}@non-existing-facebook-email.com`;
}
await prisma.user.update({
where: { id: user.id },
data,
});
return data;
}
export default handler;
|
#!/bin/sh
#These commands set up the Grid Environment for your job:
#PBS -N LandscapeEvaluation
#PBS -l nodes=1:ppn=2,walltime=5:00:00
# Move to the desired working directory e.g. /home/<ldap-user> or /home/<ldap-user>/my/work/directory
cd /home/awolniakowski/current/gripper-landscape
ulimit -s 80000
cp in/${GRIPPER}.grp.xml out/${SCENE}/${GRIPPER}.grp.xml
EXE=/home/awolniakowski/robwork/trunk/RobWorkSim/src/sandbox/grippers/GraspPlugin/bin/evaluate-gripper
DWC=scenes/${SCENE}/Scene.dwc.xml
TD=scenes/${SCENE}/task_hints.td.xml
GRP=out/${SCENE}/${GRIPPER}.grp.xml
OUT=out/${SCENE}/${GRIPPER}.tasks.xml
$EXE --dwc $DWC --td $TD --gripper $GRP -o $OUT -n 1000
|
<filename>web/cashtab/src/hooks/__mocks__/mockReturnGetSlpBalancesAndUtxosNoZeroBalance.js
import BigNumber from 'bignumber.js';
export default {
tokens: [
{
info: {
height: 660869,
tx_hash:
'16b624b60de4a1d8a06baa129e3a88a4becd499e1d5d0d40b9f2ff4d28e3f660',
tx_pos: 1,
value: 546,
txid:
'16b624b60de4a1d8a06baa129e3a88a4becd499e1d5d0d40b9f2ff4d28e3f660',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'<KEY>',
tokenTicker: 'ST',
tokenName: 'ST',
tokenDocumentUrl: 'developer.bitcoin.com',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '1',
isValid: true,
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
tokenId:
'<KEY>',
balance: new BigNumber('1'),
hasBaton: false,
},
{
info: {
height: 660869,
tx_hash:
'c7da9ae6a0ce9d4f2f3345f9f0e5da5371228c8aee72b6eeac1b42871b216e6b',
tx_pos: 1,
value: 546,
txid:
'c7da9ae6a0ce9d4f2f3345f9f0e5da5371228c8aee72b6eeac1b42871b216e6b',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'1f6a65e7a4bde92c0a012de2bcf4007034504a765377cdf08a3ee01d1eaa6901',
tokenTicker: '🍔',
tokenName: 'Burger',
tokenDocumentUrl:
'https://c4.wallpaperflare.com/wallpaper/58/564/863/giant-hamburger-wallpaper-preview.jpg',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '2',
isValid: true,
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
tokenId:
'1f6a65e7a4bde92c0a012de2bcf4007034504a765377cdf08a3ee01d1eaa6901',
balance: new BigNumber('2'),
hasBaton: false,
},
{
info: {
height: 660869,
tx_hash:
'dac23f10dd65caa51359c1643ffc93b94d14c05b739590ade85557d338a21040',
tx_pos: 1,
value: 546,
txid:
'dac23f10dd65caa51359c1643ffc93b94d14c05b739590ade85557d338a21040',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'7f8889682d57369ed0e32336f8b7e0ffec625a35cca183f4e81fde4e71a538a1',
tokenTicker: 'HONK',
tokenName: 'H<PASSWORD>',
tokenDocumentUrl: 'THE REAL HONK SLP TOKEN',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '1',
isValid: true,
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
tokenId:
'7f8889682d57369ed0e32336f8b7e0ffec625a35cca183f4e81fde4e71a538a1',
balance: new BigNumber('3'),
hasBaton: false,
},
{
info: {
height: 660869,
tx_hash:
'efa6f67078810875513a116b389886610d81ecf6daf97d55dd96d3fdd201dfac',
tx_pos: 1,
value: 546,
txid:
'efa6f67078810875513a116b389886610d81ecf6daf97d55dd96d3fdd201dfac',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'dd84ca78db4d617221b58eabc6667af8fe2f7eadbfcc213d35be9f1b419beb8d',
tokenTicker: 'TAP',
tokenName: 'Th<PASSWORD> Prayers',
tokenDocumentUrl: '',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '2',
isValid: true,
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
tokenId:
'dd84ca78db4d617221b58eabc6667af8fe2f7eadbfcc213d35be9f1b419beb8d',
balance: new BigNumber('2'),
hasBaton: false,
},
{
balance: new BigNumber('310'),
hasBaton: true,
info: {
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
decimals: 9,
height: 660978,
isValid: true,
mintBatonVout: 2,
tokenDocumentHash: '',
tokenDocumentUrl: 'https://cashtabapp.com/',
tokenId:
'bef614aac85c0c866f4d39e4d12a96851267d38d1bca5bdd6488bbd42e28b6b1',
tokenName: 'Cash Tab Points',
tokenTicker: 'CTP',
tokenType: 1,
transactionType: 'mint',
tx_hash:
'b622b770f74f056e07e5d2ea4d7f8da1c4d865e21e11c31a263602a38d4a2474',
tx_pos: 2,
txid:
'b622b770f74f056e07e5d2ea4d7f8da1c4d865e21e11c31a263602a38d4a2474',
utxoType: 'minting-baton',
value: 546,
vout: 2,
},
tokenId:
'bef614aac85c0c866f4d39e4d12a96851267d38d1bca5bdd6488bbd42e28b6b1',
},
{
balance: new BigNumber('523512344.7961451'),
hasBaton: true,
info: {
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
decimals: 7,
height: 660982,
isValid: true,
mintBatonVout: 2,
tokenDocumentHash:
'����\\�IS\u001e9�����k+���\u0018���\u001b]�߷2��',
tokenDocumentUrl:
'https://www.ImpossiblyLongWebsiteDidYouThinkWebDevWouldBeFun.org',
tokenId:
'<KEY>',
tokenName:
'Test Token With Exceptionally Long Name For CSS And Style Revisions',
tokenTicker: 'WDT',
tokenType: 1,
transactionType: 'mint',
tx_hash:
'67605f3d18135b52d95a4877a427d100c14f2610c63ee84eaf4856f883a0b70e',
tx_pos: 2,
txid:
'67605f3d18135b52d95a4877a427d100c14f2610c63ee84eaf4856f883a0b70e',
utxoType: 'minting-baton',
value: 546,
vout: 2,
},
tokenId:
'<KEY>',
},
{
info: {
height: 667750,
tx_hash:
'e9a94cc174839e3659d2fe4d33490528d18ad91404b65eb8cc35d8fa2d3f5096',
tx_pos: 2,
value: 546,
txid:
'e9a94cc174839e3659d2fe4d33490528d18ad91404b65eb8cc35d8fa2d3f5096',
vout: 2,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
tokenTicker: 'TBS',
tokenName: 'TestBits',
tokenDocumentUrl: 'https://thecryptoguy.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '5.854300861',
isValid: true,
address:
'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
balance: new BigNumber('9897999885.211011069'),
hasBaton: false,
},
{
info: {
height: 669057,
tx_hash:
'dd560d87bd632e40c6548021006653a150197ede13fadb5eadfa29abe4400d0e',
tx_pos: 1,
value: 546,
txid:
'dd560d87bd632e40c6548021006653a150197ede13fadb5eadfa29abe4400d0e',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'<KEY>',
tokenTicker: 'NAKAMOTO',
tokenName: 'NAKAMOTO',
tokenDocumentUrl: '',
tokenDocumentHash: '',
decimals: 8,
tokenType: 1,
tokenQty: '1',
isValid: true,
address:
'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
},
tokenId:
'<KEY>',
balance: new BigNumber('1'),
hasBaton: false,
},
],
nonSlpUtxos: [
{
height: 669639,
tx_hash:
'0da6d49cf95d4603958e53360ad1e90bfccef41bfb327d6b2e8a77e242fa2d58',
tx_pos: 0,
value: 1000,
txid:
'0da6d49cf95d4603958e53360ad1e90bfccef41bfb327d6b2e8a77e242fa2d58',
vout: 0,
isValid: false,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
wif: 'L58jqHoi5ynSdsskPVBJuGuVqTP8ZML1MwHQsBJY32Pv7cqDSCeH',
},
{
height: 669639,
tx_hash:
'39521b38cd1b6126a57a68b8adfd836020cd53b195f3b4675c58095c5c300ef8',
tx_pos: 0,
value: 700000,
txid:
'39521b38cd1b6126a57a68b8adfd836020cd53b195f3b4675c58095c5c300ef8',
vout: 0,
isValid: false,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
wif: 'L58jqHoi5ynSdsskPVBJuGuVqTP8ZML1MwHQsBJY32Pv7cqDSCeH',
},
{
height: 669639,
tx_hash:
'93949923f02cb5271bd6d0b5a5b937ce5ae22df5bf6117161078f175f0c29d56',
tx_pos: 0,
value: 700000,
txid:
'93949923f02cb5271bd6d0b5a5b937ce5ae22df5bf6117161078f175f0c29d56',
vout: 0,
isValid: false,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
wif: 'L58jqHoi5ynSdsskPVBJuGuVqTP8ZML1MwHQsBJY32Pv7cqDSCeH',
},
{
height: 669639,
tx_hash:
'ddace66ea968e16e55ebf218814401acc38e0a39150529fa3d1108af04e81373',
tx_pos: 0,
value: 300000,
txid:
'ddace66ea968e16e55ebf218814401acc38e0a39150529fa3d1108af04e81373',
vout: 0,
isValid: false,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
wif: 'L58jqHoi5ynSdsskPVBJuGuVqTP8ZML1MwHQsBJY32Pv7cqDSCeH',
},
{
height: 669639,
tx_hash:
'f1147285ac384159b5dfae513bda47a0459f876d046b48f13c8a7ec4f0d20d96',
tx_pos: 0,
value: 700000,
txid:
'f1147285ac384159b5dfae513bda47a0459f876d046b48f13c8a7ec4f0d20d96',
vout: 0,
isValid: false,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
wif: 'L58jqHoi5ynSdsskPVBJuGuVqTP8ZML1MwHQsBJY32Pv7cqDSCeH',
},
{
height: 669639,
tx_hash:
'f4ca891d090f2682c7086b27a4d3bc2ed1543fb96123b6649e8f26b644a45b51',
tx_pos: 0,
value: 30000,
txid:
'f4ca891d090f2682c7086b27a4d3bc2ed1543fb96123b6649e8f26b644a45b51',
vout: 0,
isValid: false,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
wif: 'L58jqHoi5ynSdsskPVBJuGuVqTP8ZML1MwHQsBJY32Pv7cqDSCeH',
},
],
slpUtxos: [
{
height: 660869,
tx_hash:
'16b624b60de4a1d8a06baa129e3a88a4becd499e1d5d0d40b9f2ff4d28e3f660',
tx_pos: 1,
value: 546,
txid:
'16b624b60de4a1d8a06baa129e3a88a4becd499e1d5d0d40b9f2ff4d28e3f660',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'<KEY>',
tokenTicker: 'ST',
tokenName: 'ST',
tokenDocumentUrl: 'developer.bitcoin.com',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '1',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 660869,
tx_hash:
'c7da9ae6a0ce9d4f2f3345f9f0e5da5371228c8aee72b6eeac1b42871b216e6b',
tx_pos: 1,
value: 546,
txid:
'c7da9ae6a0ce9d4f2f3345f9f0e5da5371228c8aee72b6eeac1b42871b216e6b',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'1f6a65e7a4bde92c0a012de2bcf4007034504a765377cdf08a3ee01d1eaa6901',
tokenTicker: '🍔',
tokenName: 'Burger',
tokenDocumentUrl:
'https://c4.wallpaperflare.com/wallpaper/58/564/863/giant-hamburger-wallpaper-preview.jpg',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '2',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 660869,
tx_hash:
'dac23f10dd65caa51359c1643ffc93b94d14c05b739590ade85557d338a21040',
tx_pos: 1,
value: 546,
txid:
'dac23f10dd65caa51359c1643ffc93b94d14c05b739590ade85557d338a21040',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'7f8889682d57369ed0e32336f8b7e0ffec625a35cca183f4e81fde4e71a538a1',
tokenTicker: 'HONK',
tokenName: 'HONK HONK',
tokenDocumentUrl: 'THE REAL HONK SLP TOKEN',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '1',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 660869,
tx_hash:
'efa6f67078810875513a116b389886610d81ecf6daf97d55dd96d3fdd201dfac',
tx_pos: 1,
value: 546,
txid:
'efa6f67078810875513a116b389886610d81ecf6daf97d55dd96d3fdd201dfac',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'dd84ca78db4d617221b58eabc6667af8fe2f7eadbfcc213d35be9f1b419beb8d',
tokenTicker: 'TAP',
tokenName: 'Th<PASSWORD>',
tokenDocumentUrl: '',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '2',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 660978,
tx_hash:
'b622b770f74f056e07e5d2ea4d7f8da1c4d865e21e11c31a263602a38d4a2474',
tx_pos: 2,
value: 546,
txid:
'b622b770f74f056e07e5d2ea4d7f8da1c4d865e21e11c31a263602a38d4a2474',
vout: 2,
utxoType: 'minting-baton',
transactionType: 'mint',
tokenId:
'bef614aac85c0c866f4d39e4d12a96851267d38d1bca5bdd6488bbd42e28b6b1',
tokenType: 1,
tokenTicker: 'CTP',
tokenName: 'Cash <PASSWORD>',
tokenDocumentUrl: 'https://cashtabapp.com/',
tokenDocumentHash: '',
decimals: 9,
mintBatonVout: 2,
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 660982,
tx_hash:
'67605f3d18135b52d95a4877a427d100c14f2610c63ee84eaf4856f883a0b70e',
tx_pos: 2,
value: 546,
txid:
'67605f3d18135b52d95a4877a427d100c14f2610c63ee84eaf4856f883a0b70e',
vout: 2,
utxoType: 'minting-baton',
transactionType: 'mint',
tokenId:
'<KEY>',
tokenType: 1,
tokenTicker: 'WDT',
tokenName:
'Test Token With Exceptionally Long Name For CSS And Style Revisions',
tokenDocumentUrl:
'https://www.ImpossiblyLongWebsiteDidYouThinkWebDevWouldBeFun.org',
tokenDocumentHash:
'����\\�IS\u001e9�����k+���\u0018���\u001b]�߷2��',
decimals: 7,
mintBatonVout: 2,
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 662799,
tx_hash:
'b399a5ae69e4ac4c96b27c680a541e6b8142006bdc2484a959821858fc0b4ca3',
tx_pos: 2,
value: 546,
txid:
'b399a5ae69e4ac4c96b27c680a541e6b8142006bdc2484a959821858fc0b4ca3',
vout: 2,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bef614aac85c0c866f4d39e4d12a96851267d38d1bca5bdd6488bbd42e28b6b1',
tokenTicker: 'CTP',
tokenName: 'Cash <PASSWORD>',
tokenDocumentUrl: 'https://cashtabapp.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '310',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 667561,
tx_hash:
'4ca7f70996c699b0f988597a2dd2700f1f24f305318ddc8fe137d96d5fa96bf5',
tx_pos: 1,
value: 546,
txid:
'4ca7f70996c699b0f988597a2dd2700f1f24f305318ddc8fe137d96d5fa96bf5',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'7f8889682d57369ed0e32336f8b7e0ffec625a35cca183f4e81fde4e71a538a1',
tokenTicker: 'HONK',
tokenName: 'HONK HONK',
tokenDocumentUrl: 'THE REAL HONK SLP TOKEN',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '1',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 667750,
tx_hash:
'de912bc7a6a1b14abe04960cd9a0ef88a4f59eb04db765f7bc2c0d2c2f997054',
tx_pos: 1,
value: 546,
txid:
'de912bc7a6a1b14abe04960cd9a0ef88a4f59eb04db765f7bc2c0d2c2f997054',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'<KEY>',
tokenTicker: 'WDT',
tokenName:
'Test Token With Exceptionally Long Name For CSS And Style Revisions',
tokenDocumentUrl:
'https://www.ImpossiblyLongWebsiteDidYouThinkWebDevWouldBeFun.org',
tokenDocumentHash:
'����\\�IS\u001e9�����k+���\u0018���\u001b]�߷2��',
decimals: 7,
tokenType: 1,
tokenQty: '100.0000001',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 667750,
tx_hash:
'e25cebd4cccbbdd91b36f672d51f5ce978e0817839be9854c3550704aec4359d',
tx_pos: 1,
value: 546,
txid:
'e25cebd4cccbbdd91b36f672d51f5ce978e0817839be9854c3550704aec4359d',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'7f8889682d57369ed0e32336f8b7e0ffec625a35cca183f4e81fde4e71a538a1',
tokenTicker: 'HONK',
tokenName: 'HONK HONK',
tokenDocumentUrl: 'THE REAL HONK SLP TOKEN',
tokenDocumentHash: '',
decimals: 0,
tokenType: 1,
tokenQty: '1',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 667750,
tx_hash:
'e9a94cc174839e3659d2fe4d33490528d18ad91404b65eb8cc35d8fa2d3f5096',
tx_pos: 2,
value: 546,
txid:
'e9a94cc174839e3659d2fe4d33490528d18ad91404b65eb8cc35d8fa2d3f5096',
vout: 2,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
tokenTicker: 'TBS',
tokenName: 'TestBits',
tokenDocumentUrl: 'https://thecryptoguy.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '5.854300861',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 667751,
tx_hash:
'05e87142de1bb8c2a43d22a2e4b97855eb84c3c76f4ea956a654efda8d0557ca',
tx_pos: 1,
value: 546,
txid:
'05e87142de1bb8c2a43d22a2e4b97855eb84c3c76f4ea956a654efda8d0557ca',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
tokenTicker: 'TBS',
tokenName: 'TestBits',
tokenDocumentUrl: 'https://thecryptoguy.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '<PASSWORD>',
isValid: true,
address: 'bitcoincash:qqvcsnz9x9nu7vq35vmrkjc7hkfxhhs9nuv44zm0ed',
},
{
height: 666969,
tx_hash:
'99583b593a3bec993328b076f4988fd77b8423d788183bf2968ed43cec11c454',
tx_pos: 2,
value: 546,
txid:
'99583b593a3bec993328b076f4988fd77b8423d788183bf2968ed43cec11c454',
vout: 2,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
tokenTicker: 'TBS',
tokenName: 'TestBits',
tokenDocumentUrl: 'https://thecryptoguy.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '9897999873.21001<PASSWORD>',
isValid: true,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
},
{
height: 667560,
tx_hash:
'34002080b2e1b42ff9f33d36dbbd0d8f1aaddc5a00b916054a40c45feebaf548',
tx_pos: 2,
value: 546,
txid:
'34002080b2e1b42ff9f33d36dbbd0d8f1aaddc5a00b916054a40c45feebaf548',
vout: 2,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
tokenTicker: 'TBS',
tokenName: 'TestBits',
tokenDocumentUrl: 'https://thecryptoguy.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '3.999999998',
isValid: true,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
},
{
height: 667560,
tx_hash:
'8edccfafe4b002da8f1aa71daae31846c51848968e7d92dcba5f0ff97beb734d',
tx_pos: 2,
value: 546,
txid:
'8edccfafe4b002da8f1aa71daae31846c51848968e7d92dcba5f0ff97beb734d',
vout: 2,
utxoType: 'token',
transactionType: 'send',
tokenId:
'<KEY>',
tokenTicker: 'WDT',
tokenName:
'Test Token With Exceptionally Long Name For CSS And Style Revisions',
tokenDocumentUrl:
'https://www.ImpossiblyLongWebsiteDidYouThinkWebDevWouldBeFun.org',
tokenDocumentHash:
'����\\�IS\u001e9�����k+���\u0018���\u001b]�߷2��',
decimals: 7,
tokenType: 1,
tokenQty: '523512244.796145',
isValid: true,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
},
{
height: 667909,
tx_hash:
'd3183b663a8d67b2b558654896b95102bbe68d164de219da96273ae52de93813',
tx_pos: 1,
value: 546,
txid:
'd3183b663a8d67b2b558654896b95102bbe68d164de219da96273ae52de93813',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'bd1acc4c986de57af8d6d2a64aecad8c30ee80f37ae9d066d758923732ddc9ba',
tokenTicker: 'TBS',
tokenName: 'TestBits',
tokenDocumentUrl: 'https://thecryptoguy.com/',
tokenDocumentHash: '',
decimals: 9,
tokenType: 1,
tokenQty: '1',
isValid: true,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
},
{
height: 669057,
tx_hash:
'dd560d87bd632e40c6548021006653a150197ede13fadb5eadfa29abe4400d0e',
tx_pos: 1,
value: 546,
txid:
'dd560d87bd632e40c6548021006653a150197ede13fadb5eadfa29abe4400d0e',
vout: 1,
utxoType: 'token',
transactionType: 'send',
tokenId:
'<KEY>',
tokenTicker: 'NAKAMOTO',
tokenName: 'NAKAMOTO',
tokenDocumentUrl: '',
tokenDocumentHash: '',
decimals: 8,
tokenType: 1,
tokenQty: '1',
isValid: true,
address: 'bitcoincash:qphpmfj0qn7znklqhrfn5dq7qh36l3vxavu346vqcl',
},
],
};
|
<reponame>myamout/RequestNetworkDemo<gh_stars>1-10
export enum State { Created, Accepted, Canceled }
export enum EscrowState { Created, Refunded, Released }
export type CallbackTransactionHash = (transactionHash: string) => void;
export type CallbackTransactionReceipt = (receipt: any) => void;
export type CallbackTransactionConfirmation = (confirmationNumber: number, receipt: any) => void;
export type CallbackTransactionError = (error: Error) => void;
export type CallbackGetRequest = (err: Error, request: any) => void;
export type CallbackIpfsAddFile = (err: Error, hash: string) => void;
export type CallbackIpfsGetFile = (err: Error, data: string) => void;
export type CallbackErrorData = (err: Error | undefined, data: string | undefined) => void;
export interface InterfaceArtifact {
abi: any;
networks: {[networkName: string]: {
address: string;
blockNumber: string;
}};
}
|
<reponame>Wlisfes/lisfes-service
import { Module, Global } from '@nestjs/common'
import { JwtModule } from '@nestjs/jwt'
import { JwtAuthService } from './jwt.service'
@Global()
@Module({
imports: [
JwtModule.registerAsync({
useFactory: () => ({
secret: process.env.JWT_SECRET,
signOptions: {
expiresIn: process.env.JWT_EXPIRESIN || '24h'
}
})
})
],
providers: [JwtAuthService],
exports: [JwtAuthService]
})
export class JwtAuthModule {}
|
<reponame>albinsony/foam2
foam.CLASS({
name: 'TestApp',
swiftImports: [
'UIKit',
],
requires: [
'Test',
'foam.swift.dao.ArrayDAO',
'foam.swift.ui.DAOTableViewSource',
'foam.swift.ui.DAOViewController',
'foam.swift.ui.DetailView',
'foam.swift.ui.ScrollingViewController',
],
exports: [
'stack',
],
properties: [
{
swiftType: 'UINavigationController',
name: 'stack',
swiftFactory: function() {/*
return UINavigationController(rootViewController: daoController.vc)
*/},
},
{
class: 'foam.dao.DAOProperty',
of: 'Test',
name: 'dao',
swiftFactory: function() {/*
return ArrayDAO_create([
"of": Test.classInfo(),
])
*/},
},
{
class: 'FObjectProperty',
of: 'foam.swift.ui.DAOViewController',
required: true,
name: 'daoController',
swiftFactory: function() {/*
let x = __subContext__
let dvc = DAOViewController_create([
"dao$": dao$,
])
let uiLabelViewConfig = [
"viewFactory": { (x: Context) -> FObject? in
return x.create(FOAMUILabel.self)
}
]
dvc.dataSource.rowViewFactory = { () -> UITableViewCell in
let nib = UINib(nibName: "TestRowView", bundle: Bundle.main)
let customView = nib.instantiate(withOwner: dvc.vc, options: nil)[0] as! TestDetailView
customView.dv_Test = x.create(DetailView.self, args: [
"of": Test.classInfo(),
"config": [
"firstName": uiLabelViewConfig,
"lastName": uiLabelViewConfig,
"exprProp": uiLabelViewConfig,
]
])!
let cell = DAOTableViewSource.SimpleRowView(
view: customView, style: .default, reuseIdentifier: dvc.dataSource.reusableCellIdentifier)
return cell
}
dvc.dataSource.rowViewPrepare = { (cell, fobj) -> Void in
let cell = cell as! DAOTableViewSource.SimpleRowView
let view = cell.view as! TestDetailView
view.dv_Test?.data = fobj
}
dvc.tableViewDelegate?.updateVcFactory = { (o: FObject) -> UIViewController in
let dv = self.DetailView_create([
"data": o,
"config": [
"exprProp": [
"viewFactory": { (x: Context) -> FObject? in
return x.create(FOAMUILabel.self)
}
]
]
])
let svc = self.ScrollingViewController_create([
"view": dv,
])
let nib = UINib(nibName: "CustomView", bundle: Bundle.main)
let customView = nib.instantiate(withOwner: svc.vc, options: nil)[0] as! TestDetailView
customView.dv_Test = dv
return svc.vc
}
return dvc
*/},
},
],
methods: [
{
name: 'init',
swiftCode: function() {/*
DispatchQueue.global(qos: .background).async {
var i = 1
Async.aWhile(
{ () -> Bool in
return i <= 50
},
afunc: Async.aSeq([
Async.aWait(delay: 0.1),
{ ret, _, _ in
_ = try! self.dao!.put(self.Test_create([
"firstName": "Dude \(i)",
]))
i += 1
ret(nil)
}
])
)({_ in }, {_ in }, nil)
}
*/},
},
],
});
|
<filename>trikNetwork/src/gamepadConnection.h
/* Copyright 2015 CyberTech Labs Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#pragma once
#include "connection.h"
namespace trikNetwork {
/// Connection object for android gamepad.
class GamepadConnection : public Connection
{
Q_OBJECT
public:
GamepadConnection();
signals:
/// Emitted when user pulls finger off a pad.
/// @param pad - id of a pad on which the event occurs.
void padUp(int pad);
/// Emitted when user tilts his Android device with "wheel" turned on.
/// @param percent - percent of a tilt angle.
void wheel(int percent);
/// Emitted when user pushes or moves his finger on a pad.
/// @param pad - id of a pad on which the event occurs.
/// @param x - x coordinate of an event.
/// @param y - y coordinate of an event.
void pad(int pad, int x, int y);
/// Emitted when user pushes a button on gamepad.
/// @param button - button number.
/// @param pressed - 1 if button was pressed, 0 if it was released.
void button(int button, int pressed);
private:
void processData(const QByteArray &data) override;
};
}
|
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.hammer.validation.issues;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.swt.graphics.Image;
import com.archimatetool.help.hints.IHelpHintProvider;
public interface IIssue extends IHelpHintProvider, IAdaptable {
void setName(String description);
String getName();
void setDescription(String description);
String getDescription();
void setExplanation(String explanation);
String getExplanation();
Object getObject();
void setObject(Object obj);
Image getImage();
}
|
package malware.feature;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Random;
import malware.parse.AsmParser;
import malware.parse.Function;
import malware.parse.AsmParser.AsmMode;
import malware.parse.AsmParser.Instruction;
import ml.lsh.MinHash;
public class MinHashFeatureExtractor
implements
FeatureExtractor<long[], List<AsmParser.Instruction>> {
private AsmMode asmMode;
private MinHash minHash;
private int ngramLen;
public MinHashFeatureExtractor(AsmMode asmMode, int numHashFunctions, Random rng,
int hashBitLen) {
this(asmMode, numHashFunctions, rng, hashBitLen, 1);
}
public MinHashFeatureExtractor(AsmMode asmMode, int numHashFunctions, Random rng, int hashBitLen,
int ngramLen) {
this.asmMode = asmMode;
minHash = new MinHash(numHashFunctions, rng, hashBitLen);
this.ngramLen = ngramLen;
}
@Override
public long[] extractFeatures(List<AsmParser.Instruction> instructionList) {
List<String> tokens = new LinkedList<String>();
if (ngramLen == 1) {
for (Instruction i : instructionList) {
if (asmMode == AsmMode.OPCODE) {
tokens.add(i.getOpcode());
} else {
tokens.add(i.getAsm());
}
}
} else {
Queue<String> q = new LinkedList<String>();
for (Instruction i : instructionList) {
if (asmMode == AsmMode.OPCODE) {
q.add(i.getOpcode());
} else {
q.add(i.getAsm());
}
if (q.size() > ngramLen) {
q.poll();
}
if (q.size() == ngramLen) {
StringBuilder buff = new StringBuilder();
for (String item : q) {
buff.append(item);
}
tokens.add(buff.toString());
}
}
// if (q.size() < ngramLen) {
// StringBuilder buff = new StringBuilder();
// for (String item : q) {
// buff.append(item);
// }
// tokens.add(buff.toString());
// }
}
return minHash.signature(tokens);
}
}
|
package org.glamey.training.codes.leetcode;
/**
* 算出数组中,两两相减最小的数字
*
* @author zhouyang.zhou. 2017.08.18.22.
*/
public class MinDiffer {
public static int minDiff(int[] nums) {
if (nums == null) {
return -1;
}
int len = nums.length;
if (len < 2) {
return -1;
}
if (len == 2) {
return diff(nums[0], nums[1]);
}
int minDiff = 0, curDiff = 0;
for (int i = 1; i < len; i++) {
curDiff = diff(nums[i - 1], nums[i]);
if (minDiff >= curDiff) {
minDiff = curDiff;
}
}
return minDiff;
}
private static int diff(int a, int b) {
if (a >= b) {
return b - a;
}
return a - b;
}
public static void main(String[] args) {
int[] nums = {10, 4, 2, 20, 100, -2};
int minDiff = minDiff(nums);
System.out.printf("mindDiff=%d\n", minDiff);
}
}
|
# shellcheck shell=ksh
# Copyright 2022 Rawiri Blundell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Provenance: https://github.com/rawiriblundell/sh_libpath
# SPDX-License-Identifier: Apache-2.0
if ! command -v openssl >/dev/null 2>&1; then
printf -- 'view_csr: %s\n' "This library requires 'openssl', which was not found in PATH" >&2
exit 1
fi
view_csr () {
_view_csr_in="${1}"
if (( "${#_view_csr_in}" == 0 )); then
printf -- 'view_csr: %s\n' "No input file provided" >&2
return 1
fi
openssl req -text -noout -verify -in "${_view_csr_in}"
unset -v _view_csr_in
}
view_csr_modulus() {
_view_csr_modulus_in="${1}"
if (( "${#_view_csr_modulus_in}" == 0 )); then
printf -- 'view_csr_modulus: %s\n' "No input file provided" >&2
return 1
fi
openssl req -noout -modulus -in "${_view_csr_modulus_in}" | shasum -a 256
unset -v _view_csr_modulus_in
}
|
import {Pipe, PipeTransform} from '@angular/core';
import * as _ from 'lodash';
@Pipe({
name: 'capacityToString'
})
export class CapacityToStringPipe implements PipeTransform {
transform(capacity: { [key in string]: string }): string {
return _.keys(capacity).map(key => `${key}=${capacity[key]}`).join(', ');
}
}
|
<reponame>ddallaire/Adaptone-app
import Component from '@ember/component';
import {inject as service} from '@ember/service';
import Configuration from 'adaptone-front/models/configuration';
import steps from 'adaptone-front/models/steps';
import SequenceIds from 'adaptone-front/constants/sequence-ids';
import Channels from 'adaptone-front/constants/channels';
import {copy} from '@ember/object/internals';
const INPUT_COUNT = 4;
const generateRange = (start, end) => Array.from({length: (end - start)}, (_, k) => k + start);
export default Component.extend({
fileSystem: service('file-system'),
router: service('router'),
session: service('session'),
connection: service('connection'),
configuration: null,
currentChannelId: 1,
init() {
this._super(...arguments);
this.set('configuration', new Configuration());
},
actions: {
saveConfig() {
this.configuration.step = steps['probe-positioning'];
this.set('configuration.monitorsNumber', +this.configuration.monitorsNumber);
this.set('configuration.speakersNumber', +this.configuration.speakersNumber);
const inputChannelIds = generateRange(this.currentChannelId, this.currentChannelId + INPUT_COUNT);
this.currentChannelId += inputChannelIds.length;
const auxiliaryChannelIds = generateRange(this.currentChannelId, this.currentChannelId + this.configuration.monitorsNumber);
this.currentChannelId += auxiliaryChannelIds.length;
const auxiliaries = auxiliaryChannelIds.map((auxId, index) => {
const auxiliary = copy(Channels.auxiliaryTemplate, true);
auxiliary.data.channelId = auxId;
auxiliary.data.auxiliaryChannelId = index;
auxiliary.data.channelName = `Aux ${index + 1}`;
auxiliary.data.inputs.forEach(i => i.data.auxiliaryChannelId = auxId);
return auxiliary;
});
const configChannels = {
inputs: Channels.inputs,
master: Channels.master,
auxiliaries
};
this.set('configuration', {
...this.configuration,
inputCount: INPUT_COUNT,
inputChannelIds,
auxiliaryChannelIds,
channels: configChannels
});
this.get('fileSystem').writeNewConfiguration(this.configuration);
this.get('session').set('configuration', this.configuration);
this.get('connection').sendMessage({
seqId: SequenceIds.INITIAL_PARAMS,
data: this.configuration
});
this.get('router').transitionTo('probe-positioning');
}
}
});
|
<reponame>GeekerHuang/GKHWebImage<gh_stars>1-10
//
// UIView+GKHWebCacheOperation.h
// GKHWebImage
//
// Created by huangshuai on 16/8/21.
// Copyright © 2016年 GKH. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface UIView (GKHWebCacheOperation)
@end
|
#!/bin/sh
source ~/.dwm/dwm-statusbar.sh
~/.dwm/dwm-bar/dwm_bar.sh &
|
function remainder(a, b) {
let result = a % b;
console.log(`The remainder of ${a} divided by ${b} is ${result}`);
}
remainder(20, 10);
// Output: The remainder of 20 divided by 10 is 0 |
<filename>gatsby-browser.js<gh_stars>0
/**
* Implement Gatsby's Browser APIs in this file.
*
* See: https://www.gatsbyjs.com/docs/browser-apis/
*/
// You can delete this file if you're not using it
import React from 'react';
import GlobalStyle from './src/globalStyles';
export const wrapPageElement = ({element}) => (
<>
<GlobalStyle />
{element}
</>
) |
<filename>src/main/java/br/inatel/CorretoraDB.java
package br.inatel;
import java.sql.SQLException;
public class CorretoraDB extends Database {
public boolean adicionarCorretoraAoBanco(Corretora novaCorretora) {
connect();
String sql = "INSERT INTO corretora(nome, uf) VALUES (?, ?)";
try {
pst = connection.prepareStatement(sql);
pst.setString(1, novaCorretora.getNome());
pst.setString(2, novaCorretora.getUF());
pst.execute();
check = true;
} catch (SQLException e) {
System.out.println("Erro de operacao: " + e.getMessage());
check = false;
} finally {
try {
connection.close();
pst.close();
} catch (SQLException e) {
System.out.println("Erro ao finalizar conexão: " + e.getMessage());
}
}
return check;
}
public Integer buscaIDDeCorretora(String nomeCorretora) {
connect();
String sql = "SELECT id_corretora FROM corretora WHERE nome=? ORDER BY id_corretora DESC LIMIT 1";
Integer idDaCorretora = 0;
try {
// Preparando o statement
pst = connection.prepareStatement(sql);
pst.setString(1, nomeCorretora);
// Executando o statement
result = pst.executeQuery();
while (result.next()) {
idDaCorretora = result.getInt("id_corretora");
check = true;
}
} catch (SQLException e) {
System.out.println("Erro de operação: " + e.getMessage());
} finally {
try {
connection.close();
pst.close();
result.close();
} catch (SQLException e) {
System.out.println("Erro ao fechar a conexão: " + e.getMessage());
}
}
return idDaCorretora;
}
}
|
#!/bin/bash
# environment
PATH=/bin:/usr/bin:$PATH
HOSTNAME=$(uname -n)
TS=`date +%Y%m%d-%T`
#DIR
WEBDIR=/usr/share/pcp/webapps/jstack
WDIR=/var/log/pcp/vector/JSTACK
BDIR=/var/lib/pcp/pmdas/vector/BINFlameGraph
THDIR=/apps/tomcat/logs/cores
#FILE
THSVG=$WEBDIR/threadump-history.svg
S3THSVG=threadump-history
DEMOSVG=$WEBDIR/threadump-Demo.svg
S3DEMOSVG=threadump-Demo
#
if [ ! -d "$WDIR" ];
then
/bin/mkdir -p $WDIR
fi
#
if [ ! -d "$WEBDIR" ];
then
/bin/mkdir -p $WEBDIR
fi
if [ -d "$WEBDIR" ];
then
/bin/rm $WEBDIR/*
fi
# find all java process and run jstack 10 times at interval of 2 seconds
j=0
for i in `ps -e|grep java|awk '{print $1}'`
do
while [ $j -le 10 ]
do
JAVA_USER=$(ps ho user -p $i)
sudo -u $JAVA_USER /usr/bin/jstack $i >> $WDIR/jstack.out.$i
sleep 2
j=$(( $j + 1 ))
done
j=0
$BDIR/stackcollapse-jstack.pl < $WDIR/jstack.out.$i > $WDIR/jstack.out.$i-folded
$BDIR/flamegraph.pl < $WDIR/jstack.out.$i-folded > $WEBDIR/jstack-$i-PID.svg
/bin/rm $WDIR/jstack.out.$i
/bin/rm $WDIR/jstack.out.$i-folded
done
if ls $THDIR/threaddump*.txt &> /dev/null; then
/bin/cat $THDIR/threaddump*.txt > $WDIR/all-threaddump.txt
$BDIR/stackcollapse-jstack.pl < $WDIR/all-threaddump.txt >$WDIR/out.folded
$BDIR/flamegraph.pl < $WDIR/out.folded > $THSVG
else
# For Demo purpose only
/bin/cat /var/lib/pcp/pmdas/vector/JSTACK/threaddump*.txt > $WDIR/all-threaddump.txt
$BDIR/stackcollapse-jstack.pl < $WDIR/all-threaddump.txt >$WDIR/out.folded
$BDIR/flamegraph.pl < $WDIR/out.folded > $DEMOSVG
fi
/bin/rm $WDIR/out.folded
/bin/rm $WDIR/all-threaddump.txt
# Updated: Wed Apr 10 21:04:12 2013 by webmaster@askapache
# @ http://uploads.askapache.com/2013/04/gnu-mirror-index-creator.txt
# Copyright (C) 2013 Free Software Foundation, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
function create_gnu_index ()
{
# call it right or die
#[[ $# != 3 ]] && echo "bad args. do: $FUNCNAME '/DOCUMENT_ROOT/' '/' 'gnu.askapache.com'" && exit 2
# D is the doc_root containing the site
local L= D="/usr/share/pcp/webapps/jstack/" SUBDIR="jstack" DOMAIN="http://$HOSTNAME/" F=
# The index.html file to create
F="${D}index.html"
# if dir doesnt exist, create it
[[ -d $D ]] || mkdir -p $D;
# cd into dir or die
cd $D || exit 2;
# touch index.html and check if writable or die
touch $F && test -w $F || exit 2;
# remove empty directories, they dont need to be there and slow things down if they are
find . -maxdepth 1 -type d -empty -exec rm -rf {} \;
# start of total output for saving as index.html
(
# print the html header
echo '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">';
echo "<html><head><title>jstack samples: http://${DOMAIN}${SUBDIR}</title></head>";
echo "<body><h1>${SUBDIR} Samples and Threaddump </h1><pre> Name Last modified Size";
# start of content output
(
# change IFS locally within subshell so the for loop saves line correctly to L var
IFS=$'\n';
# pretty sweet, will mimick the normal apache output
for L in $(find -L . -mount -depth -maxdepth 1 -type f ! -name 'index.html' -printf " <a href=${DOMAIN}${SUBDIR}/"%f">%-44f@_@%Td-%Tb-%TY %Tk:%TM @%f@\n"|sort|sed 's,\([\ ]\+\)@_@,</a>\1,g');
do
# file
F=$(sed -e 's,^.*@\([^@]\+\)@.*$,\1,g'<<<"$L");
# file with file size
F=$(du -bh $F | cut -f1);
# output with correct format
sed -e 's,\ @.*$, '"$F"',g'<<<"$L";
done;
)
# now output a list of all directories in this dir (maxdepth 1) other than '.' outputting in a sorted manner exactly like apache
find -L . -mount -depth -maxdepth 1 -type d ! -name '.' -printf " <a href=\"%f\">%-43f@_@%Td-%Tb-%TY %Tk:%TM -\n"|sort -d|sed 's,\([\ ]\+\)@_@,/</a>\1,g'
# print the footer html
echo "</pre><address>PCP pmwebd at ${DOMAIN}</address></body></html>";
# finally save the output of the subshell to index.html
) > $F;
}
# start the run ( use function so everything is local and contained )
# $1 is absolute document_root with trailing '/'
# $2 is subdir like '/subdir/' if thats the web root, '/' if no subdir
# $3 is the domain 'subdomain.domain.tld'
#create_gnu_index "${HOME}/sites/gnu.askapache.com/htdocs/" "/" "gnu.askapache.com"
create_gnu_index
# takes about 1-5 seconds to complete
exit
|
#!/usr/bin/env bash
# Root path of this script
readonly ROOT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
readonly ROOT_PATH_PROJECT="$ROOT_PATH/../.."
readonly PUMPOS_SCRIPT="$ROOT_PATH_PROJECT/pumpos.sh"
if [ $# -lt 1 ]; then
echo "Pipeline to configure games for an ITG cabinet to a pumpos target"
echo "Usage: pipeline-itg-conf <cfg file>"
exit 1
fi
cfg_file="$1"
if [ ! -e "$cfg_file" ]; then
echo "ERROR: Could not find cfg file $cfg_file"
exit 1
fi
source "$cfg_file"
set -e
"$PUMPOS_SCRIPT" conf-boot "$PUMPOS_ROOT" "sgl" |
<reponame>emresr/prisma-graphql-backends
function group(parent, args, context) {
return context.prisma.competator
.findUnique({ where: { id: parent.id } })
.group();
}
function tournament(parent, args, context) {
return context.prisma.competator
.findUnique({ where: { id: parent.id } })
.tournament();
}
function user(parent, args, context) {
return context.prisma.competator
.findUnique({ where: { id: parent.id } })
.user();
}
function matchs(parent, args, context) {
return context.prisma.competator
.findUnique({ where: { id: parent.id } })
.matchs();
}
module.exports = {
group,
user,
matchs,
tournament,
};
|
<reponame>pukuba/EZ-Stress-Test
const fetch = require('node-fetch')
const assert = require('assert')
const request = require('supertest')
const app = require('../server')
describe(`/artillery Test`, () => {
it(`Test Success Case - 1`, async () => {
const data = {
address: 'https://pukuba.ga:2004',
duration: 3,
arrivalRate: 5,
clientCount: 15
}
const res = await request(app)
.post("/artillery")
.set("Content-Type", "application/json")
.send(JSON.stringify(data))
.expect(200)
assert.strictEqual(res.body[`Scenarios launched`], 15)
assert.strictEqual(res.body[`Scenarios completed`], 15)
assert.strictEqual(res.body[`Requests completed`], 225)
assert.strictEqual(res.body[`Scenario counts`][0], `15 (100%)`)
assert.strictEqual(res.body.Codes[`200`], 225)
}).timeout(505050)
it(`Test Success Case - 2`, async () => {
const data = {
address: 'https://pukuba.ga:2004',
duration: 1,
arrivalRate: 1,
clientCount: 1
}
const res = await request(app)
.post("/artillery")
.set("Content-Type", "application/json")
.send(JSON.stringify(data))
.expect(200)
assert.strictEqual(res.body[`Scenarios launched`], 1)
assert.strictEqual(res.body[`Scenarios completed`], 1)
assert.strictEqual(res.body[`Requests completed`], 1)
assert.strictEqual(res.body[`Scenario counts`][0], `1 (100%)`)
assert.strictEqual(res.body.Codes[`200`], 1)
}).timeout(505050)
it(`Test Failed Case - 1`, async () => {
const data = {
address: "https://pukuba.ga:2004",
duration: 100,
arrivalRate: 33,
clientCount: 55
}
const res = await request(app)
.post("/artillery")
.set("Content-Type", "application/json")
.send(JSON.stringify(data))
.expect(401)
assert.strictEqual(res.body.error, "auth")
}).timeout(505050)
it(`Test Failed Case - 2`, async () => {
const data = {
address: 'localhost',
duration: 1,
arrivalRate: 1,
clientCount: 1
}
const res = await request(app)
.post("/artillery")
.set("Content-Type", "application/json")
.send(JSON.stringify(data))
.expect(412)
assert.strictEqual(res.body.error, "url")
}).timeout(505050)
}) |
import { Component, OnInit } from '@angular/core';
import { Jogador } from 'src/entidades/jogador';
import { DBService } from '../servicos/db.service';
@Component({
selector: 'app-lista-dejogadores',
templateUrl: './lista-dejogadores.component.html',
styleUrls: ['./lista-dejogadores.component.css'],
providers: [DBService]
})
export class ListaDejogadoresComponent implements OnInit {
jogadores: Jogador[];
carregando: boolean;
constructor(private database: DBService) {
this.carregarJogadores();
}
ngOnInit() {
}
private carregarJogadores() {
this.carregando = true;
this.database.listar<Jogador>('Jogador')
.then(JogadorDB => {
this.jogadores = JogadorDB;
this.carregando = false;
});
}
remover(uid: string) {
this.database.remover('Jogador', uid)
.then(() => {
alert('jogador removido com sucesso');
this.carregarJogadores();
});
}
editar(Jogador) {
Jogador.editando = true;
}
cancelEdit(Jogador) {
Jogador.editando = false;
}
confirmEdit(Jogador) {
this.database.atualizar('Jogador', Jogador.uid, { nome: Jogador.nome, posicao: Jogador.posicao, time: Jogador.time, idade: Jogador.idade})
.then(() => {
alert('jogador atualizado com sucesso');
this.carregarJogadores();
});
}
} |
<filename>modules/caas/auth/src/main/java/io/cattle/platform/iaas/api/auth/projects/ProjectResourceManager.java
package io.cattle.platform.iaas.api.auth.projects;
import io.cattle.platform.api.auth.Identity;
import io.cattle.platform.api.auth.Policy;
import io.cattle.platform.api.resource.DefaultResourceManager;
import io.cattle.platform.api.resource.DefaultResourceManagerSupport;
import io.cattle.platform.core.constants.AccountConstants;
import io.cattle.platform.core.constants.ProjectConstants;
import io.cattle.platform.core.dao.AccountDao;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.core.model.ProjectMember;
import io.cattle.platform.engine.process.impl.ProcessCancelException;
import io.cattle.platform.iaas.api.auth.dao.AuthDao;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.process.ObjectProcessManager;
import io.cattle.platform.object.process.StandardProcess;
import io.cattle.platform.process.common.util.ProcessUtils;
import io.cattle.platform.util.type.CollectionUtils;
import io.github.ibuildthecloud.gdapi.context.ApiContext;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import io.github.ibuildthecloud.gdapi.model.ListOptions;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.util.RequestUtils;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ProjectResourceManager extends DefaultResourceManager {
AuthDao authDao;
ProjectMemberResourceManager projectMemberResourceManager;
AccountDao accountDao;
public ProjectResourceManager(DefaultResourceManagerSupport support, AuthDao authDao, ProjectMemberResourceManager projectMemberResourceManager,
AccountDao accountDao) {
super(support);
this.authDao = authDao;
this.projectMemberResourceManager = projectMemberResourceManager;
this.accountDao = accountDao;
}
@Override
public Object listSupport(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, ListOptions options) {
ApiRequest request = ApiContext.getContext().getApiRequest();
Policy policy = (Policy) ApiContext.getContext().getPolicy();
String id = RequestUtils.getConditionValue("id", criteria);
String uuid = RequestUtils.getConditionValue("uuid", criteria);
String name = RequestUtils.getConditionValue("name", criteria);
if (!StringUtils.isBlank(id)) {
Account project = giveProjectAccess(objectResourceManagerSupport.getObjectManager().loadResource(Account.class, id), policy);
return Collections.singletonList(project);
}
if (!StringUtils.isBlank(uuid)) {
return giveProjectAccess(authDao.getAccountByUuid(uuid), policy);
}
boolean isAdmin;
Object getAll = request.getRequestParams().get("all");
if (getAll != null) {
String all = ((String[]) getAll)[0];
isAdmin = all.equalsIgnoreCase("true") && policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS);
} else {
isAdmin = false;
}
List<Account> projects = authDao.getAccessibleProjects(policy.getIdentities(),
isAdmin, policy.getAccountId());
List<Account> projectsFiltered = new ArrayList<>();
for (Account project : projects) {
if (StringUtils.isNotBlank(name) && !name.equalsIgnoreCase(project.getName())) {
continue;
}
projectsFiltered.add(giveProjectAccess(project, policy));
}
return projectsFiltered;
}
private Account giveProjectAccess(Account project, Policy policy) {
if (project == null || !ProjectConstants.TYPE.equalsIgnoreCase(project.getKind())) {
return null;
}
if (!authDao.hasAccessToProject(project.getId(), policy.getAccountId(),
policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS), policy.getIdentities())) {
return null;
}
boolean isOwner = authDao.isProjectOwner(project.getId(), policy.getAccountId(), policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS), policy
.getIdentities());
if (!accountDao.isActiveAccount(project) && !isOwner) {
return null;
}
if (isOwner) {
ApiContext.getContext().addCapability(project, ProjectConstants.OWNER);
} else {
ApiContext.getContext().setCapabilities(project, new ArrayList<>());
}
policy.grantObjectAccess(project);
return project;
}
@Override
public Object create(String type, ApiRequest request) {
if (!ProjectConstants.TYPE.equals(type)) {
return null;
}
return createProject(type, request);
}
@SuppressWarnings("unchecked")
private Account createProject(String type, ApiRequest apiRequest) {
Policy policy = (Policy) ApiContext.getContext().getPolicy();
Map<String, Object> project = CollectionUtils.toMap(apiRequest.getRequestObject());
if (authDao.getAccountById(policy.getAccountId()).getKind().equalsIgnoreCase(ProjectConstants.TYPE)) {
throw new ClientVisibleException(ResponseCodes.FORBIDDEN);
}
Object object = super.create(type, apiRequest);
if (object instanceof Account) {
Account newProject = (Account) object;
newProject.setKind(AccountConstants.PROJECT_KIND);
objectResourceManagerSupport.getObjectManager().persist(newProject);
List<Map<String, String>> members = (ArrayList<Map<String, String>>) project.get("members");
projectMemberResourceManager.setMembers(newProject, members);
policy.grantObjectAccess(newProject);
return objectResourceManagerSupport.getObjectManager().reload(newProject);
} else {
throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR);
}
}
public Account createProjectForUser(Identity identity) {
Account project = authDao.createProject(identity.getLogin() + ProjectConstants.PROJECT_DEFAULT_NAME, null);
authDao.createProjectMember(project, new Member(identity, ProjectConstants.OWNER));
return project;
}
@Override
public Object deleteObjectSupport(String type, String id, final Object obj, ApiRequest apiRequest) {
if (!(obj instanceof Account) || !(((Account) obj).getKind().equalsIgnoreCase(ProjectConstants.TYPE))) {
return super.deleteObjectSupport(type, id, obj, apiRequest);
}
ObjectProcessManager objectProcessManager = objectResourceManagerSupport.getObjectProcessManager();
ObjectManager objectManager = objectResourceManagerSupport.getObjectManager();
Policy policy = (Policy) ApiContext.getContext().getPolicy();
if (authDao.getAccountById(Long.valueOf(id)) == null) {
throw new ClientVisibleException(ResponseCodes.NOT_FOUND);
}
if (!authDao.isProjectOwner(Long.valueOf(id), policy.getAccountId(),
policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS), policy.getIdentities())) {
throw new ClientVisibleException(ResponseCodes.FORBIDDEN);
}
try {
objectProcessManager.executeStandardProcess(StandardProcess.REMOVE, obj, null);
} catch (ProcessCancelException e) {
objectProcessManager.executeStandardProcess(StandardProcess.DEACTIVATE, obj,
ProcessUtils.chainInData(new HashMap<>(), AccountConstants.ACCOUNT_DEACTIVATE, AccountConstants.ACCOUNT_REMOVE));
}
Account deletedProject = (Account) objectManager.reload(obj);
for (ProjectMember member : authDao.getActiveProjectMembers(deletedProject.getId())) {
objectManager.delete(member);
}
policy.grantObjectAccess(deletedProject);
return Arrays.asList(deletedProject);
}
@Override
public Object updateObjectSupport(String type, String id, Object obj, ApiRequest apiRequest) {
Policy policy = (Policy) ApiContext.getContext().getPolicy();
Account project = (Account) obj;
if (authDao.isProjectOwner(project.getId(), policy.getAccountId(),
policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS), policy.getIdentities())) {
return super.updateObjectSupport(type, id, obj, apiRequest);
} else {
throw new ClientVisibleException(ResponseCodes.FORBIDDEN, "Forbidden", "You must be a project owner to update the name or description.", null);
}
}
}
|
window.addEventListener('DOMContentLoaded',
function() {
//用于实现删除的数据提示与实际的删除功能
$('#deleteModal').on('show.bs.modal', function(e) {
//get data-id attribute of the clicked element
var ItemID = $(e.relatedTarget).data('delete-id');
var ItemName = $(e.relatedTarget).data('name');
var ItemType = $(e.relatedTarget).data('type');
var ItemField = $(e.relatedTarget).data('field');
$('#textid').val(ItemID);
$('#textName').val(ItemName);
$('#deleteModalLabel').text("确定要删除该"+ ItemType +"?")
$('#IDLabel').text("即将删除的"+ ItemType +"编号为:")
$('#NameLabel').text("即将删除的" + ItemType + ItemField + "为:");
$('#delete_input').val(ItemID);
});
}); |
<filename>chess_engine/src/ChessEngine.cpp
#include "ChessEngine.h"
ceg::ChessEngine::ChessEngine()
{
move_generator = std::make_unique<ceg::MoveGenerator>();
ai = std::make_unique<ceg::NegamaxAI>(move_generator.get());
}
ceg::BitBoard ceg::ChessEngine::get_initial_board() const
{
return get_board_by_FEN_str(initial_board_str);
}
ceg::PieceColor ceg::ChessEngine::get_next_player(PieceColor color)
{
return (color == PieceColor::WHITE) ? PieceColor::BLACK : PieceColor::WHITE;
}
std::vector<ceg::Move> ceg::ChessEngine::get_all_possible_moves_for_piece(const ceg::BitBoard& board, int piece_x, int piece_y) const
{
const int index = to_linear_idx(piece_x, piece_y);
if (!is_bit_set(board.occupied, index))
return std::vector<ceg::Move>();
bool black = false;
if (is_bit_set(board.black_pieces.occupied, index))
black = true;
auto moves = move_generator->get_all_possible_moves(board, black);
std::vector<ceg::Move> result;
for (const auto& move : moves)
{
if (move.from == index)
result.push_back(move);
}
return result;
}
ceg::Move ceg::ChessEngine::get_ai_move(const ceg::BitBoard& board, bool current_player_black, int min_depth, int max_depth, long long max_time_in_ms)
{
return ai->get_move(board, current_player_black, min_depth, max_depth, max_time_in_ms);
}
ceg::Move ceg::ChessEngine::get_ai_move(const ceg::BitBoard& board, PieceColor color, int depth)
{
return get_ai_move(board, to_bool(color), depth, depth, -1);
}
ceg::Move ceg::ChessEngine::get_ai_move(const ceg::BitBoard& board, PieceColor color)
{
return get_ai_move(board, to_bool(color), min_depth, max_depth, max_time_in_milli_seconds);
}
void ceg::ChessEngine::make_move(ceg::BitBoard& board, ceg::Move move) const
{
move_generator->make_move(board, convert_to_internal(move));
}
void ceg::ChessEngine::make_move_with_auto_promo(ceg::BitBoard& board, ceg::Move move) const
{
move_generator->make_move_with_auto_promotion(board, convert_to_internal(move));
}
bool ceg::ChessEngine::is_move_valid(const ceg::BitBoard& board, const ceg::Move& move) const
{
if (!is_bit_set(board.occupied, move.from_x, move.from_y))
return false;
const int linear_from = to_linear_idx(move.from_x, move.from_y);
const int linear_to = to_linear_idx(move.to_x, move.to_y);
bool black = is_bit_set(board.black_pieces.occupied, move.from_x, move.from_y);
auto possible_moves = move_generator->get_all_possible_moves(board, black);
for (const auto& gen_move : possible_moves)
{
if ((gen_move.from == linear_from) && (gen_move.to == linear_to))
return true;
}
return false;
}
bool ceg::ChessEngine::has_pawn_reached_end_of_board(ceg::BitBoard& board) const
{
uint64_t pawn_mask = board.black_pieces.pawns | board.white_pieces.pawns;
for (int i = 0; i < 8; i++)
{
if (is_bit_set(pawn_mask, i, 0))
return true;
if (is_bit_set(pawn_mask, i, 7))
return true;
}
return false;
}
bool ceg::ChessEngine::is_game_over(const ceg::BitBoard& board, PieceColor color) const
{
auto moves = move_generator->get_all_possible_moves(board, to_bool(color));
return moves.size() == 0;
}
bool ceg::ChessEngine::is_field_occupied(const ceg::BitBoard& board, int x, int y) const
{
return is_bit_set(board.occupied, x, y);
}
ceg::PieceColor ceg::ChessEngine::get_piece_color(const ceg::BitBoard& board, int x, int y) const
{
assert(is_field_occupied(board, x, y));
if (is_bit_set(board.black_pieces.occupied, x, y))
return PieceColor::BLACK;
else
return PieceColor::WHITE;
}
void ceg::ChessEngine::set_piece(ceg::BitBoard& board, Piece piece, PieceColor color, int x, int y) const
{
const int linear_idx = to_linear_idx(x, y);
board.clear_bits_at_position(linear_idx);
bool black = to_bool(color);
Pieces* pieces = black ? &(board.black_pieces) : &(board.white_pieces);
switch (piece)
{
case ceg::Piece::PAWN:
set_bit(pieces->pawns, linear_idx);
break;
case ceg::Piece::BISHOP:
set_bit(pieces->bishops, linear_idx);
break;
case ceg::Piece::KNIGHT:
set_bit(pieces->knights, linear_idx);
break;
case ceg::Piece::ROOK:
set_bit(pieces->rooks, linear_idx);
break;
case ceg::Piece::QUEEN:
set_bit(pieces->queens, linear_idx);
break;
case ceg::Piece::KING:
set_bit(pieces->king, linear_idx);
break;
default:
break;
}
board.update_occupied();
}
bool ceg::ChessEngine::is_check_mate(ceg::BitBoard& board, PieceColor color) const
{
auto possible_moves = move_generator->get_all_possible_moves(board, to_bool(color));
auto check_info = move_generator->get_check_info(board, to_bool(color));
if (possible_moves.size() == 0 && check_info.check_counter >= 1)
return true;
return false;
}
bool ceg::ChessEngine::is_stale_mate(ceg::BitBoard& board, PieceColor color) const
{
auto possible_moves = move_generator->get_all_possible_moves(board, to_bool(color));
auto check_info = move_generator->get_check_info(board, to_bool(color));
if (possible_moves.size() == 0 && check_info.check_counter == 0)
return true;
return false;
}
uint64_t ceg::ChessEngine::perft(const std::string& FEN_str, int depth) const
{
return perft(FEN_str, depth, nullptr, nullptr);
}
std::set<std::string> ceg::ChessEngine::perft_get_set(const std::string& FEN_str, int depth) const
{
std::set<std::string> result_set;
perft(FEN_str, depth, &result_set, nullptr);
return result_set;
}
std::map<std::string, int> ceg::ChessEngine::perft_get_map(const std::string& FEN_str, int depth) const
{
std::map<std::string, int> result_map;
perft(FEN_str, depth, nullptr, &result_map);
return result_map;
}
void ceg::ChessEngine::set_min_depth(int min_depth)
{
this->min_depth = min_depth;
}
void ceg::ChessEngine::set_max_depth(int max_depth)
{
this->max_depth = max_depth;
}
void ceg::ChessEngine::set_max_time_in_ms(long long ms)
{
this->max_time_in_milli_seconds = ms;
}
ceg::BitBoard ceg::ChessEngine::get_board_by_FEN_str(const std::string& FEN_str) const
{
auto splitted = string_split(FEN_str, " ");
if (splitted.size() < 4)
{
assert(!"Invalid FEN input string");
return ceg::BitBoard();
}
return ceg::BitBoard(splitted[0], splitted[2], splitted[3]);
}
bool ceg::ChessEngine::to_bool(ceg::PieceColor color) const
{
return (color == PieceColor::BLACK) ? true : false;
}
ceg::InternalMove ceg::ChessEngine::convert_to_internal(const ceg::Move& move) const
{
return ceg::InternalMove{ to_linear_idx(move.from_x, move.from_y), to_linear_idx(move.to_x, move.to_y) };
}
uint64_t ceg::ChessEngine::perft(const std::string& FEN_str, int depth, std::set<std::string>* out_set, std::map<std::string, int>* out_map) const
{
auto splitted = string_split(FEN_str, " ");
if (splitted.size() < 4)
{
assert(!"Invalid FEN input string");
return 0;
}
bool current_player_black = (splitted[1].at(0) == 'b');
ceg::BitBoard board(splitted[0], splitted[2], splitted[3]);
return perft(board, current_player_black, depth, out_set, out_map);
}
uint64_t ceg::ChessEngine::perft(const ceg::BitBoard& board, bool current_player_black, int depth, std::set<std::string>* out_set, std::map<std::string, int>* out_map) const
{
if (depth == 0)
return 1ULL;
uint64_t nodes = 0;
auto moves = move_generator->get_all_possible_moves(board, current_player_black);
for (const auto& move : moves)
{
ceg::BitBoard copy_board = board;
move_generator->make_move(copy_board, move);
if (out_set)
out_set->insert(copy_board.to_FEN_string());
if (out_map)
{
std::string fen_str = copy_board.to_FEN_string();
if (out_map->find(fen_str) == out_map->end())
out_map->insert({ fen_str, 1 });
else
out_map->at(fen_str) += 1;
}
nodes += perft(copy_board, !current_player_black, depth - 1, out_set, out_map);
}
return nodes;
}
|
<filename>mall-product/src/main/java/com/touch/air/mall/product/service/SpuImagesService.java
package com.touch.air.mall.product.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.touch.air.common.utils.PageUtils;
import com.touch.air.mall.product.entity.SpuImagesEntity;
import java.util.List;
import java.util.Map;
/**
* spu图片
*
* @author bin.wang
* @email <EMAIL>
* @date 2020-12-04 13:18:33
*/
public interface SpuImagesService extends IService<SpuImagesEntity> {
PageUtils queryPage(Map<String, Object> params);
void saveImages(Long id, List<String> images);
}
|
#ifndef _FILES_H_
#define _FILES_H_
#include <stdio.h>
#include "dataTypes.h"
#define CLIENTE "data/Clientes.dat"
#define VENDEDOR "data/Vendedores.dat"
#define FORNECEDOR "data/Fornecedores.dat"
#define PRODUTO "data/Produtos.dat"
#define NOTA_FISCAL "data/NotasFiscais.dat"
#define ITEM_NOTA_FISCAL "data/ItensNotaFiscal.dat"
#define NOTA_COMPRA "data/NotasCompra.dat"
#define ITEM_NOTA_COMPRA "data/ItensNotaCompra.dat"
#define HISTORICO_PRECOS "data/HistoricoPrecos.dat"
FILE *abrirArquivo(char location[]);
int fecharArquivo(FILE *file);
int gravarRegistroFinalArquivo(void *data, FILE *file, unsigned long size);
int gravarRegistroEmArquivo(void *data, FILE *file, int posicao,unsigned long size);
void lerRegistroEmArquivo(void *data, FILE *file, int position, unsigned int size);
unsigned long getNewUniqueId(FILE *file, unsigned long sizeOfRegister);
int findClienteById(FILE *file, unsigned long id);
int findClienteByCPF(FILE *file, char *cpf);
int *findClientesByName(FILE *file, char *name, int *tamanho);
int findVendedorById(FILE *file, unsigned long id);
int findVendedorByCPF(FILE *file, char *cpf);
int *findVendedoresByName(FILE *file, char *name, int *tamanho);
int isEmailCadastradoVendedor(FILE *file, char *email);
int findFornecedorById(FILE *file, unsigned long id);
int findFornecedorByCNPJ(FILE *file, char *cnpj);
int findFornecedorByName(FILE *file, char *name);
int isNomeFornecedorCadastrado(FILE *file, char * nome);
int findProdutoById(FILE *file, unsigned long id);
int findNotaFiscalById(FILE *file, unsigned long id);
int findNotaCompraById(FILE *file, unsigned long id);
int findItemNotaCompraById(FILE *file, unsigned long id);
#endif |
export interface IUser {
id: string;
email: string;
}
export interface SignUpUserResponse {
user: IUser;
isSuccess: boolean;
}
export interface RegisterUserResponse {
id: string;
email: string;
}
|
<filename>project/Dependencies.scala<gh_stars>0
import sbt._
object Dependencies {
val slf4jVersion = "1.7.20"
val logbackVersion = "1.2.3"
val scalaTestVersion = "3.0.5"
val betterFilesVersion = "3.8.0"
val sparkVersion = "2.4.4"
val sparkNlpVersion = "2.2.2"
val sparkFastTestVersion = "0.20.0-s_2.11"
val logging = Seq( "org.slf4j" % "slf4j-api" % slf4jVersion,
"ch.qos.logback" % "logback-classic" % logbackVersion )
val betterFiles = Seq( "com.github.pathikrit" %% "better-files" % betterFilesVersion )
val scalaTest = Seq( "org.scalatest" %% "scalatest" % scalaTestVersion % Test )
val spark = Seq( "org.apache.spark" %% "spark-core" % sparkVersion % Provided,
"org.apache.spark" %% "spark-sql" % sparkVersion % Provided )
val sparkNlp = Seq( "com.johnsnowlabs.nlp" %% "spark-nlp" % sparkNlpVersion )
val sparkFastTests = Seq( "MrPowers" % "spark-fast-tests" % sparkFastTestVersion % Test )
} |
<filename>console/src/boost_1_78_0/libs/system/test/result_error_access.cpp
// Copyright 2017, 2021 <NAME>.
// Distributed under the Boost Software License, Version 1.0.
// https://www.boost.org/LICENSE_1_0.txt
#include <boost/system/result.hpp>
#include <boost/core/lightweight_test.hpp>
#include <boost/core/lightweight_test_trait.hpp>
#include <string>
using namespace boost::system;
struct X
{
int v_;
explicit X( int v = 0 ): v_( v ) {}
X( X const& ) = default;
X& operator=( X const& ) = delete;
};
int main()
{
{
result<int> r;
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error(), error_code() );
}
{
result<int> const r;
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error(), error_code() );
}
{
BOOST_TEST( result<int>().has_value() );
BOOST_TEST( !result<int>().has_error() );
BOOST_TEST_EQ( result<int>().error(), error_code() );
}
{
result<int> r( 1 );
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error(), error_code() );
}
{
result<int> const r( 1 );
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error(), error_code() );
}
{
BOOST_TEST( result<int>( 1 ).has_value() );
BOOST_TEST( !result<int>( 1 ).has_error() );
BOOST_TEST_EQ( result<int>( 1 ).error(), error_code() );
}
{
auto ec = make_error_code( errc::invalid_argument );
result<int> r( ec );
BOOST_TEST( !r.has_value() );
BOOST_TEST( r.has_error() );
BOOST_TEST_EQ( r.error(), ec );
}
{
auto ec = make_error_code( errc::invalid_argument );
result<int> const r( ec );
BOOST_TEST( !r.has_value() );
BOOST_TEST( r.has_error() );
BOOST_TEST_EQ( r.error(), ec );
}
{
auto ec = make_error_code( errc::invalid_argument );
BOOST_TEST( !result<int>( ec ).has_value() );
BOOST_TEST( result<int>( ec ).has_error() );
BOOST_TEST_EQ( result<int>( ec ).error(), ec );
}
{
result<std::string, X> r( 1 );
BOOST_TEST( !r.has_value() );
BOOST_TEST( r.has_error() );
BOOST_TEST_EQ( r.error().v_, 1 );
}
{
result<std::string, X> const r( 1 );
BOOST_TEST( !r.has_value() );
BOOST_TEST( r.has_error() );
BOOST_TEST_EQ( r.error().v_, 1 );
}
{
BOOST_TEST(( !result<std::string, X>( 1 ).has_value() ));
BOOST_TEST(( result<std::string, X>( 1 ).has_error() ));
BOOST_TEST_EQ( (result<std::string, X>( 1 ).error().v_), 1 );
}
{
result<std::string, X> r( "s" );
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error().v_, 0 );
}
{
result<std::string, X> const r( "s" );
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error().v_, 0 );
}
{
BOOST_TEST(( result<std::string, X>( "s" ).has_value() ));
BOOST_TEST(( !result<std::string, X>( "s" ).has_error() ));
BOOST_TEST_EQ( (result<std::string, X>( "s" ).error().v_), 0 );
}
{
result<void> r;
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error(), error_code() );
}
{
result<void> const r;
BOOST_TEST( r.has_value() );
BOOST_TEST( !r.has_error() );
BOOST_TEST_EQ( r.error(), error_code() );
}
{
BOOST_TEST( result<void>().has_value() );
BOOST_TEST( !result<void>().has_error() );
BOOST_TEST_EQ( result<void>().error(), error_code() );
}
{
auto ec = make_error_code( errc::invalid_argument );
result<void> r( ec );
BOOST_TEST( !r.has_value() );
BOOST_TEST( r.has_error() );
BOOST_TEST_EQ( r.error(), ec );
}
{
auto ec = make_error_code( errc::invalid_argument );
result<void> const r( ec );
BOOST_TEST( !r.has_value() );
BOOST_TEST( r.has_error() );
BOOST_TEST_EQ( r.error(), ec );
}
{
auto ec = make_error_code( errc::invalid_argument );
BOOST_TEST( !result<void>( ec ).has_value() );
BOOST_TEST( result<void>( ec ).has_error() );
BOOST_TEST_EQ( result<void>( ec ).error(), ec );
}
return boost::report_errors();
}
|
<filename>src/sockets/NextSocketRouter.ts
import WebSocket from 'ws';
import fs from 'fs'
import path from 'path'
import { NextContextBase } from "../NextContext";
import { checkPathsByNormalization } from "../utils";
import { NextSocketMessageBase } from "./NextSocketMessageBase";
import { NextSocketContext } from "./NextSocketContext";
import { NextSocketRoute } from "./NextSocketRoute";
export class NextSocketRouter {
public routes: NextSocketRoute[] = [];
public registerRoute(route: NextSocketRoute) {
this.routes.push(route);
}
public async handleMessage(ctx: NextContextBase, message: NextSocketMessageBase, socket: WebSocket) {
var route = this.routes.find(r => checkPathsByNormalization(r.path, message.path));
var sctx = new NextSocketContext(message, socket);
if (route) {
await route.action.call(route, ctx, sctx).catch(console.error);
}
else {
sctx.send({
type: 'error',
message: 'Route not found',
path: message.path
});
}
}
public async registerRouters(dirs: string[]) {
for (var dir of dirs) {
var files = fs.readdirSync(dir, { withFileTypes: true });
for (var file of files) {
if (file.isFile() && (file.name.endsWith('.js') || file.name.endsWith('.ts'))) {
var filePath = path.join(dir, file.name);
var route = require(filePath);
if (route.default) {
this.registerRoute(route.default);
}
console.log('Register route: ' + path.relative(process.cwd(), filePath));
}
else if(file.isDirectory()){
this.registerRouters([path.join(dir, file.name)]);
}
}
}
}
}
|
export default {
address:"http://localhost:5000",
url:"/api"
} |
"""
Build a program to return the top N most frequent words in a string
"""
from collections import Counter
def top_n_words(string, n):
words = string.split()
counts = Counter(words).most_common(n)
return [c[0] for c in counts]
if __name__ == '__main__':
string = "The quick brown fox jumps over the lazy dog"
n = 3
top_words = top_n_words(string, n)
print(top_words) |
<filename>src/components/horse/HorseDetails/index.js<gh_stars>1-10
import React from 'react'
import PropTypes from 'prop-types'
import classNames from 'utils/classnames'
import capitalize from 'utils/capitalize'
const HorseDetails = props => {
const { data } = props
const constructClassName = className => classNames('row', className)
return (
<div className='horse-details-list col-xs-12'>
{
data.map((item, index) => (
!item.isHidden && (
<div className={constructClassName(item.className)} key={index}>
<div className='horse-details-list__title col-xs-6'>
{item.title}
</div>
<div className='col-xs-6'>
{item.value ? capitalize(item.value) : '-'}
</div>
</div>
)
))
}
</div>
)
}
HorseDetails.propTypes = {
data: PropTypes.arrayOf(
PropTypes.shape({
title: PropTypes.string,
value: PropTypes.string,
isLink: PropTypes.bool,
href: PropTypes.string
})
)
}
export default HorseDetails
|
BACKUP_FOLDER='collect_info/backup_files'
mkdir -p ${BACKUP_FOLDER}
# network files
cp -a /etc/sysconfig/network-scripts/ifcfg-* ${BACKUP_FOLDER}/etc/sysconfig/network-scripts
|
package facade.amazonaws.credentials
import facade.amazonaws.AWSCredentials
import scala.scalajs.js
import scala.scalajs.js.annotation.JSImport
@js.native
@JSImport("aws-sdk/lib/node_loader", "EnvironmentCredentials", "AWS.EnvironmentCredentials")
class EnvironmentCredentials(envPrefix: String) extends AWSCredentials {}
|
<reponame>cocdeshijie/discord-antmap<gh_stars>0
import discord
import ast
from discord.ext import commands
from discord.ext.commands import MissingRequiredArgument
from fuzzywuzzy import process
from config import Config
config = Config()
bot = commands.Bot(command_prefix='.')
bot.remove_command('help')
file = open("./genus.json", "r")
contents = file.read()
genus_db = ast.literal_eval(contents)
file.close()
file = open("./species.json", "r")
contents = file.read()
species_db = ast.literal_eval(contents)
file.close()
@bot.command(name="map")
async def antmap(ctx, *, name):
"""Shows distribution map of <species> from antmaps.org."""
if len(name.split(" ")) == 1:
name = fuzzy_genus(name)
embed = discord.Embed(title=display_name(name), description="https://antmaps.org/?mode=diversity&genus=" + name_to_map_url(name), color=0x3498db)
embed.set_image(url=str(antmap_image_genus(name)))
await ctx.send(embed=embed)
else:
name = fuzzy_search(name)
embed = discord.Embed(title=display_name(name), description="https://antmaps.org/?mode=species&species=" + name_to_map_url(name), color=0x3498db)
embed.set_image(url=str(antmap_image(name)))
await ctx.send(embed=embed)
@bot.event
async def on_command_error(ctx, error):
if isinstance(error, MissingRequiredArgument):
await ctx.send("This command cannot be left blank.")
# fuzzy search
def fuzzy_search(name):
name = name.split()
name_list = []
name_list.append(process.extractOne(name[0], genus_db)[0])
sp = species_db[name_list[0]]
name_list.append(process.extractOne(name[1], sp)[0])
return " ".join(name_list)
def fuzzy_genus(name):
name = name.split()
genus = process.extractOne(name[0], genus_db)[0]
return genus
# change "camponotus frAgilis" to "Camponotus fragilis"
def display_name(name):
if len(name) == 1:
name = [item.lower() for item in name]
name = "".join(name)
return name.lower().title()
else:
name = name.split()
name = [item.lower() for item in name]
name[0] = name[0].title()
return " ".join(name)
# change "camponotus frAgilis" to "Camponotus.fragilis"
def name_to_map_url(name):
name = name.split()
name = [item.lower() for item in name]
name[0] = name[0].title()
return " ".join(name).replace(" ", ".")
#antmap_url
def antmap_image(name):
return "https://antmap.coc.tools/images/" + name_to_map_url(name) + ".png"
# antmap genus
def antmap_image_genus(name):
return "https://antmap.coc.tools/images/" + name_to_map_url(name) + ".png"
bot.run(config.TOKEN)
|
package patron.events.respositories;
import com.appscharles.libs.databaser.exceptions.DatabaserException;
import com.appscharles.libs.databaser.managers.SFManager;
import com.appscharles.libs.databaser.operators.DBOperator;
import org.hibernate.Session;
import org.hibernate.query.Query;
import patron.events.enums.EventType;
import patron.events.models.Event;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaDelete;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import java.util.List;
/**
* The type Event repository.
*/
public class EventRepository {
/**
* Gets by type.
*
* @param eventType the event type
* @return the by type
* @throws DatabaserException the databaser exception
*/
public static List<Event> getByType(EventType eventType) throws DatabaserException {
try{
Session session = SFManager.getDefaultSessionFactory().openSession();
CriteriaBuilder builder = session.getCriteriaBuilder();
CriteriaQuery<Event> criteriaQuery = builder.createQuery(Event.class);
Root<Event> root = criteriaQuery.from(Event.class);
criteriaQuery.select(root);
criteriaQuery.where(builder.equal(root.get("type"), eventType));
Query<Event> q = session.createQuery(criteriaQuery);
List<Event> results = q.getResultList();
session.close();
return results;
} catch (Exception e){
throw new DatabaserException(e);
}
}
/**
* Remove all.
*
* @throws DatabaserException the databaser exception
*/
public static void removeAll() throws DatabaserException {
DBOperator.commit(session -> {
CriteriaBuilder builder = session.getCriteriaBuilder();
CriteriaDelete<Event> criteriaQuery = builder.createCriteriaDelete(Event.class);
criteriaQuery.from(Event.class);
session.createQuery(criteriaQuery).executeUpdate();
});
}
/**
* Count long.
*
* @return the long
* @throws DatabaserException the databaser exception
*/
public static Long count() throws DatabaserException {
try{
Session session = SFManager.getDefaultSessionFactory().openSession();
CriteriaBuilder builder = session.getCriteriaBuilder();
CriteriaQuery<Long> criteriaQuery = builder.createQuery(Long.class);
criteriaQuery.select(builder.count(criteriaQuery.from(Event.class)));
Long count = session.createQuery(criteriaQuery).getSingleResult();
session.close();
return count;
} catch (Exception e){
throw new DatabaserException(e);
}
}
}
|
<filename>src/main/java/org/olat/course/reminder/ui/CourseReminderSendTableModel.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.reminder.ui;
import java.util.List;
import java.util.Locale;
import org.olat.core.commons.persistence.SortKey;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiSortableColumnDef;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableModelDelegate;
import org.olat.course.assessment.ui.tool.AssessmentToolConstants;
/**
*
* Initial date: 3 Jun 2021<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class CourseReminderSendTableModel extends DefaultFlexiTableDataModel<CourseReminderSendRow>
implements SortableFlexiTableDataModel<CourseReminderSendRow> {
public static final int USER_PROPS_OFFSET = 500;
public static final String USAGE_IDENTIFIER = CourseReminderSendTableModel.class.getCanonicalName();
private static final SendCols[] COLS = SendCols.values();
private final Locale locale;
public CourseReminderSendTableModel(FlexiTableColumnModel columnModel, Locale locale) {
super(columnModel);
this.locale = locale;
}
@Override
public void sort(SortKey orderBy) {
if (orderBy != null) {
List<CourseReminderSendRow> views = new SortableFlexiTableModelDelegate<>(orderBy, this, locale).sort();
super.setObjects(views);
}
}
@Override
public Object getValueAt(int row, int col) {
CourseReminderSendRow identityRow = getObject(row);
return getValueAt(identityRow, col);
}
@Override
public Object getValueAt(CourseReminderSendRow row, int col) {
if(col >= 0 && col < SendCols.values().length) {
switch(COLS[col]) {
case sendDate: return row.getSendDate();
default: return "ERROR";
}
}
int propPos = col - AssessmentToolConstants.USER_PROPS_OFFSET;
return row.getIdentityProp(propPos);
}
public enum SendCols implements FlexiSortableColumnDef {
sendDate("table.header.send");
private final String i18nKey;
private SendCols(String i18nKey) {
this.i18nKey = i18nKey;
}
@Override
public String i18nHeaderKey() {
return i18nKey;
}
@Override
public boolean sortable() {
return true;
}
@Override
public String sortKey() {
return name();
}
}
} |
/*
* Copyright (c) 2015. Seagate Technology PLC. All rights reserved.
*/
package com.seagate.alto.provider.lyve.response;
import com.google.gson.Gson;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.seagate.alto.provider.lyve.LyveCloudProvider;
import com.seagate.alto.provider.Provider;
import java.util.Date;
public class MediaMetadata implements Provider.MediaMetadata {
@SerializedName(".tag")
@Expose
public String Tag;
@SerializedName("dimensions")
@Expose
public Dimensions dimensions;
@SerializedName("location")
@Expose
public Location location;
@SerializedName("time_taken")
@Expose
public String timeTaken;
@SerializedName("duration")
@Expose
public Integer duration;
@Override
public Provider.Size dimensions() {
return new Provider.Size(dimensions.width, dimensions.height);
}
@Override
public double latitude() {
return location.latitude;
}
@Override
public double longitude() {
return location.longitude;
}
@Override
public Date timeTaken() {
return LyveCloudProvider.dateFromString(timeTaken);
}
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
<reponame>pirao/compare_FRA_irregularities
import pandas as pd
import seaborn as sns
import numpy as np
from numpy import load
import scipy as sp
import matplotlib.pyplot as plt
from scipy import signal
from scipy.signal import welch
class FRA_irregularities():
def __init__(self,L_min=1.524,L_max=304.8,N=3000,k=0.25,signal=None,dt=None,signal_type='vert'):
self.L_min=L_min # The default values of L_min and L_max are the the maximum permissible range of wavelengths for FRA standards
self.L_max=L_max
self.irreg_type = None
self.N_harmonics = N
self.k = k
self.omega_max = None
self.omega_min = None
self.d_omega = None
self.omega = None
self.wave=None
self.s_vert=None
self.s_lat=None
self.vert_irreg = None
self.lat_irreg = None
self.class_list = None
self.signal = signal
self.dt = dt
self.signal_type = signal_type
def PSD(self,type_irreg=6):
self.omega_max = 2*np.pi/self.L_min # Maximum angular frequency (spatial wavenumber) in rad/m
self.omega_min = 2*np.pi/self.L_max # Minimum angular frequency (spatial wavenumber) in rad/m
self.d_omega = (self.omega_max-self.omega_min)/self.N_harmonics # Frequency increment (rad/m)
n = np.arange(1,self.N_harmonics+1,1) # index vector
self.omega = self.omega_min + (n-0.5)*self.d_omega # discrete angular frequency (rad/m)
# Creating wavelength domain vector
self.wave = 2*np.pi/self.omega
if type_irreg == 6:
Av = 0.0339*10**-4 # m^2 * (rad/m)
Aa = 0.0339*10**-4 # m^2 * (rad/m)
omega_c = 0.8245 # rad/m
omega_s = 0.438 # rad/m
elif type_irreg == 5:
Av = 0.2095*10**-4
Aa = 0.0762*10**-4
omega_c = 0.8245
omega_s = 0.8209
elif type_irreg == 4:
Av = 0.5376*10**-4
Aa = 0.3027*10**-4
omega_c = 0.8245
omega_s = 1.1312
elif type_irreg == 3:
Av = 0.6816*10**-4
Aa = 0.4128*10**-4
omega_c = 0.8245
omega_s = 0.852
elif type_irreg == 2:
Av = 1.0181*10**-4
Aa = 1.2107*10**-4
omega_c = 0.8245
omega_s = 0.9308
elif type_irreg == 1:
Av = 1.2107*10**-4
Aa = 3.3634*10**-4
omega_c = 0.8245
omega_s = 0.6046
else:
print('Provide a FRA classe between 6 and 1')
return None
self.s_vert = 2*np.pi*(self.k*Av*omega_c**2)/((self.omega**2)*(self.omega**2+omega_c**2)) # m^2/(1/m)
self.s_lat = 2*np.pi*(self.k*Aa*omega_c**2)/((self.omega**2)*(self.omega**2+omega_c**2)) # m^2/(1/m)
return self.wave,self.omega,self.s_vert,self.s_lat
def _create_PSD(self,class_list=[6,5,4]):
self.class_list=class_list
self.vert_irreg = []
self.lat_irreg = []
for item in self.class_list:
_,_,vert, lat = FRA_irregularities.PSD(self,type_irreg=item)
self.vert_irreg.append(vert)
self.lat_irreg.append(lat)
print('Classes {} were created'.format(class_list))
return self.wave,self.omega,self.vert_irreg,self.lat_irreg
# def plot_PSD(self,x_axis='wavelength',v_line=True,scale='log',plot_signal=False):
# if self.vert_irreg==None:
# FRA_irregularities._create_PSD(self,class_list=[6,5,4,3]) # Create PSDs
# fig, ax = plt.subplots(2,1,figsize=(30,20))
# ax[0].set_ylim(10**(-14),10**0)
# ax[1].set_ylim(10**(-14),10**0);
# ax[0].set_title('PSD standard curves for vertical irregularities')
# ax[0].set_ylabel('PSD $(m^2/(1/m)$')
# ax[0].set_xticks
# ax[1].set_title('PSD standard curves for lateral irregularities')
# ax[1].set_ylabel('PSD $(m^2/(1/m)$')
# if x_axis == 'spatial_angular_frequency':
# for idx,item in enumerate(self.class_list):
# ax[0].plot(self.omega,self.vert_irreg[idx],label='FRA class {}'.format(self.class_list[idx]))
# ax[1].plot(self.omega,self.lat_irreg[idx],label='FRA class {}'.format(self.class_list[idx]))
# if v_line == True:
# ax[0].axvline(self.omega.min(), label='Lower spatial frequency: {} rad/m'.format(np.round(self.omega_min,3)),color='m',linestyle='--')
# ax[0].axvline(self.omega.max(), label='Upper spatial frequency: {} rad/m'.format(np.round(self.omega_max,3)),color='m',linestyle='--')
# ax[1].axvline(self.omega.min(), label='Lower spatial frequency: {} rad/m'.format(np.round(self.omega_min,3)),color='m',linestyle='--')
# ax[1].axvline(self.omega.max(), label='Upper spatial frequency: {} rad/m'.format(np.round(self.omega_max,3)),color='m',linestyle='--')
# ax[0].set_xlabel('Spatial angular frequency (rad/m)')
# ax[0].legend()
# ax[1].set_xlabel('Spatial angular frequency (rad/m)')
# ax[1].legend()
# if x_axis == 'wavelength':
# for idx,item in enumerate(self.class_list):
# ax[0].plot(self.wave,self.vert_irreg[idx],label='FRA class {}'.format(self.class_list[idx]))
# ax[1].plot(self.wave,self.lat_irreg[idx],label='FRA class {}'.format(self.class_list[idx]))
# if v_line == True:
# ax[0].axvline(self.wave.min(), label='Lower FRA wavelength: {} m'.format(self.L_min),color='m',linestyle='--')
# ax[0].axvline(self.wave.max(), label='Upper FRA wavelength: {} m'.format(self.L_max),color='m',linestyle='--')
# ax[1].axvline(self.wave.min(), label='Lower FRA wavelength: {} m'.format(self.L_min),color='m',linestyle='--')
# ax[1].axvline(self.wave.max(), label='Upper FRA wavelength: {} m'.format(self.L_max),color='m',linestyle='--')
# ax[0].set_xlabel('Wavelength (m)')
# ax[1].set_xlabel('Wavelength (m)')
# if scale=='log':
# ax[0].set_xscale('log')
# ax[0].set_yscale('log')
# ax[1].set_xscale('log')
# ax[1].set_yscale('log')
# ax[0].legend()
# ax[1].legend()
# if plot_signal==True:
# f,welch_coef = FRA_irregularities.Welch_PSD_signal(self)
# if self.signal_type =='vert':
# ax[0].plot(1/f,welch_coef,label = 'Signal PSD')
# elif self.signal_type=='lat':
# ax[1].plot(1/f,welch_coef,label = 'Signal PSD')
# else:
# print('Only vertical and lateral irregularities can be plotted')
# ax[0].legend()
# ax[1].legend()
def Welch_PSD_signal(self,window_size_frac=0.2,overlap_frac=0.5):
segment_size = np.int32(window_size_frac*len(self.signal))
fft_size = 2 ** (int(np.log2(segment_size)) + 1) # round up to next highest power of 2 - used for zero padding
overlap_size = overlap_frac*segment_size
f_signal, welch_coef_signal = welch(self.signal,
1/self.dt,
nperseg=segment_size,
noverlap=overlap_size,
nfft=fft_size,
return_onesided=True,
scaling='density',
detrend='constant',
window='hann',
average='mean')
return f_signal, welch_coef_signal |
#!/usr/bin/env bash
set -euo pipefail; shopt -s nullglob
[ "$(whoami)" = root ] || exec sudo -p "$(printf "This command needs to run as root.\nPassword: ")" $0 "$@"
cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1
pacman -Syu --needed nvidia-prime
sudo systemctl enable nvidia-persistenced
echo -n "Configuring NVIDIA power management... "
cp nvidia-pm.conf /etc/modprobe.d/
cp 80-nvidia-pm.rules /etc/udev/rules.d/
echo "done!"
|
#!/bin/bash
: ${DOCKER_REGISTRY:="vmware"}
PACKAGE=${1}
BUILD=${2}
TAG=dev-${BUILD}
if [ -n "$CI" ]; then
TAG=$IMAGE_TAG
fi
image=${DOCKER_REGISTRY}/dispatch-${PACKAGE}:${TAG}
echo $image
docker build -t $image -f images/${PACKAGE}/Dockerfile .
if [ -n "$PUSH_IMAGES" ]; then
docker push $image
fi
|
<reponame>DPechetti/node_base_project
const ListBatatinhaOperation = require('../../../src/app/operations/ListBatatinhaOperation');
const generateBatatinhaRequest = require('../../mocks/batatinha/generateBatatinhaRequest');
describe('GetBatatinhaOperation', () => {
test('Should call batatinha service and return found batatinha', async () => {
const batatinha = generateBatatinhaRequest();
const batatinhaServiceResponse = { batatinhaResponseField: 'batatinhaResponseField' };
const listBatatinhaService = {
execute: () => Promise.resolve([batatinhaServiceResponse, batatinhaServiceResponse])
};
const listBatatinhaOperation = ListBatatinhaOperation({ listBatatinhaService });
const foundBatatinha = await listBatatinhaOperation.execute(batatinha);
expect(foundBatatinha).toStrictEqual([batatinhaServiceResponse, batatinhaServiceResponse]);
});
});
|
<reponame>zhaort2009/easypoi-test<gh_stars>0
package cn.afterturn.easypoi.test.excel.export;
import cn.afterturn.easypoi.excel.ExcelExportUtil;
import cn.afterturn.easypoi.excel.annotation.Excel;
import cn.afterturn.easypoi.excel.entity.ExportParams;
import cn.afterturn.easypoi.excel.entity.enmus.ExcelType;
import cn.afterturn.easypoi.test.en.EnumDataEntity;
import cn.afterturn.easypoi.test.en.Sex;
import cn.afterturn.easypoi.test.en.StatusEnum;
import cn.afterturn.easypoi.test.entity.MsgClient;
import cn.afterturn.easypoi.test.entity.MsgClientGroup;
import org.apache.poi.ss.usermodel.Workbook;
import org.junit.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* @author by jueyue on 18-4-2.
*/
public class ExcelExportEnumTest {
@Test
public void test() throws Exception {
List<EnumDataEntity> list = new ArrayList<EnumDataEntity>();
for (int i = 0; i < 100; i++) {
EnumDataEntity client = new EnumDataEntity();
client.setName("小明" + i);
client.setSex(Sex.MAN);
client.setStatus(StatusEnum.Init);
client.setBaseStatus(StatusEnum.Ready);
list.add(client);
}
Date start = new Date();
ExportParams params = new ExportParams("枚举测试", "测试", ExcelType.XSSF);
Workbook workbook = ExcelExportUtil.exportExcel(params, EnumDataEntity.class, list);
System.out.println(new Date().getTime() - start.getTime());
FileOutputStream fos = new FileOutputStream("D:/excel/EnumDataEntity.xlsx");
workbook.write(fos);
fos.close();
}
}
|
def hourglass_sum(arr):
max_sum = float("-inf")
for row in range(4):
for col in range(4):
top = arr[row][col] + arr[row][col+1] + arr[row][col+2]
middle = arr[row+1][col+1]
bottom = arr[row+2][col] + arr[row+2][col+1] + arr[row+2][col+2]
curr_sum = top + middle + bottom
max_sum = max(max_sum, curr_sum)
return max_sum |
#!/bin/bash
#
# Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
aws_access_key=${1:-null}
aws_secret_key=${2:-null}
aws_region=${3:-null}
aws_key_name=${4:-aws-key-pair}
cred_path=${5:-null}
config_path=${6:-null}
pravega_org=${7:-pravega/pravega}
pravega_branch=${8:-master}
cd aws/
TF_LOG=INFO terraform init
TF_LOG=INFO terraform apply -auto-approve -var aws_access_key=$aws_access_key \
-var aws_secret_key=$aws_secret_key \
-var aws_region=$aws_region \
-var aws_key_name=$aws_key_name \
-var cred_path=$cred_path \
-var config_path=$config_path \
-var pravega_org=$pravega_org \
-var pravega_branch=$pravega_branch
touch public_dns.txt
master_public_dns=`terraform output master_public_dns`
echo $master_public_dns >> $config_path/public_dns.txt
slave_public_dns=`terraform output slave_public_dns`
echo $slave_public_dns >> $config_path/public_dns.txt
|
export const DRAFT_BLOCKS = {
'#': 'header-one',
'##': 'header-two',
'###': 'header-three',
'####': 'header-four',
'#####': 'header-five',
'######': 'header-six',
'*': 'unordered-list-item',
'+': 'unordered-list-item',
'-': 'unordered-list-item',
'n.': 'ordered-list-item',
'>': 'blockquote',
};
export const DRAFT_INLINE = {
'~~': 'STRIKETHROUGH',
'**': 'BOLD',
'*': 'ITALIC',
'`': 'CODE',
_: 'ITALIC',
};
|
#!/bin/bash
echo "Install system requirements"
apt-get --quiet update
apt-get install -y --no-install-recommends \
curl \
git
echo "Install pyenv"
curl -# -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash
echo "Setup home directory"
ln -s /vagrant/ /home/vagrant/data
chown vagrant.vagrant /home/vagrant/data
echo "Setup Jupyter auto start"
cat >/etc/systemd/system/jupyter.service <<EOL
[Unit]
Description=Jupyter Workplace
[Service]
Type=simple
PIDFile=/run/jupyter.pid
ExecStart=/home/vagrant/.pyenv/versions/anaconda3-5.1.0/bin/jupyter notebook --port=8888 --no-browser --ip=0.0.0.0 --NotebookApp.token= --notebook-dir=/home/vagrant/data
User=vagrant
Group=vagrant
WorkingDirectory=/home/vagrant/data
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target
EOL
systemctl enable jupyter.service
systemctl daemon-reload
systemctl restart jupyter.service
|
package service
import (
"shippo-server/internal/model"
"shippo-server/utils"
)
type PermissionPolicyService struct {
*Service
}
func NewPermissionPolicyService(s *Service) *PermissionPolicyService {
return &PermissionPolicyService{s}
}
// 按照策略ID查询某个策略信息
func (t *PermissionPolicyService) FindByID(id uint) (p model.PermissionPolicy, err error) {
p, err = t.dao.PermissionPolicy.FindByID(id)
return
}
// 按照策略名称查询某个策略信息
func (t *PermissionPolicyService) FindByPolicyName(name string) (p model.PermissionPolicy, err error) {
p, err = t.dao.PermissionPolicy.FindByPolicyName(name)
return
}
// 查询某个策略所拥有的访问规则
func (t *PermissionPolicyService) FindPermissionAccessByID(id uint) (
p []model.PermissionAccess, err error) {
p, err = t.dao.PermissionPolicy.FindPermissionAccessByID(id)
return
}
// 查询某个策略所拥有的访问规则
func (t *PermissionPolicyService) FindPermissionAccessByPolicyName(name string) (
p []model.PermissionAccess, err error) {
p, err = t.dao.PermissionPolicy.FindPermissionAccessByPolicyName(name)
return
}
// 按照类型查询某个策略所拥有的访问规则
func (t *PermissionPolicyService) FindPermissionAccessByType(id uint, accessType string) (
p []model.PermissionAccess, err error) {
p, err = t.dao.PermissionPolicy.FindPermissionAccessByType(id, accessType)
return
}
// 按照类型查询某个策略所拥有的访问规则
func (t *PermissionPolicyService) FindPermissionAccessByPolicyNameAndType(name string, accessType string) (
p []model.PermissionAccess, err error) {
p, err = t.dao.PermissionPolicy.FindPermissionAccessByPolicyNameAndType(name, accessType)
return
}
func (t *PermissionPolicyService) PermissionPolicyCreate(p model.PermissionPolicy) (err error) {
_, err = t.dao.PermissionPolicy.PermissionPolicyCreate(p.PolicyName, p.Remark)
return
}
func (t *PermissionPolicyService) PermissionPolicyDel(p model.PermissionPolicy) (err error) {
err = t.dao.PermissionPolicy.PermissionPolicyDel(p.ID)
return
}
func (t *PermissionPolicyService) PermissionPolicyUpdate(p model.PermissionPolicy) (err error) {
err = t.dao.PermissionPolicy.PermissionPolicyUpdate(p)
return
}
func (t *PermissionPolicyService) PermissionPolicyFindAllExtStatus(id uint) (
p []model.PermissionPolicyStatus, err error) {
p, err = t.dao.PermissionPolicy.PermissionPolicyFindAllExtStatus(id)
return
}
func (t *PermissionPolicyService) PermissionPolicyFindAll() (p []model.PermissionPolicyCount, err error) {
p, err = t.dao.PermissionPolicy.PermissionPolicyFindAll()
return
}
func (t *PermissionPolicyService) PermissionPolicyFind(p model.PermissionPolicy) (
list model.PermissionPolicyCount, err error) {
list, err = t.dao.PermissionPolicy.PermissionPolicyFind(p.ID)
return
}
// 更新权限关联关系
func (t *PermissionPolicyService) PermissionAssociationUpdate(policyId uint, access []uint) (err error) {
list, err := t.dao.PermissionPolicy.PermissionAssociationFindPolicyIdList(policyId)
if err != nil {
return
}
// 如果 旧的列表不包含新的id,就创建
for _, newAccessId := range access {
if !utils.In(newAccessId, list) {
_, err = t.dao.PermissionPolicy.PermissionAssociationCreate(policyId, newAccessId)
if err != nil {
return
}
}
}
// 如果 新的列表不包含旧的id,就删除
for _, oldAccessId := range list {
if !utils.In(oldAccessId, access) {
err = t.dao.PermissionPolicy.PermissionAssociationDel(policyId, oldAccessId)
if err != nil {
return
}
}
}
return
}
|
#!/bin/bash
# Please make sure your VNC resolution is 1920x1080 for figure utilities unit tests to pass
#
# Written by Nanbo Sun,Yang Qing and CBIG under MIT license: https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
##########################
# Set parameters and paths
##########################
# output folder
out_dir=$1
mkdir -p $out_dir
# unit test version, 'light' or 'intermediate' or 'comprehensive'
version=$2
dt=`date +%Y%m%d`
# log file
LF=${out_dir}/${dt}_CBIG_UnitTests_Version_${version}.log
# verbose log file
LF_verbose=${out_dir}/${dt}_CBIG_UnitTests_Version_${version}_verbose.log
if [ -z "${out_dir}" ]; then
echo "Error: please specify the output directory."
exit 1
fi
> $LF_verbose
if [ ! -w $LF_verbose ]; then
echo "Error: $LF_verbose is not writeable. Please change the output directory."
exit 1
fi
> $LF
if [ ! -w $LF ]; then
echo "Error: $LF is not writeable. Please change the output directory."
exit 1
fi
###########################################
# Print start info to log file and terminal
###########################################
echo "**************************************************************************" | tee -a $LF
echo | tee -a $LF
echo "Running CBIG unit tests!" | tee -a $LF
echo "Unit Test Version: ${version}" | tee -a $LF
echo -n "Start Date: " | tee -a $LF
date | tee -a $LF
#####################
# Run CBIG unit tests
#####################
> ${out_dir}/error_testcase.log
# run light unit test
if [ "$version" = "light" ]; then
matlab -nodesktop -nosplash -r "runtests('$CBIG_CODE_DIR/unit_tests', 'Recursively', true); exit"\
> $LF_verbose
# extract error message from the verbose log file
line_num=`grep -n "Failure Summary" $LF_verbose | cut -d ":" -f 1`
if [ -n "${line_num}" ]; then
line_num_error=$(($line_num+4))
total_lines=`wc -l $LF_verbose | cut -d " " -f 1`
sed -n "$line_num_error,$total_lines p" < $LF_verbose > ${out_dir}/error_testcase.log
fi
# run comprehensive unit test
elif [ "$version" = "comprehensive" ]; then
matlab -nodesktop -nosplash -r "runtests('$CBIG_CODE_DIR/unit_tests', 'Recursively', true); exit" > $LF_verbose
# extract error message from the verbose log file
line_num=`grep -n "Failure Summary" $LF_verbose | cut -d ":" -f 1`
if [ -n "${line_num}" ]; then
line_num_error=$(($line_num+4))
total_lines=`wc -l $LF_verbose | cut -d " " -f 1`
sed -n "$line_num_error,$total_lines p" < $LF_verbose > ${out_dir}/error_testcase.log
fi
matlab -nodesktop -nosplash -r "runtests('$CBIG_CODE_DIR/stable_projects', 'Recursively', true); exit" \
> ${out_dir}/tmp.log
cat ${out_dir}/tmp.log >> $LF_verbose
# extract error message from the verbose log file
line_num=`grep -n "Failure Summary" ${out_dir}/tmp.log | cut -d ":" -f 1`
if [ -n "${line_num}" ]; then
line_num_error=$(($line_num+4))
total_lines=`wc -l ${out_dir}/tmp.log | cut -d " " -f 1`
sed -n "$line_num_error,$total_lines p" < ${out_dir}/tmp.log >> ${out_dir}/error_testcase.log
fi
# run intermediate unit test
elif [ "$version" = "intermediate" ]; then
matlab -nodesktop -nosplash -r "runtests('$CBIG_CODE_DIR/unit_tests', 'Recursively', true); exit" > $LF_verbose
# extract error message from the verbose log file
line_num=`grep -n "Failure Summary" $LF_verbose | cut -d ":" -f 1`
if [ -n "${line_num}" ]; then
line_num_error=$(($line_num+4))
total_lines=`wc -l $LF_verbose | cut -d " " -f 1`
sed -n "$line_num_error,$total_lines p" < $LF_verbose > ${out_dir}/error_testcase.log
fi
# run the selected stable project unit tests
SP_tests=`cat $CBIG_CODE_DIR/unit_tests/CBIG_intermediate_unit_test_list`
for SP_test in ${SP_tests}
do
matlab -nodesktop -nosplash -r "addpath(genpath('$CBIG_CODE_DIR/stable_projects')); runtests('$SP_test'); exit"\
> ${out_dir}/tmp.log
cat ${out_dir}/tmp.log >> $LF_verbose
# extract error message from the verbose log file
line_num=`grep -n "Failure Summary" ${out_dir}/tmp.log | cut -d ":" -f 1`
if [ -n "${line_num}" ]; then
line_num_error=$(($line_num+4))
total_lines=`wc -l ${out_dir}/tmp.log | cut -d " " -f 1`
sed -n "$line_num_error,$total_lines p" < ${out_dir}/tmp.log >> ${out_dir}/error_testcase.log
fi
done
# error message for unrecognized unit test set
else
echo "Error: version should be [light], [intermediate] or [comprehensive]! Please specify."
exit 1
fi
#########################################
# Print env info to log file and terminal
#########################################
echo -n "End Date: " | tee -a $LF
date | tee -a $LF
echo | tee -a $LF
echo "**************************************************************************" | tee -a $LF
echo | tee -a $LF
# print OS related info
echo "Test Environment Settings" | tee -a $LF
echo "OS:" | tee -a $LF
lsb_release -a | tee -a $LF
# print default software packages used
echo | tee -a $LF
echo -e "MATLAB:\t $CBIG_MATLAB_DIR" | tee -a $LF
echo -e "FREESURFER:\t $FREESURFER_HOME" | tee -a $LF
echo -e "FSL:\t $CBIG_FSLDIR" | tee -a $LF
echo -e "ANTS:\t $CBIG_ANTS_DIR" | tee -a $LF
echo -e "AFNI:\t $CBIG_AFNI_DIR" | tee -a $LF
# print last commit
echo | tee -a $LF
echo -n "Last commit: " | tee -a $LF
cd $CBIG_CODE_DIR
git log -1 --oneline | tee -a $LF
echo | tee -a $LF
echo "**************************************************************************" | tee -a $LF
echo | tee -a $LF
#############################################
# Print test summary to log file and terminal
#############################################
# calculate number of included functions
ut_ex_num=`cat $LF_verbose | grep '^Done' | sed 's/Done\ //g' | grep '^test_' | wc -l`
# calculate number of included stable projects
sp_num=`cat $LF_verbose | grep '^Done' | sed 's/Done\ //g' | grep -v '^test_' | wc -l`
# calculate number of failed functions
ut_ex_err_num=`cat ${out_dir}/error_testcase.log | grep -e 'test' | sed 's/^\ //g' | grep '^test_' \
| sort | cut -d "/" -f 1 | uniq | wc -l`
# calculate number of failed stable projects
sp_err_num=`cat ${out_dir}/error_testcase.log | grep -e 'test' | sed 's/^\ //g' | grep -v '^test_' \
| sort | cut -d "/" -f 1 | uniq | wc -l`
# print summary report to log file and terminal
echo "Test Summary:" | tee -a $LF
echo -e "[$sp_num] stable projects tested\t\t\t\t[$sp_err_num] FAILED the unit tests" | tee -a $LF
echo -e "[$ut_ex_num] utilities&external_packages functions tested\t[$ut_ex_err_num] FAILED the unit tests" \
| tee -a $LF
echo | tee -a $LF
##########################################
# Print cleaned error messages to log file
##########################################
line_num=`grep -n "Failure Summary" $LF_verbose | cut -d ":" -f 1`
# unit test passed
if [ -z "${line_num}" ]; then
echo "Unit Tests [PASSED]!" | tee -a $LF
echo "You can check $LF for more details."
# unit test failed
else
echo "Unit Tests [FAILED]!" | tee -a $LF
echo | tee -a $LF
echo "**************************************************************************" | tee -a $LF
echo "Please check $LF for error details."
echo | tee -a $LF
echo "Error details are as follows:" >> $LF
echo | tee -a $LF
bounds=`grep -n '^=\+\{80\}' $LF_verbose | cut -d':' -f1`
echo $bounds | xargs -n2 sh -c "sed -n \"\$0,\$1 p\" $LF_verbose >> $LF; echo -e \"\n\" >> $LF"
fi
###########################################
# list functions included in this unit test
###########################################
echo | tee -a $LF
echo "# **************************************************************************" >> $LF
echo "# " >> $LF
echo "# The stable projects included are listed here:" >> $LF
echo "# " >> $LF
cat $LF_verbose | grep '^Done' | sed 's/Done\ //g' | grep -v '^test_' | sed 's/_unit_test//g' | sort | \
awk '{print "# "$0}' >> $LF
echo "# " >> $LF
echo "# The utilities or exernal_packages functions included are listed here:" >> $LF
echo "# " >> $LF
cat $LF_verbose | grep '^Done' | sed 's/Done\ //g' | grep '^test_' | sed 's/test_//g' | sort | \
awk '{print "# "$0}' >> $LF
###########################
# Remove intermediate files
###########################
rm -f ${out_dir}/error_testcase.log
rm -f ${out_dir}/tmp.log
|
import {getRepository} from "typeorm";
import * as express from "express";
import {Request, Response} from "express";
import {Player} from "../entity/Player";
import {Position} from "../entity/Position";
import {validate} from "class-validator";
import {auth} from '../middleware/auth';
import {admin} from '../middleware/admin';
import {async} from '../middleware/async';
const router = express.Router();
router.get("/", auth, async(async function(req: Request, res: Response) {
const players = await getRepository(Player).find({ relations: ["position"] });
res.status(200).send(players);
}));
router.get("/:id", auth, async(async function(req: Request, res: Response) {
const player = await getRepository(Player).findOne({ where: {id: req.params.id} });
if(!player) return res.status(404).send('There is no player with the given id.');
res.status(200).send(player);
}));
router.post("/", [auth, admin], async(async function(req: Request, res: Response) {
const position = await getRepository(Position).findOne({ id: req.body.positionId });
if(!position) return res.status(404).send('Position is not available!');
// Avoid Duplication
const oldPlayer = await getRepository(Player).findOne({ where:{
name: req.body.name,
position: {id: req.body.positionId}
}});
if(oldPlayer) return res.status(400).send('Duplication Error.');
let player = new Player();
player.name = req.body.name;
player.position = position;
const errors = await validate(player);
if (errors.length > 0) return res.status(400).send(`Bad Input! ${errors}`);
await getRepository(Player).save(player);
res.status(200).send(player);
}));
router.put("/:id", [auth, admin], async(async function(req: Request, res: Response) {
let player = await getRepository(Player).findOne({ where: {id: req.params.id} });
if(!player) return res.status(404).send('There is no player with the given id.');
const position = await getRepository(Position).findOne({ name: req.body.position });
if(!position) return res.status(404).send('Position is not available!');
player.name = req.body.name;
player.position = position;
await getRepository(Player).save(player);
res.status(200).send(player);
}));
router.delete("/", [auth, admin], async(async function(req: Request, res: Response) {
const players = await getRepository(Player).find();
await getRepository(Player).remove(players);
res.status(200).send(players);
}));
router.delete("/:id", async function(req: Request, res: Response) {
const player = await getRepository(Player).findOne({ where: {id: req.params.id} });
if(!player) return res.status(404).send('There is no player with the given id.');
await getRepository(Player).remove(player);
res.status(200).send(player);
});
export { router as playerRouter } |
def permutations(str):
if len(str) <= 1:
return [str]
perms = []
for i in range(len(str)):
rest_strings = str[:i] + str[i+1:]
for perm in permutations(rest_strings):
perms.append(str[i:i+1] + perm)
return perms
str = "abc"
perms = permutations(str)
print(perms) # ['abc', 'acb', 'bac', 'bca', 'cab', 'cba'] |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/MSTabBarController/MSTabBarController.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/MSTabBarController/MSTabBarController.framework"
fi
|
<filename>src/Schemas/UserSchema/index.js<gh_stars>0
export { UserSchema } from './UserSchema';
|
<reponame>Hannah-Abi/python-pro-21
import unittest
from unittest.mock import patch
from tmc import points, reflect
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import os.path
import textwrap
import inspect, re
import types
from random import choice, randint, shuffle
exercise = 'src.prime_numbers'
def source_rows(function: callable):
src = inspect.getsource(function)
lines = [line.strip() for line in re.split('\\n|;', src)
if len(line.strip()) > 0 and not line.strip().startswith("#")]
return len(lines)
@points('12.prime_numbers')
class PrimeNumbersTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Input was not expected")]):
cls.module = load_module(exercise, 'fi')
def test_0a_main_ok(self):
ok, line = check_source(self.module)
message = """Code testing the functions must be located after the
if __name__ == "__main__":
block. The following line must be moved:
"""
self.assertTrue(ok, message+line)
def test_1_function_exists(self):
try:
from src.prime_numbers import prime_numbers
except Exception as e:
self.fail(f'Your program should have a function called prime_numbers.')
def test_2_return_type(self):
try:
from src.prime_numbers import prime_numbers
val = prime_numbers()
except Exception as e:
self.fail(f"Function threw an error when called like this\n" +
'prime_numbers()\n' +
f'{e}')
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue(type(val) is types.GeneratorType, f"Function prime_numbers should return a generator," +
f" now it returns {val} which is of type {taip}\n when it is called as\n" +
'prime_numbers()')
def test_3_test_with_values1(self):
from src.prime_numbers import prime_numbers
test_cases = (1,3,4,7,9,12)
al = [2,3,5,7,11,13,17,19,23,29,31,37,41,43]
for test_case in test_cases:
corr = al[:test_case]
gen = prime_numbers()
val = [next(gen) for i in range(test_case)]
self.assertEqual(val, corr, f'Generator should return values\n{corr}\n' +
f'when it is initialized as:\ngen = prime_numbers()\n' +
f'and the function next(gen) is called {test_case} times\n' +
f'Now it returns\n' +
f'{val}')
if __name__ == '__main__':
unittest.main()
|
<reponame>benoitc/pypy
from pypy.rpython.lltypesystem import lltype, rffi
from pypy.rlib.rsdl import RMix, RSDL
from pypy.rpython.tool import rffi_platform as platform
def malloc_buffer_chunk(has_own_allocated_buffer, length_bytes, volume):
buffer_pointer = lltype.malloc(RMix.Buffer, length_bytes, flavor='raw')
return malloc_chunk(has_own_allocated_buffer, length_bytes, volume)
def malloc_chunk(has_own_allocated_buffer, buffer_pointer, length_bytes, volume):
"""
Creates a new Mix_Chunk.
has_own_allocated_buffer: if 1 struct has its own allocated buffer,
if 0 abuf should not be freed
buffer_pointer: pointer to audio data
length_bytes: length of audio data in bytes
volume: Per-sample volume, 0-128 (normally
MIX_MAX_VOLUME after loading)"""
p = lltype.malloc(RMix.Chunk, flavor='raw')
rffi.setintfield(p, 'c_allocated', has_own_allocated_buffer)
rffi.setintfield(p, 'c_abuf', buffer_pointer)
rffi.setintfield(p, 'c_alen', length_bytes)
rffi.setintfield(p, 'c_volume', volume)
return p |
#!/usr/bin/env bash
# Copyright (c) 2020 The Samcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Linter to check that commit messages have a new line before the body
# or no body at all
export LC_ALL=C
EXIT_CODE=0
while getopts "?" opt; do
case $opt in
?)
echo "Usage: $0 [N]"
echo " COMMIT_RANGE='<commit range>' $0"
echo " $0 -?"
echo "Checks unmerged commits, the previous N commits, or a commit range."
echo "COMMIT_RANGE='47ba2c3...ee50c9e' $0"
exit ${EXIT_CODE}
;;
esac
done
if [ -z "${COMMIT_RANGE}" ]; then
if [ -n "$1" ]; then
COMMIT_RANGE="HEAD~$1...HEAD"
else
# This assumes that the target branch of the pull request will be master.
MERGE_BASE=$(git merge-base HEAD master)
COMMIT_RANGE="$MERGE_BASE..HEAD"
fi
fi
while IFS= read -r commit_hash || [[ -n "$commit_hash" ]]; do
n_line=0
while IFS= read -r line || [[ -n "$line" ]]; do
n_line=$((n_line+1))
length=${#line}
if [ $n_line -eq 2 ] && [ $length -ne 0 ]; then
echo "The subject line of commit hash ${commit_hash} is followed by a non-empty line. Subject lines should always be followed by a blank line."
EXIT_CODE=1
fi
done < <(git log --format=%B -n 1 "$commit_hash")
done < <(git log "${COMMIT_RANGE}" --format=%H)
exit ${EXIT_CODE}
|
package simulator.model;
import java.util.List;
import simulator.model.Body;
public class NoGravity implements GravityLaws{
@Override
public void apply(List<Body> bodies) {
// TODO Auto-generated method stub
}
////////////////////////////////////////////
/////////////// Practica 5 /////////////////
////////////////////////////////////////////
public String toString() {
return "No Gravity";
}
}
|
"""
You're given a number n. Can you write a method sumOfAllPrimes that finds all prime numbers smaller than or equal to n, and returns a sum of them?
For example, we're given the number 15. All prime numbers smaller than 15 are:
2, 3, 5, 7, 11, 13
They sum up to 41, so sumOfAllPrimes(15) would return 41.
"""
def isPrime(n):
if n == 1:
return False
if n == 2 or n == 3 or n == 5 or n == 7:
return True
if n % 2 == 0 or n % 3 == 0:
return False
else:
prime_num = 5
while prime_num <= 7:
if n % prime_num == 0:
return False
prime_num += 2
return True
def sumOfAllPrimes(num):
if num <= 1:
return False
s = 0
for i in range(2, num+1):
if isPrime(i):
s += i
return s
if __name__ == '__main__':
print(sumOfAllPrimes(55))
|
<filename>pkg/controller/add_searchheadcluster.go
package controller
import (
"github.com/splunk/splunk-operator/pkg/controller/searchheadcluster"
)
func init() {
// AddToManagerFuncs is a list of functions to create controllers and add them to a manager.
AddToManagerFuncs = append(AddToManagerFuncs, searchheadcluster.Add)
}
|
<filename>client-common/src/test/java/org/apache/livy/client/common/TestHttpMessages.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.client.common;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.livy.JobHandle.State;
public class TestHttpMessages {
/**
* Tests that all defined messages can be serialized and deserialized using Jackson.
*/
@Test
public void testMessageSerialization() throws Exception {
ObjectMapper mapper = new ObjectMapper();
for (Class<?> msg : HttpMessages.class.getClasses()) {
if (msg.isInterface()) {
continue;
}
String name = msg.getSimpleName();
Constructor c = msg.getConstructors()[0];
Object[] params = new Object[c.getParameterTypes().length];
Type[] genericTypes = c.getGenericParameterTypes();
for (int i = 0; i < params.length; i++) {
params[i] = dummyValue(c.getParameterTypes()[i], genericTypes[i]);
}
Object o1 = c.newInstance(params);
byte[] serialized = mapper.writeValueAsBytes(o1);
Object o2 = mapper.readValue(serialized, msg);
assertNotNull("could not deserialize " + name, o2);
for (Field f : msg.getFields()) {
checkEquals(name, f, o1, o2);
}
}
}
@Test(expected=IllegalArgumentException.class)
public void testJobStatusResultBadState() {
new HttpMessages.JobStatus(0L, State.QUEUED, new byte[1], null);
}
@Test(expected=IllegalArgumentException.class)
public void testJobStatusErrorBadState() {
new HttpMessages.JobStatus(0L, State.QUEUED, null, "An Error");
}
private Object dummyValue(Class<?> klass, Type type) {
switch (klass.getSimpleName()) {
case "int":
case "Integer":
return 42;
case "long": return 84L;
case "byte[]": return new byte[] { (byte) 0x42, (byte) 0x84 };
case "String": return "test";
case "State": return State.SUCCEEDED;
case "Map":
Map<String, String> map = new HashMap<>();
map.put("dummy1", "dummy2");
return map;
case "List":
Class<?> genericType = getGenericArgType(type);
return Arrays.asList(dummyValue(genericType, null), dummyValue(genericType, null));
default: throw new IllegalArgumentException("FIX ME: " + klass.getSimpleName());
}
}
private Class<?> getGenericArgType(Type type) {
assertNotNull("FIX ME: null type argument.", type);
ParameterizedType ptype = (ParameterizedType) type;
assertEquals("FIX ME: no support for multiple type arguments.",
1, ptype.getActualTypeArguments().length);
Type argType = ptype.getActualTypeArguments()[0];
assertTrue("FIX ME: type argument is not a class.", argType instanceof Class);
return (Class<?>) argType;
}
private void checkEquals(String name, Field f, Object o1, Object o2) throws Exception {
Object v1 = f.get(o1);
Object v2 = f.get(o2);
boolean match;
if (!f.getType().isArray()) {
match = v1.equals(v2);
} else if (v1 instanceof byte[]) {
match = Arrays.equals((byte[]) v1, (byte[]) v2);
} else {
throw new IllegalArgumentException("FIX ME: " + f.getType().getSimpleName());
}
assertTrue(
String.format("Field %s of %s does not match after deserialization.", f.getName(), name),
match);
}
}
|
const partition = async (array, left, right) => {
let pivot = array[right];
let i = left - 1;
// console.log(pivot)
let bar = document.getElementById(`bar-${right}`);
bar.style.background = "red";
for (let j = left; j < right; j++) {
if (array[j] < pivot) {
i += 1;
let bar1 = document.getElementById(`bar-${i}`);
let bar2 = document.getElementById(`bar-${j}`);
bar1.style.background = "yellow";
bar2.style.background = "yellow";
await delayAlgo();
const x = array[i];
array[i] = array[j];
array[j] = x;
const t = bar1.style.height;
bar1.style.height = bar2.style.height;
bar2.style.height = t;
bar1.style.background = "orange";
bar2.style.background = "orange";
}
}
i += 1;
const x = array[i];
let bar1 = document.getElementById(`bar-${i}`);
bar1.style.background = "yellow";
await delayAlgo();
array[i] = array[right];
array[right] = x;
const t = bar1.style.height;
bar1.style.height = bar.style.height;
bar.style.height = t;
bar1.style.background = "orange";
bar.style.background = "orange";
return i;
};
const callAlgo2 = async (array, left, right) => {
if (left < right) {
let p = await partition(array, left, right);
await callAlgo2(array, left, p - 1);
await callAlgo2(array, p + 1, right);
}
};
const quickSort = async () => {
let n = array.length;
await callAlgo2(array, 0, n - 1);
};
|
const CognitoValidator = require('./services/cognito')
const jwtValidator = require('./jwtValidator')
const FirebaseValidator = require('./services/firebase')
// module.default = jwtValidator
// module.exports = jwtValidator
module.exports = {
jwtValidator,
CognitoValidator,
FirebaseValidator
} |
#! /bin/bash
curl https://sdk.cloud.google.com | bash
gcloud init |
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var util_1 = require("@antv/util");
var base_1 = require("./base");
var Line = /** @class */ (function (_super) {
tslib_1.__extends(Line, _super);
function Line() {
return _super !== null && _super.apply(this, arguments) || this;
}
Line.prototype.getDefaultCfg = function () {
var cfg = _super.prototype.getDefaultCfg.call(this);
return tslib_1.__assign(tslib_1.__assign({}, cfg), { type: 'line' });
};
Line.prototype.getGridPath = function (points) {
var path = [];
util_1.each(points, function (point, index) {
if (index === 0) {
path.push(['M', point.x, point.y]);
}
else {
path.push(['L', point.x, point.y]);
}
});
return path;
};
return Line;
}(base_1.default));
exports.default = Line;
//# sourceMappingURL=line.js.map |
#!/usr/bin/env bash
#Author: Jiri Brejcha, jirka@jiribrejcha.net
#Sends current WLAN Pi IP address and other useful details to you in a Telegram message. Requires internet connection.
#Collect all data
ETH0SPEED=$(ethtool eth0 2>/dev/null | grep -q "Link detected: yes" && ethtool eth0 2>/dev/null | grep "Speed" | sed 's/....$//' | cut -d ' ' -f2 || echo "disconnected")
ETH0DUPLEX=$(ethtool eth0 2>/dev/null | grep -q "Link detected: yes" && ethtool eth0 2>/dev/null | grep "Duplex" | cut -d ' ' -f 2 || echo "disconnected")
HOSTNAME=$(hostname)
UPTIME=$(uptime -p | cut -c4-)
MODE=$(cat /etc/wlanpi-state)
ETH0IP=$(ip a | grep "eth0" | grep "inet" | grep -v "secondary" | head -n1 | cut -d '/' -f1 | cut -d ' ' -f6)
UPLINK=$(ip route show | grep "default via" | cut -d " " -f5)
UPLINKIP=$(ip a | grep "$UPLINK" | grep "inet" | grep -v "secondary" | head -n1 | cut -d '/' -f1 | cut -d ' ' -f6)
NEIGHBOUR=$(grep -q "Name:" /tmp/lldpneigh.txt 2>/dev/null && cat /tmp/lldpneigh.txt | sed 's/^Name:/Connected to:/g' | sed 's/^Desc:/Port description:/g' | sed 's/^IP:/Neighbour IP:/g' | sed -z 's/\n/%0A/g')
if [ -z "$NEIGHBOUR" ]; then
NEIGHBOUR=$(grep -q "Name:" /tmp/cdpneigh.txt 2>/dev/null && cat /tmp/cdpneigh.txt | sed 's/^Name:/Connected to:/g' | sed 's/^Port:/Port description:/g' | sed 's/^IP:/Neighbour IP:/g' |sed 's/^SW:/Software version:/g' | sed -z 's/\n/%0A/g')
fi
#Get public IP data
DATAINJSON=$(timeout 3 curl -s 'ifconfig.co/json')
PUBLICIP=$(echo "$DATAINJSON" | jq -r '.ip')
PUBLICIPCOUNTRY=$(echo "$DATAINJSON" | jq -r '.country')
PUBLICIPASNORG=$(echo "$DATAINJSON" | jq -r '.asn_org')
PUBLICIPHOSTNAME=$(echo "$DATAINJSON" | jq -r '.hostname')
PUBLICIPASN=$(echo "$DATAINJSON" | jq -r '.asn')
if [ -z "$ETH0IP" ]; then
CURRENTIP="$UPLINKIP"
else
CURRENTIP="$ETH0IP"
fi
#Compose the message
TEXT=""
TEXT+="%f0%9f%9f%a2 <b>$HOSTNAME is now online</b> %0A"
if [ "$ETH0IP" ]; then
TEXT+="Eth0 IP address: <code>$ETH0IP</code> %0A"
fi
if [[ "$ETH0SPEED" == "disconnected" ]]; then
TEXT+="Eth0 is down %0A"
else
TEXT+="Eth0 speed: $ETH0SPEED %0A"
TEXT+="Eth0 duplex: $ETH0DUPLEX %0A"
fi
TEXT+="WLAN Pi mode: $MODE %0A"
TEXT+="Uptime: $UPTIME %0A"
if [ ! -z "$NEIGHBOUR" ]; then
TEXT+="%0A"
TEXT+="$NEIGHBOUR"
fi
TEXT+="%0A"
TEXT+="Uplink to internet: $UPLINK %0A"
if [[ "$UPLINK" != "eth0" ]]; then
TEXT+="Local $UPLINK IP address: $UPLINKIP %0A"
fi
TEXT+="Public IP: <code>$PUBLICIP</code>, <code>$PUBLICIPHOSTNAME</code> %0A"
if [ ! -z "$CURRENTIP" ]; then
TEXT+="%0A"
TEXT+="Web interface: http://$CURRENTIP %0A"
#TEXT+="Web console: https://$CURRENTIP:9090 %0A"
TEXT+="SSH: <code>ssh://wlanpi@$CURRENTIP</code> %0A"
#TEXT+="Copy file to TFTP server: copy flash:filename tftp://$CURRENTIP %0A"
fi
echo $TEXT |
#!/tools/bin/bash -ev
set +h
# Linux From Scratch - 7.7
# Chapter 6: Installing Basic System Software
# Section 2: Preparing Virtual Kernel File Systems
# Part 2: Mount Virtual Filesystems
#####################################################################
source ../lfs_profile
mount -v --bind /dev $LFS/dev
mount -vt devpts devpts $LFS/dev/pts -o gid=5,mode=620
mount -vt proc proc $LFS/proc
mount -vt sysfs sysfs $LFS/sys
mount -vt tmpfs tmpfs $LFS/run
if [ -h $LFS/dev/shm ]; then
mkdir -pv $LFS/$(readlink $LFS/dev/shm)
fi
#####################################################################
#
# Any time LFS setup is stopped and later resumed by rebooting, etc.,
# run 6.02.2 and 6.04 again. Alternatively run mountchroot.sh until
# Chapter 7, after which you should use mountchroot2.sh.
#
# Proceed to chroot by running 6.04_chroot.sh
# (Section 3 is just notes on Package Management)
#
#####################################################################
|
<?php
$arr = [4, 3, 5, 1, 7, 10];
function maxVal($arr){
$max = $arr[0];
for ($i = 1; $i < count($arr); $i++){
if ($arr[$i] > $max){
$max = $arr[$i];
}
}
return $max;
}
echo maxVal($arr);
?> |
EVAL_FOLDER=$1
MODEL=$2
NAME=$3
if [ "$#" -ne 3 ]; then
echo "Wrong number of parameters. Expected: EVAL_FOLDER MODEL_FOLDER MODEL_NAME "
exit 1;
fi
if [ -z "$CANDC" ]; then
echo "Need to set CANDC variable to point at the C&C parser folder"
exit 1
fi
java -jar easyccg.jar -m $MODEL -f $EVAL_FOLDER/gold.raw > $EVAL_FOLDER/$NAME.auto -l 200 -i tokenized
$CANDC/src/scripts/ccg/./get_grs_from_auto $EVAL_FOLDER/$NAME.auto experiments/genia/markedup_sd-1.00 $EVAL_FOLDER/$NAME.grs
python $EVAL_FOLDER/grs2sd-1.00 --ccgbank $EVAL_FOLDER/$NAME.grs> $EVAL_FOLDER/$NAME.depbank
cat $EVAL_FOLDER/$NAME.depbank | grep -v "delme" > $EVAL_FOLDER/$NAME.depbank2
mv $EVAL_FOLDER/$NAME.depbank2 $EVAL_FOLDER/$NAME.depbank
python2.7 $EVAL_FOLDER/eval_test.py $EVAL_FOLDER/gold.raw $EVAL_FOLDER/gold.deps $EVAL_FOLDER/$NAME.depbank > $EVAL_FOLDER/$NAME.eval
|
<gh_stars>0
/*************************************************************
** Program name: randomOrderUniqueElementGenerator.hpp
** Author: <NAME>
** Date: 6/1/2017
** Description: Function prototype for
randomOrderUniqueElementGenerator (final
project CS 162).
**************************************************************/
#ifndef randomOrderUniqueElementGenerator_hpp
#define randomOrderUniqueElementGenerator_hpp
#include <vector>
using std::vector;
vector<int>& randomOrderUniqueElementGenerator
(vector<int>& container, int max, int min,
int numberOfElements);
#endif /* randomOrderUniqueElementGenerator_hpp */ |
#include <stdio.h>
// Function prototypes
void displayMenu();
int main()
{
char selection;
// Display the main menu
displayMenu();
// Get the user selection
printf("Enter your selection: ");
scanf("%c", &selection);
// Loop until user quits
while(selection != 'q')
{
switch(selection)
{
default:
displayMenu();
break;
}
// Get the user selection
printf("Enter your selection: ");
scanf("%c", &selection);
}
return 0;
}
// Displays the main menu
void displayMenu()
{
printf("\nMenu\n");
printf("====\n");
printf("A - Option 1\n");
printf("B - Option 2\n");
printf("Q - Quit\n\n");
} |
#!/bin/bash
if [ "$1" = "--clean" ]; then
echo "Cleaning build directory..."
rm -rf build
fi
echo "Starting build process..."
mkdir -p dist build
cd build
cmake -DCMAKE_INSTALL_PREFIX="../dist" ..
make |
import java.time.LocalDateTime;
import java.util.Map;
public class GameManagementSystem {
public void removeInactiveGames(Map<String, Game> activeGames, int gameExpiryTimeInHours) {
LocalDateTime currentTime = LocalDateTime.now();
// Create a list to store the IDs of inactive games
List<String> inactiveGameIds = new ArrayList<>();
// Iterate through the activeGames collection
for (Game game : activeGames.values()) {
if (game.getLastMoveTime().isBefore(currentTime.minusHours(gameExpiryTimeInHours))) {
// Add the ID of the inactive game to the list
inactiveGameIds.add(game.getId());
}
}
// Remove the inactive games from the activeGames collection
for (String gameId : inactiveGameIds) {
activeGames.remove(gameId);
}
}
} |
package com.packagename.myapp.spring.menu.item.component;
import java.util.Optional;
import com.vaadin.flow.component.icon.Icon;
public class TogglableActionIcon extends TogglableActionComponent<Icon>
{
private static final long serialVersionUID = 9058607056242901365L;
private String toggleEnableClassName;
private String toggleDisableClassName;
public TogglableActionIcon(String toggleEnableClassName, String toggleDisableClassName,
Icon component, Runnable toggleAction)
{
super(component, toggleAction);
this.toggleEnableClassName = Optional.ofNullable(toggleEnableClassName).get();;
this.toggleDisableClassName = Optional.ofNullable(toggleDisableClassName).get();;
}
@Override
public String getToggleEnableClassName()
{
return toggleEnableClassName;
}
@Override
public String getToggleDisableClassName()
{
return toggleDisableClassName;
}
}
|
import random
for _ in range(10):
print(random.randint(0, 50)) |
<filename>models/xmlTypes.go
package models
import (
"encoding/xml"
//"reflect"
//"fmt"
//"strconv"
)
type Terminal struct {
Name string `xml:"name,attr"`
Class string `xml:"class,attr"`
Value string `xml:",chardata"`
} //todo move to XML processing, make private
//-------------------------------------------------------------
type DataType string
const (
ST_SEQ DataType = "seq"
ST_RULE = "rule"
ST_TERM = "term"
ST_CLASS = "class"
)
func (se *StringElement) Type() DataType {
switch DataType(se.XMLName.Local) {
case ST_SEQ:
return ST_SEQ
case ST_RULE:
return ST_RULE
case ST_TERM:
return ST_TERM
case ST_CLASS:
return ST_CLASS
}
return DataType("NO TYPE")
}
type StringElement struct {
XMLName xml.Name
Choises bool `xml:"choices,attr"`
Iterative bool `xml:"iterative,attr"`
Optional bool `xml:"optional,attr"`
Value string `xml:",chardata"`
Words []StringElement `xml:",any"`
}
type Rule struct {
XMLName xml.Name `xml:"RULE"`
Name string `xml:"name,attr"`
TopWord StringElement `xml:"seq"` //TODO как в XMLRoot Rules
//TermNames []string `xml:"term"`
}
type XMLRoot struct {
XMLName xml.Name `xml:"ROOT"`
Terms []Terminal `xml:"TERMINALS>TERM"`
Rules []Rule `xml:"RULES>RULE"`
}
|
#!/bin/sh
#
# Jailhouse, a Linux-based partitioning hypervisor
#
# Copyright (c) Siemens AG, 2018
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# SPDX-License-Identifier: MIT
#
usage()
{
echo "Usage: $0 ARCHITECTURE [QEMU_OPTIONS]"
echo -e "\nSet QEMU_PATH environment variable to use a locally " \
"built QEMU version"
exit 1
}
if [ -n "${QEMU_PATH}" ]; then
QEMU_PATH="${QEMU_PATH}/"
fi
case "$1" in
x86|x86_64|amd64)
DISTRO_ARCH=amd64
QEMU=qemu-system-x86_64
QEMU_EXTRA_ARGS=" \
-cpu host,-kvm-pv-eoi,-kvm-pv-ipi,-kvm-asyncpf,-kvm-steal-time,-kvmclock \
-smp 4 \
-enable-kvm -machine q35,kernel_irqchip=split \
-serial vc \
-device ide-hd,drive=disk \
-device intel-iommu,intremap=on,x-buggy-eim=on \
-device intel-hda,addr=1b.0 -device hda-duplex \
-device e1000e,addr=2.0,netdev=net"
KERNEL_CMDLINE=" \
root=/dev/sda intel_iommu=off memmap=82M\$0x3a000000 \
vga=0x305"
;;
arm64|aarch64)
DISTRO_ARCH=arm64
QEMU=qemu-system-aarch64
QEMU_EXTRA_ARGS=" \
-cpu cortex-a57 \
-smp 16 \
-machine virt,gic-version=3,virtualization=on \
-device virtio-serial-device \
-device virtconsole,chardev=con -chardev vc,id=con \
-device virtio-blk-device,drive=disk \
-device virtio-net-device,netdev=net"
KERNEL_CMDLINE=" \
root=/dev/vda mem=768M"
;;
""|--help)
usage
;;
*)
echo "Unsupported architecture: $1"
exit 1
;;
esac
IMAGE_PREFIX="$(dirname $0)/build/tmp/deploy/images/qemu-${DISTRO_ARCH}/demo-image-jailhouse-demo-qemu-${DISTRO_ARCH}"
IMAGE_FILE=$(ls ${IMAGE_PREFIX}.ext4.img)
shift 1
${QEMU_PATH}${QEMU} \
-drive file=${IMAGE_FILE},discard=unmap,if=none,id=disk,format=raw \
-m 1G -serial mon:stdio -netdev user,id=net \
-kernel ${IMAGE_PREFIX}-vmlinuz -append "${KERNEL_CMDLINE}" \
-initrd ${IMAGE_PREFIX}-initrd.img ${QEMU_EXTRA_ARGS} "$@"
|
for i in range(1, 11):
for j in range(1, 11):
print(i*j, end='\t')
print() |
XBPS_TARGET_CFLAGS="-mtune=G4"
XBPS_TARGET_CXXFLAGS="$XBPS_TARGET_CFLAGS"
XBPS_TARGET_FFLAGS="$XBPS_TARGET_CFLAGS"
XBPS_TRIPLET="powerpc-linux-gnu"
XBPS_RUST_TARGET="powerpc-unknown-linux-gnu"
|
source ~/annoy_cpu/bin/activate
#!/bin/zsh
#$ -cwd
#$ -N TopGuNN_create_word_index
#$ -l h=nlpgrid10
#$ -l h_vmem=50G
python3 -u code/create_word_index.py \
-outDir 'betatest/out/' \
> betatest/out/create_word_index.stdout 2>&1 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.proxying;
import java.util.Map;
import brooklyn.entity.basic.EntityLocal;
/**
* Instances of this class supply logic which can be used to initialize entities.
* These can be added to an {@link EntitySpec} programmatically, or declared as part
* of YAML recipes in a <code>brooklyn.initializers</code> section.
* In the case of the latter, implementing classes should define a no-arg constructor
* or a {@link Map} constructor so that YAML parameters can be supplied.
**/
public interface EntityInitializer {
/** Applies initialization logic to a just-built entity.
* Invoked immediately after the "init" call on the AbstractEntity constructed.
*
* @param entity guaranteed to be the actual implementation instance,
* thus guaranteed to be castable to EntityInternal which is often desired,
* or to the type at hand (it is not even a proxy)
*/
public void apply(EntityLocal entity);
}
|
#!/bin/bash
set -e eu -x
DEST="/tmp/portage-root"
source /lib/gentoo/functions.sh
source /etc/portage/make.conf
GCC_LDPATH="$(gcc-config -L)"
# Build Erlang
echo dev-lang/erlang::rbkmoney ~amd64 >> /etc/portage/package.accept_keywords/erlang
emerge -t =dev-lang/erlang-22.3.4.21::rbkmoney
quickpkg --include-config=y sys-libs/glibc sys-libs/timezone-data \
sys-apps/debianutils sys-libs/zlib net-misc/curl
# Build image
mkdir -p "${DEST}"/{etc,run,var,lib64,usr/lib64}/
ln -s /run "${DEST}/var/run"
ln -s /lib64 "${DEST}/lib"
ln -s /usr/lib64 "${DEST}/usr/lib64"
echo 'Europe/Moscow' > "${DEST}"/etc/timezone
export USE=unconfined
export ROOT="${DEST}"
emerge --getbinpkgonly sys-libs/glibc sys-libs/timezone-data
emerge -t sys-libs/zlib net-libs/libmnl dev-libs/elfutils \
sys-apps/busybox app-shells/bash net-misc/curl
equery s \*
# Link logger to busybox to avoid installing util-linux
ln -s -f /bin/busybox "${DEST}/usr/bin/logger"
mkdir -p "$(dirname "${DEST}${GCC_LDPATH}")"
cp -r "${GCC_LDPATH}" "${DEST}${GCC_LDPATH}"
cp /etc/ld.so.conf.d/05gcc-x86_64-pc-linux-gnu.conf \
"${DEST}/etc/ld.so.conf.d/05gcc-x86_64-pc-linux-gnu.conf"
ldconfig -r "${DEST}"
# Install Nagios Scripts for monitoring Riak
emerge net-analyzer/riak_nagios
rm -rf "${DEST}/var/cache/edb"/*
|
Advanced Bash-Scripting Guide:
Prev Next
Chapter 21. Subshells
Running a shell script launches a new process, a subshell.
Definition: A subshell is a child process launched by a shell (or shell script).
A subshell is a separate instance of the command processor -- the shell that gives you the prompt at the console or in an xterm window. Just as your commands are interpreted at the command-line prompt, similarly does a script batch-process a list of commands. Each shell script running is, in effect, a subprocess (child process) of the parent shell.
A shell script can itself launch subprocesses. These subshells let the script do parallel processing, in effect executing multiple subtasks simultaneously.
#!/bin/bash
# subshell-test.sh
(
# Inside parentheses, and therefore a subshell . . .
while [ 1 ] # Endless loop.
do
echo "Subshell running . . ."
done
)
# Script will run forever,
#+ or at least until terminated by a Ctl-C.
exit $? # End of script (but will never get here).
Now, run the script:
sh subshell-test.sh
And, while the script is running, from a different xterm:
ps -ef | grep subshell-test.sh
UID PID PPID C STIME TTY TIME CMD
500 2698 2502 0 14:26 pts/4 00:00:00 sh subshell-test.sh
500 2699 2698 21 14:26 pts/4 00:00:24 sh subshell-test.sh
^^^^
Analysis:
PID 2698, the script, launched PID 2699, the subshell.
Note: The "UID ..." line would be filtered out by the "grep" command,
but is shown here for illustrative purposes.
In general, an external command in a script forks off a subprocess, [1] whereas a Bash builtin does not. For this reason, builtins execute more quickly and use fewer system resources than their external command equivalents.
Command List within Parentheses
( command1; command2; command3; ... )
A command list embedded between parentheses runs as a subshell.
Variables in a subshell are not visible outside the block of code in the subshell. They are not accessible to the parent process, to the shell that launched the subshell. These are, in effect, variables local to the child process.
Example 21-1. Variable scope in a subshell
#!/bin/bash
# subshell.sh
echo
echo "We are outside the subshell."
echo "Subshell level OUTSIDE subshell = $BASH_SUBSHELL"
# Bash, version 3, adds the new $BASH_SUBSHELL variable.
echo; echo
outer_variable=Outer
global_variable=
# Define global variable for "storage" of
#+ value of subshell variable.
(
echo "We are inside the subshell."
echo "Subshell level INSIDE subshell = $BASH_SUBSHELL"
inner_variable=Inner
echo "From inside subshell, \"inner_variable\" = $inner_variable"
echo "From inside subshell, \"outer\" = $outer_variable"
global_variable="$inner_variable" # Will this allow "exporting"
#+ a subshell variable?
)
echo; echo
echo "We are outside the subshell."
echo "Subshell level OUTSIDE subshell = $BASH_SUBSHELL"
echo
if [ -z "$inner_variable" ]
then
echo "inner_variable undefined in main body of shell"
else
echo "inner_variable defined in main body of shell"
fi
echo "From main body of shell, \"inner_variable\" = $inner_variable"
# $inner_variable will show as blank (uninitialized)
#+ because variables defined in a subshell are "local variables".
# Is there a remedy for this?
echo "global_variable = "$global_variable"" # Why doesn't this work?
echo
# =======================================================================
# Additionally ...
echo "-----------------"; echo
var=41 # Global variable.
( let "var+=1"; echo "\$var INSIDE subshell = $var" ) # 42
echo "\$var OUTSIDE subshell = $var" # 41
# Variable operations inside a subshell, even to a GLOBAL variable
#+ do not affect the value of the variable outside the subshell!
exit 0
# Question:
# --------
# Once having exited a subshell,
#+ is there any way to reenter that very same subshell
#+ to modify or access the subshell variables?
See also $BASHPID and Example 34-2.
Definition: The scope of a variable is the context in which it has meaning, in which it has a value that can be referenced. For example, the scope of a local variable lies only within the function, block of code, or subshell within which it is defined, while the scope of a global variable is the entire script in which it appears.
Note
While the $BASH_SUBSHELL internal variable indicates the nesting level of a subshell, the $SHLVL variable shows no change within a subshell.
echo " \$BASH_SUBSHELL outside subshell = $BASH_SUBSHELL" # 0
( echo " \$BASH_SUBSHELL inside subshell = $BASH_SUBSHELL" ) # 1
( ( echo " \$BASH_SUBSHELL inside nested subshell = $BASH_SUBSHELL" ) ) # 2
# ^ ^ *** nested *** ^ ^
echo
echo " \$SHLVL outside subshell = $SHLVL" # 3
( echo " \$SHLVL inside subshell = $SHLVL" ) # 3 (No change!)
Directory changes made in a subshell do not carry over to the parent shell.
Example 21-2. List User Profiles
#!/bin/bash
# allprofs.sh: Print all user profiles.
# This script written by Heiner Steven, and modified by the document author.
FILE=.bashrc # File containing user profile,
#+ was ".profile" in original script.
for home in `awk -F: '{print $6}' /etc/passwd`
do
[ -d "$home" ] || continue # If no home directory, go to next.
[ -r "$home" ] || continue # If not readable, go to next.
(cd $home; [ -e $FILE ] && less $FILE)
done
# When script terminates, there is no need to 'cd' back to original directory,
#+ because 'cd $home' takes place in a subshell.
exit 0
A subshell may be used to set up a "dedicated environment" for a command group.
COMMAND1
COMMAND2
COMMAND3
(
IFS=:
PATH=/bin
unset TERMINFO
set -C
shift 5
COMMAND4
COMMAND5
exit 3 # Only exits the subshell!
)
# The parent shell has not been affected, and the environment is preserved.
COMMAND6
COMMAND7
As seen here, the exit command only terminates the subshell in which it is running, not the parent shell or script.
One application of such a "dedicated environment" is testing whether a variable is defined.
if (set -u; : $variable) 2> /dev/null
then
echo "Variable is set."
fi # Variable has been set in current script,
#+ or is an an internal Bash variable,
#+ or is present in environment (has been exported).
# Could also be written [[ ${variable-x} != x || ${variable-y} != y ]]
# or [[ ${variable-x} != x$variable ]]
# or [[ ${variable+x} = x ]]
# or [[ ${variable-x} != x ]]
Another application is checking for a lock file:
if (set -C; : > lock_file) 2> /dev/null
then
: # lock_file didn't exist: no user running the script
else
echo "Another user is already running that script."
exit 65
fi
# Code snippet by Stéphane Chazelas,
#+ with modifications by Paulo Marcel Coelho Aragao.
+
Processes may execute in parallel within different subshells. This permits breaking a complex task into subcomponents processed concurrently.
Example 21-3. Running parallel processes in subshells
(cat list1 list2 list3 | sort | uniq > list123) &
(cat list4 list5 list6 | sort | uniq > list456) &
# Merges and sorts both sets of lists simultaneously.
# Running in background ensures parallel execution.
#
# Same effect as
# cat list1 list2 list3 | sort | uniq > list123 &
# cat list4 list5 list6 | sort | uniq > list456 &
wait # Don't execute the next command until subshells finish.
diff list123 list456
Redirecting I/O to a subshell uses the "|" pipe operator, as in ls -al | (command).
Note
A code block between curly brackets does not launch a subshell.
{ command1; command2; command3; . . . commandN; }
var1=23
echo "$var1" # 23
{ var1=76; }
echo "$var1" # 76
Notes
[1]
An external command invoked with an exec does not (usually) fork off a subprocess / subshell.
Prev Home Next
Applications Up Restricted Shells |
/**
* Gets the primary key column for the provided model.
* @param Model
* @returns {*}
*/
exports.getPrimaryKeyColumn = function getPrimaryKeyColumn (Model) {
var pk = Model.getMeta('primarykey')
if (pk) {
return pk
}
var name = this.getTableName(Model)
var tableSchema = this.getTableSchema(Model)
var primaryKeyColumn = this.metadata.schema.primary_keys[name]
var column = primaryKeyColumn && tableSchema && tableSchema[primaryKeyColumn]
return column && column.COLUMN_NAME
}
|
toolbox /google-cloud-sdk/bin/gsutil -m cp -r gs://spacemesh/sm/* .
cd /var/lib/toolbox/*/
docker rm $(docker ps -a -q) ; docker volume prune -f ; docker network prune -f
docker run -v /var/run/docker.sock:/var/run/docker.sock -v $PWD:$PWD -w $PWD docker/compose -f docker-compose-metrics.yml up -d
until $(curl --output /dev/null --silent --fail localhost:5601/api/status); do
sleep 1
done
curl -X POST "localhost:5601/api/saved_objects/_import" -H "kbn-xsrf: true" --form file=@config/kibana/filebeat.ndjson
|
def findPerfectNumbers():
perfect_numbers = []
for num in range(2, 21):
temp_sum = 0
for factor in range(1, num):
if (num % factor == 0):
temp_sum += factor
if (temp_sum == num):
perfect_numbers.append(num)
return perfect_numbers |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.baeldung.hexagon.service;
import com.baeldung.hexagon.model.Student;
import com.baeldung.hexagon.repository.StudentRepository;
import java.util.List;
/**
*
* @author NandomPC
*/
public class StudentServiceImpl implements StudentService {
private StudentRepository repository;
public StudentServiceImpl(StudentRepository repository) {
this.repository = repository;
}
@Override
public List<Student> getAllStudents() {
return repository.getAllStudents();
}
}
|
<filename>models/module.py
from backend.database import db
class Module(db.Model):
__tablename__ = "Module"
Id = db.Column(db.Integer, primary_key=True)
Name = db.Column(db.String)
Description = db.Column(db.String)
BuildCommand = db.Column(db.String)
BuildLocation = db.Column(db.String)
LanguageId = db.Column(db.Integer, db.ForeignKey('Language.Id'))
|
SELECT *
FROM orders
ORDER BY date DESC
LIMIT 3; |
<filename>code/iaas/auth-logic/src/main/java/io/cattle/platform/iaas/api/auth/integration/internal/rancher/RancherIdentitySearchProvider.java
package io.cattle.platform.iaas.api.auth.integration.internal.rancher;
import io.cattle.platform.api.auth.Identity;
import io.cattle.platform.core.constants.ProjectConstants;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.iaas.api.auth.SecurityConstants;
import io.cattle.platform.iaas.api.auth.dao.AuthDao;
import io.cattle.platform.iaas.api.auth.integration.interfaces.IdentitySearchProvider;
import io.cattle.platform.object.ObjectManager;
import io.github.ibuildthecloud.gdapi.context.ApiContext;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
public class RancherIdentitySearchProvider implements IdentitySearchProvider {
@Inject
AuthDao authDao;
@Inject
private ObjectManager objectManager;
@Override
public List<Identity> searchIdentities(String name, String scope, boolean exactMatch) {
if (!isConfigured()){
notConfigured();
}
List<Identity> identities = new ArrayList<>();
if (!scopes().contains(scope)){
return identities;
}
List<Account> accounts = new ArrayList<>();
if (exactMatch){
accounts.add(authDao.getByName(name));
} else {
accounts.addAll(authDao.searchAccounts(name));
}
for(Account account: accounts){
if (account != null) {
String accountId = (String) ApiContext.getContext().getIdFormatter().formatId(objectManager.getType(Account.class), account.getId());
identities.add(new Identity(ProjectConstants.RANCHER_ID, accountId, account.getName(), null, null, null));
}
}
return identities;
}
@Override
public Set<Identity> getIdentities(Account account) {
Set<Identity> identities = new HashSet<>();
identities.add(new Identity(ProjectConstants.RANCHER_ID, String.valueOf(account.getId()), account.getName(),
null, null, null));
return identities;
}
public List<Identity> searchIdentities(String name, boolean exactMatch) {
if (!isConfigured()){
notConfigured();
}
List<Identity> identities = new ArrayList<>();
for (String scope : scopes()) {
identities.addAll(searchIdentities(name, scope, exactMatch));
}
return identities;
}
private void notConfigured() {
throw new ClientVisibleException(ResponseCodes.SERVICE_UNAVAILABLE,
"RancherIdentityNotConfigured", "Rancher is not configured as an Identity provider.", null);
}
@Override
public Identity getIdentity(String id, String scope) {
if (!isConfigured()){
notConfigured();
}
String accountId = ApiContext.getContext().getIdFormatter().parseId(id);
Account account = authDao.getAccountById(Long.valueOf(accountId == null ? id : accountId));
if (account == null || account.getKind().equalsIgnoreCase(ProjectConstants.TYPE)) {
return null;
}
accountId = (String) ApiContext.getContext().getIdFormatter().formatId(objectManager.getType(Account.class), account.getId());
return new Identity(ProjectConstants.RANCHER_ID, accountId, account.getName(), null, null, null);
}
@Override
public Set<String> scopes() {
return ProjectConstants.SCOPES;
}
@Override
public boolean isConfigured() {
return !SecurityConstants.SECURITY.get();
}
@Override
public String getName() {
return ProjectConstants.RANCHER_SEARCH_PROVIDER;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.