text stringlengths 1 1.05M |
|---|
package cn.airpassport.lib.common;
import org.apache.commons.lang3.StringUtils;
/**
* @class LibCommon
*/
public class LibCommon
{
public static boolean isValidUserId( long userId )
{
return userId > 10000000;
}
public static boolean isValidMid( String sMid )
{
if ( StringUtils.isBlank( sMid ) )
{
return false;
}
return 36 == sMid.length();
}
public static String calcErrorCode( Object oObjectMethod, String sErrorTail )
{
try
{
String sClassName = oObjectMethod.getClass().getName();
int nPosDollarChar = sClassName.indexOf( "$" );
if ( nPosDollarChar > 0 )
{
sClassName = sClassName.substring( 0, nPosDollarChar );
}
String sMethodName = oObjectMethod.getClass().getEnclosingMethod().getName();
if ( ! StringUtils.isBlank( sErrorTail ) )
{
return String.format( "%s.%s.%s", sClassName, sMethodName, sErrorTail );
}
else
{
return String.format( "%s.%s", sClassName, sMethodName );
}
}
catch ( Exception e )
{
return sErrorTail;
}
}
} |
<filename>open-sphere-plugins/stk-terrain/src/main/java/io/opensphere/stkterrain/model/mesh/package-info.java<gh_stars>10-100
/**
* Quantized mesh model classes.
*/
package io.opensphere.stkterrain.model.mesh;
|
package org.osgeo.proj4j;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.List;
import junit.framework.TestCase;
import junit.textui.TestRunner;
import org.osgeo.proj4j.io.MetaCRSTestCase;
import org.osgeo.proj4j.io.MetaCRSTestFileReader;
/**
* Runs MetaCRS test files.
*
* @author mbdavis
*
*/
public class MetaCRSTest extends TestCase {
public static void main(String args[]) {
TestRunner.run(MetaCRSTest.class);
}
static CRSFactory csFactory = new CRSFactory();
public MetaCRSTest(String name) {
super(name);
}
public void xtestMetaCRSExample() throws IOException {
File file = getFile("../../../TestData.csv");
MetaCRSTestFileReader reader = new MetaCRSTestFileReader(file);
List<MetaCRSTestCase> tests = reader.readTests();
for (MetaCRSTestCase test : tests) {
runTest(test);
}
}
public void testPROJ4_SPCS() throws IOException {
File file = getFile("../../../PROJ4_SPCS_EPSG_nad83.csv");
MetaCRSTestFileReader reader = new MetaCRSTestFileReader(file);
List<MetaCRSTestCase> tests = reader.readTests();
for (MetaCRSTestCase test : tests) {
runTest(test);
}
}
File getFile(String name) {
try {
return new File(this.getClass().getResource(name).toURI());
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
void runTest(MetaCRSTestCase crsTest) {
try {
crsTest.execute(csFactory);
crsTest.print(System.out);
} catch (Proj4jException ex) {
System.out.println(ex);
}
}
}
|
<reponame>J1Mtonic/venus-protocol-interface
export { default as queryClient } from './queryClient';
export { default as requestFaucetFunds } from './mutations/requestFaucetFunds';
export * from './mutations/requestFaucetFunds';
|
def test_patch(self):
# Assuming 'self.client' is the Django test client and 'reverse' is a function to generate URLs
url = reverse('releaseschedule-detail', args=[1]) # Assuming the release schedule ID is 1
changes = [
('date', '2018-01-01'),
('release', 'test-release-0.2'),
('sla', 'bug_fixes'),
]
for change in changes:
# Send a PATCH request with the current change
response = self.client.patch(url, dict([change]), format='json')
# Assert the response status code is 200 (OK)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Assert the updated release schedule's ID matches the expected value
self.assertEqual(response.data['id'], 1) |
<filename>src/services/CourseService.js
import axios from 'axios'
const apiClient = axios.create({
baseURL: process.env.VUE_APP_COURSE_API_HOST,
withCredentials: false,
headers: {
Accept: 'application/json',
'Content-Type': 'application/json'
}
}
)
export default {
getCourses() {
return apiClient.get('/api/v1/courses')
},
getCourse(id) {
return apiClient.get('/api/v1/courses/' + id)
},
editCourse(id, data){
return apiClient.put("/api/v1/courses/" + id ,data,{ useCredentials: false })
},
addCourse(data){
return apiClient.post("/api/v1/courses",data,{ useCredentials: false })
}
} |
import { createSelector } from 'reselect';
import { worldChampForSeason } from './season.selectors';
const selectedYear = state => state.queryOpts.activeSeason;
const seasons = state => state.seasons;
const standings = state => state.standings;
const races = state => state.races;
const raceResults = state => state.raceResults;
const drivers = state => state.drivers;
const raceResultsForSeason = createSelector(
[selectedYear, seasons, standings, races, raceResults, drivers],
(selectedYear, seasons, standings, races, raceResults, drivers) => {
let season = seasons[selectedYear];
if(typeof season === 'undefined') {
return [];
}
if(!Array.isArray(season.Races)) {
return [];
}
let worldChamp = worldChampForSeason(season.season, standings, drivers);
if(typeof worldChamp === 'undefined') {
return [];
}
return season.Races.map( raceId => {
let race = races[raceId];
if(typeof race === 'undefined') {
return null
}
let raceResultId = race.Results[0]; //retrieving first index as we are interested only in the first place result
let result = raceResults[raceResultId];
if(typeof result === 'undefined') {
return null
}
let driver = drivers[result.Driver];
if(typeof driver === 'undefined') {
return null
}
driver.isWorldChampion = driver.driverId === worldChamp.driverId;
return {
date: race.date,
raceName: race.raceName,
round: race.round,
season: race.season,
time: race.time,
url: race.url,
winner: driver
}
}).filter( item => item !== null);
}
);
const worldChampionSelector = createSelector(
[selectedYear, standings, drivers],
(selectedYear, standings, drivers) => {
let worldChamp = worldChampForSeason(selectedYear, standings, drivers);
if(typeof worldChamp === 'undefined') {
return {}
}
return worldChamp;
}
)
export {
raceResultsForSeason,
worldChampionSelector
} |
<filename>packages/eslint-plugin-zillow/lib/eslint-plugin-zillow.js
'use strict';
const { getPluginProcessors, getPluginRules } = require('./plugins');
const jestConfig = require('./configs/jest.json');
const mochaConfig = require('./configs/mocha.json');
const recommendedConfig = require('./configs/recommended.json');
module.exports = {
configs: {
jest: jestConfig,
mocha: mochaConfig,
recommended: recommendedConfig,
},
// TODO: environments
processors: getPluginProcessors(),
rules: getPluginRules(),
};
|
<gh_stars>1-10
import msgpack
from . import packet
class MsgPackPacket(packet.Packet):
uses_binary_events = False
def encode(self):
"""Encode the packet for transmission."""
return msgpack.dumps(self._to_dict())
def decode(self, encoded_packet):
"""Decode a transmitted package."""
decoded = msgpack.loads(encoded_packet)
self.packet_type = decoded['type']
self.data = decoded['data']
self.id = decoded.get('id')
self.namespace = decoded['nsp']
|
<filename>src/main/java/com/alipay/api/response/AlipayCommerceIotAdvertiserAdModifyResponse.java
package com.alipay.api.response;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.commerce.iot.advertiser.ad.modify response.
*
* @author auto create
* @since 1.0, 2021-12-08 21:39:53
*/
public class AlipayCommerceIotAdvertiserAdModifyResponse extends AlipayResponse {
private static final long serialVersionUID = 3412178429454753925L;
}
|
#!/bin/bash
# Requires gitio-cli to be preinstalled
# Requires node as well
pkgname="$1"
if ! wget -q -e robots=off -r -A.patch -nd -l 1 "https://github.com/void-linux/void-packages/tree/master/srcpkgs/$pkgname/patches/"; then
exit 1
fi
patches+=($(ls ./ | sed '/^.*\.patch/!d' | tr "\n" " "))
rm ./*.patch &>/dev/null
printf '\n'
echo " def self.patch"
version_node=$(nodebrew ls | cut -d$'\n' -f1)
for i in ${patches[@]}
do
node /usr/local/share/nodebrew/node/$version_node/bin/gitio-cli/index.js "https://raw.githubusercontent.com/void-linux/void-packages/master/srcpkgs/$pkgname/patches/$i" | tr "\n" " " | cut -d':' -f2- | sed 's/ Long URL: .*//g' | wl-copy
echo " system \"curl --ssl --progress-bar -o $i -L$(wl-paste)\""
printf " abort 'Checksum mismatch. :/ Try again.'.lightred unless Digest::SHA256.hexdigest( File.read('$i') ) == '%b'\n" $(curl -Ls $(wl-paste) | sha256sum | cut -d" " -f1)
done
source ./template
if [ -z $patch_args ]; then
patch_type="-Np0"
else
patch_type="$patch_args"
fi
for i in ${patches[@]}
do
echo " system \"patch ${patch_type} ./$i\""
done
echo " end"
|
/*
* Copyright 2016 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.difference.historybook.index.lucene;
import java.time.Instant;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.util.BytesRef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.google.common.hash.Hashing;
import com.google.common.net.InetAddresses;
import com.google.common.net.InternetDomainName;
import io.mola.galimatias.GalimatiasParseException;
import io.mola.galimatias.URL;
/**
* Provides a semantic interface to a Lucene document
*/
public class IndexDocumentAdapter {
private static final Logger LOG = LoggerFactory.getLogger(IndexDocumentAdapter.class);
public static final String FIELD_SEARCH = "body";
public static final String FIELD_COLLECTION = "collection";
public static final String FIELD_URL = "url";
public static final String FIELD_URL_GROUP = "url-group";
private static final String FIELD_DOMAIN = "domain";
public static final String FIELD_TIMESTAMP = "timestamp";
private static final String FIELD_TIMESTAMP_TEXT = "timestampText";
private static final String FIELD_TITLE = "title";
private static final String FIELD_KEY = "key";
private final Document doc;
/**
* Constructor for IndexDocumentAdapter that creates a backing Lucene doc that is initially empty
*/
public IndexDocumentAdapter() {
this.doc = new Document();
}
/**
* Constructor for IndexDocumentAdapter that uses a provided Lucene doc
* @param doc A Lucene doc to use as the backing store for this instance
*/
public IndexDocumentAdapter(Document doc) {
this.doc = doc;
}
/**
* @param collection the case-sensitive name of a collection to use in namespacing the index
* @return this for request chaining
*/
public IndexDocumentAdapter setCollection(String collection) {
doc.add(new StringField(FIELD_COLLECTION, collection, Field.Store.YES));
return this;
}
/**
* @return the case-sensitive name of the collection this backing document is in
*/
public String getCollection() {
return doc.get(FIELD_COLLECTION);
}
/**
* @param url the complete URL for the page being indexed in this document
* @return this for method chaining
*/
public IndexDocumentAdapter setUrl(String url) {
doc.add(new SortedDocValuesField(FIELD_URL_GROUP, new BytesRef(url)));
doc.add(new StringField(FIELD_URL, url, Field.Store.YES));
setDomainField(url);
return this;
}
/**
* @return the URL for the page indexed in this document
*/
public String getUrl() {
return doc.get(FIELD_URL);
}
private void setDomainField(String url) {
try {
URL u = URL.parse(url);
String hostString = u.host().toHumanString();
String domain;
if (!"localhost".equalsIgnoreCase(hostString) && !InetAddresses.isInetAddress(hostString)) {
domain = InternetDomainName.from(hostString).topPrivateDomain().toString();
} else {
domain = hostString;
}
doc.add(new StringField(FIELD_DOMAIN, domain, Field.Store.YES));
} catch (GalimatiasParseException e1) {
LOG.error("Unable to parse url {}", url);
}
}
/**
* @return the top level domain extracted from the page url. Useful for grouping/filtering results.
*/
public String getDomain() {
return doc.get(FIELD_DOMAIN);
}
/**
* @param timestamp the timestamp for when the page was fetched from the source
* @return this for method chaining
*/
public IndexDocumentAdapter setTimestamp(Instant timestamp) {
doc.add(new NumericDocValuesField(FIELD_TIMESTAMP, timestamp.getEpochSecond()));
doc.add(new StoredField(FIELD_TIMESTAMP_TEXT, timestamp.toString()));
return this;
}
/**
* @return a textual representation of the timestamp for when the page was fetched in ISO-8601 format
*/
public String getTimestampText() {
return doc.get(FIELD_TIMESTAMP_TEXT);
}
/**
* @param title The title of the page
* @return this for method chaining
*/
public IndexDocumentAdapter setTitle(String title) {
doc.add(new TextField(FIELD_TITLE, title, Field.Store.YES));
return this;
}
/**
* @return The title of the page
*/
public String getTitle() {
return doc.get(FIELD_TITLE);
}
/**
* @param content The textual content of the page
* @return this for method chaining
*/
public IndexDocumentAdapter setContent(String content) {
doc.add(new TextField(FIELD_SEARCH, content, Field.Store.YES));
String hash = Hashing.sha1().newHasher().putString(content, Charsets.UTF_8).hash().toString();
doc.add(new StringField(FIELD_KEY, hash, Field.Store.YES));
return this;
}
/**
* @return A unique key for the page content (a hash of the content in fact)
*/
public String getKey() {
return doc.get(FIELD_KEY);
}
/**
* @return the underlying Lucene document
*/
public Document getAsDocument() {
return doc;
}
}
|
<filename>src/Component/Header/Account.js
import React, { Component } from 'react';
import { LinkContainer } from 'react-router-bootstrap';
// reactbootstrap component
import Nav from 'react-bootstrap/Nav'
// scss for array
import style from './Account.module.scss';
export default class Account extends Component {
render() {
return (
<>
<Nav className="flex-column">
<LinkContainer to="/settings">
<Nav.Link className={style.accountLink} >Profile</Nav.Link>
</LinkContainer>
<LinkContainer to="/settings">
<Nav.Link className={style.accountLink} >Settings</Nav.Link>
</LinkContainer>
<LinkContainer to="/login">
<Nav.Link className={style.accountLink}>Logout</Nav.Link>
</LinkContainer>
</Nav>
</>
)
}
}
|
<filename>packages/playwright-test/src/mount.ts
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { Fixtures, Locator, Page, BrowserContextOptions, PlaywrightTestArgs, PlaywrightTestOptions, PlaywrightWorkerArgs } from './types';
let boundCallbacksForMount: Function[] = [];
export const fixtures: Fixtures<PlaywrightTestArgs & PlaywrightTestOptions & { mount: (component: any, options: any) => Promise<Locator> }, PlaywrightWorkerArgs & { _ctPage: { page: Page | undefined, hash: string } }> = {
_ctPage: [{ page: undefined, hash: '' }, { scope: 'worker' }],
context: async ({ page }, use) => {
await use(page.context());
},
page: async ({ _ctPage, browser, viewport, playwright }, use) => {
const defaultContextOptions = (playwright.chromium as any)._defaultContextOptions as BrowserContextOptions;
const hash = contextHash(defaultContextOptions);
if (!_ctPage.page || _ctPage.hash !== hash) {
if (_ctPage.page)
await _ctPage.page.close();
const page = await (browser as any)._wrapApiCall(async () => {
const page = await browser.newPage();
await page.addInitScript('navigator.serviceWorker.register = () => {}');
await page.exposeFunction('__pw_dispatch', (ordinal: number, args: any[]) => {
boundCallbacksForMount[ordinal](...args);
});
await page.goto(process.env.PLAYWRIGHT_VITE_COMPONENTS_BASE_URL!);
return page;
}, true);
_ctPage.page = page;
_ctPage.hash = hash;
await use(page);
} else {
const page = _ctPage.page;
await (page as any)._wrapApiCall(async () => {
await (page as any)._resetForReuse();
await (page.context() as any)._resetForReuse();
await page.goto('about:blank');
await page.setViewportSize(viewport || { width: 1280, height: 800 });
await page.goto(process.env.PLAYWRIGHT_VITE_COMPONENTS_BASE_URL!);
}, true);
await use(page);
}
},
mount: async ({ page }, use) => {
await use(async (component, options) => {
const selector = await (page as any)._wrapApiCall(async () => {
return await innerMount(page, component, options);
}, true);
return page.locator(selector);
});
boundCallbacksForMount = [];
},
};
async function innerMount(page: Page, jsxOrType: any, options: any): Promise<string> {
let component;
if (typeof jsxOrType === 'string')
component = { kind: 'object', type: jsxOrType, options };
else
component = jsxOrType;
wrapFunctions(component, page, boundCallbacksForMount);
// WebKit does not wait for deferred scripts.
await page.waitForFunction(() => !!(window as any).playwrightMount);
const selector = await page.evaluate(async ({ component }) => {
const unwrapFunctions = (object: any) => {
for (const [key, value] of Object.entries(object)) {
if (typeof value === 'string' && (value as string).startsWith('__pw_func_')) {
const ordinal = +value.substring('__pw_func_'.length);
object[key] = (...args: any[]) => {
(window as any)['__pw_dispatch'](ordinal, args);
};
} else if (typeof value === 'object' && value) {
unwrapFunctions(value);
}
}
};
unwrapFunctions(component);
return await (window as any).playwrightMount(component);
}, { component });
return selector;
}
function wrapFunctions(object: any, page: Page, callbacks: Function[]) {
for (const [key, value] of Object.entries(object)) {
const type = typeof value;
if (type === 'function') {
const functionName = '__pw_func_' + callbacks.length;
callbacks.push(value as Function);
object[key] = functionName;
} else if (type === 'object' && value) {
wrapFunctions(value, page, callbacks);
}
}
}
function contextHash(context: BrowserContextOptions): string {
const hash = {
acceptDownloads: context.acceptDownloads,
bypassCSP: context.bypassCSP,
colorScheme: context.colorScheme,
extraHTTPHeaders: context.extraHTTPHeaders,
forcedColors: context.forcedColors,
geolocation: context.geolocation,
hasTouch: context.hasTouch,
httpCredentials: context.httpCredentials,
ignoreHTTPSErrors: context.ignoreHTTPSErrors,
isMobile: context.isMobile,
javaScriptEnabled: context.javaScriptEnabled,
locale: context.locale,
offline: context.offline,
permissions: context.permissions,
proxy: context.proxy,
storageState: context.storageState,
timezoneId: context.timezoneId,
userAgent: context.userAgent,
deviceScaleFactor: context.deviceScaleFactor,
};
return JSON.stringify(hash);
}
|
func processAndFetchUser(username: String, completion: @escaping (User?) -> Void) {
var processedUsername = username.trimmingCharacters(in: .symbols)
processedUsername = processedUsername.trimmingCharacters(in: .punctuationCharacters)
UserService.shared.fetchUser(withUserName: processedUsername) { fetchedUser in
completion(fetchedUser)
}
} |
#!/bin/bash -e
set -o pipefail
[ "${DEBUG,,}" == "true" ] && set -x
my_file="$(readlink -e "$0")"
my_dir="$(dirname $my_file)"
source "$my_dir/definitions"
${my_dir}/../common/deploy_platform.sh helm
|
#!/bin/bash
NB_NODES=$1 #nb nodes
#WIT=$2 #nb monitors
AVG_AUTH=0
for i in `seq 0 $(($NB_NODES-1))`
do
#echo "i = " $i
a=$(($i/254))
b=$((1+$i%254))
#echo "a = " $a
#echo "b = " $b
grep "nbCons" sim-10.0.$a.$b.log > tmpAuth.txt
nbAuth=$(tail -1 tmpAuth.txt | awk '{print $7}')
AVG_AUTH=$(($AVG_AUTH + $nbAuth))
done
AUTH=$(($AVG_AUTH/$NB_NODES))
echo "Avg authenticators = " $AUTH
|
#!/bin/bash
#minikube start --vm-driver=xhyve --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.Authentication.PasswordFile.BasicAuthFile=/var/lib/localkube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --insecure-registry localhost:5000 --extra-config=apiserver.Authentication.PasswordFile.BasicAuthFile=/var/lib/localkube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.Authentication.PasswordFile.BasicAuthFile=/var/lib/localkube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.Authentication.PasswordFile.BasicAuthFile=/host-home/.minikube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.basic-auth-file="/var/lib/localkube/config/static-users.csv"
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.Authentication.PasswordFile.basic-auth-file=/var/lib/localkube/config/static-users.csv --extra-config=apiserver.Authentication.PasswordFile.BasicAuthFile=/var/lib/localkube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000
#This works with minikube v0.25.2
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.Authentication.PasswordFile.BasicAuthFile=/var/lib/localkube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.basic-auth-file=/var/lib/localkube/config/static-users.csv
#minikube start --vm-driver=hyperkit --v=7 --alsologtostderr --cpus=4 --memory=12288 --disk-size=40g --insecure-registry localhost:5000 --extra-config=apiserver.BasicAuthFile=/var/lib/localkube/config/static-users.csv
./start.sh cockpit
#nohup minikube mount ~:/host-home &
|
class AlertMessage extends React.Component {
componentDidMount() {
alert("Welcome to my page!");
}
render() {
return null;
}
} |
<reponame>glameyzhou/training<filename>distribute/src/main/java/org/glamey/training/designmodel/observable/package-info.java
/**
* https://zh.wikipedia.org/wiki/%E8%A7%82%E5%AF%9F%E8%80%85%E6%A8%A1%E5%BC%8F
*
* 观察者模式是软件设计模式的一种。
*
* 在此种模式中,一个目标对象管理所有相依于它的观察者对象,并且在它本身的状态改变时主动发出通知。
* 这通常透过呼叫各观察者所提供的方法来实现。此种模式通常被用来实时事件处理系统。
*
* @author zhouyang.zhou. 2017.08.29.16.
*/
package org.glamey.training.designmodel.observable; |
<gh_stars>1-10
import React from 'react'
import { MemoryRouter } from 'react-router-dom'
import { render, within, fireEvent } from '@testing-library/react'
import '@testing-library/jest-dom/extend-expect'
import Main from '../pages/Main'
import UserInput from '../components/UserInput'
import { RecButton } from '../styles/styles'
test('check ingredients', async () => {
const rendered = await render(<UserInput
options={['onion', 'ham']}
placeholder="Enter Ingredient..."
/>)
const { findByPlaceholderText } = rendered
// Find autocomplete element
const autocomplete = await findByPlaceholderText('Enter Ingredient...')
const { getByText: getByBodyText } = within(document.body)
fireEvent.mouseDown(autocomplete);
const option = getByBodyText('onion')
expect(option).toBeInTheDocument()
fireEvent.change(autocomplete, { target: { value: 'a' }})
expect(within(document.body).queryByText('onion')).not.toBeInTheDocument()
expect(within(document.body).queryByText('ham')).toBeInTheDocument()
})
test('check preferences: diet', async () => {
const rendered = await render(<UserInput
options={['No preference', 'Vegan']}
placeholder="search..."
/>)
const { findByPlaceholderText } = rendered
// Find autocomplete element
const autocomplete = await findByPlaceholderText('search...')
const { getByText: getByBodyText } = within(document.body)
fireEvent.mouseDown(autocomplete);
const option = getByBodyText('Vegan')
expect(option).toBeInTheDocument()
fireEvent.change(autocomplete, { target: { value: 'v' }})
expect(within(document.body).queryByText('No preference')).not.toBeInTheDocument()
expect(within(document.body).queryByText('Vegan')).toBeInTheDocument()
})
test('check preferences: cuisine', async () => {
const rendered = await render(<UserInput
options={['American', 'French']}
placeholder="search..."
/>)
const { findByPlaceholderText } = rendered
// Find autocomplete element
const autocomplete = await findByPlaceholderText('search...')
const { getByText: getByBodyText } = within(document.body)
fireEvent.mouseDown(autocomplete);
const option = getByBodyText('American')
expect(option).toBeInTheDocument()
fireEvent.change(autocomplete, { target: { value: 'a' }})
expect(within(document.body).queryByText('French')).not.toBeInTheDocument()
expect(within(document.body).queryByText('American')).toBeInTheDocument()
})
test('check preferences: sort by', async () => {
const rendered = await render(<UserInput
options={['Date', 'Rate', 'Calories']}
placeholder="search..."
/>)
const { findByPlaceholderText } = rendered
// Find autocomplete element
const autocomplete = await findByPlaceholderText('search...')
const { getByText: getByBodyText } = within(document.body)
fireEvent.mouseDown(autocomplete);
const option = getByBodyText('Date')
expect(option).toBeInTheDocument()
fireEvent.change(autocomplete, { target: { value: 'd' }})
expect(within(document.body).queryByText('Rate')).not.toBeInTheDocument()
expect(within(document.body).queryByText('Calories')).not.toBeInTheDocument()
expect(within(document.body).queryByText('Date')).toBeInTheDocument()
})
test('calls correct function on click (get recommendations)', async () => {
const onClick = jest.fn()
const { getByText } = render(<RecButton onClick={onClick}>get recommendations</RecButton>)
fireEvent.click(getByText('get recommendations'))
expect(onClick).toHaveBeenCalled()
})
test('calls correct function on click (feeling lucky)', async () => {
const onClick = jest.fn()
const { getByText } = render(<RecButton onClick={onClick} lucky>I am Feeling Lucky</RecButton>)
fireEvent.click(getByText('I am Feeling Lucky'))
expect(onClick).toHaveBeenCalled()
})
test('check page elements', async () => {
const { getByText } = render(<Main />, { wrapper: MemoryRouter })
expect(getByText('ingredient list')).toBeInTheDocument()
expect(getByText('preferences')).toBeInTheDocument()
expect(getByText('diet')).toBeInTheDocument()
expect(getByText('cuisine')).toBeInTheDocument()
expect(getByText('sort by')).toBeInTheDocument()
expect(getByText('get recommendations')).toBeInTheDocument()
expect(getByText('I am Feeling Lucky')).toBeInTheDocument()
}) |
<gh_stars>0
/*
* Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information.
*/
package com.linkedin.kafka.cruisecontrol.monitor.sampling.aggregator;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.Snapshot;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.TopicPartition;
/**
* This class contains the result of metric samples aggregation from
* {@link MetricSampleAggregator#recentSnapshots(Cluster, long)}
*/
public class MetricSampleAggregationResult {
private final Map<TopicPartition, Snapshot[]> _snapshots;
private final Map<TopicPartition, List<SampleFlaw>> _sampleFlaws;
private final boolean _includeAllTopics;
private final long _generation;
private final Set<TopicPartition> _invalidPartitions;
MetricSampleAggregationResult(long generation,
boolean includeAllTopics) {
_snapshots = new HashMap<>();
_sampleFlaws = new HashMap<>();
_includeAllTopics = includeAllTopics;
_generation = generation;
_invalidPartitions = new HashSet<>();
}
public Map<TopicPartition, Snapshot[]> snapshots() {
return _snapshots;
}
public Map<TopicPartition, List<SampleFlaw>> sampleFlaws() {
return _sampleFlaws;
}
public List<SampleFlaw> sampleFlaw(TopicPartition tp) {
return _sampleFlaws.getOrDefault(tp, Collections.emptyList());
}
public long generation() {
return _generation;
}
void addPartitionSnapshots(TopicPartition tp, Snapshot[] snapshots) {
_snapshots.put(tp, snapshots);
}
void recordPartitionWithSampleFlaw(TopicPartition tp, long snapshotWindow, Imputation action) {
List<SampleFlaw> sampleFlaws = _sampleFlaws.computeIfAbsent(tp, k -> new ArrayList<>());
sampleFlaws.add(new SampleFlaw(snapshotWindow, action));
if (action == Imputation.FORCED_INSUFFICIENT || action == Imputation.FORCED_UNKNOWN) {
_invalidPartitions.add(tp);
}
}
void discardAllSnapshots() {
_snapshots.clear();
_invalidPartitions.clear();
}
boolean includeAllTopics() {
return _includeAllTopics;
}
public Set<TopicPartition> invalidPartitions() {
return Collections.unmodifiableSet(_invalidPartitions);
}
MetricSampleAggregationResult merge(MetricSampleAggregationResult other) {
this._snapshots.putAll(other.snapshots());
_invalidPartitions.addAll(other._invalidPartitions);
for (Map.Entry<TopicPartition, List<SampleFlaw>> entry : other.sampleFlaws().entrySet()) {
this._sampleFlaws.compute(entry.getKey(), (k, v) -> {
if (v == null) {
return entry.getValue();
} else {
v.addAll(entry.getValue());
return v;
}
});
}
return this;
}
/**
* There are a few imputations we will do when there is not sufficient samples in a snapshot window for a
* partition. The imputations are used in the following preference order.
* <ul>
* <li>AVG_AVAILABLE: The average of available samples even though there are more than half of the required samples.</li>
* <li>AVG_ADJACENT: The average value of the current snapshot and the two adjacent snapshot windows</li>
* <li>PREV_PERIOD: The samples from the previous period is used.</li>
* <li>FORCED_INSUFFICIENT: The sample is forced to be included with insufficient data.</li>
* <li>FORCED_UNKNOWN: The sample is forced to be included and the original value was unknown.</li>
* <li>NO_VALID_IMPUTATION: there is no valid imputation</li>
* </ul>
*/
public enum Imputation {
AVG_AVAILABLE, PREV_PERIOD, AVG_ADJACENT, FORCED_INSUFFICIENT, FORCED_UNKNOWN, NO_VALID_IMPUTATION
}
/**
* The sample flaw for a partition that is still treated as valid.
*/
public static class SampleFlaw {
private final long _snapshotWindow;
private final Imputation _imputation;
SampleFlaw(long snapshotWindow, Imputation imputation) {
_snapshotWindow = snapshotWindow;
_imputation = imputation;
}
public long snapshotWindow() {
return _snapshotWindow;
}
public Imputation imputation() {
return _imputation;
}
@Override
public String toString() {
return String.format("[%d, %s]", _snapshotWindow, _imputation);
}
}
}
|
import { assignArrowCreate } from './helpers/window'
import arrowCreate from './arrowCreate'
assignArrowCreate(window);
export default arrowCreate;
export { DIRECTION } from './consts';
export { HEAD } from './head';
|
#!/bin/bash
set -e
# <-- Create Development Database -->
# \i <filename> --to run (include) a script file of SQL commands.
# \c <database> --to connect to a different database
# Copy init.sql file to a docker volume in /var/lib/data/init.sql to gain access to init.sql inside the container.
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
CREATE USER $POSTGRES_DEV_USER WITH PASSWORD '$POSTGRES_DEV_PASSWORD';
CREATE DATABASE $POSTGRES_DEV_DB;
GRANT CONNECT ON DATABASE $POSTGRES_DEV_DB TO $POSTGRES_DEV_USER;
GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DEV_DB TO $POSTGRES_DEV_USER;
\c $POSTGRES_DEV_DB;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO $POSTGRES_DEV_USER;
ALTER DEFAULT PRIVILEGES GRANT ALL ON TABLES TO $POSTGRES_DEV_USER;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE, SELECT ON SEQUENCES TO $POSTGRES_DEV_USER;
\i /var/lib/data/init.sql;
ALTER DATABASE $POSTGRES_DEV_DB SET TIMEZONE TO 'Africa/Casablanca';
CREATE USER $POSTGRES_READ_DEV_USER WITH PASSWORD '$POSTGRES_READ_DEV_USER_PASSWORD';
CREATE USER $POSTGRES_CREATE_DEV_USER WITH PASSWORD '$POSTGRES_CREATE_DEV_USER_PASSWORD';
CREATE USER $POSTGRES_UPDATE_DEV_USER WITH PASSWORD '$POSTGRES_UPDATE_DEV_USER_PASSWORD';
CREATE USER $POSTGRES_DELETE_DEV_USER WITH PASSWORD '$POSTGRES_DELETE_DEV_USER_PASSWORD';
GRANT CONNECT ON DATABASE $POSTGRES_DEV_DB TO $POSTGRES_READ_DEV_USER;
GRANT CONNECT ON DATABASE $POSTGRES_DEV_DB TO $POSTGRES_CREATE_DEV_USER;
GRANT CONNECT ON DATABASE $POSTGRES_DEV_DB TO $POSTGRES_UPDATE_DEV_USER;
GRANT CONNECT ON DATABASE $POSTGRES_DEV_DB TO $POSTGRES_DELETE_DEV_USER;
GRANT USAGE ON SCHEMA public TO $POSTGRES_READ_DEV_USER;
GRANT USAGE ON SCHEMA public TO $POSTGRES_CREATE_DEV_USER;
GRANT USAGE ON SCHEMA public TO $POSTGRES_UPDATE_DEV_USER;
GRANT USAGE ON SCHEMA public TO $POSTGRES_DELETE_DEV_USER;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO $POSTGRES_READ_DEV_USER;
GRANT INSERT ON ALL TABLES IN SCHEMA public TO $POSTGRES_CREATE_DEV_USER;
GRANT SELECT, UPDATE ON ALL TABLES IN SCHEMA public TO $POSTGRES_UPDATE_DEV_USER;
GRANT SELECT, DELETE ON ALL TABLES IN SCHEMA public TO $POSTGRES_DELETE_DEV_USER;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO $POSTGRES_READ_DEV_USER;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO $POSTGRES_CREATE_DEV_USER;
GRANT USAGE, SELECT, UPDATE ON ALL SEQUENCES IN SCHEMA public TO $POSTGRES_UPDATE_DEV_USER;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO $POSTGRES_DELETE_DEV_USER;
EOSQL
|
for (let num = 1; num <= 100; num++ ) {
if (num % 4 === 0 && num % 7 === 0) {
console.log(num);
}
} |
#!/bin/bash
function quit()
{
echo "exit program"
exit 1;
}
read -p " are you sure kill router?[y/n]" response
case $response in
[nN])
echo "exit stop.sh"
quit;
;;
*)
;;
esac
######check Router Process######
SERVER_NAME="RouterServer"
PIDS=`ps -ef | grep $SERVER_NAME | grep -v "grep" | awk '{print $2}'`
echo "PIDS=$PIDS"
if [ -z $PIDS ]; then
echo "no this process"
else
echo "find process is $PIDS"
fi
logDir="/logs/im/router"
logFile="$logDir/heika-router.log"
#####kill RouterServer####
echo -e "Stopping the $SERVER_NAME ...\c"
for PID in $PIDS ; do
kill $PID $logFile 2>&1
done
echo "execute completed,has run the command of kill [pid]."
|
#!/bin/sh
git clone git@github.com:glbrimhall/filesender.git
cp git.config filesender/.git/config
cd filesender
git checkout directory-tree-upload
cd -
docker-compose up -d
|
<gh_stars>100-1000
Page({
data: {
typeTree: {}, // 数据缓存
currType: 0 ,
// 当前类型
"types": [
{
"shopAddr":"飞马牌服饰",
"name":"<NAME>",
"typeId":"0",
},
{
"shopAddr":"飞马牌服饰",
"name":"银泰",
"typeId":"2",
},
{
"name":"银泰西湖店",
"typeId":"3",
},
],
"typeTree": [
{
'pic':"../../images/im.jpg",
"shopAddr":"飞马牌服饰",
"name":"PUMA Kids",
"typeId":"1",
},
{
'pic':"../../images/im.jpg",
"shopAddr":"飞马牌服饰",
"name":"<NAME>",
"typeId":"1",
},
{
'pic':"../../images/im.jpg",
"shopAddr":"飞马牌服饰",
"name":"<NAME>",
"typeId":"1",
},
],
},
onLoad (){
var me = this;
request({
url: ApiList.goodsType,
success: function (res) {
me.setData({
types: res.data.data
});
}
});
this.getTypeTree(this.data.currType);
this.setData({
currType: 0,
// parseInt(options.currentTab)
});
},
tapType(e){
const currType = e.currentTarget.dataset.typeId;
this.setData({
currType: currType
});
this.getTypeTree(currType);
},
// 加载品牌、二级类目数据
getTypeTree (currType) {
const me = this, _data = me.data;
if(!_data.typeTree[currType]){
request({
url: ApiList.goodsTypeTree,
data: {typeId: +currType},
success: function (res) {
_data.typeTree[currType] = res.data.data;
me.setData({
typeTree: _data.typeTree
});
}
});
}
},
onShareAppMessage: function () {
return {
title: '微信小程序联盟',
desc: '最具人气的小程序开发联盟!',
path: '/page/user?id=123'
}
},
}) |
//============================================================================
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
//============================================================================
#ifndef VIEWER_SRC_OUTPUTDIRPROVIDER_HPP_
#define VIEWER_SRC_OUTPUTDIRPROVIDER_HPP_
#include <QObject>
#include "VDir.hpp"
#include "VInfo.hpp"
#include "InfoProvider.hpp"
#include "VTask.hpp"
#include "VTaskObserver.hpp"
class OutputDirClient;
class OutputDirProviderTask
{
public:
enum FetchMode {LocalFetch,RemoteFetch};
enum Status {UnkownStatus,FinishedStatus,FailedStatus};
enum Condition {NoCondition,RunIfPrevFailed};
OutputDirProviderTask(const std::string& path,FetchMode fetchMode,Condition cond=NoCondition) :
path_(path), fetchMode_(fetchMode), condition_(cond), status_(UnkownStatus) {}
std::string path_;
VDir_ptr dir_;
QString error_;
FetchMode fetchMode_;
Condition condition_;
Status status_;
};
class OutputDirProvider : public QObject, public InfoProvider
{
Q_OBJECT
public:
explicit OutputDirProvider(InfoPresenter* owner);
void visit(VInfoNode*) override;
void clear() override;
private Q_SLOTS:
void slotOutputClientError(QString);
void slotOutputClientProgress(QString,int);
void slotOutputClientFinished();
private:
bool hasNext() const;
void fetchNext();
void fetchIgnored();
void fetchFinished(VDir_ptr,QString msg=QString());
void fetchFailed(QString msg=QString());
void failed(QString);
void completed();
bool fetchDirViaOutputClient(VNode *n,const std::string& fileName);
VDir_ptr fetchLocalDir(const std::string& path,std::string& errorStr);
OutputDirClient* makeOutputClient(const std::string& host,const std::string& port);
OutputDirClient *outClient_;
QList<OutputDirProviderTask> queue_;
int currentTask_;
};
#endif /* VIEWER_SRC_OUTPUTDIRPROVIDER_HPP_ */
|
#
# The following script uses the user and password information
# in file /opt-users/users.txt to to create a database per user,
# set the password for that database and then initialize the postgis
# extensions.
#
# The user file shoudl contain pairs of lines that are the user and password.
# e.g.
# user1
# passwd1
# user2
# passwd2
#
set -e
set -u
while IFS= read -r dbuser; do
echo "dbuser is $dbuser"
read -r dbpasswd
echo "dbpasswd is $dbpasswd"
echo " Creating user and database '$dbuser'"
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL1
CREATE USER $dbuser PASSWORD '$dbpasswd';
CREATE DATABASE $dbuser;
GRANT ALL PRIVILEGES ON DATABASE $dbuser TO $dbuser;
EOSQL1
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" $dbuser <<-EOSQL2
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
\dn.
\dt.
GRANT ALL PRIVILEGES ON SCHEMA topology TO $dbuser;
EOSQL2
done < /opt-users/users.txt
|
#!/bin/bash
git filter-branch --tree-filter 'rm -rf node_modules' --prune-empty HEAD
git for-each-ref --format="%(refname)" refs/original/ | xargs -n 1 git update-ref -d
echo node_modules/ >> .gitignore
git add .gitignore
git commit -m 'Removing node_modules from git history'
git gc
git push origin master --force
|
import { defineMessages } from 'react-intl';
const messages = defineMessages({
'ImageUploader.fileSizeExceeds': {
id: 'ImageUploader.fileSizeExceeds',
defaultMessage: '錯誤!圖片檔案過大。',
},
'ImageUploader.imageUploadSuccess': {
id: 'ImageUploader.imageUploadSuccess',
defaultMessage: '成功!你已經成功上傳圖片。',
},
'ImageUploader.errorWhenUploadImage': {
id: 'ImageUploader.errorWhenUploadImage',
defaultMessage: '錯誤!上傳圖片時發生錯誤。',
},
'ImageUploader.descriptionL1': {
id: 'ImageUploader.descriptionL1',
defaultMessage: '將您想上傳的圖片拖拉於此',
},
'ImageUploader.descriptionL2': {
id: 'ImageUploader.descriptionL2',
defaultMessage: '推薦大小為 1600 x 854 pixels',
},
'ImageUploader.upload': {
id: 'ImageUploader.upload',
defaultMessage: '上傳',
},
});
export default messages;
|
#!/bin/sh
set -e
[ -z "${GITHUB_PAT}" ] && exit 0
[ "${TRAVIS_BRANCH}" != "master" ] && exit 0
git config --global user.email "kbenoit@lse.ac.uk"
git config --global user.name "kbenoit"
git clone -b gh-pages https://${GITHUB_PAT}@github.com/${TRAVIS_REPO_SLUG}.git book-output
cd book-output
cp -r ../_book/* ./
git add --all *
git commit -m "Update the book" || true
git push origin gh-pages
|
sap.ui.define([], function() {
"use strict";
var beacons = [];
return {
startBeaconRegion: function() {
var uuid = 'E2C56DB5-DFFB-48D2-B060-D0F5A71096E0';
var identifier = 'BrightBeacon';
var beaconRegion = new cordova.plugins.locationManager.BeaconRegion(identifier, uuid);
cordova.plugins.locationManager.requestWhenInUseAuthorization();
// cordova.plugins.locationManager.requestAlwaysAuthorization()
var delegate = new cordova.plugins.locationManager.Delegate();
delegate.didRangeBeaconsInRegion = function (pluginResult) {
beacons = pluginResult.beacons;
console.log(JSON.stringify(pluginResult));
};
cordova.plugins.locationManager.setDelegate(delegate);
cordova.plugins.locationManager.startRangingBeaconsInRegion(beaconRegion)
.fail()
.done();
},
getCurrentBeacon: function() {
if (!beacons) {
return;
}
if (beacons.length === 0) {
return;
}
// Sort beacons by distance
// beacons[0].accuracy major minor rssi proximity uuid
beacons.sort(function(a, b) {
if (a.accuracy === -1) {
return true;
}
if (b.accuracy === -1) {
return true;
}
return a.accuracy < b.accuracy;
});
var firstBeacon = beacons[0];
// 过道
if ((firstBeacon.major === 201) || (firstBeacon.major === 202)) {
return {"majorId": firstBeacon.major, "minorId": firstBeacon.minor};
}
// 不在过道 计算加权房间位置
var r805Accuracy = 0;
var count805 = 0;
var r822Accuracy = 0;
var count822 = 0;
var r801Accuracy = 0;
var count801 = 0;
var r807Accuracy = 0;
var count807 = 0;
var rWcAccuracy = 0;
var countWc = 0;
var rFrontAccuracy = 0;
var countFront = 0;
var r824Accuracy = 0;
var count824 = 0;
var r803Accuracy = 0;
var count803 = 0;
for (var i = beacons.length - 1; i >= 0; i--) {
if (beacons[i].accuracy > 0) {
if (beacons[i].major === 101) {
r805Accuracy += beacons[i].accuracy;
count805 += 1;
} else if (beacons[i].major === 102) {
r822Accuracy += beacons[i].accuracy;
count822 += 1;
} else if (beacons[i].major === 103) {
r801Accuracy += beacons[i].accuracy;
count801 += 1;
} else if (beacons[i].major === 104) {
r807Accuracy += beacons[i].accuracy;
count807 += 1;
} else if (beacons[i].major === 105) {
rWcAccuracy += beacons[i].accuracy;
countWc += 1;
} else if (beacons[i].major === 106) {
rFrontAccuracy += beacons[i].accuracy;
countFront += 1;
} else if (beacons[i].major === 107) {
r824Accuracy += beacons[i].accuracy;
count824 += 1;
} else if (beacons[i].major === 108) {
r803Accuracy += beacons[i].accuracy;
count803 += 1;
}
}
}
var wRooms = [];
if (count805 != 0) {
var w805Accuracy = r805Accuracy / count805;
if (w805Accuracy >= 0) {
wRooms.push({"major": "101", "minor": "1", "rssi": w805Accuracy});
}
}
if (count822 != 0) {
var w822Accuracy = r822Accuracy / count822;
if (w822Accuracy >= 0) {
wRooms.push({"major": "102", "minor": "1", "rssi": w822Accuracy});
}
}
if (count801 != 0) {
var w801Accuracy = r801Accuracy / count801;
if (w801Accuracy >= 0) {
wRooms.push({"major": "103", "minor": "1", "rssi": w801Accuracy});
}
}
if (count807 != 0) {
var w807Accuracy = r807Accuracy / count807;
if (w807Accuracy >= 0) {
wRooms.push({"major": "104", "minor": "1", "rssi": w807Accuracy});
}
}
if (countWc != 0) {
var wWcAccuracy = rWcAccuracy / countWc;
if (wWcAccuracy >= 0) {
wRooms.push({"major": "105", "minor": "1", "rssi": wWcAccuracy});
}
}
if (countFront != 0) {
var wFrontAccuracy = rFrontAccuracy / countFront;
if (wFrontAccuracy >= 0) {
wRooms.push({"major": "106", "minor": "1", "rssi": wFrontAccuracy});
}
}
if (count824 != 0) {
var w824Accuracy = r824Accuracy / count824;
if (w824Accuracy >= 0) {
wRooms.push({"major": "107", "minor": "1", "rssi": w824Accuracy});
}
}
if (count803 != 0) {
var w803Accuracy = r803Accuracy / count803;
if (w803Accuracy >= 0) {
wRooms.push({"major": "108", "minor": "1", "rssi": w803Accuracy});
}
}
// Sort
wRooms.sort(function(a, b) {
return a.rssi < b.rssi;
});
if (wRooms.length > 0) {
return {"majorId": wRooms[0].major, "minorId": wRooms[0].minor};
}
}
};
}); |
#!/usr/bin/env bash
# extract-unichem.sh: download UniChem and extract TSV of (chembl,chebi) pairs
# Copyright 2019 Stephen A. Ramsey <stephen.ramsey@oregonstate.edu>
set -o nounset -o pipefail -o errexit
if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
echo Usage: "$0 <output_tsv_file>"
exit 2
fi
echo "================= starting extract-unichem.sh ================="
date
config_dir=`dirname "$0"`
source ${config_dir}/master-config.shinc
output_tsv_file=${1:-"${BUILD_DIR}/unichem/unichem-mappings.tsv"}
unichem_dir=${BUILD_DIR}/unichem
unichem_output_dir=`dirname ${output_tsv_file}`
unichem_ver=335
unichem_ftp_site=ftp://ftp.ebi.ac.uk/pub/databases/chembl/UniChem/data
rm -r -f ${unichem_dir}
mkdir -p ${unichem_dir}
mkdir -p ${unichem_output_dir}
${curl_get} ${unichem_ftp_site}/oracleDumps/UDRI${unichem_ver}/UC_XREF.txt.gz > ${unichem_dir}/UC_XREF.txt.gz
${curl_get} ${unichem_ftp_site}/oracleDumps/UDRI${unichem_ver}/UC_SOURCE.txt.gz > ${unichem_dir}/UC_SOURCE.txt.gz
${curl_get} ${unichem_ftp_site}/oracleDumps/UDRI${unichem_ver}/UC_RELEASE.txt.gz > ${unichem_dir}/UC_RELEASE.txt.gz
chembl_src_id=`zcat ${unichem_dir}/UC_SOURCE.txt.gz | awk '{if ($2 == "chembl") {printf "%s", $1}}'`
chebi_src_id=`zcat ${unichem_dir}/UC_SOURCE.txt.gz | awk '{if ($2 == "chebi") {printf "%s", $1}}'`
drugbank_src_id=`zcat ${unichem_dir}/UC_SOURCE.txt.gz | awk '{if ($2 == "drugbank") {printf "%s", $1}}'`
update_date=`zcat ${unichem_dir}/UC_RELEASE.txt.gz | tail -1 | cut -f3`
echo "# ${update_date}" > ${output_tsv_file}
zcat ${unichem_dir}/UC_XREF.txt.gz | awk '{if ($2 == '${chebi_src_id}') {print $1 "\tCHEBI:" $3}}' | sort -k1 > ${unichem_dir}/chebi.txt
zcat ${unichem_dir}/UC_XREF.txt.gz | awk '{if ($2 == '${chembl_src_id}') {print $1 "\tCHEMBL.COMPOUND:" $3}}' | sort -k1 > ${unichem_dir}/chembl.txt
zcat ${unichem_dir}/UC_XREF.txt.gz | awk '{if ($2 == '${drugbank_src_id}') {print $1 "\tDRUGBANK:" $3}}' | sort -k1 > ${unichem_dir}/drugbank.txt
join ${unichem_dir}/chembl.txt ${unichem_dir}/chebi.txt | sed 's/ /\t/g' | cut -f2-3 >> ${output_tsv_file}
join ${unichem_dir}/chembl.txt ${unichem_dir}/drugbank.txt | sed 's/ /\t/g' | cut -f2-3 >> ${output_tsv_file}
join ${unichem_dir}/chebi.txt ${unichem_dir}/drugbank.txt | sed 's/ /\t/g' | cut -f2-3 >> ${output_tsv_file}
date
echo "================= finished extract-unichem.sh ================="
|
echo '[FROGBOT] Updating system'
sudo apt-get update
echo '[FROGBOT] Installing ffmpeg'
sudo apt install ffmpeg
echo '[FROGBOT] Installing the python 3.9 environment'
sudo apt-get install python3.9-venv
echo '[FROGBOT] Starting virtual environment'
python3.9 -m venv env
source env/bin/activate
pip3 install --upgrade pip
echo '[FROGBOT] Installing python packages'
python3 -m pip install -r requirements.txt
echo '[FROGBOT] Finished installations'
echo '[FROGBOT] Starting bot'
python3 -B main.py |
<filename>models/mandelbulb.h
#pragma once
#include <render/render.h>
#include <lib/object.h>
struct Config;
struct Object* CreateMandelbulb(struct Render*, struct Config*);
|
<filename>LYCompLayout/Classes/Models/LYLayoutModel.h<gh_stars>0
//
// LYLayoutModel.h
// LYCOMPLAYOUT
//
// CREATED BY BLODELY ON 2017-02-15.
// COPYRIGHT (C) 2017 BLODELY. ALL RIGHTS RESERVED.
//
#import <Foundation/Foundation.h>
@interface LYLayoutModel : NSObject
/**
UID, Not null
*/
@property (nonatomic, strong) NSString *UID;
/**
Persist Object
*/
- (void)persist;
/**
Export configuration as 'plist' format.
*/
- (void)exportToPlist;
/**
Import configuration data from 'plist' file at specific filepath
@param filepath plist file path
*/
- (void)readPlistAtFilepath:(NSString *)filepath;
/**
Export configuration as 'XML' format.
*/
- (void)exportToXML;
/**
Import configuration data from 'XML' file at specific filepath
@param filepath XML file path
*/
- (void)readXMLAtFilepath:(NSString *)filepath;
@end
|
#!/usr/bin/env bash
set -e
unset TARGET_CHAIN
unset NETWORK_NAME
unset ETH_RPC_URL
export NETWORK_NAME=ropsten
export TARGET_CHAIN=pangoro
export ETH_RPC_URL=https://ropsten.infura.io/$INFURA_KEY
echo "ETH_FROM: ${ETH_FROM}"
# import the deployment helpers
. $(dirname $0)/common.sh
# bsctest to pangoro bridge config
this_chain_pos=1
this_out_lane_pos=0
this_in_lane_pos=1
bridged_chain_pos=0
bridged_in_lane_pos=1
bridged_out_lane_pos=0
# fee market config
COLLATERAL_PERORDER=$(seth --to-wei 0.01 ether)
SLASH_TIME=86400
RELAY_TIME=86400
# 300 : 0.01
PRICE_RATIO=100
SimpleFeeMarket=$(deploy SimpleFeeMarket $COLLATERAL_PERORDER $SLASH_TIME $RELAY_TIME $PRICE_RATIO)
# darwinia beefy light client config
# Pangoro
NETWORK=0x50616e676f726f00000000000000000000000000000000000000000000000000
BEEFY_SLASH_VALUT=$ETH_FROM
BEEFY_VALIDATOR_SET_ID=0
BEEFY_VALIDATOR_SET_LEN=4
BEEFY_VALIDATOR_SET_ROOT=0xde562c60e8a03c61ef0ab761968c14b50b02846dd35ab9faa9dea09d00247600
DarwiniaLightClient=$(deploy DarwiniaLightClient \
$NETWORK \
$BEEFY_SLASH_VALUT \
$BEEFY_VALIDATOR_SET_ID \
$BEEFY_VALIDATOR_SET_LEN \
$BEEFY_VALIDATOR_SET_ROOT)
OutboundLane=$(deploy OutboundLane \
$DarwiniaLightClient \
$SimpleFeeMarket \
$this_chain_pos \
$this_out_lane_pos \
$bridged_chain_pos \
$bridged_in_lane_pos 1 0 0)
InboundLane=$(deploy InboundLane \
$DarwiniaLightClient \
$this_chain_pos \
$this_in_lane_pos \
$bridged_chain_pos \
$bridged_out_lane_pos 0 0)
seth send -F $ETH_FROM $SimpleFeeMarket "setOutbound(address,uint)" $OutboundLane 1 --chain bsctest
ExecutionLayer=$(jq -r ".[\"$NETWORK_NAME\"].ExecutionLayer" "$PWD/bin/addr/$MODE/$TARGET_CHAIN.json")
(set -x; seth send -F $ETH_FROM $ExecutionLayer "registry(uint32,uint32,address,uint32,address)" $bridged_chain_pos $this_out_lane_pos $OutboundLane $this_in_lane_pos $InboundLane --rpc-url https://pangoro-rpc.darwinia.network)
|
export enum EventType {
Destroy = 'destroy',
Ready = 'ready',
Unstarted = 'unstarted',
Ended = 'ended',
Played = 'played',
Paused = 'paused',
Buffering = 'buffering',
Cued = 'cued',
VolumeChange = 'volume-change',
Seeked = 'seeked',
QualityChange = 'quality-change',
RateChange = 'rate-change',
ApiChange = 'api-change',
SizeChange = 'size-change',
FullscreenChange = 'fullscreen-change',
Error = 'error',
AdUnstarted = 'ad-unstarted',
AdEnded = 'ad-ended',
AdPlayed = 'ad-played',
AdPaused = 'ad-paused',
AdBuffering = 'ad-buffering',
AdCued = 'ad-cued',
/** The share panel on embedded YouTube videos */
SharePanelOpened = 'share-panel-opened',
LoadProgress = 'load-progress',
VideoProgress = 'video-progress',
ReloadRequired = 'reload-required',
CueRangeEnter = 'cue-range-enter',
CueRangeExit = 'cue-range-exit',
ShareClicked = 'share-clicked',
ConnectionIssue = 'connection-issue',
VideoDataChange = 'video-data-change',
PlaylistUpdate = 'playlist-update',
PlayVideo = 'play-video',
AutonavCoundownStarted = 'autonav-coundown-started'
}; |
// This module was autogenerate. Please don't edit.
exports._UnsafeTableSortLabel = require("@material-ui/core/TableSortLabel").default; |
package com.qinghua.demo.activity;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Toast;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.qinghua.demo.R;
import com.qinghua.demo.widget.CircleMenuLayout;
@Route(path = "/second/circle_menu")
public class CircleMenuActivity extends AppCompatActivity {
private CircleMenuLayout mCircleMenuLayout;
private String[] mItemTexts = new String[] { "安全中心 ", "特色服务", "投资理财",
"转账汇款", "我的账户", "信用卡" };
private int[] mItemImgs = new int[] { R.mipmap.home_mbank_1_normal,
R.mipmap.home_mbank_2_normal, R.mipmap.home_mbank_3_normal,
R.mipmap.home_mbank_4_normal, R.mipmap.home_mbank_5_normal,
R.mipmap.home_mbank_6_normal };
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
//自已切换布局文件看效果
setContentView(R.layout.activity_circle_menu_ccb);
mCircleMenuLayout = (CircleMenuLayout) findViewById(R.id.id_menulayout);
mCircleMenuLayout.setMenuItemIconsAndTexts(mItemImgs, mItemTexts);
mCircleMenuLayout.setOnMenuItemClickListener(new CircleMenuLayout.OnMenuItemClickListener()
{
@Override
public void itemClick(View view, int pos)
{
Toast.makeText(CircleMenuActivity.this, mItemTexts[pos],
Toast.LENGTH_SHORT).show();
}
@Override
public void itemCenterClick(View view)
{
Toast.makeText(CircleMenuActivity.this,
"you can do something just like ccb ",
Toast.LENGTH_SHORT).show();
}
});
}
}
|
#!/usr/bin/env bash
[[ "$PKTVISORD_ARGS" == "" ]] && PKTVISORD_ARGS="eth0"
exec pktvisord $PKTVISORD_ARGS
|
def extract_warranty_info(license_text: str) -> dict:
warranty_info = {}
lines = license_text.split('\n')
warranty_type = None
warranty_text = ''
for line in lines:
if line.isupper() and ':' in line:
if warranty_type:
warranty_info[warranty_type] = warranty_text.strip()
warranty_type = line.strip(':')
warranty_text = ''
else:
warranty_text += line.strip() + ' '
if warranty_type:
warranty_info[warranty_type] = warranty_text.strip()
return warranty_info |
<filename>controllers/helpers/helper_functions.go
// Package helpers contains the supporting functions for the operator at large that are shared
package helpers
import (
"math"
"strings"
)
//============================================================================================ HELPER FUNCTIONS
// ReturnLonger returns the larger value of lengths between two strings
func ReturnLonger(currentData string, newData string) string {
if len(newData) > len(currentData) {
return newData
}
return currentData
}
// StrPad returns the input string padded on the left, right or both sides using padType to the specified padding length padLength.
//
// Example:
// input := "Codes";
// StrPad(input, 10, " ", "RIGHT") // produces "Codes "
// StrPad(input, 10, "-=", "LEFT") // produces "=-=-=Codes"
// StrPad(input, 10, "_", "BOTH") // produces "__Codes___"
// StrPad(input, 6, "___", "RIGHT") // produces "Codes_"
// StrPad(input, 3, "*", "RIGHT") // produces "Codes"
func StrPad(input string, padLength int, padString string, padType string) string {
var output string
inputLength := len(input)
padStringLength := len(padString)
if inputLength >= padLength {
return input
}
repeat := math.Ceil(float64(1) + (float64(padLength-padStringLength))/float64(padStringLength))
switch padType {
case "RIGHT":
output = input + strings.Repeat(padString, int(repeat))
output = output[:padLength]
case "LEFT":
output = strings.Repeat(padString, int(repeat)) + input
output = output[len(output)-padLength:]
case "BOTH":
length := (float64(padLength - inputLength)) / float64(2)
repeat = math.Ceil(length / float64(padStringLength))
output = strings.Repeat(padString, int(repeat))[:int(math.Floor(float64(length)))] + input + strings.Repeat(padString, int(repeat))[:int(math.Ceil(float64(length)))]
}
return output
}
// DifferenceInStringSlices returns a []string of the unique items between two []string
func DifferenceInStringSlices(slice1 []string, slice2 []string) []string {
var diff []string
// Loop two times, first to find slice1 strings not in slice2,
// second loop to find slice2 strings not in slice1
for i := 0; i < 2; i++ {
for _, s1 := range slice1 {
found := false
for _, s2 := range slice2 {
if s1 == s2 {
found = true
break
}
}
// String not found. We add it to return slice
if !found {
diff = append(diff, s1)
}
}
// Swap the slices, only if it was the first loop
if i == 0 {
slice1, slice2 = slice2, slice1
}
}
return diff
}
// RemoveStringFromSlice takes a slice and finds a string then returns a new slice without it
func RemoveStringFromSlice(r string, s []string) []string {
for i, v := range s {
if v == r {
return append(s[:i], s[i+1:]...)
}
}
return s
}
|
#include "Collider.h"
#include <iostream>
Collider::Collider()
{
}
Collider::~Collider()
{
}
bool Collider::checkSphereCollision(const glm::vec3& center1, float radius1,
const glm::vec3& center2, float radius2)
{
float distance = glm::length(center1 - center2);
return distance <= radius1 + radius2;
}
bool Collider::checkRaySphereCollision(const glm::vec3& rayOrg, const glm::vec3& rayDir,
const glm::vec3& sphereCenter, float radius)
{
// https://en.wikipedia.org/wiki/Line%E2%80%93sphere_intersection
glm::vec3 oc = rayOrg - sphereCenter;
glm::vec3 u = glm::normalize(rayDir);
float b = glm::dot(oc, u);
float c = glm::dot(oc, oc) - (radius * radius);
float h = (b * b) - c;
if (h < 0) {
return false;
}
// TODO: Return collision data
// if (h > 0)
// {
// h = glm::sqrt(h);
// float d1 = -b + h;
// float d2 = -b - h;
// float rayLength = glm::length(rayDir);
// glm::vec3 p1 = rayOrg + (u * d1);
// glm::vec3 p2 = rayOrg + (u * d2);
// float dot1 = glm::dot(u, p1);
// float dot2 = glm::dot(u, p2);
// std::cout << "d1:" << d1 << " d2:" << d2 << " rayLength:" << rayLength << "\n";
// std::cout << "dot1:" << dot1 << " dot2:" << dot2 << std::endl;
// }
return true;
} |
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.diagram.actions;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.commands.CompoundCommand;
import org.eclipse.gef.ui.actions.SelectionAction;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.custom.BusyIndicator;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IWorkbenchPart;
import com.archimatetool.editor.diagram.commands.DiagramCommandFactory;
import com.archimatetool.editor.model.commands.DeleteArchimateElementCommand;
import com.archimatetool.editor.model.commands.DeleteArchimateRelationshipCommand;
import com.archimatetool.editor.model.commands.NonNotifyingCompoundCommand;
import com.archimatetool.editor.utils.PlatformUtils;
import com.archimatetool.model.IArchimateConcept;
import com.archimatetool.model.IArchimateElement;
import com.archimatetool.model.IArchimateRelationship;
import com.archimatetool.model.IDiagramModelArchimateComponent;
import com.archimatetool.model.IDiagramModelArchimateConnection;
import com.archimatetool.model.IDiagramModelArchimateObject;
import com.archimatetool.model.IDiagramModelObject;
import com.archimatetool.model.util.ArchimateModelUtils;
/**
* Delete from Model Action
*
* @author <NAME>
*/
public class DeleteFromModelAction extends SelectionAction {
public static final String ID = "DeleteFromModelAction"; //$NON-NLS-1$
public static final String TEXT = Messages.DeleteFromModelAction_0;
public DeleteFromModelAction(IWorkbenchPart part) {
super(part);
setText(TEXT);
setId(ID);
}
@Override
protected boolean calculateEnabled() {
List<?> list = getSelectedObjects();
if(list.isEmpty()) {
return false;
}
for(Object object : list) {
if(object instanceof EditPart) {
Object model = ((EditPart)object).getModel();
if(model instanceof IDiagramModelArchimateComponent) {
return true;
}
}
}
return false;
}
@Override
public void run() {
List<?> selection = getSelectedObjects();
Set<IArchimateConcept> archimateConcepts = new HashSet<IArchimateConcept>();
// Gather referenced model concepts
for(Object object : selection) {
if(object instanceof EditPart) {
Object model = ((EditPart)object).getModel();
if(model instanceof IDiagramModelArchimateObject) {
IArchimateElement element = ((IDiagramModelArchimateObject)model).getArchimateElement();
archimateConcepts.add(element);
// Element's relationships
for(IArchimateRelationship relation : ArchimateModelUtils.getAllRelationshipsForConcept(element)) {
archimateConcepts.add(relation);
// Relation's relationships
for(IArchimateRelationship r : ArchimateModelUtils.getAllRelationshipsForConcept(relation)) {
archimateConcepts.add(r);
}
}
}
else if(model instanceof IDiagramModelArchimateConnection) {
IArchimateRelationship relation = ((IDiagramModelArchimateConnection)model).getArchimateRelationship();
archimateConcepts.add(relation);
// Relation's relationships
for(IArchimateRelationship r : ArchimateModelUtils.getAllRelationshipsForConcept(relation)) {
archimateConcepts.add(r);
}
}
}
}
// Check whether any of these concepts are referenced in other diagrams
if(hasMoreThanOneReference(archimateConcepts)) {
if(!MessageDialog.openQuestion(Display.getDefault().getActiveShell(),
Messages.DeleteFromModelAction_0,
Messages.DeleteFromModelAction_1 +
"\n\n" + //$NON-NLS-1$
Messages.DeleteFromModelAction_2)) {
return;
}
}
// TODO: Bug on Mac 10.12 and newer - Open dialog does not close straight away
// See https://bugs.eclipse.org/bugs/show_bug.cgi?id=527306
if(PlatformUtils.isMac()) {
while(Display.getCurrent().readAndDispatch());
}
// Create commands
CompoundCommand compoundCommand = new NonNotifyingCompoundCommand(TEXT);
for(IArchimateConcept archimateConcept : archimateConcepts) {
if(archimateConcept instanceof IArchimateElement) {
// Element
Command cmd = new DeleteArchimateElementCommand((IArchimateElement)archimateConcept);
compoundCommand.add(cmd);
// Diagram Model Objects
for(IDiagramModelObject dmo : ((IArchimateElement)archimateConcept).getReferencingDiagramObjects()) {
cmd = DiagramCommandFactory.createDeleteDiagramObjectCommand(dmo);
compoundCommand.add(cmd);
}
}
else if(archimateConcept instanceof IArchimateRelationship) {
// Relationship
Command cmd = new DeleteArchimateRelationshipCommand((IArchimateRelationship)archimateConcept);
compoundCommand.add(cmd);
// Diagram Model Connections
for(IDiagramModelArchimateConnection dmc : ((IArchimateRelationship)archimateConcept).getReferencingDiagramConnections()) {
cmd = DiagramCommandFactory.createDeleteDiagramConnectionCommand(dmc);
compoundCommand.add(cmd);
}
}
}
BusyIndicator.showWhile(null, new Runnable() {
@Override
public void run() {
execute(compoundCommand);
}
});
}
private boolean hasMoreThanOneReference(Set<IArchimateConcept> archimateConcepts) {
for(IArchimateConcept archimateConcept : archimateConcepts) {
if(archimateConcept.getReferencingDiagramComponents().size() > 1) {
return true;
}
}
return false;
}
}
|
<gh_stars>0
import * as classnames from "classnames"
import * as React from "react"
export default class TodoTextInput extends React.PureComponent<any, any> {
public state = {
text: this.props.text || ""
}
public handleSubmit = (e: any) => {
const text = e.target.value.trim()
if (e.which === 13) {
this.props.onSave(text)
if (this.props.newTodo) {
this.setState({ text: "" })
}
}
}
public handleChange = (e: any) => {
this.setState({ text: e.target.value })
}
public handleBlur = (e: any) => {
if (!this.props.newTodo) {
this.props.onSave(e.target.value)
}
}
public render() {
return (
<input
className={classnames({
"edit": this.props.editing,
"new-todo": this.props.newTodo
})}
type="text"
placeholder={this.props.placeholder}
autoFocus={true}
value={this.state.text}
onBlur={this.handleBlur}
onChange={this.handleChange}
onKeyDown={this.handleSubmit}
/>
)
}
}
|
<reponame>psema4/Atomic-OS
/* commands.js
*
* Atomic OS WASH command
*
* List available commands from /bin to stdout
*
* @author <NAME> <<EMAIL> (http://www.psema4.com)
* @version 2.0.0
*/
window.system = window.system || {};
system.bin = system.bin || {};
/* Dummy constructor
*
* Access programmatically via system.bin.commands.!!methodName!!
* @constructor
*/
system.bin.commands = {
/* @method help
* @returns {string} Returns a simple string synopsis for this command
*
* Simple synopsis on this command, used by the <a href="help.html">help command</a>
*/
help: function() {
return "List available commands from /bin to stdout\n\n Usage: commands";
},
/* @method exec
* @param {Array} args A list of arguments the command was called with
* Executes command with args. The calling HxProcess is available as **this** and it's first 3 file descriptors are stdin, stdout, and stderr respectively.
* For example, to echo text to stdout: **this.fd[1].write('foobar');**
*/
exec: function(args) {
// 'this' is the calling process
var stdin = (this.fd && this.fd.length > 0) ? this.fd[0] : false;
var stdout = (this.fd && this.fd.length > 1) ? this.fd[1] : false;
var stderr = (this.fd && this.fd.length > 2) ? this.fd[2] : false;
try {
wash("ls /bin");
} catch(e) {
if (stderr) {
stderr.write('command exception: ' + e);
} else {
console.log('command exception:');
console.dir(e);
}
}
}
};
|
<reponame>addcolouragency/craft_storefront<gh_stars>0
declare const _default: (req: any, res: any) => Promise<void>;
/**
* @oas [get] /shipping-options
* operationId: GetShippingOptions
* summary: Retrieve Shipping Options
* description: "Retrieves a list of Shipping Options."
* parameters:
* - (query) is_return {boolean} Whether return Shipping Options should be included. By default all Shipping Options are returned.
* - (query) product_ids {string} A comma separated list of Product ids to filter Shipping Options by.
* - (query) region_id {string} the Region to retrieve Shipping Options from.
* tags:
* - Shipping Option
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* shipping_options:
* type: array
* items:
* $ref: "#/components/schemas/shipping_option"
*/
export default _default;
export declare class StoreGetShippingOptionsParams {
product_ids?: string;
region_id?: string;
is_return?: string;
}
|
from collections import namedtuple
GRUND = 5
WATER = 15
AIR = 25
Coords = namedtuple("Coords", "w l")
class CubeSlice:
def __init__(self, data, level):
self.level = level
self.width = len(data)
self.length = len(data[0])
self.shape = (self.width, self.length)
self._slice = [[GRUND if level < h else WATER for h in row]
for row in data]
def is_valid_coords(self, coords: Coords):
w, l = coords
return (0 <= w < self.width) and (0 <= l < self.length)
def items(self):
return ((Coords(w, l), self._slice[w][l])
for w in range(self.width) for l in range(self.length))
def count(self, value):
count = 0
for w in range(self.width):
for l in range(self.length):
if self._slice[w][l] == value:
count += 1
return count
def __setitem__(self, key: Coords, value):
w, l = key
self._slice[w][l] = value
def __getitem__(self, key: Coords):
w, l = key
return self._slice[w][l]
def __repr__(self):
return f'<CubeSlice level {self.level}>'
class Cube:
def __init__(self, data):
self.height = self.get_height(data)
self.width = self.get_width(data)
self.length = self.get_length(data)
self.slices = [CubeSlice(data, level) for level in range(self.height)]
self._iter_index = 0
@staticmethod
def get_height(data):
return max(max(row) for row in data)
@staticmethod
def get_width(data):
return len(data)
@staticmethod
def get_length(data):
lengths = {len(row) for row in data}
if len(lengths) > 1:
raise ValueError("Data has inconsistent length")
return list(lengths)[0]
def count(self, value):
return sum(cube_slice.count(value) for cube_slice in self.slices)
def __iter__(self):
return self
def __next__(self):
current_index = self._iter_index
if current_index >= len(self.slices):
self._iter_index = 0
raise StopIteration
self._iter_index += 1
return self.slices[current_index]
|
package io.opensphere.core.preferences;
/**
* A listener for preference changes.
*/
@FunctionalInterface
public interface PreferenceChangeListener
{
/**
* Invoked when a preference change has occurred.
*
* @param evt - the {@link PreferenceChangeEvent}
*/
void preferenceChange(PreferenceChangeEvent evt);
}
|
# Google Cloud Platform Application
from google.appengine.api import appinfo
app = appinfo.AppInfoExternal(
application='myapp',
version='1',
runtime='python27',
api_version='1',
threadsafe=True,
)
# Create a handler for the endpoint
def hello_world():
print("Hello, world!")
# Map the handler to the endpoint
app.handlers.extend([
('/', hello_world)
])
# Deploy the application
app.deploy() |
#!/bin/sh
os=`uname`
load=0
if [ "$os" = "linux" ] || [ "$os" = "Linux" ]
then
uptime>loadtemp
load=`sed -n 1p loadtemp|awk '{print substr($(NF-2),1,4)}'`
elif [ "$os" = "SunOS" ]
then
uptime>loadtemp
load=`sed -n 1p loadtemp|awk '{print substr($(NF-2),1,4)}'`
elif [ "$os" = "HP-UX" ]
then
uptime>loadtemp
load=`sed -n 1p loadtemp|awk '{print substr($(NF-2),1,4)}'`
elif [ "$os" = "AIX" ]
then
uptime>loadtemp
load=`sed -n 1p loadtemp|awk '{print substr($(NF-2),1,4)}'`
elif [ "$os" = "SCO_SV" ] || [ "$os" = "UnixWare" ]
then
uptime>loadtemp
load=`sed -n 1p loadtemp|awk '{print substr($(NF-2),1,4)}'`
fi
status_text=`cat loadtemp`
echo "Load Average=$load"
echo "status_text=Uptime:$status_text"
exit 0
|
<reponame>nutexadmin/Nutex-OpenApi
package io.bhex.api.client.domain.option.request;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class OptionPositionRequest {
private String symbol;
private Long recvWindow;
private Long timestamp;
}
|
func cumulativeSum(_ input: [Int]) -> [Int] {
var result: [Int] = []
var sum = 0
for num in input {
sum += num
result.append(sum)
}
return result
}
// Test cases
let input1 = [1, 2, 3, 4]
let input2 = [3, 1, 5, 2, 7]
print(cumulativeSum(input1)) // Output: [1, 3, 6, 10]
print(cumulativeSum(input2)) // Output: [3, 4, 9, 11, 18] |
class TagNotFoundError(Exception):
"""Exception raised when the tag is not found in cidlineclasses"""
def validate_cid_tag(cid, tag):
"""Validate the CID tag against cidlineclasses"""
try:
yield getattr(cidlineclasses, tag) # execution pauses here
except AttributeError:
raise exc.TagNotFoundError(f"Tag '{tag}' not found in cidlineclasses") |
#!/bin/bash
# Copyright 2016 - 2021 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permi -fssions and
# limitations under the License.
for i in \
restore pgdump pgrestore postgres-gis pgbadger pgpool \
watch backup postgres pgbouncer pgadmin4 upgrade pgbench \
pgbasebackup-restore postgres-ha postgres-gis-ha
do
docker rmi -f $CCP_IMAGE_PREFIX/radondb-$i:$CCP_IMAGE_TAG
docker rmi -f radondb-$i
# docker rmi -f registry.radondb.openshift.com/jeff-project/radondb-$i:$CCP_IMAGE_TAG
done
for i in \
postgres-gis postgres-gis-ha
do
docker rmi -f $CCP_IMAGE_PREFIX/radondb-$i:$CCP_POSTGIS_IMAGE_TAG
docker rmi -f radondb-$i
done
exit
|
import sbt._
object AnalyzerBuild extends Build {
lazy val root = Project(id = "analyzer", base = file(".")) dependsOn dispatchGitHubProject
lazy val dispatchGitHubProject = GitHubDependency("erikvdv1", "dispatch-github", "0.1-SNAPSHOT").toRootProject
}
case class GitHubDependency(owner: String, repository: String, ref: String = "master") {
private val host = "github.com"
private def location = s"$host/$owner/$repository.git#$ref"
def toHttpsUri = uri(s"https://$location")
def toGitUri = uri(s"git://$location")
def toRootProject = RootProject(toGitUri)
}
|
<gh_stars>0
/**
*
* FrameContainer Redux Container
*
*/
import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
import { createStructuredSelector } from 'reselect';
import { selectName } from './selectors';
import { sampleAction } from './actions';
class FrameContainer extends Component { // eslint-disable-line react/prefer-stateless-function
constructor(props) {
super(props);
this.handleEvent = this.handleEvent.bind(this);
this.state = {
exampleValue: '',
};
}
handleEvent(e) {
e.preventDefault();
e.stopPropagation();
const {dispatch, name} = this.props; // eslint-disable-line
dispatch(sampleAction(name));
}
render() {
const {exampleValue} = this.state; // eslint-disable-line
const {exampleProp} = this.props; // eslint-disable-line
return (
<div style={{ padding: '2em' }}>
{this.props.children}
</div>
); // eslint-disable-line
}
}
FrameContainer.propTypes = {
dispatch: PropTypes.func,
name: PropTypes.string,
exampleProp: PropTypes.string,
children: PropTypes.node,
};
FrameContainer.defaultProps = {
exampleProp: '',
children: null,
};
const mapStateToProps = createStructuredSelector({
name: selectName(),
});
function mapDispatchToProps(dispatch) {
return {
dispatch,
};
}
export default connect(mapStateToProps, mapDispatchToProps)(FrameContainer);
|
#!/usr/bin/env bash
# Run all the tests.
# don't set -e so that all test invocations will run.
nosetests
NOSETEST_EXIT=$?
pushd diffscuss-mode/tests > /dev/null
./runtests.sh
EMACS_EXIT=$?
popd > /dev/null
# sum the exit codes of the test suites so all have to return 0
# (assumes that no one returns a negative, I know, but these return 1
# when they fail)
TO_EXIT=$((NOSETEST_EXIT + EMACS_EXIT))
exit $TO_EXIT
|
# creates a dock-space for OSX users
function llspace() {
defaults write com.apple.dock persistent-apps -array-add '{"tile-type"="spacer-tile";}'
killall Dock
llcolor $GREEN "Made a dock space for ya"
}
|
for num in range(2,101):
for i in range(2,num):
if (num % i ) == 0:
break
else:
print(num, end = ' ') |
<gh_stars>0
import { createTheme } from '@mui/material/styles';
const theme = createTheme({
palette: {
mode: 'light',
},
});
export default theme;
|
<gh_stars>1-10
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.http import HttpResponse
from django.utils.translation import gettext_lazy as _
from django.views.generic import RedirectView
from moj_irat.views import HealthcheckView, PingJsonView
from mtp_common.metrics.views import metrics_view
from mtp_auth.patches import patch_oauth2_provider_token_view
from .views import schema_view
patch_oauth2_provider_token_view()
urlpatterns = [
url(r'^', include('prison.urls')),
url(r'^', include('mtp_auth.urls')),
url(r'^', include('transaction.urls')),
url(r'^', include('account.urls')),
url(r'^', include('payment.urls')),
url(r'^', include('credit.urls')),
url(r'^', include('security.urls')),
url(r'^', include('service.urls')),
url(r'^', include('disbursement.urls')),
url(r'^', include('core.urls')),
url(r'^', include('notification.urls')),
url(r'^', include('performance.urls')),
url(r'^oauth2/', include(('oauth2_provider.urls', 'oauth2_provider'), namespace='oauth2_provider')),
url(r'^admin/', admin.site.urls),
url(r'^admin/', include('django.conf.urls.i18n')),
url(r'^ping.json$', PingJsonView.as_view(
build_date_key='APP_BUILD_DATE',
commit_id_key='APP_GIT_COMMIT',
version_number_key='APP_BUILD_TAG',
), name='ping_json'),
url(r'^healthcheck.json$', HealthcheckView.as_view(), name='healthcheck_json'),
url(r'^metrics.txt$', metrics_view, name='prometheus_metrics'),
url(r'^favicon.ico$', RedirectView.as_view(url=settings.STATIC_URL + 'images/favicon.ico', permanent=True)),
url(r'^robots.txt$', lambda request: HttpResponse('User-agent: *\nDisallow: /', content_type='text/plain')),
url(r'^\.well-known/security\.txt$', RedirectView.as_view(
url='https://raw.githubusercontent.com/ministryofjustice/security-guidance'
'/main/contact/vulnerability-disclosure-security.txt',
permanent=True,
)),
url(r'^404.html$', lambda request: HttpResponse(
_('Page not found'),
content_type='text/plain', status=404,
)),
url(r'^500.html$', lambda request: HttpResponse(
_('Sorry, something went wrong'),
content_type='text/plain', status=500,
)),
url(r'^$', lambda request: HttpResponse(content_type='text/plain', status=204)),
]
if settings.ENVIRONMENT in ('test', 'local'):
urlpatterns.extend([
url(r'^swagger(?P<format>\.json|\.yaml)$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
url(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
url(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
])
|
# given text
txt = "This is a sample text."
# split the given text into words
words = txt.split()
# define an empty dictionary
freq_dict = {}
# compute frequency of each word
for word in words:
if word not in freq_dict:
freq_dict[word] = 1
else:
freq_dict[word] += 1
# sort the dictionary by frequency
freq_dict = {word: count for word, count in sorted(freq_dict.items(), key=lambda count: count[1], reverse=True)}
# print the most frequent word
print(list(freq_dict.keys())[0]) # Output: this |
#!/usr/bin/env bash
# This is as 1a but increasing epochs from 4 to 10 and adding the option
# --proportional-shrink=150.0. Note: without adding the --proportional-shrink
# option, changing the num-epochs to 10 gave a couple of percent degradation.
# The --proportional-shrink option did also help with 4 epochs (but with a lower
# optimum, at 80); but those WER numbers were about a percent absolute worse
# than the numbers reported in this experiment.
# local/chain/compare_wer.sh --online exp/chain/tdnn1a_sp exp/chain/tdnn1b_sp
# System tdnn1a_sp tdnn1b_sp
#WER dev_clean_2 (tgsmall) 18.11 15.24
# [online:] 18.12 15.15
#WER dev_clean_2 (tglarge) 13.20 11.20
# [online:] 13.18 11.13
# Final train prob -0.0602 -0.0642
# Final valid prob -0.1038 -0.1014
# Final train prob (xent) -1.4997 -1.3679
# Final valid prob (xent) -1.7786 -1.6482
# steps/info/chain_dir_info.pl exp/chain/tdnn1b_sp
# exp/chain/tdnn1b_sp: num-iters=17 nj=2..5 num-params=7.0M dim=40+100->2309 combine=-0.085->-0.067 xent:train/valid[10,16,final]=(-1.98,-1.54,-1.37/-2.12,-1.76,-1.65) logprob:train/valid[10,16,final]=(-0.104,-0.076,-0.064/-0.129,-0.105,-0.101)
# Set -e here so that we catch if any executable fails immediately
set -euo pipefail
# First the options that are passed through to run_ivector_common.sh
# (some of which are also used in this script directly).
stage=0
decode_nj=10
train_set=train_clean_5
test_sets=dev_clean_2
gmm=tri3b
nnet3_affix=
# The rest are configs specific to this script. Most of the parameters
# are just hardcoded at this level, in the commands below.
affix=1b # affix for the TDNN directory name
tree_affix=
train_stage=-10
get_egs_stage=-10
decode_iter=
# training options
# training chunk-options
chunk_width=140,100,160
# we don't need extra left/right context for TDNN systems.
chunk_left_context=0
chunk_right_context=0
common_egs_dir=
xent_regularize=0.1
# training options
srand=0
remove_egs=true
reporting_email=
#decode options
test_online_decoding=true # if true, it will run the last decoding stage.
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
# The iVector-extraction and feature-dumping parts are the same as the standard
# nnet3 setup, and you can skip them by setting "--stage 11" if you have already
# run those things.
local/nnet3/run_ivector_common.sh --stage $stage \
--train-set $train_set \
--gmm $gmm \
--nnet3-affix "$nnet3_affix" || exit 1;
# Problem: We have removed the "train_" prefix of our training set in
# the alignment directory names! Bad!
gmm_dir=exp/$gmm
ali_dir=exp/${gmm}_ali_${train_set}_sp
tree_dir=exp/chain${nnet3_affix}/tree_sp${tree_affix:+_$tree_affix}
lang=data/lang_chain
lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_sp_lats
dir=exp/chain${nnet3_affix}/tdnn${affix}_sp
train_data_dir=data/${train_set}_sp_hires
lores_train_data_dir=data/${train_set}_sp
train_ivector_dir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires
for f in $gmm_dir/final.mdl $train_data_dir/feats.scp $train_ivector_dir/ivector_online.scp \
$lores_train_data_dir/feats.scp $ali_dir/ali.1.gz; do
[ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1
done
if [ $stage -le 10 ]; then
echo "$0: creating lang directory $lang with chain-type topology"
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
if [ -d $lang ]; then
if [ $lang/L.fst -nt data/lang/L.fst ]; then
echo "$0: $lang already exists, not overwriting it; continuing"
else
echo "$0: $lang already exists and seems to be older than data/lang..."
echo " ... not sure what to do. Exiting."
exit 1;
fi
else
cp -r data/lang $lang
silphonelist=$(cat $lang/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat $lang/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >$lang/topo
fi
fi
if [ $stage -le 11 ]; then
# Get the alignments as lattices (gives the chain training more freedom).
# use the same num-jobs as the alignments
steps/align_fmllr_lats.sh --nj 75 --cmd "$train_cmd" ${lores_train_data_dir} \
data/lang $gmm_dir $lat_dir
rm $lat_dir/fsts.*.gz # save space
fi
if [ $stage -le 12 ]; then
# Build a tree using our new topology. We know we have alignments for the
# speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use
# those. The num-leaves is always somewhat less than the num-leaves from
# the GMM baseline.
if [ -f $tree_dir/final.mdl ]; then
echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it."
exit 1;
fi
steps/nnet3/chain/build_tree.sh \
--frame-subsampling-factor 3 \
--context-opts "--context-width=2 --central-position=1" \
--cmd "$train_cmd" 3500 ${lores_train_data_dir} \
$lang $ali_dir $tree_dir
fi
if [ $stage -le 13 ]; then
mkdir -p $dir
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}')
learning_rate_factor=$(echo "print (0.5/$xent_regularize)" | python)
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=100 name=ivector
input dim=40 name=input
# please note that it is important to have input layer with the name=input
# as the layer immediately preceding the fixed-affine-layer to enable
# the use of short notation for the descriptor
fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat
# the first splicing is moved before the lda layer, so no splicing here
relu-renorm-layer name=tdnn1 dim=512
relu-renorm-layer name=tdnn2 dim=512 input=Append(-1,0,1)
relu-renorm-layer name=tdnn3 dim=512 input=Append(-1,0,1)
relu-renorm-layer name=tdnn4 dim=512 input=Append(-3,0,3)
relu-renorm-layer name=tdnn5 dim=512 input=Append(-3,0,3)
relu-renorm-layer name=tdnn6 dim=512 input=Append(-6,-3,0)
## adding the layers for chain branch
relu-renorm-layer name=prefinal-chain dim=512 target-rms=0.5
output-layer name=output include-log-softmax=false dim=$num_targets max-change=1.5
# adding the layers for xent branch
# This block prints the configs for a separate output that will be
# trained with a cross-entropy objective in the 'chain' models... this
# has the effect of regularizing the hidden parts of the model. we use
# 0.5 / args.xent_regularize as the learning rate factor- the factor of
# 0.5 / args.xent_regularize is suitable as it means the xent
# final-layer learns at a rate independent of the regularization
# constant; and the 0.5 was tuned so as to make the relative progress
# similar in the xent and regular final layers.
relu-renorm-layer name=prefinal-xent input=tdnn6 dim=512 target-rms=0.5
output-layer name=output-xent dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 14 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{3,4,5,6}/$USER/kaldi-data/egs/mini_librispeech-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage
fi
steps/nnet3/chain/train.py --stage=$train_stage \
--cmd="$decode_cmd" \
--feat.online-ivector-dir=$train_ivector_dir \
--feat.cmvn-opts="--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient=0.1 \
--chain.l2-regularize=0.00005 \
--chain.apply-deriv-weights=false \
--chain.lm-opts="--num-extra-lm-states=2000" \
--trainer.srand=$srand \
--trainer.max-param-change=2.0 \
--trainer.num-epochs=10 \
--trainer.frames-per-iter=3000000 \
--trainer.optimization.num-jobs-initial=2 \
--trainer.optimization.num-jobs-final=5 \
--trainer.optimization.initial-effective-lrate=0.001 \
--trainer.optimization.final-effective-lrate=0.0001 \
--trainer.optimization.shrink-value=1.0 \
--trainer.optimization.proportional-shrink=150.0 \
--trainer.num-chunk-per-minibatch=256,128,64 \
--trainer.optimization.momentum=0.0 \
--egs.chunk-width=$chunk_width \
--egs.chunk-left-context=$chunk_left_context \
--egs.chunk-right-context=$chunk_right_context \
--egs.chunk-left-context-initial=0 \
--egs.chunk-right-context-final=0 \
--egs.dir="$common_egs_dir" \
--egs.opts="--frames-overlap-per-eg 0" \
--cleanup.remove-egs=$remove_egs \
--use-gpu=true \
--reporting.email="$reporting_email" \
--feat-dir=$train_data_dir \
--tree-dir=$tree_dir \
--lat-dir=$lat_dir \
--dir=$dir || exit 1;
fi
if [ $stage -le 15 ]; then
# Note: it's not important to give mkgraph.sh the lang directory with the
# matched topology (since it gets the topology file from the model).
utils/mkgraph.sh \
--self-loop-scale 1.0 data/lang_test_tgsmall \
$tree_dir $tree_dir/graph_tgsmall || exit 1;
fi
if [ $stage -le 16 ]; then
frames_per_chunk=$(echo $chunk_width | cut -d, -f1)
rm $dir/.error 2>/dev/null || true
for data in $test_sets; do
(
nspk=$(wc -l <data/${data}_hires/spk2utt)
steps/nnet3/decode.sh \
--acwt 1.0 --post-decode-acwt 10.0 \
--extra-left-context $chunk_left_context \
--extra-right-context $chunk_right_context \
--extra-left-context-initial 0 \
--extra-right-context-final 0 \
--frames-per-chunk $frames_per_chunk \
--nj $nspk --cmd "$decode_cmd" --num-threads 4 \
--online-ivector-dir exp/nnet3${nnet3_affix}/ivectors_${data}_hires \
$tree_dir/graph_tgsmall data/${data}_hires ${dir}/decode_tgsmall_${data} || exit 1
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" \
data/lang_test_{tgsmall,tglarge} \
data/${data}_hires ${dir}/decode_{tgsmall,tglarge}_${data} || exit 1
) || touch $dir/.error &
done
wait
[ -f $dir/.error ] && echo "$0: there was a problem while decoding" && exit 1
fi
# Not testing the 'looped' decoding separately, because for
# TDNN systems it would give exactly the same results as the
# normal decoding.
if $test_online_decoding && [ $stage -le 17 ]; then
# note: if the features change (e.g. you add pitch features), you will have to
# change the options of the following command line.
steps/online/nnet3/prepare_online_decoding.sh \
--mfcc-config conf/mfcc_hires.conf \
$lang exp/nnet3${nnet3_affix}/extractor ${dir} ${dir}_online
rm $dir/.error 2>/dev/null || true
for data in $test_sets; do
(
nspk=$(wc -l <data/${data}_hires/spk2utt)
# note: we just give it "data/${data}" as it only uses the wav.scp, the
# feature type does not matter.
steps/online/nnet3/decode.sh \
--acwt 1.0 --post-decode-acwt 10.0 \
--nj $nspk --cmd "$decode_cmd" \
$tree_dir/graph_tgsmall data/${data} ${dir}_online/decode_tgsmall_${data} || exit 1
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" \
data/lang_test_{tgsmall,tglarge} \
data/${data}_hires ${dir}_online/decode_{tgsmall,tglarge}_${data} || exit 1
) || touch $dir/.error &
done
wait
[ -f $dir/.error ] && echo "$0: there was a problem while decoding" && exit 1
fi
exit 0;
|
/*
TITLE Draw the Olympic Circles Chapter12Exercise8.cpp
Bjarne Stroustrup "Programming: Principles and Practice Using C++"
COMMENT
Objective: Draw the Olympi Circles. // Should be semi - circles to overlap properly
Input: -
Output: Graph on screen.
Author: <NAME>
Date: 17. 08. 2015
*/
#include <iostream>
#include "Simple_window.h"
int main()
{
try
{
// create a centered window
const int windowWidth = 800;
const int windowHeight = 600;
Point centerScreen(x_max() / 2.- windowWidth / 2., y_max() / 2. - windowHeight / 2.);
Simple_window sw(centerScreen, windowWidth, windowHeight, "Chapter 12 Exercise 8");
// create Olympic circles
const int frameSize = 20;
const int circleRadius = 100;
const int circleWidth = 10;
// black in the top middle
Graph_lib::Circle cBlack(Point(sw.x_max() / 2.,
sw.y_max() / 2. - circleRadius),
circleRadius);
cBlack.set_style(Line_style(Line_style::solid, circleWidth));
cBlack.set_color(Color::black);
// blue in the top left
Graph_lib::Circle cBlue(Point(sw.x_max() / 2. - 2 * circleRadius - frameSize,
sw.y_max() / 2. - circleRadius),
circleRadius);
cBlue.set_style(Line_style(Line_style::solid, circleWidth));
cBlue.set_color(Color::blue);
// red in the top right
Graph_lib::Circle cRed(Point(sw.x_max() / 2. + 2 * circleRadius + frameSize,
sw.y_max() / 2. - circleRadius),
circleRadius);
cRed.set_style(Line_style(Line_style::solid, circleWidth));
cRed.set_color(Color::red);
// yellow in the bottom left
Graph_lib::Circle cYellor(Point(sw.x_max() / 2. - circleRadius - frameSize / 2.,
sw.y_max() / 2.),
circleRadius);
cYellor.set_style(Line_style(Line_style::solid, circleWidth));
cYellor.set_color(Color::yellow);
// greem in the bottom right
Graph_lib::Circle cGreen(Point(sw.x_max() / 2. + circleRadius + frameSize / 2.,
sw.y_max() / 2.),
circleRadius);
cGreen.set_style(Line_style(Line_style::solid, circleWidth));
cGreen.set_color(Color::green);
// right overlapping
sw.attach(cBlack);
sw.attach(cBlue);
sw.attach(cRed);
sw.attach(cYellor);
sw.attach(cGreen);
sw.wait_for_button();
}
catch(std::exception& e)
{
std::cerr << e.what() << std::endl;
}
catch(...)
{
std::cerr <<"Default exception!"<< std::endl;
}
}
|
/*
* Copyright 2014 akquinet engineering GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package de.akquinet.engineering.vaadinator.example.address.ui.std.presenter;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Date;
import java.util.HashMap;
import org.junit.Before;
import org.junit.Test;
import de.akquinet.engineering.vaadinator.example.address.model.Address;
import de.akquinet.engineering.vaadinator.example.address.model.Anreden;
import de.akquinet.engineering.vaadinator.example.address.service.AddressService;
import de.akquinet.engineering.vaadinator.example.address.ui.presenter.Presenter;
import de.akquinet.engineering.vaadinator.example.address.ui.presenter.SubviewCapablePresenter;
import de.akquinet.engineering.vaadinator.example.address.ui.std.view.AddressAddView;
import de.akquinet.engineering.vaadinator.example.address.ui.std.view.AddressChangeView;
import de.akquinet.engineering.vaadinator.example.address.ui.std.view.AddressListView;
public class AddressListPresenterImplTest {
private AddressListView view;
private AddressChangeView cview;
private AddressAddView aview;
private AddressService service;
private PresenterFactory presenterFactory;
private AddressChangePresenter cpres;
private AddressAddPresenter apres;
private AddressListPresenterImpl pres;
private AddressSelectPresenter spres;
@Before
public void setUp() {
view = mock(AddressListView.class);
// when(view.getAddressSelection()).thenReturn(mock(Address.class));
cview = mock(AddressChangeView.class);
when(cview.getAnrede()).thenReturn(Anreden.FRAU);
when(cview.getVorname()).thenReturn("vorname");
when(cview.getNachname()).thenReturn("nachname");
when(cview.getEmail()).thenReturn("email");
when(cview.getGeburtsdatum()).thenReturn(new Date(0));
aview = mock(AddressAddView.class);
when(aview.getAnrede()).thenReturn(Anreden.FRAU);
when(aview.getVorname()).thenReturn("vorname");
when(aview.getNachname()).thenReturn("nachname");
when(aview.getEmail()).thenReturn("email");
when(aview.getGeburtsdatum()).thenReturn(new Date(0));
service = mock(AddressService.class);
presenterFactory = mock(PresenterFactory.class);
cpres = mock(AddressChangePresenter.class);
when(cpres.getView()).thenReturn(cview);
apres = mock(AddressAddPresenter.class);
spres = mock(AddressSelectPresenter.class);
when(apres.getView()).thenReturn(aview);
when(presenterFactory.createAddressAddPresenter((Presenter) any())).thenReturn(apres);
when(presenterFactory.createAddressChangePresenter((Presenter) any())).thenReturn(cpres);
when(presenterFactory.createAddressSelectPresenter()).thenReturn(spres);
pres = new AddressListPresenterImpl(new HashMap<String, Object>(), view, presenterFactory, service, null);
}
@Test
public void testStartPresenting() {
pres.startPresenting();
verify(view).setObserver(pres);
verify(view).initializeUi();
verify(spres).startPresenting();
}
@Test
public void testReturnToThisPresener() {
pres.returnToThisPresener(mock(Presenter.class));
verify(view).closeSubView();
verify(spres).returnToThisPresener(any(Presenter.class));
}
@Test
public void testReturnToThisPresenerSubview() {
SubviewCapablePresenter subviewCapablePresenter = mock(SubviewCapablePresenter.class);
pres = new AddressListPresenterImpl(new HashMap<String, Object>(), view, presenterFactory, service,
subviewCapablePresenter);
pres.returnToThisPresener(mock(Presenter.class));
verify(view, never()).closeSubView();
verify(subviewCapablePresenter).setDetailView(null);
verify(spres).returnToThisPresener(any(Presenter.class));
}
@Test
public void testOnAddressSelected() {
pres.onAddressSelected(new Address(Anreden.FROLLEIN, "Sabine", "Test", "<EMAIL>"));
verify(presenterFactory).createAddressChangePresenter(pres);
verify(view).openSubView(cview);
verify(cpres).startPresenting();
}
@Test
public void testOnAddressSelectedSubview() {
SubviewCapablePresenter subviewCapablePresenter = mock(SubviewCapablePresenter.class);
pres = new AddressListPresenterImpl(new HashMap<String, Object>(), view, presenterFactory, service,
subviewCapablePresenter);
pres.onAddressSelected(new Address(Anreden.FROLLEIN, "Sabine", "Test", "<EMAIL>"));
verify(presenterFactory).createAddressChangePresenter(pres);
verify(view, never()).openSubView(cview);
verify(subviewCapablePresenter).setDetailView(cview);
verify(cpres).startPresenting();
}
@Test
public void testOnAddressSelectedNull() {
pres.onAddressSelected(null);
verify(presenterFactory, never()).createAddressChangePresenter((Presenter) any());
verify(view, never()).openSubView(cview);
verify(cpres, never()).startPresenting();
}
@Test
public void testOnAddressSelectedNullSubview() {
SubviewCapablePresenter subviewCapablePresenter = mock(SubviewCapablePresenter.class);
pres = new AddressListPresenterImpl(new HashMap<String, Object>(), view, presenterFactory, service,
subviewCapablePresenter);
pres.onAddressSelected(null);
verify(presenterFactory, never()).createAddressChangePresenter((Presenter) any());
verify(view, never()).openSubView(cview);
verify(subviewCapablePresenter, never()).setDetailView(cview);
verify(cpres, never()).startPresenting();
}
@Test
public void testOnAddAddress() {
pres.onAddAddress();
verify(presenterFactory).createAddressAddPresenter(pres);
verify(view).openSubView(aview);
verify(apres).startPresenting();
}
@Test
public void testOnAddAddressSubview() {
SubviewCapablePresenter subviewCapablePresenter = mock(SubviewCapablePresenter.class);
pres = new AddressListPresenterImpl(new HashMap<String, Object>(), view, presenterFactory, service,
subviewCapablePresenter);
pres.onAddAddress();
verify(presenterFactory).createAddressAddPresenter(pres);
verify(view, never()).openSubView(aview);
verify(subviewCapablePresenter).setDetailView(aview);
verify(apres).startPresenting();
}
}
|
def kClosestValues(arr, x, k):
closestPrimes = []
diff = float("inf")
for num in arr:
currentDiff = abs(num - x)
if currentDiff <= diff:
if currentDiff < diff:
closestPrimes = []
closestPrimes.append(num)
diff = currentDiff
if closestPrimes[0] > x:
closestPrimes.sort()
else:
closestPrimes.sort(reverse=True)
return closestPrimes[:k] |
package org.multibit.hd.core.testing.error_reporting;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.*;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.bitcoinj.crypto.TrustStoreLoader;
import org.bitcoinj.params.MainNetParams;
import org.multibit.commons.concurrent.SafeExecutors;
import org.multibit.hd.core.config.Configurations;
import org.multibit.hd.core.dto.PaymentSessionSummary;
import org.multibit.hd.core.managers.HttpsManager;
import org.multibit.hd.core.managers.InstallationManager;
import org.multibit.hd.core.services.PaymentProtocolService;
import org.multibit.hd.core.services.PaymentProtocolServiceTest;
import org.multibit.hd.core.testing.payments.FixtureCallable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.net.ServerSocketFactory;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLServerSocket;
import javax.net.ssl.TrustManagerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URL;
import java.security.KeyStore;
import java.util.concurrent.TimeUnit;
import static org.fest.assertions.Fail.fail;
/**
* <p>A HTTPS server listening on localhost:8443 and responding with Payment Protocol requests created from fixtures</p>
*
* <p>To debug SSL include <code>-Djavax.net.debug=ssl:handshake</code> as a VM command line parameter</p>
*
*/
public class ErrorReportingHttpsServer {
private static final Logger log = LoggerFactory.getLogger(ErrorReportingHttpsServer.class);
private SSLServerSocket serverSocket;
private static ListeningExecutorService executorService = SafeExecutors.newSingleThreadExecutor("bip70-server");
/**
* Start an https Payment Protocol server which can respond to a MultiBit HD instance
* Start MBHD with the commandline parameter "project dir"/fixtures/payments/localhost-signed-1milli.bitcoinpaymentrequest
* @param args No args are needed
*/
public static void main(String[] args) {
InstallationManager.unrestricted = true;
Configurations.currentConfiguration = Configurations.newDefaultConfiguration();
ErrorReportingHttpsServer server = new ErrorReportingHttpsServer();
log.debug("Result of server.start() was {}", server.start());
// Add some responses - we consume one here (a PaymentRequest fixture) and then add three PaymentACKCallable responses
server.addFixture("/fixtures/error_reporting/localhost-signed-1milli.bitcoinpaymentrequest");
// Probe it once to see if it is up
PaymentProtocolService paymentProtocolService = new PaymentProtocolService(MainNetParams.get());
paymentProtocolService.start();
final URI uri = URI.create(PaymentProtocolServiceTest.PAYMENT_REQUEST_BIP72_SINGLE);
// Wait until the HTTPS server is up before setting the trust store loader
TrustStoreLoader trustStoreLoader = new TrustStoreLoader.DefaultTrustStoreLoader();
final PaymentSessionSummary paymentSessionSummary = paymentProtocolService.probeForPaymentSession(uri, false, trustStoreLoader);
log.debug(paymentSessionSummary.toString());
// Runs forever
while (true) {
Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS);
log.debug("Still running...");
}
}
/**
* @return True if the server started OK
*/
public boolean start() {
InputStream is = null;
try {
log.debug("Initialise the trust store containing the trusted certificates (including localhost:8443)");
URL trustStoreUrl = ErrorReportingHttpsServer.class.getResource("/mbhd-cacerts-with-localhost");
System.setProperty("javax.net.ssl.trustStore", trustStoreUrl.getFile());
System.setProperty("javax.net.ssl.trustStorePassword", HttpsManager.PASSPHRASE);
SSLContext sslContext = SSLContext.getInstance("TLS");
log.debug("Initialise the key store containing the private server keys (CN=localhost is required)");
KeyStore ks = KeyStore.getInstance("JKS");
is = ErrorReportingHttpsServer.class.getResourceAsStream("/localhost.jks");
ks.load(is, HttpsManager.PASSPHRASE.toCharArray());
log.debug("Initialise the key manager factory");
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(ks, HttpsManager.PASSPHRASE.toCharArray());
log.debug("Initialise the trust manager factory");
TrustManagerFactory tmf = TrustManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
tmf.init(ks);
// Setup the HTTPS context and parameters
sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
// Create a ServerSocketFactory from the SSLContext
ServerSocketFactory ssf = sslContext.getServerSocketFactory();
// Create unauthenticated server socket on localhost:8443
serverSocket = (SSLServerSocket) ssf.createServerSocket(8443);
serverSocket.setNeedClientAuth(false);
serverSocket.setWantClientAuth(false);
String[] suites = serverSocket.getSupportedCipherSuites();
serverSocket.setEnabledCipherSuites(suites);
return true;
} catch (Exception e) {
log.error("Failed to create HTTPS server", e);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
log.error("Failed to close key store", ioe);
}
}
}
// Must have failed to be here
return false;
}
/**
* @param fixture The classpath reference of the fixture to add (served as byte[])
*/
public void addFixture(final String fixture) {
Preconditions.checkState(!executorService.isTerminated(), "Executor service must not be terminated");
log.debug("Adding fixture: '{}'", fixture);
ListenableFuture<Boolean> listenableFuture = executorService.submit(new FixtureCallable(serverSocket, "application/bitcoin-paymentrequest", fixture));
Futures.addCallback(listenableFuture, new FutureCallback<Boolean>() {
@Override
public void onSuccess(@Nullable Boolean result) {
log.info("Fixture '{}' served successfully", fixture);
}
@Override
public void onFailure(Throwable t) {
fail("Unexpected failure for fixture: " + fixture, t);
}
});
}
/**
* Remove all entries from the fixture queue and reset the executor service
*/
@SuppressFBWarnings({"ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD"})
public void reset() {
executorService.shutdownNow();
try {
executorService.awaitTermination(2, TimeUnit.SECONDS);
} catch (InterruptedException e) {
log.warn("Failed to terminate executor service cleanly");
}
executorService = SafeExecutors.newSingleThreadExecutor("error-reporting-server");
}
public void stop() {
reset();
try {
serverSocket.close();
} catch (IOException e) {
log.warn("Failed to close server socket", e);
}
}
}
|
const express = require('express');
const { contactList } = require('./data.js');
const app = express();
app.get('/contacts', (req, res) => {
res.json(contactList);
});
app.post('/contacts', (req, res) => {
const contact = req.body;
contactList.push(contact);
res.json(contact);
});
app.put('/contacts/:id', (req, res) => {
const { id } = req.params;
const contactIndex = contactList.findIndex((c) => c.id == id);
if (contactIndex === -1) {
return res.status(404).json({ message: 'Contact not found!' });
}
contactList[contactIndex] = { ...contactList[contactIndex], ...req.body };
res.json(contactList[contactIndex]);
});
app.delete('/contacts/:id', (req, res) => {
const { id } = req.params;
contactList = contactList.filter((c) => c.id != id);
res.json({ message: 'Contact deleted.' });
});
app.listen(3000, () => {
console.log('Server running on port 3000');
}); |
class PublicationManager:
def __init__(self):
self.publications = {}
def add_publication(self, title, author, abstract=None):
self.publications[title] = {'author': author, 'abstract': abstract}
def get_description(self, title):
if title in self.publications:
desc = 'Publication: ' + title + ' - Author: ' + self.publications[title]['author']
if self.publications[title]['abstract']:
desc += ' - Abstract: ' + self.publications[title]['abstract']
else:
desc += ' - No abstract available.'
return desc
else:
return 'Publication not found.' |
/*
* Copyright 2018-2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations
* under the License.
*/
package no.priv.bang.ukelonn.api.resources;
import javax.inject.Inject;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.osgi.service.log.LogService;
import no.priv.bang.ukelonn.api.beans.LoginResult;
@Path("/logout")
@Produces(MediaType.APPLICATION_JSON)
public class Logout {
@Inject
LogService logservice;
@POST
public LoginResult doLogout() {
Subject subject = SecurityUtils.getSubject();
subject.logout();
return LoginResult.with().build();
}
}
|
<gh_stars>1-10
package datahelper
import (
"database/sql"
"github.com/jmoiron/sqlx"
"rickonono3/r-blog/helper/typehelper"
"rickonono3/r-blog/mytype"
)
// GetResultId 将 res 转为 int 型的 id
func GetResultId(res *sql.Result) (id int) {
id64, _ := (*res).LastInsertId()
id = typehelper.Int64ToInt(id64)
return
}
// IsExists 判断某个实体是否存在
func IsExists(tx *sqlx.Tx, entity mytype.Entity) bool {
if entity.Type == 0 && entity.Id == 0 {
return true
}
var (
err error
count = 0
layerExists = false
entityExists = false
)
// layerExists
if err = tx.QueryRowx(
"select count(*) from layer where type=? and id=?",
entity.Type,
entity.Id,
).Scan(&count); err != nil {
return false
}
if count == 1 {
layerExists = true
}
// entityExists
count = 0
query := ""
switch entity.Type {
case 0:
query = "select count(*) from dir where id=?"
case 1:
query = "select count(*) from article where id=?"
case 2:
query = "select count(*) from file where id=?"
}
if err = tx.QueryRowx(
query,
entity.Id,
).Scan(&count); err != nil {
return false
}
if count == 1 {
entityExists = true
}
return layerExists && entityExists
}
|
<reponame>johnreysd/ClientBase<gh_stars>0
package dev.zihasz.clientbase.feature.module.modules.client;
import dev.zihasz.clientbase.feature.module.Category;
import dev.zihasz.clientbase.feature.module.Module;
import dev.zihasz.clientbase.setting.Setting;
import org.lwjgl.input.Keyboard;
import java.awt.*;
public class ClickGUIModule extends Module {
public ClickGUIModule() {
super("ClickGUI", "The \"control panel\" for your client", Category.CLIENT, Keyboard.KEY_RSHIFT);
}
Setting<Color> foregroundColor = new Setting<>("ForegroundColor", "The foreground color for the GUI.", new Color(152, 255, 152, 250));
Setting<Color> backgroundColor = new Setting<>("BackgroundColor", "The background color for the GUI.", new Color(100, 100, 100, 125));
Setting<Color> fontColor = new Setting<>("FontColor", "The font color for the GUI.", new Color(255, 255, 255, 250));
@Override
public void onEnable() {
if (mc.currentScreen == null)
mc.displayGuiScreen(null); /* TODO: Replace "null" with the actual ClickGUI class. */
}
@Override
public void onDisable() {
if (mc.currentScreen == null) /* TODO: Replace "null" with the actual ClickGUI class. */
mc.displayGuiScreen(null);
}
}
|
<reponame>gitter-badger/intellij-swagger
package org.zalando.intellij.swagger.rename.json;
import com.intellij.openapi.vfs.VirtualFile;
import org.zalando.intellij.swagger.SwaggerLightCodeInsightFixtureTestCase;
public class RenameFileRefTest extends SwaggerLightCodeInsightFixtureTestCase {
private static final String FILES_PATH = "rename/swagger/file/json/";
public void testRenameJsonFileReference() {
myFixture.copyFileToProject(FILES_PATH + "empty.json", "definitions/pet.json");
final VirtualFile swaggerFile =
myFixture.copyFileToProject(FILES_PATH + "rename_file_reference.json", "swagger.json");
myFixture.configureFromExistingVirtualFile(swaggerFile);
myFixture.renameElementAtCaret("newName.json");
myFixture.checkResultByFile(
"swagger.json", FILES_PATH + "rename_file_reference_after.json", true);
assertNotNull(myFixture.findFileInTempDir("definitions/newName.json"));
assertNull(myFixture.findFileInTempDir("definitions/pet.json"));
}
public void testRenameRefInReferencedFile() {
final VirtualFile definitionsFile =
myFixture.copyFileToProject(
FILES_PATH + "ref_in_referenced_file_with_caret.json", "definitions.json");
final VirtualFile swaggerFile =
myFixture.copyFileToProject(
FILES_PATH + "rename_ref_in_referenced_file.json", "swagger.json");
myFixture.configureFromExistingVirtualFile(swaggerFile);
myFixture.configureFromExistingVirtualFile(definitionsFile);
myFixture.renameElementAtCaret("newName");
myFixture.checkResultByFile(
"definitions.json", FILES_PATH + "ref_in_referenced_file_with_caret_after.json", true);
}
}
|
#!/bin/bash
# Usage:
# Assumes DT_TENANT and DT_API_TOKEN environment variables are set
# ./createDynatraceKeptnCustomMetrics.sh
if [[ -z "$DT_TENANT" || -z "$DT_API_TOKEN" ]]; then
echo "DT_TENANT & DT_API_TOKEN MUST BE SET!!"
exit 1
fi
####################################################################################################################
## createCustomMetric(METRICKEY, METRICNAME)
####################################################################################################################
# Example: createCustomMetric("custom:keptn.events.configuration.change", "Keptn Configuration Change Events")
function createCustomMetric() {
METRICKEY=$1
METRICNAME=$2
PAYLOAD='{
"displayName" : "'$METRICNAME'",
"unit" : "Count",
"dimensions": [
"project",
"stage",
"service"
],
"types": [
"Event"
]
}'
echo "Creating Custom Metric $METRICNAME($METRICKEY)"
echo "PUT https://$DT_TENANT/api/v1/timeseries/$METRICKEY"
echo "$PAYLOAD"
curl -X PUT \
"https://$DT_TENANT/api/v1/timeseries/$METRICKEY" \
-H 'accept: application/json; charset=utf-8' \
-H "Authorization: Api-Token $DT_API_TOKEN" \
-H 'Content-Type: application/json; charset=utf-8' \
-d "$PAYLOAD" \
-o curloutput.txt
cat curloutput.txt
}
# now lets create metrics for each event
createCustomMetric "custom:keptn.events.configuration.change" "Keptn Configuration Change Events"
createCustomMetric "custom:keptn.events.deployment.finished" "Keptn Deployment Finished Events"
createCustomMetric "custom:keptn.events.tests.finished" "Keptn Tests Finished Events"
createCustomMetric "custom:keptn.events.start.evaluation" "Keptn Start Evaluation Events"
createCustomMetric "custom:keptn.events.evaluation.done" "Keptn Evaluation Done Events"
createCustomMetric "custom:keptn.events.problem.open" "Keptn Problem Open Events"
|
package com.wpisen.trace.agent.trace;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
public class StackSession {
static final int MAX_SZIE = 3000;
private StackNode rootNode;
private StackNode hotNode;
private int size = 0;
@SuppressWarnings("unused")
private int errorSize = 0;
public StackSession(StackNode rootNode) {
size = 1;
rootNode.id = "0";
this.rootNode = rootNode;
hotNode = rootNode;
}
public void close() {
rootNode.done = true;
}
/**
* 添加节点
* @param node
* @return
*/
public StackNode addNode(StackNode node) {
if (size >= MAX_SZIE) {
size++;
return null;
}
hotNode.childs.add(node);
node.stackSession=this;
node.parent = hotNode;
node.id = hotNode.id + "." + hotNode.childs.size();
size++;
hotNode = node;
return node;
}
public void doneNode(StackNode node) {
node.done = true;
node.error = null;
hotNode = node.parent;
}
public StackNode getHotStack() {
return hotNode;
}
protected StackNode setHotStack(StackNode hot) {
return hotNode = hot;
}
public void printStack(PrintStream out) {
print(rootNode, out);
}
// 递归打印堆栈节点
private void print(StackNode node, PrintStream out) {
out.println(node.toString());
for (StackNode n : node.childs) {
print(n, out);
}
}
public List<StackNode> getAllNodes(){
List<StackNode> result=new ArrayList<>(size);
result.add(rootNode);
putNodes(rootNode,result);
return result;
}
private void putNodes(StackNode parent, List<StackNode> list) {
for (StackNode node : parent.childs) {
list.add(node);
putNodes(node,list);
}
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Throwable) {
// TODO 暂时无法实现
/*
* StackNode node = nodes[hotIndex]; while (!node.done && node !=
* nodes[0]) { node.error = (Throwable) obj; node =
* nodes[node.parentIndex]; }
*/
errorSize++;
// throwables[errorSize++]= (Throwable) obj;
return false;
}
/**
*通过一个4个长度的数组对象来添加堆栈节点。
*通过数组下标【0】 来返回一个新的堆栈节点对象
*@param [1]=long classId, [2]=String className, [3]=String methodName
*@return [0]= Object stackNode
*/
else if (obj instanceof Object[]) {
Object params[] = (Object[]) obj;
params[0] = addNode(new StackNode((Long) params[1], (String) params[2], (String) params[3]));
params[0] = params[0] == null ? new Object() : params[0];
return false;
}
/*
* 结束节点
*/
else if (obj instanceof StackNode) {
doneNode((StackNode) obj);
return false;
}
return super.equals(obj);
}
}
|
package com.ride.myride.fragment;
import android.view.View;
import android.widget.Button;
import com.ride.myride.R;
public class LoginRegisterFragment extends AbstractFragment {
public LoginRegisterFragment(){}
@Override
public void onStart() {
super.onStart();
View view = getView();
Button login = view.findViewById(R.id.login);
Button register = view.findViewById(R.id.register);
login.setOnClickListener(this);
register.setOnClickListener(this);
}
@Override
protected int getFragmentLayout() {
return R.layout.login_register_fragment;
}
}
|
def longestSortedSeq(nums):
if len(nums) < 2:
return len(nums)
max_len = 1
i = 0
while i < len(nums):
j = i + 1
while j < len(nums) and nums[j-1] <= nums[j]:
j += 1
max_len = max(max_len, j - i)
i = j - 1
return max_len |
'use strict';
var shajs = require('sha.js');
var _require = require('iota.lib.js/lib/utils/asciiToTrytes'),
toTrytes = _require.toTrytes;
var Kerl = require('iota.lib.js/lib/crypto/kerl/kerl');
var Converter = require('iota.lib.js/lib/crypto/converter/converter');
function getSeed(key, password) {
if (key.length % 243 !== 0) {
key = '' + key + '9'.repeat(243 - key.length % 243);
}
if (password.length % 243 !== 0) {
password = '' + password + '9'.repeat(243 - password.length % 243);
}
var hash = [];
var kerl = new Kerl();
kerl.initialize();
kerl.absorb(Converter.trits(key), 0);
kerl.absorb(Converter.trits(password), 0);
kerl.squeeze(hash, 0);
return Converter.trytes(hash);
}
function getKeys(username, password) {
var raw = shajs('sha512').update(username + ':' + password).digest('hex');
var base = toTrytes(raw);
return {
service: base.substr(0, 81),
ledger: base.substr(81, 81),
password: base.substr(162, 32),
passwordExt: base.substr(162, 81),
extra: base.substr(243, 10),
checksum: base.substr(253)
};
}
module.exports = {
getKeys: getKeys,
getSeed: getSeed
}; |
package com.frewing.dump.core.mail;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import com.frewing.dump.core.R;
public class MailActivity extends Activity implements View.OnClickListener, IMessage {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mail);
findViewById(R.id.btn_mail).setOnClickListener(this);
}
@Override
public void onClick(View view) {
String[] recipients = { "<EMAIL>" };
SendEmailAsyncTask email = new SendEmailAsyncTask();
email.activity = this;
email.m = new Mail("<EMAIL>", "560131fjg");
email.m.set_host("smtp.163.com");
email.m.set_port("465");
email.m.set_from("<EMAIL>");
email.m.setBody("test");
email.m.set_to(recipients);
email.m.set_subject("test2");
email.execute();
}
@Override
public void displayMessage(String s) {
Toast.makeText(this, s, Toast.LENGTH_SHORT).show();
}
}
|
import { Request, Response, Router } from "express";
import { addPatientProfile } from "../../../models/user/addPatientProfile";
import { verifyUser } from "../../../models/middlewares/verifyUser";
import { validate } from "./validate";
export const addProfileRouter = Router();
addProfileRouter.post(
"/",
verifyUser,
async (req: Request, res: Response): Promise<Response> => {
const errors: string[] = validate(req.body);
if (errors.length) return res.status(400).send({ errors });
try {
await addPatientProfile({ ...req.body, user: req.user });
return res.status(200).send({ success: true });
} catch (error) {
return res.status(500).send({ errors: [error.message] });
}
}
);
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_add_to_photos = void 0;
var ic_add_to_photos = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M4 6H2v14c0 1.1.9 2 2 2h14v-2H4V6zm16-4H8c-1.1 0-2 .9-2 2v12c0 1.1.9 2 2 2h12c1.1 0 2-.9 2-2V4c0-1.1-.9-2-2-2zm-1 9h-4v4h-2v-4H9V9h4V5h2v4h4v2z"
},
"children": []
}]
};
exports.ic_add_to_photos = ic_add_to_photos; |
<filename>packages/ui/src/components/Region.tsx
import React, { PropsWithChildren } from "react";
import cx from "classnames";
interface RegionProps extends React.HTMLProps<HTMLDivElement> {
active?: boolean;
activeClassName?: React.ComponentPropsWithoutRef<"div">["className"];
}
export const Region: React.FC<PropsWithChildren<RegionProps>> = ({
children,
className,
activeClassName,
active,
...rest
}) => (
<div
className={cx(className, active ? activeClassName : undefined)}
{...rest}
>
{children}
</div>
);
|
import numpy as np
from sklearn import svm
X = np.array([np.array([x['red_grade'], x['white_grade']]) for x in data])
y = np.array([x['quality'] for x in data])
clf = svm.SVC(kernel='linear')
clf.fit(X, y) |
SCRIPT_NAME=elf
TEMPLATE_NAME=elf32
OUTPUT_FORMAT="elf32-tic6x-le"
# This address is an arbitrary value expected to be suitable for
# semihosting simulator use, but not on hardware where it is expected
# to be overridden.
TEXT_START_ADDR=0x8000
MAXPAGESIZE="CONSTANT (MAXPAGESIZE)"
ARCH=tic6x
EXECUTABLE_SYMBOLS="EXTERN (__c6xabi_DSBT_BASE);"
SDATA_START_SYMBOLS="PROVIDE_HIDDEN (__c6xabi_DSBT_BASE = .);"
# ".bss" is near (small) BSS, ".far" is far (normal) BSS, ".const" is
# far read-only data, ".rodata" is near read-only data. ".neardata"
# is near (small) data, ".fardata" is (along with .data) far data.
RODATA_NAME="const"
SDATA_NAME="neardata"
SBSS_NAME="bss"
BSS_NAME="far"
OTHER_SDATA_SECTIONS=".rodata ${RELOCATING-0} : { *(.rodata${RELOCATING+ .rodata.*}) }"
OTHER_READONLY_RELOC_SECTIONS="
.rel.rodata ${RELOCATING-0} : { *(.rel.rodata${RELOCATING+ .rel.rodata.*}) }
.rela.rodata ${RELOCATING-0} : { *(.rela.rodata${RELOCATING+ .rela.rodata.*}) }"
OTHER_READWRITE_SECTIONS=".fardata ${RELOCATING-0} : { *(.fardata${RELOCATING+ .fardata.*}) }"
OTHER_READWRITE_RELOC_SECTIONS="
.rel.fardata ${RELOCATING-0} : { *(.rel.fardata${RELOCATING+ .rel.fardata.*}) }
.rela.fardata ${RELOCATING-0} : { *(.rela.fardata${RELOCATING+ .rela.fardata.*}) }"
OTHER_BSS_SECTIONS="
.heap :
{
. = ALIGN(4);
_HEAP_START = .;
. += 0x2000000;
_HEAP_MAX = .;
}
.stack :
{
. += 0x100000;
_STACK_START = .;
}"
ATTRS_SECTIONS='.c6xabi.attributes 0 : { KEEP (*(.c6xabi.attributes)) KEEP (*(.gnu.attributes)) }'
|
<gh_stars>1-10
function bmi(age, weight, height) {
bmi = 0;
return bmi;
}
module.exports = bmi;
|
package com.telpoo.frame.utils;
import java.io.File;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.net.Uri;
import android.os.Environment;
public class AppSupport {
public static boolean isPackageInstalled(String packagename, Context context) {
PackageManager pm = context.getPackageManager();
try {
pm.getPackageInfo(packagename, PackageManager.GET_ACTIVITIES);
return true;
} catch (NameNotFoundException e) {
return false;
}
}
public static void unInstall(String packageName, Context mContext) {
Uri packageURI = Uri.parse("package:" + packageName);
Intent intent = new Intent(Intent.ACTION_DELETE, packageURI);
mContext.startActivity(intent);
}
public static void install(String path, Context content){
Intent promptInstall = new Intent(Intent.ACTION_VIEW)
//.setData(Uri.parse(path))
//.setType("application/android.com.app");
.setDataAndType(Uri.fromFile(new File(path)), "application/vnd.android.package-archive");
promptInstall.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
content.startActivity(promptInstall);
}
}
|
<gh_stars>0
import About from './About/About';
import CaseStudy from './CaseStudy/CaseStudy';
import Changelog from './Changelog/Changelog';
import Contact from './Contact/Contact';
import Experience from './Experience/Experience';
import Footer from './Footer/Footer';
import GetInTouch from './GetInTouch/GetInTouch';
import Projects from './Projects/Projects';
import ProjectsPerTech from './ProjectsPerTech/ProjectsPerTech';
import SideMenu from './SideMenu/SideMenu';
import Technical from './Technical/Technical';
export {
About,
CaseStudy,
Changelog,
Contact,
Experience,
Footer,
GetInTouch,
Projects,
ProjectsPerTech,
SideMenu,
Technical,
};
|
exports.up = function(knex) {
return knex.schema.createTable("exercises", exercises => {
// exercise id
exercises.increments();
// exercise type
exercises.string("type", 128).notNullable();
// exercise reps
exercises.integer("reps").notNullable();
// exercise created at
exercises.timestamp("created_at").defaultTo(knex.fn.now());
// connects exercise to user (foreign key)
exercises
.integer("user_id")
.notNullable()
.unsigned()
.references("id")
.inTable("users")
.onDelete("CASCADE")
.onUpdate("CASCADE");
});
};
exports.down = function(knex) {
return knex.schema.dropTableIfExists("exercises");
};
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.tests.crmsfa.orders;
import java.math.BigDecimal;
import java.util.List;
import java.util.Map;
import org.ofbiz.accounting.invoice.InvoiceWorker;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.GeneralException;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.util.EntityUtil;
import org.ofbiz.order.order.OrderReadHelper;
import org.ofbiz.service.GenericServiceException;
import org.opentaps.common.order.SalesOrderFactory;
import org.opentaps.tests.warehouse.InventoryAsserts;
public class DropShipTests extends OrderTestCase {
private static final String MODULE = OrderTests.class.getName();
private GenericValue demoCustomer;
private GenericValue demoCSR;
private GenericValue demowarehouse1;
private GenericValue demopurch1;
private GenericValue dropShip1;
private GenericValue dropShip2;
private GenericValue postalAddress;
private GenericValue paymentMethod;
private GenericValue productStore;
private static final String facilityId = "WebStoreWarehouse";
private static final String organizationPartyId = "Company";
@Override
public void setUp() throws Exception {
super.setUp();
demoCustomer = delegator.findByPrimaryKey("Party", UtilMisc.toMap("partyId", "DemoCustomer"));
assertNotNull("DemoCustomer not null", demoCustomer);
dropShip1 = delegator.findByPrimaryKey("Product", UtilMisc.toMap("productId", "dropShip1"));
assertNotNull("Product dropShip1 not null", dropShip1);
dropShip2 = delegator.findByPrimaryKey("Product", UtilMisc.toMap("productId", "dropShip2"));
assertNotNull("Product dropShip2 not null", dropShip2);
demoCSR = delegator.findByPrimaryKey("UserLogin", UtilMisc.toMap("userLoginId", "DemoCSR"));
assertNotNull("DemoCSR not null", demoCSR);
demowarehouse1 = delegator.findByPrimaryKey("UserLogin", UtilMisc.toMap("userLoginId", "demowarehouse1"));
assertNotNull("demowarehouse1 not null", demowarehouse1);
demopurch1 = delegator.findByPrimaryKey("UserLogin", UtilMisc.toMap("userLoginId", "demopurch1"));
assertNotNull("demopurch1 not null", demopurch1);
postalAddress = delegator.findByPrimaryKey("PostalAddress", UtilMisc.toMap("contactMechId", "9015"));
assertNotNull("PostalAddress 9015 not null", postalAddress);
paymentMethod = delegator.findByPrimaryKey("CreditCard", UtilMisc.toMap("paymentMethodId", "9015"));
assertNotNull("CreditCard 9015 not null", paymentMethod);
productStore = delegator.findByPrimaryKey("ProductStore", UtilMisc.toMap("productStoreId", "9000"));
assertNotNull("ProductStore 9000 not null", productStore);
}
@Override
public void tearDown() throws Exception {
super.tearDown();
demoCustomer = null;
dropShip1 = null;
dropShip2 = null;
demoCSR = null;
demowarehouse1 = null;
demopurch1 = null;
postalAddress = null;
paymentMethod = null;
productStore = null;
}
/**
* Verify drop shipping is handled correctly: purchase orders are created, invoices and payments are created, and no inventory changes hands.
* @exception GeneralException if an error occurs
*/
public void testDropShipOrdering() throws GeneralException {
InventoryAsserts invAss = new InventoryAsserts(this, facilityId, organizationPartyId, User);
Map<String, Object> initialDropShip1Inventory = invAss.getInventory(dropShip1.getString("productId"));
Map<String, Object> initialDropShip2Inventory = invAss.getInventory(dropShip2.getString("productId"));
// Create a sales order for 1 dropShip1 and 2 dropShip2 for demoCustomer using credit card paymentMethodId 9015
SalesOrderFactory sof = null;
try {
sof = new SalesOrderFactory(delegator, dispatcher, demoCSR, organizationPartyId, demoCustomer.getString("partyId"), productStore.getString("productStoreId"));
} catch (
GenericEntityException e) {
assertTrue("GenericEntityException:" + e.toString(), false);
}
String currencyUomId = productStore.getString("defaultCurrencyUomId");
// get the drop ship supplier for each of the products
String dropShip1SupplierPartyId = null;
Map<String, Object> getSuppliersForProductResult = runAndAssertServiceSuccess("getSuppliersForProduct", UtilMisc.<String, Object>toMap("productId", dropShip1.getString("productId"), "quantity", new BigDecimal("1.0"), "canDropShip", "Y", "currencyUomId", currencyUomId));
List<GenericValue> supplierProducts = (List<GenericValue>) getSuppliersForProductResult.get("supplierProducts");
if (UtilValidate.isNotEmpty(supplierProducts)) {
dropShip1SupplierPartyId = EntityUtil.getFirst(supplierProducts).getString("partyId");
}
assertNotNull("No supplier found for product dropShip1", dropShip1SupplierPartyId);
String dropShip2SupplierPartyId = null;
getSuppliersForProductResult = runAndAssertServiceSuccess("getSuppliersForProduct", UtilMisc.<String, Object>toMap("productId", dropShip2.getString("productId"), "quantity", new BigDecimal("2.0"), "canDropShip", "Y", "currencyUomId", currencyUomId));
supplierProducts = (List<GenericValue>) getSuppliersForProductResult.get("supplierProducts");
if (UtilValidate.isNotEmpty(supplierProducts)) {
dropShip2SupplierPartyId = EntityUtil.getFirst(supplierProducts).getString("partyId");
}
assertNotNull("No supplier found for product dropShip2", dropShip2SupplierPartyId);
sof.addPaymentMethod("CREDIT_CARD", paymentMethod.getString("paymentMethodId"));
// set the shipping method and the drop shipping supplier to the two ship groups
sof.addShippingGroup("UPS", "NEXT_DAY", postalAddress.getString("contactMechId"), dropShip1SupplierPartyId);
sof.addShippingGroup("UPS", "NEXT_DAY", postalAddress.getString("contactMechId"), dropShip2SupplierPartyId);
try {
sof.addProduct(dropShip1, new BigDecimal("1.0"), "00001");
sof.addProduct(dropShip2, new BigDecimal("2.0"), "00002");
} catch (GenericServiceException e) {
fail("GenericServiceException:" + e.toString());
}
// Create and approve the order
String orderId = null;
try {
orderId = sof.storeOrder();
sof.approveOrder();
sof.processPayments();
} catch (GenericServiceException e) {
fail("GenericServiceException:" + e.toString());
}
Debug.logInfo("testDropShipOrdering created sales order ID " + orderId, MODULE);
// Verify that dropShip1 and dropShip2 are each linked to a different purchase order item
GenericValue orderItem1Assoc = EntityUtil.getFirst(delegator.findByAnd("OrderItemAssoc", UtilMisc.toMap("orderId", orderId, "orderItemSeqId", "00001", "orderItemAssocTypeId", "DROP_SHIPMENT")));
GenericValue orderItem2Assoc = EntityUtil.getFirst(delegator.findByAnd("OrderItemAssoc", UtilMisc.toMap("orderId", orderId, "orderItemSeqId", "00002", "orderItemAssocTypeId", "DROP_SHIPMENT")));
assertNotNull("dropShip1 orderItem (orderItemSeqId 00001) for order ID " + orderId + " is not linked to a purchase order item", orderItem1Assoc);
assertNotNull("dropShip2 orderItem (orderItemSeqId 00002) for order ID " + orderId + " is not linked to a purchase order item", orderItem2Assoc);
String dropShip1PurchaseOrderId = orderItem1Assoc.getString("toOrderId");
String dropShip2PurchaseOrderId = orderItem2Assoc.getString("toOrderId");
if (!dropShip1SupplierPartyId.equalsIgnoreCase(dropShip2SupplierPartyId)) {
assertNotSame("dropShip1 orderItem (orderItemSeqId 00001) for order ID " + orderId + " is linked to the same purchase order (" + dropShip1PurchaseOrderId + ") as dropShip2 orderItem (00002)", dropShip1PurchaseOrderId, dropShip2PurchaseOrderId);
}
// Approve and call service quickDropShipOrder on the purchase orders linked to dropShip1
// Note that shipGroupSeqId is the shipGroupSeqId of the purchase order, not the sales order, so it should be 00001 in both cases
runAndAssertServiceSuccess("changeOrderItemStatus", UtilMisc.toMap("orderId", dropShip1PurchaseOrderId, "statusId", "ITEM_APPROVED", "userLogin", demoCSR));
runAndAssertServiceSuccess("quickDropShipOrder", UtilMisc.toMap("orderId", dropShip1PurchaseOrderId, "shipGroupSeqId", "00001", "userLogin", demowarehouse1));
// Sleep to get the service captureOrderPayments in PaymentGatewayServices and
// the service processCaptureSplitPayment fired by captureOrderPayments finished
try {
Thread.sleep(1000 * 60 * 3);
} catch (InterruptedException e) {
fail("InterruptedException: " + e.toString());
}
// Approve and call service quickDropShipOrder on the purchase orders linked to dropShip2
runAndAssertServiceSuccess("changeOrderItemStatus", UtilMisc.toMap("orderId", dropShip2PurchaseOrderId, "statusId", "ITEM_APPROVED", "userLogin", demoCSR));
runAndAssertServiceSuccess("quickDropShipOrder", UtilMisc.toMap("orderId", dropShip2PurchaseOrderId, "shipGroupSeqId", "00001", "userLogin", demowarehouse1));
// Verify that the sales order and purchase orders are now completed
GenericValue salesOrder = delegator.findByPrimaryKey("OrderHeader", UtilMisc.toMap("orderId", orderId));
assertNotNull("Can't find sales order " + orderId, salesOrder);
assertEquals("Sales order " + orderId + " status is not ORDER_COMPLETED", "ORDER_COMPLETED", salesOrder.getString("statusId"));
GenericValue dropShip1PurchaseOrder = delegator.findByPrimaryKey("OrderHeader", UtilMisc.toMap("orderId", dropShip1PurchaseOrderId));
assertNotNull("Can't find purchase order " + dropShip1PurchaseOrderId, dropShip1PurchaseOrder);
assertEquals("Purchase order " + dropShip1PurchaseOrderId + " status is not ORDER_COMPLETED", "ORDER_COMPLETED", dropShip1PurchaseOrder.getString("statusId"));
GenericValue dropShip2PurchaseOrder = delegator.findByPrimaryKey("OrderHeader", UtilMisc.toMap("orderId", dropShip2PurchaseOrderId));
assertNotNull("Can't find purchase order " + dropShip2PurchaseOrderId, dropShip2PurchaseOrder);
assertEquals("Purchase order " + dropShip2PurchaseOrderId + " status is not ORDER_COMPLETED", "ORDER_COMPLETED", dropShip2PurchaseOrder.getString("statusId"));
// Verify that no new InventoryItem (since the beginning of this unit test) have been created for dropShip1 and dropShip2
invAss.assertInventoryChange(dropShip1.getString("productId"), BigDecimal.ZERO, initialDropShip1Inventory);
invAss.assertInventoryChange(dropShip2.getString("productId"), BigDecimal.ZERO, initialDropShip2Inventory);
// Verify that a sales invoice and received customer payment for dropShip1 is created, and this sales invoice is paid
// Verify that a sales invoice and received customer payment for dropShip2 is created, and this sales invoice is paid
List<GenericValue> dropShip1OrderItemBillingList = delegator.findByAnd("OrderItemBilling", UtilMisc.toMap("orderId", orderId, "orderItemSeqId", "00001"));
assertEquals("There is only one invoice corresponding to orderId " + orderId, 1, dropShip1OrderItemBillingList.size());
GenericValue dropShip1OrderItemBilling = EntityUtil.getFirst(dropShip1OrderItemBillingList);
List<GenericValue> salesInvoice1List = dropShip1OrderItemBilling.getRelated("Invoice");
assertEquals("There is only one invoice corresponding to orderId " + orderId, 1, salesInvoice1List.size());
GenericValue salesInvoice1 = EntityUtil.getFirst(salesInvoice1List);
assertEquals("Invoice should be a SALES_INVOICE", "SALES_INVOICE", salesInvoice1.getString("invoiceTypeId"));
assertEquals("Invoice partyIdFrom should be Company", "Company", salesInvoice1.getString("partyIdFrom"));
assertEquals("Invoice partyId should be DemoCustomer", "DemoCustomer", salesInvoice1.getString("partyId"));
assertEquals("Invoice statusId should be INVOICE_PAID", "INVOICE_PAID", salesInvoice1.getString("statusId"));
List<GenericValue> paymentApplication1List = delegator.findByAnd("PaymentApplication", UtilMisc.toMap("invoiceId", salesInvoice1.getString("invoiceId")));
assertEquals("There is only one payment corresponding to invoiceId " + salesInvoice1.getString("invoiceId"), 1, paymentApplication1List.size());
GenericValue paymentApplication1 = EntityUtil.getFirst(paymentApplication1List);
List<GenericValue> payment1List = paymentApplication1.getRelated("Payment");
assertEquals("There is only one payment corresponding to invoiceId " + salesInvoice1.getString("invoiceId"), 1, payment1List.size());
GenericValue payment1 = EntityUtil.getFirst(payment1List);
assertEquals("Payment statusId should be PMNT_RECEIVED", "PMNT_RECEIVED", payment1.getString("statusId"));
assertEquals("Payment partyIdFrom should be DemoCustomer", "DemoCustomer", payment1.getString("partyIdFrom"));
assertEquals("Payment partyIdTo should be Company", "Company", payment1.getString("partyIdTo"));
List<GenericValue> dropShip2OrderItemBillingList = delegator.findByAnd("OrderItemBilling", UtilMisc.toMap("orderId", orderId, "orderItemSeqId", "00002"));
assertEquals("There is only one invoice corresponding to orderId " + orderId, 1, dropShip2OrderItemBillingList.size());
GenericValue dropShip2OrderItemBilling = EntityUtil.getFirst(dropShip2OrderItemBillingList);
List<GenericValue> salesInvoice2List = dropShip2OrderItemBilling.getRelated("Invoice");
assertEquals("There is only one invoice corresponding to orderId " + orderId, 1, salesInvoice2List.size());
GenericValue salesInvoice2 = EntityUtil.getFirst(salesInvoice2List);
assertEquals("Invoice should be a SALES_INVOICE", "SALES_INVOICE", salesInvoice2.getString("invoiceTypeId"));
assertEquals("Invoice partyIdFrom should be Company", "Company", salesInvoice2.getString("partyIdFrom"));
assertEquals("Invoice partyId should be DemoCustomer", "DemoCustomer", salesInvoice2.getString("partyId"));
assertEquals("Invoice statusId should be INVOICE_PAID", "INVOICE_PAID", salesInvoice2.getString("statusId"));
List<GenericValue> paymentApplication2List = delegator.findByAnd("PaymentApplication", UtilMisc.toMap("invoiceId", salesInvoice2.getString("invoiceId")));
assertEquals("There is only one payment corresponding to invoiceId " + salesInvoice2.getString("invoiceId"), 1, paymentApplication2List.size());
GenericValue paymentApplication2 = EntityUtil.getFirst(paymentApplication2List);
List<GenericValue> payment2List = paymentApplication2.getRelated("Payment");
assertEquals("There is only one payment corresponding to invoiceId " + salesInvoice2.getString("invoiceId"), 1, payment2List.size());
GenericValue payment2 = EntityUtil.getFirst(payment2List);
assertEquals("Payment statusId should be PMNT_RECEIVED", "PMNT_RECEIVED", payment2.getString("statusId"));
assertEquals("Payment partyIdFrom should be DemoCustomer", "DemoCustomer", payment2.getString("partyIdFrom"));
assertEquals("Payment partyIdTo should be Company", "Company", payment2.getString("partyIdTo"));
assertEquals("Payment amount should be grand total of the sales order", sof.getGrandTotal(), payment1.getDouble("amount") + payment2.getDouble("amount"));
// Verify that a purchase invoice for dropShip1 from DemoSupplier is created in the "In Process" state
List<GenericValue> dropShip1PurchaseOrderItemBillingList = delegator.findByAnd("OrderItemBilling", UtilMisc.toMap("orderId", dropShip1PurchaseOrderId, "orderItemSeqId", "00001"));
assertEquals("There is only one invoice corresponding to orderId " + dropShip1PurchaseOrderId, 1, dropShip1PurchaseOrderItemBillingList.size());
GenericValue dropShip1PurchaseOrderItemBilling = EntityUtil.getFirst(dropShip1PurchaseOrderItemBillingList);
List<GenericValue> dropShipInvoice1List = dropShip1PurchaseOrderItemBilling.getRelated("Invoice");
assertEquals("There is only one invoice corresponding to orderId " + dropShip1PurchaseOrderId, 1, dropShipInvoice1List.size());
GenericValue dropShipInvoice1 = EntityUtil.getFirst(dropShipInvoice1List);
assertEquals("Invoice partyIdFrom should be DemoSupplier", "DemoSupplier", dropShipInvoice1.getString("partyIdFrom"));
assertEquals("Invoice partyId should be Company", "Company", dropShipInvoice1.getString("partyId"));
assertEquals("Invoice statusId should be INVOICE_IN_PROCESS", "INVOICE_IN_PROCESS", dropShipInvoice1.getString("statusId"));
// Verify that a purchase invoice for dropShip2 from BigSupplier is created in the "In Process" state
List<GenericValue> dropShip2PurchaseOrderItemBillingList = delegator.findByAnd("OrderItemBilling", UtilMisc.toMap("orderId", dropShip2PurchaseOrderId, "orderItemSeqId", "00001"));
assertEquals("There is only one invoice corresponding to orderId " + dropShip2PurchaseOrderId, 1, dropShip2PurchaseOrderItemBillingList.size());
GenericValue dropShip2PurchaseOrderItemBilling = EntityUtil.getFirst(dropShip2PurchaseOrderItemBillingList);
List<GenericValue> dropShipInvoice2List = dropShip2PurchaseOrderItemBilling.getRelated("Invoice");
assertEquals("There is only one invoice corresponding to orderId " + dropShip2PurchaseOrderId, 1, dropShipInvoice2List.size());
GenericValue dropShipInvoice2 = EntityUtil.getFirst(dropShipInvoice2List);
assertEquals("Invoice partyIdFrom should be BigSupplier", "BigSupplier", dropShipInvoice2.getString("partyIdFrom"));
assertEquals("Invoice partyId should be Company", "Company", dropShipInvoice2.getString("partyId"));
assertEquals("Invoice statusId should be INVOICE_IN_PROCESS", "INVOICE_IN_PROCESS", dropShipInvoice2.getString("statusId"));
// Verify that the total of salesInvoice1 + salesInvoice2 == grand total of the sales order (use InvoiceWorker.getInvoiceTotalBd for invoice totals)
double invoiceTotal = InvoiceWorker.getInvoiceTotal(delegator, salesInvoice1.getString("invoiceId")).doubleValue() + InvoiceWorker.getInvoiceTotal(delegator, salesInvoice2.getString("invoiceId")).doubleValue();
assertEquals("salesInvoice1 + salesInvoice2 == grand total of the sales order", sof.getGrandTotal(), invoiceTotal);
// Verify that the total of dropShipInvoice1 + dropShipInvoice2 == (grand total of dropShip1PurchaseOrderId) + (grand total of dropShip2PurchaseOrderId)
double purchaseInvoiceTotal = InvoiceWorker.getInvoiceTotal(delegator, dropShipInvoice1.getString("invoiceId")).doubleValue() + InvoiceWorker.getInvoiceTotal(delegator, dropShipInvoice2.getString("invoiceId")).doubleValue();
OrderReadHelper orh1 = new OrderReadHelper(dropShip1PurchaseOrder);
OrderReadHelper orh2 = new OrderReadHelper(dropShip2PurchaseOrder);
double purchaseOrderTotal = orh1.getOrderGrandTotal().doubleValue() + orh2.getOrderGrandTotal().doubleValue();
assertEquals("total of dropShipInvoice1 + dropShipInvoice2 == (grand total of dropShip1PurchaseOrderId) + (grand total of dropShip2PurchaseOrderId)", purchaseOrderTotal, purchaseInvoiceTotal);
}
}
|
/*---------------------------------------------------------------------------*\
| Subject: Mz XmlDocument
| NameSpace: System.Xml.MzXmlDocument
| Author: meizz
| Created: 2006-01-23
| Version: 2006-04-26
|-------------------------------------------------------------
| MSN: <EMAIL> QQ: 112889082 http://www.meizz.com
| Email: <EMAIL> CSDN ID:meizz Copyright (c) meizz
\*---------------------------------------------------------------------------*/
function MzXmlDocument()
{
if(document.implementation&&document.implementation.createDocument)
{
var doc=document.implementation.createDocument("","",null);
doc.addEventListener("load",function(e){this.readyState=4;},false);
doc.readyState=4; return doc;
}
else
{
var msxmls=["MSXML2","Microsoft","MSXML","MSXML3"];
for(var i=0;i<msxmls.length;i++)
try{return new ActiveXObject(msxmls[i]+'.DomDocument')}catch(e){}
throw new Error("Could not find an installed XML parser!");
}
}
MzXmlDocument.Extends(System, "MzXmlDocument");
var IE7 = false; //repair for IE7 2006-04-26
if(/MSIE (\d+(\.\d+)?)/.test(navigator.userAgent))
{
IE7 = parseFloat(RegExp.$1)>=7;
}
if(System.supportsXmlHttp() && "undefined"!=typeof XMLDocument && !IE7)
{
(function()
{
var _xmlDocPrototype=XMLDocument.prototype;
_xmlDocPrototype.__proto__={__proto__:_xmlDocPrototype.__proto__};
var _p=_xmlDocPrototype.__proto__;
_p.createNode=function(aType,aName,aNamespace)
{
switch(aType)
{
case 1:
if(aNamespace&&aNamespace!="")
return this.createElementNS(aNamespace,aName);
else return this.createElement(aName);
case 2:
if(aNamespace&&aNamespace!="")
return this.createAttributeNS(aNamespace,aName);
else return this.createAttribute(aName);
case 3:
default:return this.createTextNode("");
}
};
_p.__realLoad=_xmlDocPrototype.load;
_p.load=function(sUri)
{
this.readyState=0;
this.__realLoad(sUri);
};
_p.loadXML=function(s)
{
var doc2=(new DOMParser).parseFromString(s,"text/xml");
while(this.hasChildNodes())
this.removeChild(this.lastChild);
var cs=doc2.childNodes;
var l=cs.length;
for(var i=0;i<l;i++)
this.appendChild(this.importNode(cs[i],true));
};
_p.setProperty=function(sName,sValue)
{
if(sName=="SelectionNamespaces")
{
this._selectionNamespaces={};
var parts=sValue.split(/\s+/);
var re= /^xmlns\:([^=]+)\=((\"([^\"]*)\")|(\'([^\']*)\'))$/;
for(var i=0;i<parts.length;i++){
re.test(parts[i]);
this._selectionNamespaces[RegExp.$1]=RegExp.$4||RegExp.$6;
}
}
};
_p.__defineSetter__("onreadystatechange",function(f){
if(this._onreadystatechange)
this.removeEventListener("load",this._onreadystatechange,false);
this._onreadystatechange=f;
if(f)
this.addEventListener("load",f,false);return f;
});
_p.__defineGetter__("onreadystatechange",function(){
return this._onreadystatechange;
});
MzXmlDocument._mozHasParseError=function(oDoc){
return!oDoc.documentElement||oDoc.documentElement.localName=="parsererror"&&oDoc.documentElement.getAttribute("xmlns")=="http://www.mozilla.org/newlayout/xml/parsererror.xml";
};
_p.__defineGetter__("parseError",function(){
var hasError=MzXmlDocument._mozHasParseError(this);
var res={errorCode:0,filepos:0,line:0,linepos:0,reason:"",srcText:"",url:""};
if(hasError){
res.errorCode= -1;
try{
res.srcText=this.getElementsByTagName("sourcetext")[0].firstChild.data;
res.srcText=res.srcText.replace(/\n\-\^$/,"");
}
catch(ex){
res.srcText="";
}
try{
var s=this.documentElement.firstChild.data;
var re= /XML Parsing Error\:(.+)\nLocation\:(.+)\nLine Number(\d+)\,Column(\d+)/;
var a=re.exec(s);res.reason=a[1];res.url=a[2];res.line=a[3];res.linepos=a[4];
}
catch(ex){
res.reason="Unknown";
}
}
return res;
});
var _nodePrototype=Node.prototype;
_nodePrototype.__proto__={__proto__:_nodePrototype.__proto__};
_p=_nodePrototype.__proto__;
_p.__defineGetter__("xml",function(){
return(new XMLSerializer).serializeToString(this);
});
_p.__defineGetter__("baseName",function(){
var lParts=this.nodeName.split(":");
return lParts[lParts.length-1];
});
_p.__defineGetter__("text",function(){
var cs=this.childNodes;
var l=cs.length;
var sb=new Array(l);
for(var i=0;i<l;i++)
sb[i]=cs[i].text;
return sb.join("");
});
_p.selectNodes=function(sExpr){
var doc=this.nodeType==9?this:this.ownerDocument;
var nsRes=doc.createNSResolver(this.nodeType==9?this.documentElement:this);
var nsRes2;
if(doc._selectionNamespaces){
nsRes2=function(s){
if(doc._selectionNamespaces[s])
return doc._selectionNamespaces[s];
return nsRes.lookupNamespaceURI(s);
};
}
else nsRes2=nsRes;
var xpRes=doc.evaluate(sExpr,this,nsRes2,5,null);
var res=[];
var item;
while((item=xpRes.iterateNext()))
res.push(item);
return res;
};
_p.selectSingleNode=function(sExpr){
var doc=this.nodeType==9?this:this.ownerDocument;
var nsRes=doc.createNSResolver(this.nodeType==9?this.documentElement:this);
var nsRes2;
if(doc._selectionNamespaces){
nsRes2=function(s){
if(doc._selectionNamespaces[s])
return doc._selectionNamespaces[s];
return nsRes.lookupNamespaceURI(s);
};
}
else nsRes2=nsRes;
var xpRes=doc.evaluate(sExpr,this,nsRes2,9,null);
return xpRes.singleNodeValue;
};
_p.transformNode=function(oXsltNode){
var doc=this.nodeType==9?this:this.ownerDocument;
var processor=new XSLTProcessor();
processor.importStylesheet(oXsltNode);
var df=processor.transformToFragment(this,doc);
return df.xml;
};
_p.transformNodeToObject=function(oXsltNode,oOutputDocument){
var doc=this.nodeType==9?this:this.ownerDocument;
var outDoc=oOutputDocument.nodeType==9?oOutputDocument:oOutputDocument.ownerDocument;
var processor=new XSLTProcessor();processor.importStylesheet(oXsltNode);
var df=processor.transformToFragment(this,doc);
while(oOutputDocument.hasChildNodes())
oOutputDocument.removeChild(oOutputDocument.lastChild);
var cs=df.childNodes;
var l=cs.length;
for(var i=0;i<l;i++)
oOutputDocument.appendChild(outDoc.importNode(cs[i],true));
};
var _attrPrototype=Attr.prototype;
_attrPrototype.__proto__={__proto__:_attrPrototype.__proto__};
_p=_attrPrototype.__proto__;
_p.__defineGetter__("xml",function(){
var nv=(new XMLSerializer).serializeToString(this);
return this.nodeName+"=\""+nv.replace(/\"/g,""")+"\"";
});
var _textPrototype=Text.prototype;
_textPrototype.__proto__={__proto__:_textPrototype.__proto__};
_p=_textPrototype.__proto__;
_p.__defineGetter__("text",function(){
return this.nodeValue;
});
})();
} |
def find_primes(lower, upper):
primes = []
for num in range(lower, upper + 1):
if all(num % i != 0 for i in range(2, num)):
primes.append(num)
return primes |
export currDir=$(cd `dirname $0` && pwd)
export logDir=$currDir/log/loader/blockchain
export QINFRA=$currDir/../../../qinfra
export dependPath=$currDir/../
export initScript=$currDir/../initBlockchain.q
mkdir -p $logDir
$QINFRA/l32/q_ssl.sh -depend $dependPath -init $initScript -p 3657 -rdb "$(hostname -I):36041" -hdb "$(hostname -I):36051" -hdbwriter "$(hostname -I):36052" 1>>$logDir/stdout.log 2>>$logDir/stderr.log & |
/* ========================= eCAL LICENSE =================================
*
* Copyright (C) 2016 - 2019 Continental Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ========================= eCAL LICENSE =================================
*/
#include <ecal/ecal.h>
#include <ecal/msg/string/subscriber.h>
#include <iostream>
#include <sstream>
#include <mutex>
#include <chrono>
#include <thread>
// globals
std::chrono::steady_clock::time_point start_time(std::chrono::nanoseconds(0));
long long g_msgs (0);
long long g_bytes(0);
// print performance results
void PrintStatistic(const std::string& topic_name_, const std::chrono::duration<double>& diff_time_, const size_t size_, long long& bytes_, long long& msgs_, char* buf_)
{
std::stringstream out;
out << "Topic Name: " << topic_name_ << std::endl;
out << "Message size (kByte): " << int(size_ / 1024 ) << std::endl;
out << "kByte/s: " << int(bytes_ / 1024 / diff_time_.count()) << std::endl;
out << "MByte/s: " << int(bytes_ / 1024 / 1024 / diff_time_.count()) << std::endl;
out << "Messages/s: " << int(msgs_ / diff_time_.count()) << std::endl;
if(buf_)
{
buf_[30] = 0;
out << "Message: " << buf_ << std::endl;
}
std::cout << out.str() << std::endl;
msgs_ = 0;
bytes_ = 0;
}
// subscriber callback function
void OnReceive(const char* topic_name_, const struct eCAL::SReceiveCallbackData* data_)
{
size_t size = data_->size;
const void* data = data_->buf;
g_msgs++;
g_bytes += size;
// check time and print results every second
std::chrono::duration<double> diff_time = std::chrono::steady_clock::now() - start_time;
if (diff_time >= std::chrono::seconds(1))
{
PrintStatistic(topic_name_, diff_time, size, g_bytes, g_msgs, (char*)data);
start_time = std::chrono::steady_clock::now();
}
}
// main entry
int main(int argc, char **argv)
{
// initialize eCAL API
eCAL::Initialize(argc, argv, "performance_rec_cb");
// create subscriber for topic "Performance"
eCAL::CSubscriber sub("Performance");
// dump instance state if creation failed
if(!sub.IsCreated())
{
std::cout << "Could not create subscriber !" << std::endl;
return(0);
}
// add callback
sub.AddReceiveCallback(std::bind(OnReceive, std::placeholders::_1, std::placeholders::_2));
// idle main thread
while(eCAL::Ok())
{
// sleep 100 ms
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
// destroy subscriber
sub.Destroy();
// finalize eCAL API
eCAL::Finalize();
return(0);
}
|
import numpy as np
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense
# Load the example dataset
df = pd.read_csv('houseprices.csv')
# Split the dataset into inputs and outputs
X = df.drop(columns=['price'])
y = df['price']
# Create the model
model = Sequential()
model.add(Dense(64, input_dim=X.shape[1], activation='relu'))
model.add(Dense(32, activation='relu'))
model.add(Dense(1))
# Compile the model
model.compile(loss='mse', optimizer='adam', metrics=['mae'])
# Train the model
model.fit(X, y, batch_size=256, epochs=150, verbose=2) |
<filename>src/main/java/net/kardexo/kardexotools/tasks/TickableDeathListener.java
package net.kardexo.kardexotools.tasks;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.Util;
import net.minecraft.network.chat.TextComponent;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.util.Mth;
public class TickableDeathListener implements Runnable
{
private final MinecraftServer server;
private final List<String> cache = new ArrayList<String>();
public TickableDeathListener(MinecraftServer dedicatedserver)
{
this.server = dedicatedserver;
}
@Override
public void run()
{
for(ServerPlayer player : this.server.getPlayerList().getPlayers())
{
if(player.getHealth() == 0)
{
if(!this.cache.contains(player.getGameProfile().getName()))
{
int x = Mth.floor(player.getX());
int y = Mth.floor(player.getY());
int z = Mth.floor(player.getZ());
player.sendMessage(new TextComponent("You died at " + x + " " + y + " " + z), Util.NIL_UUID);
this.cache.add(player.getGameProfile().getName());
}
}
else
{
if(this.cache.contains(player.getGameProfile().getName()))
{
this.cache.remove(player.getGameProfile().getName());
}
}
}
}
}
|
#!/bin/bash
pwd
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy.cpp -o cmy
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmyk.cpp -o cmyk
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src gray.cpp -o gray
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb.cpp -o rgb
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.