text
stringlengths
1
1.05M
package com.globalcollect.gateway.sdk.java.gc.merchant.payments; import com.globalcollect.gateway.sdk.java.CallContext; import com.globalcollect.gateway.sdk.java.GcApiResource; import com.globalcollect.gateway.sdk.java.GcResponseException; import com.globalcollect.gateway.sdk.java.gc.errors.ErrorResponse; import com.globalcollect.gateway.sdk.java.gc.payment.ApprovePaymentRequest; import com.globalcollect.gateway.sdk.java.gc.payment.CancelApprovalPaymentResponse; import com.globalcollect.gateway.sdk.java.gc.payment.CancelPaymentResponse; import com.globalcollect.gateway.sdk.java.gc.payment.CreatePaymentRequest; import com.globalcollect.gateway.sdk.java.gc.payment.CreatePaymentResponse; import com.globalcollect.gateway.sdk.java.gc.payment.PaymentApprovalResponse; import com.globalcollect.gateway.sdk.java.gc.payment.PaymentErrorResponse; import com.globalcollect.gateway.sdk.java.gc.payment.PaymentResponse; import com.globalcollect.gateway.sdk.java.gc.payment.TokenizePaymentRequest; import com.globalcollect.gateway.sdk.java.gc.refund.RefundErrorResponse; import com.globalcollect.gateway.sdk.java.gc.refund.RefundRequest; import com.globalcollect.gateway.sdk.java.gc.refund.RefundResponse; import com.globalcollect.gateway.sdk.java.gc.token.CreateTokenResponse; import java.util.Map; import java.util.TreeMap; public class PaymentsClientImpl extends GcApiResource implements PaymentsClient { public PaymentsClientImpl(GcApiResource parent, Map<String, String> pathContext) { super(parent, pathContext); } @Override public RefundResponse refund(String paymentId, RefundRequest body) { return refund(paymentId, body, null); } @Override public RefundResponse refund(String paymentId, RefundRequest body, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}/refund", pathContext); try { return communicator.post( uri, getClientHeaders(), null, body, RefundResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 400 : errorType = RefundErrorResponse.class; break; case 404 : errorType = RefundErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public PaymentResponse processchallenged(String paymentId) { return processchallenged(paymentId, null); } @Override public PaymentResponse processchallenged(String paymentId, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}/processchallenged", pathContext); try { return communicator.post( uri, getClientHeaders(), null, null, PaymentResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 404 : errorType = ErrorResponse.class; break; case 405 : errorType = ErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public PaymentResponse get(String paymentId) { return get(paymentId, null); } @Override public PaymentResponse get(String paymentId, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}", pathContext); try { return communicator.get( uri, getClientHeaders(), null, PaymentResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public CreatePaymentResponse create(CreatePaymentRequest body) { return create(body, null); } @Override public CreatePaymentResponse create(CreatePaymentRequest body, CallContext context) { String uri = instantiateUri("/{merchantId}/payments", null); try { return communicator.post( uri, getClientHeaders(), null, body, CreatePaymentResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 400 : errorType = PaymentErrorResponse.class; break; case 402 : errorType = PaymentErrorResponse.class; break; case 403 : errorType = PaymentErrorResponse.class; break; case 502 : errorType = PaymentErrorResponse.class; break; case 503 : errorType = PaymentErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public CreateTokenResponse tokenize(String paymentId, TokenizePaymentRequest body) { return tokenize(paymentId, body, null); } @Override public CreateTokenResponse tokenize(String paymentId, TokenizePaymentRequest body, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}/tokenize", pathContext); try { return communicator.post( uri, getClientHeaders(), null, body, CreateTokenResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 404 : errorType = ErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public CancelPaymentResponse cancel(String paymentId) { return cancel(paymentId, null); } @Override public CancelPaymentResponse cancel(String paymentId, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}/cancel", pathContext); try { return communicator.post( uri, getClientHeaders(), null, null, CancelPaymentResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 402 : errorType = ErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public PaymentApprovalResponse approve(String paymentId, ApprovePaymentRequest body) { return approve(paymentId, body, null); } @Override public PaymentApprovalResponse approve(String paymentId, ApprovePaymentRequest body, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}/approve", pathContext); try { return communicator.post( uri, getClientHeaders(), null, body, PaymentApprovalResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 402 : errorType = ErrorResponse.class; break; case 404 : errorType = ErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } @Override public CancelApprovalPaymentResponse cancelapproval(String paymentId) { return cancelapproval(paymentId, null); } @Override public CancelApprovalPaymentResponse cancelapproval(String paymentId, CallContext context) { Map<String, String> pathContext = new TreeMap<String, String>(); pathContext.put("paymentId", paymentId); String uri = instantiateUri("/{merchantId}/payments/{paymentId}/cancelapproval", pathContext); try { return communicator.post( uri, getClientHeaders(), null, null, CancelApprovalPaymentResponse.class, context); } catch (GcResponseException e) { final Class<?> errorType; switch (e.getStatusCode()) { case 404 : errorType = ErrorResponse.class; break; default: errorType = ErrorResponse.class; break; } final Object errorObject = communicator.getMarshaller().unmarshal(e.getBody(), e.getStatusCode(), uri, errorType); throw createException(e.getStatusCode(), e.getBody(), errorObject, context); } } }
#!/bin/sh # Copyright (c) 2014-2016 The Bitcoin Core developers # Copyright (c) 2019 The Yiya Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. INPUT=$(cat /dev/stdin) VALID=false REVSIG=false IFS=' ' if [ "$YIYA_VERIFY_COMMITS_ALLOW_SHA1" = 1 ]; then GPG_RES="$(echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null)" else # Note how we've disabled SHA1 with the --weak-digest option, disabling # signatures - including selfsigs - that use SHA1. While you might think that # collision attacks shouldn't be an issue as they'd be an attack on yourself, # in fact because what's being signed is a commit object that's # semi-deterministically generated by untrusted input (the pull-req) in theory # an attacker could construct a pull-req that results in a commit object that # they've created a collision for. Not the most likely attack, but preventing # it is pretty easy so we do so as a "belt-and-suspenders" measure. GPG_RES="" for LINE in "$(gpg --version)"; do case "$LINE" in "gpg (GnuPG) 1.4.1"*|"gpg (GnuPG) 2.0."*) echo "Please upgrade to at least gpg 2.1.10 to check for weak signatures" > /dev/stderr GPG_RES="$(echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null)" ;; # We assume if you're running 2.1+, you're probably running 2.1.10+ # gpg will fail otherwise # We assume if you're running 1.X, it is either 1.4.1X or 1.4.20+ # gpg will fail otherwise esac done [ "$GPG_RES" = "" ] && GPG_RES="$(echo "$INPUT" | gpg --trust-model always --weak-digest sha1 "$@" 2>/dev/null)" fi for LINE in $(echo "$GPG_RES"); do case "$LINE" in "[GNUPG:] VALIDSIG "*) while read KEY; do [ "${LINE#?GNUPG:? VALIDSIG * * * * * * * * * }" = "$KEY" ] && VALID=true done < ./contrib/verify-commits/trusted-keys ;; "[GNUPG:] REVKEYSIG "*) [ "$YIYA_VERIFY_COMMITS_ALLOW_REVSIG" != 1 ] && exit 1 REVSIG=true GOODREVSIG="[GNUPG:] GOODSIG ${LINE#* * *}" ;; "[GNUPG:] EXPKEYSIG "*) [ "$YIYA_VERIFY_COMMITS_ALLOW_REVSIG" != 1 ] && exit 1 REVSIG=true GOODREVSIG="[GNUPG:] GOODSIG ${LINE#* * *}" ;; esac done if ! $VALID; then exit 1 fi if $VALID && $REVSIG; then echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null | grep "\[GNUPG:\] \(NEWSIG\|SIG_ID\|VALIDSIG\)" echo "$GOODREVSIG" else echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null fi
<reponame>jhersh/CocoaPods require File.expand_path('../../spec_helper', __FILE__) module Pod describe HooksManager do before do @hooks_manager = Pod::HooksManager end describe 'register' do it 'allows to register a block for a notification with a given name' do @hooks_manager.register(:post_install) {} @hooks_manager.registrations[:post_install].count.should == 1 @hooks_manager.registrations[:post_install].first.class.should == Proc end it 'raises if no name is given' do should.raise ArgumentError do @hooks_manager.register(nil) {} end end it 'raises if no block is given' do should.raise ArgumentError do @hooks_manager.register(:post_install) end end end describe 'run' do it 'invokes the hooks' do @hooks_manager.register(:post_install) do |_options| true.should.be.true end @hooks_manager.run(:post_install, Object.new) end it 'handles the case that no listeners have registered' do should.not.raise do @hooks_manager.run(:post_install, Object.new) end end it 'handles the case that no listeners have registered for a name' do @hooks_manager.register(:post_install) do |_options| true.should.be.true end should.not.raise do @hooks_manager.run(:pre_install, Object.new) end end it 'raises if no name is given' do should.raise ArgumentError do @hooks_manager.run(nil, Object.new) {} end end it 'raises if no context object is given' do should.raise ArgumentError do @hooks_manager.run(:post_install, nil) end end end end end
export * from './components/staticTilemap'; export * from './components/tilemapRendering'; export * from './systems/tilemapProcessor';
const sentence = 'This is a sample sentence for counting the word frequency.'; const wordFrequency = (sentence) => { let words = sentence.split(' '); // split the sentence into array of words let countMap = {}; // initializing the countMap words.forEach((word) => { // if the word is already present in the map, then increase count if (countMap[word]) { countMap[word] = countMap[word] + 1; } else { countMap[word] = 1; // if the word is not present, then map it to 1 } }); console.log(countMap); }; wordFrequency(sentence);
#!/bin/sh gcc -DMYTHREADS=1 -o bstexe_01 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=2 -o bstexe_02 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=3 -o bstexe_03 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=4 -o bstexe_04 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=5 -o bstexe_05 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=6 -o bstexe_06 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=7 -o bstexe_07 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=8 -o bstexe_08 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=9 -o bstexe_09 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=10 -o bstexe_10 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=11 -o bstexe_11 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=12 -o bstexe_12 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=13 -o bstexe_13 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=14 -o bstexe_14 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=15 -o bstexe_15 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=16 -o bstexe_16 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=17 -o bstexe_17 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=18 -o bstexe_18 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=19 -o bstexe_19 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=20 -o bstexe_20 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=21 -o bstexe_21 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=22 -o bstexe_22 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=23 -o bstexe_23 -O2 bst.cpp -fopenmp -lm gcc -DMYTHREADS=24 -o bstexe_24 -O2 bst.cpp -fopenmp -lm
let form = document.querySelector("#contact-form"); form.addEventListener("submit", function(event) { event.preventDefault(); let name = form.elements["name"].value; let email = form.elements["email"].value; let message = form.elements["message"].value; Email.send({ Host : "smtp.gmail.com", Username : "YOUR_EMAIL_ADDRESS_HERE@gmail.com", Password : "YOUR_EMAIL_PASSWORD_HERE", To : 'YOUR_EMAIL_ADDRESS_HERE@gmail.com', From : email, Subject : "New Contact Message", Body : "Name: " + name + " Email: " + email + " Message: " + message }).then(function(message){ alert("sent!"); }); });
class Shape { side: string; constructor(side: string) { this.side = side; } getArea(): number { return 0; // This method will be overridden by subclasses } } class Square extends Shape { sideLength: number; constructor(sideLength: number) { super("square"); this.sideLength = sideLength; } getArea(): number { return this.sideLength * this.sideLength; } } // Example usage const square = new Square(5); console.log(square.getArea()); // Output: 25
import React from 'react'; import // { CIQ } from // Enable to access CIQ namespace './chartiq_config/presets/base'; import './chartiq_config/presets/examples'; // Include plugins // import timespanevent from './chartiq_config/presets/timespanevents'; // import crypto from './chartiq_config/presets/cryptoiq'; // import scriptiq from './chartiq_config/presets/scriptiq'; // import tfc from './chartiq_config/presets/tfc'; import { AdvancedChart, getDefaultConfig } from './chartiq'; // CIQ.debug = true; // Enables debugging const pluginsToLoadLazy = { // timespanevent, // crypto, // scriptiq, // tfc }; const config = getDefaultConfig(); // Update chart configuration by modifying default configuration config.chartConfig.preferences.currentPriceLine = true; config.addOns.tooltip = null; // Optional callback function to access chart engine and uiContext const chartInitialized = ({ chartEngine, uiContext }) => { // chartEngine provides access to chart engine CIQ.ChartEngine // uiContext provides access to UI component interaction CIQ.UI.Context }; export default (props) => ( <AdvancedChart config={config} chartInitialized={chartInitialized} pluginsToLoadLazy={pluginsToLoadLazy} /> );
#!/bin/bash cd /app/src if [ ! -f "/app/src/config/settings.json" ]; then cp ../sample_settings.json ./config/ fi python3 main.py
<filename>src/templates/page.js import React from "react"; import PropTypes from "prop-types"; import { graphql } from "gatsby"; import Layout from "../components/layout/layout"; import Section from "../components/organisms/sections/section" import Helmet from 'react-helmet' export const PageTemplate = ({ title, content, pageid }) => { return ( <main id={pageid} className="main"> <Helmet> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.7.2/css/all.css" integrity="<KEY>" crossorigin="anonymous" /> </Helmet> {content.map(( section, index ) => ( <Section key={index} section={section} > </Section> ))} </main> ); }; const Page = ({ data }) => { const { markdownRemark: post } = data; return ( <Layout> <PageTemplate pageid={post.frontmatter.pageid} title={post.frontmatter.title} content={post.frontmatter.content} /> </Layout> ); }; Page.propTypes = { data: PropTypes.object.isRequired }; export default Page; export const PageQuery = graphql` query Page($id: String!) { markdownRemark(id: { eq: $id }) { id fields { slug } html frontmatter { title pageid content { sectiontitle sectionid backgroundcolor textcolor backgroundimage{ childImageSharp { fluid(maxWidth: 1920) { ...GatsbyImageSharpFluid } } } pagetitle sectionvalue{ type markdown leftmarkdown rightmarkdown columnnumber formname columnmarkdown{ markdown } } } } } } `;
<reponame>lukaselmer/2018-six-hackathon declare module 'figo';
string = "Hello, World, I am here" list = string.split(", ") print(list) # Output ['Hello', 'World', 'I am here']
<filename>shared/addItem.js const ul = document.getElementById('todo-list') const todoInput = document.getElementById('new-todo-title'); export const addItem = (e, todoTitle) => { ul.insertAdjacentHTML("beforeend", renderTodoItemTemplate(todoTitle)); todoInput.value = ''; } function renderTodoItemTemplate(title) { return ` <li> <div class="view"> <input class="toggle" type="checkbox"> <label class="label">${title}</label> <button class="destroy"></button> </div> <input class="edit" value=""> </li>`; }
#!/bin/sh python manage.py flush --no-input python manage.py makemigrations python manage.py migrate exec "$@"
import appMiddleware from './appMiddleware' var req, res beforeEach(() => { req = { url: '/login' } res = { status: () => res, send: jest.fn(output => { res.body = output }) } appMiddleware.getIndexFile = () => '<html><div id="root"></div></html>' }) it('renders the app', () => { appMiddleware(req, res) expect(res.body).toMatchSnapshot() })
<gh_stars>0 import type { ISanivaliDef } from '../types'; export declare type TrimLeftParam = boolean | undefined; export declare type TrimLeftRuleItem = 'trimLeft' | ['trimLeft', TrimLeftParam?]; export declare const trimLeftDef: ISanivaliDef; //# sourceMappingURL=trimLeft.d.ts.map
// Copyright 2020 <NAME> and <NAME> #pragma once #include <string> #include "../observable.hpp" #include "../universe.hpp" class VolumeProfile : public Observable { public: VolumeProfile(std::string id) : Observable(id) { name = "volume_profile"; } void process(); };
import type { ISanivaliDef } from '_src/types'; import { isEmpty } from '_src/util'; export type EmptyToNullParam = boolean | 'undefined' | undefined; export type EmptyToNullRuleItem = | 'emptyToNull' | ['emptyToNull', EmptyToNullParam?]; export const emptyToNullDef: ISanivaliDef = { sanitizer: (enable?: EmptyToNullParam) => { if (enable === false) return null; const nil = enable === 'undefined' ? undefined : null; return (v: unknown) => (isEmpty(v) ? nil : v); }, runOnNil: true, };
<gh_stars>0 import React from "react"; import { Product } from "../../../redux"; import { ProductItem } from "."; export const ProductsTable: React.FC<ProductsTableProps> = ({ filteredProducts }) => ( <div> {filteredProducts.length > 0 && ( <div className="grid grid-product grid-count-6"> <div className="grid-col" /> <div className="grid-col"> <h5>Name</h5> </div> <div className="grid-col"> <h5>Brand</h5> </div> <div className="grid-col"> <h5>Price</h5> </div> <div className="grid-col"> <h5>Date Added</h5> </div> <div className="grid-col"> <h5>Qty</h5> </div> </div> )} {filteredProducts.length === 0 ? new Array(10) .fill({}) .map((product, index) => <ProductItem key={`product-skeleton ${index}`} product={product} />) : filteredProducts.map((product) => <ProductItem key={product.id} product={product} />)} </div> ); type ProductsTableProps = { filteredProducts: Product[]; };
def sort_words(words):     words.sort()     return words print(sort_words(["cat", "apple", "dog", "zebra"]))
/** * Copyright 2018 Google Inc. All rights reserved. * Modifications copyright (c) Microsoft Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { test as it, expect } from './pageTest'; it('should throw for non-string selector', async ({ page }) => { const error = await page.$(null).catch(e => e); expect(error.message).toContain('selector: expected string, got object'); }); it('should query existing element with css selector @smoke', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('css=section'); expect(element).toBeTruthy(); }); it('should query existing element with text selector', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('text="test"'); expect(element).toBeTruthy(); }); it('should query existing element with xpath selector', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('xpath=/html/body/section'); expect(element).toBeTruthy(); }); it('should return null for non-existing element', async ({ page, server }) => { const element = await page.$('non-existing-element'); expect(element).toBe(null); }); it('should auto-detect xpath selector', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('//html/body/section'); expect(element).toBeTruthy(); }); it('should auto-detect xpath selector with starting parenthesis', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('(//section)[1]'); expect(element).toBeTruthy(); }); it('should auto-detect xpath selector starting with ..', async ({ page, server }) => { await page.setContent('<div><section>test</section><span></span></div>'); const span = await page.$('"test" >> ../span'); expect(await span.evaluate(e => e.nodeName)).toBe('SPAN'); const div = await page.$('"test" >> ..'); expect(await div.evaluate(e => e.nodeName)).toBe('DIV'); }); it('should auto-detect text selector', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('"test"'); expect(element).toBeTruthy(); }); it('should auto-detect css selector', async ({ page, server }) => { await page.setContent('<section>test</section>'); const element = await page.$('section'); expect(element).toBeTruthy(); }); it('should support >> syntax', async ({ page, server }) => { await page.setContent('<section><div>test</div></section>'); const element = await page.$('css=section >> css=div'); expect(element).toBeTruthy(); }); it('should query existing elements', async ({ page, server }) => { await page.setContent('<div>A</div><br/><div>B</div>'); const elements = await page.$$('div'); expect(elements.length).toBe(2); const promises = elements.map(element => page.evaluate(e => e.textContent, element)); expect(await Promise.all(promises)).toEqual(['A', 'B']); }); it('should return empty array if nothing is found', async ({ page, server }) => { await page.goto(server.EMPTY_PAGE); const elements = await page.$$('div'); expect(elements.length).toBe(0); }); it('xpath should query existing element', async ({ page, server }) => { await page.setContent('<section>test</section>'); const elements = await page.$$('xpath=/html/body/section'); expect(elements[0]).toBeTruthy(); expect(elements.length).toBe(1); }); it('xpath should return empty array for non-existing element', async ({ page, server }) => { const element = await page.$$('//html/body/non-existing-element'); expect(element).toEqual([]); }); it('xpath should return multiple elements', async ({ page, server }) => { await page.setContent('<div></div><div></div>'); const elements = await page.$$('xpath=/html/body/div'); expect(elements.length).toBe(2); }); it('$$ should work with bogus Array.from', async ({ page, server }) => { await page.setContent('<div>hello</div><div></div>'); const div1 = await page.evaluateHandle(() => { Array.from = () => []; return document.querySelector('div'); }); const elements = await page.$$('div'); expect(elements.length).toBe(2); // Check that element handle is functional and belongs to the main world. expect(await elements[0].evaluate((div, div1) => div === div1, div1)).toBe(true); });
#include <iostream> #include <fstream> #include <vector> class ColorOutputManager { private: bool m_useStandardOutput; std::vector<std::tuple<int, int, int>> m_colorBuffer; std::ostream& m_outputStream; public: ColorOutputManager(bool useStandardOutput) : m_useStandardOutput(useStandardOutput), m_outputStream(useStandardOutput ? std::cout : std::ofstream("output.txt")) {} void writeToBuffer(const int r, const int g, const int b) { m_colorBuffer.emplace_back(r, g, b); } void flush() { for (const auto& color : m_colorBuffer) { if (m_useStandardOutput) { std::cout << std::get<0>(color) << " " << std::get<1>(color) << " " << std::get<2>(color) << "\n"; } else { m_outputStream << std::get<0>(color) << " " << std::get<1>(color) << " " << std::get<2>(color) << "\n"; } } m_colorBuffer.clear(); } };
<gh_stars>1-10 import fs from "fs"; import path from "path"; import { IAdapter } from "./adapter"; import { getAdapter } from "./adapters"; import { IWriteOptions } from "./options"; export class WriteConfig<T> { private obj: T; private options: IWriteOptions; private adapter: IAdapter<T>; constructor(obj: T, options: IWriteOptions) { this.obj = obj; this.options = options; this.adapter = getAdapter<T>(this.options.type); } /** * Reloads the configuration from the adapter. */ public read() { this.obj = this.adapter.parse( fs.readFileSync(this.options.path, { encoding: this.options.encoding || "utf-8", }) ); return this; } public modify(fn: (obj: T) => T) { this.obj = fn(this.obj); return this; } public save(opts: { path?: string; indent?: number }) { let options: Record<string, unknown> = {}; if (typeof opts.path === "string") options.path = opts.path; options = Object.assign( { encoding: "utf-8", indent: opts.indent ?? 4, }, this.options, options ); if (options.path && options.encoding) { if (!fs.existsSync(path.dirname(options.path as string))) fs.mkdirSync(path.dirname(options.path as string), { recursive: true, }); const data = this.toString(options); fs.writeFileSync(options.path as string, data, { encoding: options.encoding as BufferEncoding, }); } return this; } public toString(options?) { return this.adapter.stringify(this.obj, options); } public toObject() { return this.obj; } /** * Validates this config object with Zod. * @param modify Whether to modify the config or just to validate it. * @param onError An optional onError function that can be used to log errors. Defaults to a function that calls console.error. */ public validate( modify: boolean, onError: (err) => void = (err) => { console.error(`Could not load config: ${err}`); process.exit(1); } ) { let res = {}; try { res = this.options.schema.parse(this.obj); if (modify) this.modify(() => res as T); } catch (err) { onError(err); } return this; } }
func associatePhysicsBody(_ physicsComponent: PhysicsComponent, with physicsBody: SKPhysicsBody, to node: SKNode, allowBodyFromDifferentNode: Bool) { guard let currentBodyNode = physicsBody.node else { physicsComponent.attach(physicsBody, to: node) return } if !allowBodyFromDifferentNode { if currentBodyNode != node { OctopusKit.logForErrors("\(physicsBody) already associated with \(currentBodyNode) — Detaching from entity. If this is intentional, set the `allowBodyFromDifferentNode` flag.") physicsComponent.removeFromEntity() } } else { if !currentBodyNode.inParentHierarchy(node) { OctopusKit.logForWarnings("\(physicsBody) already associated with \(currentBodyNode) which is not in the hierarchy of \(node) — This may not be the desired behavior.") } } }
package de.fnordbedarf.debugger; /** * Created by lora on 20.05.17. * */ public class NullObject { }
BUILD_DIR=src/main/assets/build npm install $BUILD_DIR GRUNT=node_modules/grunt/bin/grunt $GRUNT --gruntfile=$BUILD_DIR/Gruntfile.js --theme=neutral $GRUNT --gruntfile=$BUILD_DIR/Gruntfile.js --theme=slu
import requests from bs4 import BeautifulSoup # Make a GET request to fetch the raw HTML content html_content = requests.get('https://en.wikipedia.org/wiki/Python_(programming_language)').text # Parse the html content soup = BeautifulSoup(html_content, "lxml") # Find all the h2 tags and iterate through them for h2_tag in soup.find_all("h2"): # Check if h2 tag has a 'tocsection' class if "tocsection" in h2_tag.attrs['class']: # Extract the article title from the h2 tag article_title = h2_tag.text.strip() # Print the article title print("Article Title:", article_title) print("-"*50)
<gh_stars>100-1000 package com.boot.service.impl; import com.boot.constant.ThemeConstant; import com.boot.feign.system.SettingFeign; import com.boot.pojo.Setting; import com.boot.pojo.User; import com.boot.pojo.UserAuthority; import com.boot.pojo.UserDetail; import com.boot.service.RegisterService; import com.boot.service.UserDetailService; import com.boot.service.UserService; import com.boot.utils.SnowId; import io.seata.spring.annotation.GlobalTransactional; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.sql.Date; @Service public class RegisterServiceImpl implements RegisterService { @Autowired private UserService userService; @Autowired @Lazy //延迟加载bean,解决循环注入问题 Requested bean is currently in creation: Is there an unresolvable circular reference? private SettingFeign settingFeign; @Autowired private UserDetailService userDetailService; // 分布式事务 @GlobalTransactional(name = "seata_register", rollbackFor = Exception.class) @Override public void register(User user) { // 注册代码 Date date = new Date(new java.util.Date().getTime()); BCryptPasswordEncoder bCryptPasswordEncoder = new BCryptPasswordEncoder(); // 进行BCryptPasswordEncoder加密 String encode_password = <PASSWORD>PasswordEncoder.encode(user.getPassword()); long userid = SnowId.nextId(); //雪花算法生成分布式id user.setId(userid); user.setPassword(<PASSWORD>_password); user.setDate(date); user.setValid(1); userService.addUser(user); UserAuthority userAuthority = new UserAuthority(); userAuthority.setId(SnowId.nextId()); userAuthority.setUser_id(userid); userAuthority.setAuthority_id(2); userService.addUserAuthority(userAuthority); // int i=10/0; //模拟异常,测试分布式事务 // 设置userDetail UserDetail userDetail = new UserDetail(); userDetail.setId(SnowId.nextId()); userDetail.setName(user.getUsername()); userDetailService.addUserDetail(userDetail); // 添加用户默认设置 Setting setting = new Setting(); setting.setId(SnowId.nextId()); setting.setName(user.getUsername()); setting.setTheme(ThemeConstant.CALM_THEME); setting.setFoot("----2021----"); setting.setLogo("/user/img/bloglogo.jpg"); settingFeign.addSettingByUser(setting); } }
#!/usr/bin/env bash CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) . $CURDIR/../shell_config.sh $CLICKHOUSE_CLIENT -q "DROP TABLE IF EXISTS test.foo;" # Missing arguments for array, table not created $CLICKHOUSE_CLIENT -q "CREATE TABLE test.foo (a Array) Engine=Memory;" 2&>/dev/null $CLICKHOUSE_CLIENT -q "SELECT 'Still alive';"
class Employee { constructor(theName, id, email) { this.theName = theName; this.id = id; this.email = email; } getName() {} getId() {} getEmail() {} getRole() { return this.name; } } module.exports = Employee;
<gh_stars>0 /** * @author pschroen / https://ufo.ai/ */ import { Color, GLSL3, Matrix3, Matrix4, RawShaderMaterial, Uniform } from 'three'; import vertexShader from '../glsl/ReflectorMaterial.vert.glsl'; import fragmentShader from '../glsl/ReflectorMaterial.frag.glsl'; export class ReflectorMaterial extends RawShaderMaterial { constructor( { color = new Color( 0x7F7F7F ), map = null, fog = null, dithering = false } = {} ) { const parameters = { glslVersion: GLSL3, defines: { }, uniforms: { tMap: new Uniform( null ), tReflect: new Uniform( null ), uMapTransform: new Uniform( new Matrix3() ), uMatrix: new Uniform( new Matrix4() ), uColor: new Uniform( color instanceof Color ? color : new Color( color ) ) }, vertexShader, fragmentShader }; if ( map ) { map.updateMatrix(); parameters.uniforms = Object.assign( parameters.uniforms, { tMap: new Uniform( map ), uMapTransform: new Uniform( map.matrix ) } ); } if ( fog ) { parameters.defines = Object.assign( parameters.defines, { USE_FOG: '' } ); parameters.uniforms = Object.assign( parameters.uniforms, { uFogColor: new Uniform( fog.color ), uFogNear: new Uniform( fog.near ), uFogFar: new Uniform( fog.far ) } ); } if ( dithering ) { parameters.defines = Object.assign( parameters.defines, { DITHERING: '' } ); } super( parameters ); } }
import numpy as np from scipy.interpolate import griddata def interpolate_2d(X, Y, Z, factor): # Flatten the input arrays X_f, Y_f, Z_f = X.flatten(), Y.flatten(), Z.flatten() # Scale the x-coordinates and y-coordinates x_inter = scale(X, factor=factor).reshape(1, -1) y_inter = scale(Y, factor=factor).reshape(-1, 1) # Perform 2D interpolation using griddata interpolated_Z = griddata((X_f, Y_f), Z_f, (x_inter, y_inter), method='linear') # Reshape the interpolated results to match the original grid interpolated_Z = interpolated_Z.reshape(X.shape) # Return the interpolated X, Y, and Z coordinate tuples return x_inter, y_inter, interpolated_Z
#!/bin/bash set -e # Determine platform name. Currently supported: # # x86_64 => x64_linux # aarch64 => aarch64_linux # platform_name() { arch=$(uname -m) case $arch in x86_64) echo "x64_linux" ;; aarch64) echo "aarch64_linux" ;; *) echo "Unsupported platform '$arch'" 1>&2 exit 1 ;; esac } UPDATE=312 BUILD=b07 NAME="openjdk-8u${UPDATE}-${BUILD}" JRE_NAME="${NAME}-jre" TARBALL_BASE_NAME="OpenJDK8U" EA_SUFFIX="" PLATFORM="$(platform_name)" TARBALL_VERSION="8u${UPDATE}${BUILD}${EA_SUFFIX}" PLATFORM_VERSION="${PLATFORM}_${TARBALL_VERSION}" TARBALL_NAME="${TARBALL_BASE_NAME}-jdk_${PLATFORM_VERSION}" TARBALL_NAME_JRE="${TARBALL_BASE_NAME}-jre_${PLATFORM_VERSION}" SOURCE_NAME="${TARBALL_BASE_NAME}-sources_${TARBALL_VERSION}" build() { set -x # On some systems the per user process limit is set too low # by default (e.g. 1024). This may make the build fail on # systems with many cores (e.g. 64). Raise the limit to 1/2 # of the maximum amount of threads allowed by the kernel. if [ -e /proc/sys/kernel/threads-max ]; then ulimit -u $(( $(cat /proc/sys/kernel/threads-max) / 2)) fi rm -rf build # Add patch to be able to build on EL 6 wget https://bugs.openjdk.java.net/secure/attachment/81610/JDK-8219879.export.patch patch -p1 < JDK-8219879.export.patch bash common/autoconf/autogen.sh # Create a source tarball archive corresponding to the # binary build tar -c -z -f ../${SOURCE_NAME}.tar.gz --transform "s|^|${NAME}-sources/|" --exclude-vcs --exclude='**.patch*' --exclude='overall-build.log' . MILESTONE="fcs" if [ "${EA_SUFFIX}_" != "_" ]; then MILESTONE="ea" fi for debug in release slowdebug; do bash configure \ --with-boot-jdk="/usr/lib/jvm/java" \ --with-debug-level="$debug" \ --with-conf-name="$debug" \ --enable-unlimited-crypto \ --with-milestone="$MILESTONE" \ --with-native-debug-symbols=external \ --with-update-version=$UPDATE \ --with-build-number=$BUILD target="bootcycle-images" if [ "${debug}_" == "slowdebug_" ]; then target="images" fi make LOG_LEVEL=debug CONF=$debug $target # Package it up pushd build/$debug/images if [ "${debug}_" == "slowdebug_" ]; then NAME="$NAME-$debug" TARBALL_NAME="$TARBALL_NAME-$debug" fi # JDK package mv j2sdk-image $NAME cp src.zip $NAME tar -c -f ${TARBALL_NAME}.tar --exclude='**.debuginfo' $NAME gzip ${TARBALL_NAME}.tar tar -c -f ${TARBALL_NAME}-debuginfo.tar $(find ${NAME}/ -name \*.debuginfo) gzip ${TARBALL_NAME}-debuginfo.tar rm $NAME/src.zip mv $NAME j2sdk-image # JRE package (release only) if [ "${debug}_" == "release_" ]; then mv j2re-image $JRE_NAME tar -c -f ${TARBALL_NAME_JRE}.tar --exclude='**.debuginfo' $JRE_NAME gzip ${TARBALL_NAME_JRE}.tar tar -c -f ${TARBALL_NAME_JRE}-debuginfo.tar $(find ${JRE_NAME}/ -name \*.debuginfo) gzip ${TARBALL_NAME_JRE}-debuginfo.tar mv $JRE_NAME j2re-image fi popd done mv ../${SOURCE_NAME}.tar.gz build/ set +x } build 2>&1 | tee overall-build.log ALL_ARTEFACTS="$NAME-$(platform_name)-all-artefacts.tar" tar -c -f $ALL_ARTEFACTS $(find build -name \*.tar.gz) overall-build.log gzip $ALL_ARTEFACTS ls -lh *.tar.gz
def oddNumbers(n): for i in range(0, n + 1): if i % 2 != 0: yield i n = 10 for num in oddNumbers(n): print(num)
#!/bin/bash dieharder -d 16 -g 18 -S 4152765730
<reponame>vietthang/amcl<filename>version3/cpp/rom_curve_NUMS256E.cpp #include "arch.h" #include "ecp_NUMS256E.h" namespace NUMS256E { /* NUMS 256-bit Curve - Edwards */ #if CHUNK==16 using namespace B256_13; const int CURVE_A= 1; const int CURVE_B_I= -15342; const BIG CURVE_B= {0x355,0x1FFE,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FFF,0x1FF}; const BIG CURVE_Order= {0xAF5,0x16EA,0x43B,0xF63,0x11A4,0x1CD,0x1D65,0x14A5,0x155A,0x20C,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80}; const BIG CURVE_Gx= {0x13DA,0x1768,0x40B,0x1D81,0xA0D,0x1AC3,0xC20,0x1DC,0x198A,0x1061,0x6F5,0x1241,0x15F6,0xF1E,0x1734,0x46F,0xAEA,0x7DB,0x1D45,0x114}; const BIG CURVE_Gy= {0x9E6,0xC54,0x19DE,0xC2D,0x12FA,0x1769,0x215,0x1B02,0x1F61,0x38A,0x4,0xC97,0x1D9A,0xB32,0x1F3A,0x4B8,0x19D9,0x14FE,0x154F,0x89}; #endif #if CHUNK==32 using namespace B256_29; const int CURVE_A= 1; const int CURVE_B_I= -15342; const BIG CURVE_B= {0x1FFFC355,0x1FFFFFFF,0x1FFFFFFF,0x1FFFFFFF,0x1FFFFFFF,0x1FFFFFFF,0x1FFFFFFF,0x1FFFFFFF,0xFFFFFF}; const BIG CURVE_Order= {0xEDD4AF5,0x123D8C87,0x1650E6C6,0xAB54A5E,0x419,0x0,0x0,0x0,0x400000}; const BIG CURVE_Gx= {0xEED13DA,0x6F60481,0x20D61A8,0x13141DC6,0x9BD60C3,0x1EAFB490,0xDF73478,0x1F6D5D44,0x8A7514}; const BIG CURVE_Gy= {0x198A89E6,0x1D30B73B,0x15BB4CB,0x1EC3B021,0x18010715,0x12ECD325,0x171F3A59,0x13FB3B24,0x44D53E}; #endif #if CHUNK==64 using namespace B256_56; const int CURVE_A= 1; const int CURVE_B_I= -15342; const BIG CURVE_B= {0xFFFFFFFFFFC355L,0xFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFL,0xFFFFFFFFFFFFFFL,0xFFFFFFFFL}; const BIG CURVE_Order= {0x47B190EEDD4AF5L,0x5AA52F59439B1AL,0x4195L,0x0L,0x40000000L}; const BIG CURVE_Gx= {0xDEC0902EED13DAL,0x8A0EE3083586A0L,0x5F69209BD60C39L,0x6AEA237DCD1E3DL,0x8A7514FBL}; const BIG CURVE_Gy= {0xA616E7798A89E6L,0x61D810856ED32FL,0xD9A64B8010715FL,0xD9D925C7CE9665L,0x44D53E9FL}; #endif }
class SubnetConfiguration: def __init__(self, ip_configurations, name, network_security_group_id): if not isinstance(ip_configurations, list): raise TypeError("Expected argument 'ip_configurations' to be a list") self.ip_configurations = ip_configurations if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") self.name = name if network_security_group_id and not isinstance(network_security_group_id, str): raise TypeError("Expected argument 'network_security_group_id' to be a str") self.network_security_group_id = network_security_group_id
#!/usr/bin/env bash # # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Check include guards. export LC_ALL=C HEADER_ID_PREFIX="(BITCOIN_|VIRCLE_)" HEADER_ID_SUFFIX="_H" REGEXP_EXCLUDE_FILES_WITH_PREFIX="src/(crypto/ctaes/|leveldb/|secp256k1/|tinyformat.h|univalue/|key/wordlists/|xxhash/|unilib/|lz4/|usbdevice/ledger/)" EXIT_CODE=0 for HEADER_FILE in $(git ls-files -- "*.h" | grep -vE "^${REGEXP_EXCLUDE_FILES_WITH_PREFIX}") do HEADER_ID_BASE=$(cut -f2- -d/ <<< "${HEADER_FILE}" | sed "s/\.h$//g" | tr / _ | tr "[:lower:]" "[:upper:]") HEADER_ID="${HEADER_ID_PREFIX}${HEADER_ID_BASE}${HEADER_ID_SUFFIX}" if [[ $(grep -cE "^#(ifndef|define) ${HEADER_ID}" "${HEADER_FILE}") != 2 ]]; then echo "${HEADER_FILE} seems to be missing the expected include guard:" echo " #ifndef ${HEADER_ID}" echo " #define ${HEADER_ID}" echo " ..." echo " #endif // ${HEADER_ID}" echo EXIT_CODE=1 fi done exit ${EXIT_CODE}
from flask import Flask from flask_login import current_user, login_required from app.lib.base.provider import Provider from app.lib.base.decorators import admin_required app = Flask(__name__) # Mock user roles for demonstration purposes class User: def __init__(self, role): self.role = role # Mock current_user for demonstration purposes current_user = User("admin") # Custom admin_required decorator def admin_required(func): def wrapper(*args, **kwargs): if current_user.role == "admin": return func(*args, **kwargs) else: return "Access Denied: Admin role required" return wrapper # Route for /admin_dashboard with access restrictions @app.route('/admin_dashboard') @login_required @admin_required def admin_dashboard(): return "Welcome to the admin dashboard"
#!/bin/bash set -e # Initializes the berglas secret storage. REL=$(dirname "$0") source ${REL}/config.sh gcloud services enable --project ${PROJECT_ID} \ cloudkms.googleapis.com \ storage-api.googleapis.com \ storage-component.googleapis.com berglas bootstrap --project $PROJECT_ID --bucket $BUCKET_ID
def all_perms(string): if len(string) == 0: return [''] prev_list = all_perms(string[1:len(string)]) next_list = [] for i in range(0,len(prev_list)): for j in range(0,len(string)): new_str = prev_list[i][0:j]+string[0]+prev_list[i][j:len(string)-1] if new_str not in next_list: next_list.append(new_str) return next_list string = 'abcd' permutations = all_perms(string) print(permutations)
package org.museautomation.ui.valuesource.actions; import org.museautomation.core.values.*; import org.museautomation.ui.extend.actions.*; /** * @author <NAME> (see LICENSE.txt for license details) */ public class SourceTypeChangeAction extends UndoableAction { public SourceTypeChangeAction(ValueSourceConfiguration source, String type) { _source = source; _new_type = type; } @Override protected boolean executeImplementation() { _old_type = _source.getType(); _source.setType(_new_type); return true; } @Override protected boolean undoImplementation() { _source.setType(_old_type); return true; } private ValueSourceConfiguration _source; private String _new_type; private String _old_type; }
package io.syndesis.qe.logic.common.wizard; public interface WizardPhase { void goToNextWizardPhase(); }
<filename>Array/Contains_duplicates.c /*Given an array of integers, find if the array contains any duplicates. Your function should return true if any value appears at least twice in the array, and it should return false if every element is distinct. Example 1: Input: [1,2,3,1] Output: true Example 2: Input: [1,2,3,4] Output: false Example 3: Input: [1,1,1,3,3,4,3,2,4,2] Output : true*/ #include<stdio.h> int main() { int n,flag=0,i,temp; int *input_array; input_array=(int *)calloc(100,sizeof(int)); printf("Enter the size of the array\t"); scanf("%d",&n); for(i=0;i<n;i++) { scanf("%d",&temp); if(input_array[temp]!=0) { input_array[temp]++; flag=1; } else { input_array[temp]++; flag=0; } } (flag==1)?printf("true"):printf("false"); free(input_array); }
<reponame>JasonLiu798/javautil package com.atjl.fmt.api; import freemarker.cache.StringTemplateLoader; import freemarker.template.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.StringWriter; import java.util.Map; public class FmtUtil { private FmtUtil() { throw new UnsupportedOperationException(); } private static final Logger logger = LoggerFactory.getLogger(FmtUtil.class); private static final String DFT_TEMPLATE_NAME = "dftTemplate"; // private static Configuration conf; // private static Map<String, String> templateMap; // public static void init(Map<String, String> templates) { // FmtContext.constructConfig(); // templateMap = templates; // } public static String render(String templateContent, Map<String, Object> parameters) { try { Configuration cfg = new Configuration(); cfg.setObjectWrapper(new DefaultObjectWrapper()); cfg.setTemplateExceptionHandler(TemplateExceptionHandler.IGNORE_HANDLER); StringTemplateLoader stringLoader = new StringTemplateLoader(); stringLoader.putTemplate(DFT_TEMPLATE_NAME, templateContent); cfg.setTemplateLoader(stringLoader); Template template = cfg.getTemplate(DFT_TEMPLATE_NAME, "utf-8"); StringWriter writer = new StringWriter(); template.process(parameters, writer); return writer.toString(); } catch (IOException | TemplateException e) { if (logger.isErrorEnabled()) { logger.error("render {}", e); } } return null; } /** * 渲染 * * @param templateContent * @param parameters * @return */ // public static String render(String templateContent, Map<String, String> parameters) { // render(); /* try { Configuration cfg = new Configuration(); cfg.setObjectWrapper(new DefaultObjectWrapper()); cfg.setTemplateExceptionHandler(TemplateExceptionHandler.IGNORE_HANDLER); StringTemplateLoader stringLoader = new StringTemplateLoader(); stringLoader.putTemplate(DFT_TEMPLATE_NAME, templateContent); cfg.setTemplateLoader(stringLoader); Template template = cfg.getTemplate(DFT_TEMPLATE_NAME, "utf-8"); StringWriter writer = new StringWriter(); template.process(parameters, writer); return writer.toString(); } catch (IOException | TemplateException e) { if (logger.isErrorEnabled()) { logger.error("render {}", e); } } return null;*/ // } }
import { formatVersion } from '@/util/widgets' function client (userAgentInfo = '', platform = '') { let engine = { // 呈现引擎 trident: 0, gecko: 0, webkit: 0, khtml: 0, presto: 0, ver: null // 具体的版本号 } let browser = { // 浏览器 ie: 0, firefox: 0, safari: 0, konq: 0, opera: 0, chrome: 0, ver: null // 具体的版本号 } let system = { // 操作系统 win: false, mac: false, x11: false } let ua = userAgentInfo if (!ua) { if (window && window.navigator && window.navigator.userAgent) { // 默认值 ua = window.navigator.userAgent } else { throw new Error('not window global variable, please pass userAgentInfo param') } } if (/AppleWebKit\/([^(\s]+)/.test(ua)) { // 匹配Webkit内核浏览器(Chrome、Safari、新Opera) engine.ver = RegExp['$1'] engine.webkit = parseFloat(engine.ver, 2) if (/OPR\/(\S+)/.test(ua)) { // 确定是不是引用了Webkit内核的Opera browser.ver = RegExp['$1'] browser.opera = parseFloat(browser.ver, 2) } else if (/Chrome\/(\S+)/.test(ua)) { // 确定是不是Chrome browser.ver = RegExp['$1'] browser.chrome = parseFloat(browser.ver, 2) } else if (/Version\/(\S+)/.test(ua)) { // 确定是不是高版本(3+)的Safari browser.ver = RegExp['$1'] browser.safari = parseFloat(browser.ver, 2) } else { // 近似地确定低版本Safafi版本号 let SafariVersion = 1 if (engine.webkit < 100) { SafariVersion = 1 } else if (engine.webkit < 312) { SafariVersion = 1.2 } else if (engine.webkit < 412) { SafariVersion = 1.3 } else { SafariVersion = 2 } browser.safari = browser.ver = SafariVersion } } /* istanbul ignore next */ else if (window && window.opera) { // 只匹配拥有Presto内核的老版本Opera 5+(12.15-) engine.ver = browser.ver = window.opera.version() engine.presto = browser.opera = parseFloat(engine.ver, 2) } else if (/Opera[/\s](\S+)/.test(ua)) { // 匹配不支持window.opera的Opera 5-或伪装的Opera engine.ver = browser.ver = RegExp['$1'] engine.presto = browser.opera = parseFloat(engine.ver, 2) } else if (/KHTML\/(\S+)/.test(ua) || /Konqueror\/([^;]+)/.test(ua)) { engine.ver = browser.ver = RegExp['$1'] engine.khtml = browser.konq = parseFloat(engine.ver, 2) } else if (/rv:([^)]+)\) Gecko\/\d{8}/.test(ua)) { // 判断是不是基于Gecko内核 engine.ver = RegExp['$1'] engine.gecko = parseFloat(engine.ver, 2) if (/Firefox\/(\S+)/.test(ua)) { // 确定是不是Firefox browser.ver = RegExp['$1'] browser.firefox = parseFloat(browser.ver, 2) } } else if (/Trident\/([\d.]+)/.test(ua)) { // 确定是否是Trident内核的浏览器(IE8+) engine.ver = RegExp['$1'] engine.trident = parseFloat(engine.ver, 2) if (/rv:([\d.]+)/.test(ua) || /MSIE ([^;]+)/.test(ua)) { // 匹配IE8-11+ browser.ver = RegExp['$1'] browser.ie = parseFloat(browser.ver, 2) } } else if (/MSIE ([^;]+)/.test(ua)) { // 匹配IE6、IE7 browser.ver = RegExp['$1'] browser.ie = parseFloat(browser.ver, 2) engine.ver = browser.ie - 4.0 // 模拟IE6、IE7中的Trident值 engine.trident = parseFloat(engine.ver, 2) } let p = platform || (window && window.navigator && navigator.platform) // 判断操作系统 system.win = p.indexOf('Win') === 0 || Boolean(/win/ig.test(ua)) system.mac = p.indexOf('Mac') === 0 || Boolean(/mac/ig.test(ua)) system.linux = (p.indexOf('X11') === 0) || (p.indexOf('Linux') === 0) || Boolean(/linux|x11/ig.test(ua)) if (system.win) { if (/Win(?:dows)?\s([^;:]{2})\s([^;]+)/g.test(ua)) { if (RegExp['$1'] === 'NT') { system.win = ({ '5.0': '2000', '5.1': 'xp', '6.0': 'vista', '6.1': '7', '6.2': '8', '6.3': '8.1', '10': '10' })[RegExp['$2']] || 'NT' } else if (RegExp['$1'] === '9x') { system.win = 'me' } else { system.win = RegExp['$1'] } } } // 特殊逻辑 const isWeixin = /MicroMessenger/ig.test(ua) const isAndroid = /android|adr/ig.test(ua) const isIOS = /iphone|ipad|ipod|ios/ig.test(ua) // all iphone , android , winphone and nikia symbian const isPC = !(isIOS || isAndroid || /(?:Windows Phone)/.test(ua) || /(?:SymbianOS)/.test(ua)) const isPhone = !isPC const isMiniProgram = /miniProgram/ig.test(ua) const isWechatDevtools = /wechatdevtools/ig.test(ua) // 与小程序字段做兼容 // 平台 const mpPlatform = isWechatDevtools ? 'devtools' : (isIOS ? 'ios': (isAndroid ? 'android' : '')) // 微信客户端版本 const versionObj = /MicroMessenger\/([^(\s]+)/ig.exec(ua) const mpVersion = versionObj ? formatVersion(versionObj[1]) : '' // 小程序的system字段如 system: "Android 5.0" 或 system: "iOS 13.5.1" // iOS系统版本号从类似 Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) 取 // 安卓系统从类似 Mozilla/5.0 (Linux; Android 7.1.1; vivo Y75 Build/N6F26Q; wv) 取 let mpSystem = '' let mpSystemRst = isIOS ? /iPhone\s+OS\s+([^\s;]+)/ig.exec(ua) : /Android\s+([^\s;]+)/ig.exec(ua) if (mpSystemRst && typeof mpSystemRst[1] === 'string') { let versionTemp = formatVersion(mpSystemRst[1]) mpSystem = `${isIOS ? 'iOS' : 'Android'} ${versionTemp}` } // 品牌型号引入会导致库体积增大过多,需要另外支持 // brand model return { userAgent: ua, // 用户浏览器Ua原文 engine, // 包含着用户浏览器引擎(内核)信息 browser, // 包括用户浏览器品牌与版本信息 system, // 用户所用操作系统及版本信息 isWeixin, // 是否是微信 isAndroid, // 是否是安卓 isIOS, // 是否是IOS isPC, // 是否是PC isPhone, isMiniProgram, // 微信小程序 mpPlatform, mpVersion, mpSystem } }; export default client
import QueryParamsOffsetAndLimit from './QueryParamsOffsetAndLimit'; export {QueryParamsOffsetAndLimit};
def functionA(x): answer = 0 for i in range(x): answer += x-i return answer
#!/bin/bash set -e # NOTE: Travis build matrix won't let us run something after ALL matrix jobs succeed, so we have to run serially (below) # if [ $JOB = "unit" ]; then # grunt # grunt test:ci # grunt release # elif [ $JOB = "e2e" ]; then # # grunt clean build test:e2e --browsers=SL_Chrome # grunt # grunt test:e2e:ci # else # echo "Unknown job type. Please set JOB=unit or JOB=e2e." # fi grunt # grunt test:e2e:ci --verbose # If this is a pull request then we won't have access to secure variables and can't do integration tests with SauceLabs. # In this case just do normal local tests if [ $TRAVIS_PULL_REQUEST == "true" ] then grunt test else grunt test:ci # Send coverage data to coveralls.io if [ $TRAVIS_BRANCH == "master" ] then grunt coverage cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js || true fi fi
import itertools def calculate_atomic_fraction(battery_entries, ion): total_charge_fraction = 0 total_discharge_fraction = 0 pair_count = 0 for pair in itertools.combinations(battery_entries, 2): entry1_charge_fraction = pair[0].entry_charge.composition.get_atomic_fraction(ion) entry1_discharge_fraction = pair[0].entry_discharge.composition.get_atomic_fraction(ion) entry2_charge_fraction = pair[1].entry_charge.composition.get_atomic_fraction(ion) entry2_discharge_fraction = pair[1].entry_discharge.composition.get_atomic_fraction(ion) total_charge_fraction += (entry1_charge_fraction + entry2_charge_fraction) / 2 total_discharge_fraction += (entry1_discharge_fraction + entry2_discharge_fraction) / 2 pair_count += 1 avg_charge_fraction = total_charge_fraction / pair_count avg_discharge_fraction = total_discharge_fraction / pair_count return (avg_charge_fraction, avg_discharge_fraction)
<filename>src/icons/legacy/Skyatlas.tsx<gh_stars>1-10 // Generated by script, don't edit it please. import createSvgIcon from '../../createSvgIcon'; import SkyatlasSvg from '@rsuite/icon-font/lib/legacy/Skyatlas'; const Skyatlas = createSvgIcon({ as: SkyatlasSvg, ariaLabel: 'skyatlas', category: 'legacy', displayName: 'Skyatlas' }); export default Skyatlas;
make clean make ./limitless client 10.40.49.156 4050 test 5000 --user #1001 995 986 #5001 4984 4938
import { customElement } from '@aurelia/runtime'; @customElement({ name: 'cancel', template: `<template>THE BIG BOARD! <input></template>` }) export class Cancel { public async canEnter() { return false; } }
<filename>app/models/obs_factory/openqa_job.rb module ObsFactory # Local representation of a job in the remote openQA. Uses a OpenqaApi (with a # hardcoded base url) to read the information and the Rails cache to store it class OpenqaJob include ActiveModel::Model extend ActiveModel::Naming include ActiveModel::Serializers::JSON attr_accessor :id, :name, :state, :result, :clone_id, :iso, :modules def self.openqa_base_url # build.opensuse.org can reach only the host directly, so we need # to use http - and accept a https redirect if used on work stations "http://openqa.opensuse.org" end @@api = ObsFactory::OpenqaApi.new(openqa_base_url) # Reads jobs from the openQA instance or the cache with an interface similar # to ActiveRecord::Base#find_all_by # # If searching by iso or getting the full list, caching comes into play. In # any other case, a GET query to openQA is always performed. # # param [Hash] args filters to use in the query. Valid values: # :build, :distri, :iso, :maxage, :state and :version # param [Hash] opt Options: # :cache == 'refresh' forces a refresh of the cache # :exclude_modules skips the loading of the modules information (which # needs an extra GET request per job). The #modules atribute will be # empty for all the jobs (except those read from the cache) and the # results will not be cached def self.find_all_by(args = {}, opt = {}) refresh = (opt.symbolize_keys[:cache].to_s == 'refresh') exclude_mod = !!opt.symbolize_keys[:exclude_modules] filter = args.symbolize_keys.slice(:iso, :state, :build, :maxage, :distri, :version) # We are only interested in current results get_params = {scope: 'current'} # If searching for the whole list of jobs, it caches the jobs # per ISO name. if filter.empty? Rails.cache.delete('openqa_isos') if refresh jobs = [] isos = Rails.cache.read('openqa_isos') # If isos are in the cache, everything is read from cache if isos (isos + [nil]).each do |iso| jobs += Rails.cache.read("openqa_jobs_for_iso_#{iso}") || [] end else # Get the bare list of jobs jobs = @@api.get('jobs', get_params)['jobs'] # If exclude_mod is given, that's all. But if not... unless exclude_mod # First, enrich the result with the modules information and cache # the jobs per ISO jobs.group_by {|j| (j['assets']['iso'].first rescue nil)}.each do |iso, iso_jobs| iso_jobs.each do |job| job['modules'] = modules_for(job['id']) end Rails.cache.write("openqa_jobs_for_iso_#{iso}", iso_jobs, expires_in: 2.minutes) end # And then, cache the list of ISOs isos = jobs.map {|j| (j['assets']['iso'].first rescue nil)}.sort.compact.uniq Rails.cache.write('openqa_isos', isos, expires_in: 2.minutes) end end # If searching only by ISO, cache that one elsif filter.keys == [:iso] cache_entry = "openqa_jobs_for_iso_#{filter[:iso]}" Rails.cache.delete(cache_entry) if refresh jobs = Rails.cache.read(cache_entry) if jobs.nil? get_params[:iso] = filter[:iso] jobs = @@api.get('jobs', get_params)['jobs'] unless exclude_mod jobs.each do |job| job['modules'] = modules_for(job['id']) end Rails.cache.write(cache_entry, jobs, expires_in: 2.minutes) end end # In any other case, don't cache else get_params.merge!(filter) jobs = @@api.get('jobs', get_params)['jobs'] unless exclude_mod jobs.each do |job| job['modules'] = modules_for(job['id']) end end end jobs.map { |j| OpenqaJob.new(j.slice(*attributes)) } end # Name of the modules which failed during openQA execution # # @return [Array] array of module names def failing_modules modules.reject {|m| %w(ok na).include? m['result']}.map {|m| m['name'] } end # Result of the job, or its state if no result is available yet # # @return [String] state if result is 'none', value of result otherwise def result_or_state if result == 'none' state else result end end def self.attributes %w(id name state result clone_id iso modules) end # Required by ActiveModel::Serializers def attributes Hash[self.class.attributes.map { |a| [a, nil] }] end protected # Reads the list of failed modules for a given job_id from openQA # by means of a GET call # # @param [#to_s] job_id the job id # @return [Array] array of hashes with two keys each: 'name' and 'result' def self.modules_for(job_id) # Surprisingly, we don't have an API call for getting the job # results in openQA result = @@api.get("tests/#{job_id}/file/results.json", {}, base_url: openqa_base_url) result['testmodules'].map {|m| m.slice('name', 'result') } rescue [] end end end
#!/usr/bin/env bats load helpers function setup() { if ! crictl runp -h | grep -q "cancel-timeout"; then skip "must have a crictl with the -T option to test CRI-O's timeout handling" fi setup_test create_conmon 3s CANCEL_TIMEOUT="3s" } function teardown() { cleanup_test } function create_conmon() { local timeout=$1 cat > "$TESTDIR"/tmp_conmon << EOF #!/bin/bash if [[ "\$1" != "--version" ]]; then sleep $timeout fi $CONMON_BINARY \$@ EOF chmod +x "$TESTDIR/tmp_conmon" export CONTAINER_CONMON="$TESTDIR/tmp_conmon" } # Allow cri-o to catch up. The sleep here should be less than # resourcestore.sleepTimeBeforeCleanup but enough for cri-o to # finish processing cancelled crictl create/runp. function wait_create() { sleep 30s } # Allow cri-o to catch up and clean the state of a pod/container. # The sleep here should be > 2 * resourcestore.sleepTimeBeforeCleanup. function wait_clean() { sleep 150s } @test "should not clean up pod after timeout" { # need infra container so runp can timeout in conmon CONTAINER_DROP_INFRA_CTR=false start_crio run crictl runp -T "$CANCEL_TIMEOUT" "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_create # cri-o should not report any pods pods=$(crictl pods -q) [[ -z "$pods" ]] created_ctr_id=$(runtime list -q) [ -n "$created_ctr_id" ] output=$(crictl runp "$TESTDATA"/sandbox_config.json) [[ "$output" == "$created_ctr_id" ]] } @test "should not clean up container after timeout" { start_crio pod_id=$(crictl runp "$TESTDATA"/sandbox_config.json) run crictl create -T "$CANCEL_TIMEOUT" "$pod_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_create # cri-o should not report any containers ctrs=$(crictl ps -aq) [[ -z "$ctrs" ]] # cri-o should have created a container created_ctr_id=$(runtime list -q | grep -v "$pod_id") [ -n "$created_ctr_id" ] output=$(crictl create "$pod_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json) [[ "$output" == "$created_ctr_id" ]] } @test "should clean up pod after timeout if request changes" { # need infra container so runp can timeout in conmon CONTAINER_DROP_INFRA_CTR=false start_crio run crictl runp -T "$CANCEL_TIMEOUT" "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_create created_ctr_id=$(runtime list -q) [ -n "$created_ctr_id" ] # we should create a new pod and not reuse the old one output=$(crictl runp <(jq '.metadata.attempt = 2' "$TESTDATA"/sandbox_config.json)) [[ "$output" != "$created_ctr_id" ]] wait_clean # the old, timed out container should have been removed ! runtime list -q | grep "$created_ctr_id" } @test "should clean up container after timeout if request changes" { start_crio pod_id=$(crictl runp "$TESTDATA"/sandbox_config.json) run crictl create -T "$CANCEL_TIMEOUT" "$pod_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_create # cri-o should have created a container created_ctr_id=$(runtime list -q | grep -v "$pod_id") [ -n "$created_ctr_id" ] # should create a new container and not reuse the old one output=$(crictl create "$pod_id" <(jq '.metadata.attempt = 2' "$TESTDATA"/container_config.json) "$TESTDATA"/sandbox_config.json) [[ "$output" != "$created_ctr_id" ]] wait_clean # the old, timed out container should have been removed ! runtime list -q | grep "$created_ctr_id" } @test "should clean up pod after timeout if not re-requested" { # need infra container so runp can timeout in conmon CONTAINER_DROP_INFRA_CTR=false start_crio run crictl runp -T "$CANCEL_TIMEOUT" "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_clean # cri-o should not report any pods pods=$(crictl pods -q) [[ -z "$pods" ]] # pod should have been cleaned up [[ -z $(runtime list -q) ]] # we should recreate the pod and not reuse the old one crictl runp "$TESTDATA"/sandbox_config.json } @test "should clean up container after timeout if not re-requested" { start_crio pod_id=$(crictl runp "$TESTDATA"/sandbox_config.json) run crictl create -T "$CANCEL_TIMEOUT" "$pod_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_clean # cri-o should not report any containers ctrs=$(crictl ps -aq) [[ -z "$ctrs" ]] # container should have been cleaned up ! runtime list -q | grep -v "$pod_id" # we should recreate the container and not reuse the old one crictl create "$pod_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json } # this test case is paranoid, but mostly checks that we can't # operate on a pod that's not created, and that we don't mark # a timed out pod as created before it's re-requested @test "should not be able to operate on a timed out pod" { # need infra container so runp can timeout in conmon CONTAINER_DROP_INFRA_CTR=false start_crio run crictl runp -T "$CANCEL_TIMEOUT" "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_create # container should not have been cleaned up created_ctr_id=$(runtime list -q) [ -n "$created_ctr_id" ] ! crictl create "$created_ctr_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json ! crictl stopp "$created_ctr_id" ! crictl inspectp "$created_ctr_id" } @test "should not be able to operate on a timed out container" { start_crio pod_id=$(crictl runp "$TESTDATA"/sandbox_config.json) run crictl create -T "$CANCEL_TIMEOUT" "$pod_id" "$TESTDATA"/container_config.json "$TESTDATA"/sandbox_config.json echo "$output" [[ "$output" == *"context deadline exceeded"* ]] [ "$status" -ne 0 ] wait_create # cri-o should have created a container created_ctr_id=$(runtime list -q | grep -v "$pod_id") [ -n "$created_ctr_id" ] ! crictl start "$created_ctr_id" ! crictl exec "$created_ctr_id" ls ! crictl exec --sync "$created_ctr_id" ls ! crictl inspect "$created_ctr_id" }
#include <string> #include <unordered_map> class Texture { // Texture class definition }; class TextureManager { private: std::unordered_map<std::string, Texture> textures; public: // Retrieves a texture by its name Texture& getTexture(const std::string& name) { return textures[name]; } // Loads a texture into memory and associates it with a given name void loadTexture(const std::string& name, const std::string& filename) { // Load the texture from the file and store it in the textures map Texture newTexture = loadTextureFromFile(filename); textures[name] = newTexture; } // Unloads a texture from memory based on its name void unloadTexture(const std::string& name) { // Check if the texture exists and remove it from the textures map if (textures.find(name) != textures.end()) { textures.erase(name); } } private: // Helper method to load a texture from a file Texture loadTextureFromFile(const std::string& filename) { // Implementation to load the texture from the file // This could involve using a graphics library or other platform-specific code // For the purpose of this example, we'll assume a simple implementation Texture newTexture; // Load the texture from the file and initialize it return newTexture; } };
<reponame>jakson92/foxbit-trade-bot import FoxbitConstants from '../../../config/foxbitConstants'; import FoxbitTools from '../../tools/foxbitTools'; class SellOrder { constructor(price, amount) { this.side = '2'; this.price = FoxbitTools.convertToNumberToSatoshis(price); this.amount = FoxbitTools.convertToNumberToSatoshis(amount); this.symbol = FoxbitConstants.DEFAULT_SYMBOL; } isSameOrderSide(side) { if (side === 'sell') { return true; } return false; } isMyPriceInTop(price) { const priceInSatoshis = FoxbitTools.convertToNumberToSatoshis(price); if (this.price < priceInSatoshis) { return true; } return false; } gePriceByOrderbook(value) { return value - 0.01; } } export default SellOrder;
/** * Returns true if the provided element is a component of the provided type. * * @param classType {ReactElement class} - the class of a React Element * @param reactElement {ReactElement} - any React Element (not a real DOM node) * * @example * // Checks if the component is an Autocomplete * isComponentType(Autocomplete, this.props.children[0]); */ export default function isComponentOfType(classType, reactElement) { return reactElement && reactElement.type === classType; }
#include <iostream> #include <cstring> class Person { private: int code; char* name; public: // Destructor to release memory for name ~Person() { if (name != nullptr) { delete[] name; name = nullptr; } } // Copy constructor for deep copying Person(const Person& person) { std::cout << std::endl << "Calling COPY-CSTR"; code = person.code; if (person.name != nullptr) { name = new char[strlen(person.name) + 1]; strcpy(name, person.name); } else { name = nullptr; } } // Setter for code void setCode(int newCode) { code = newCode; } // Setter for name void setName(const char* newName) { if (name != nullptr) { delete[] name; } name = new char[strlen(newName) + 1]; strcpy(name, newName); } }; int main() { Person person1; person1.setCode(1001); person1.setName("Alice"); Person person2 = person1; // Copy constructor should be called return 0; }
#!/bin/bash # ========== Experiment Seq. Idx. 989 / 49.1.5 / N. 44/3/4 - _S=49.1.5 D1_N=44 a=1 b=-1 c=-1 d=1 e=1 f=-1 D3_N=3 g=-1 h=1 i=1 D4_N=4 j=4 ========== set -u # Prints header echo -e '\n\n========== Experiment Seq. Idx. 989 / 49.1.5 / N. 44/3/4 - _S=49.1.5 D1_N=44 a=1 b=-1 c=-1 d=1 e=1 f=-1 D3_N=3 g=-1 h=1 i=1 D4_N=4 j=4 ==========\n\n' if [[ "Yes" == "No" ]]; then echo 'FATAL: This treatment did not include an SVM layer.'>&2 echo ' Something very wrong happened!'>&2 exit 161 fi # Prepares all environment variables JBHI_DIR="$HOME/jbhi-special-issue" DATASET_DIR="$JBHI_DIR/data/edra-clinical.299.tfr" MODEL_DIR="$JBHI_DIR/models/deep.44" SVM_DIR="$JBHI_DIR/svm-models" SVM_PREFIX="$SVM_DIR/deep.44.layer.3.svm" SVM_PATH="$SVM_PREFIX.pkl" FEATURES_DIR="$JBHI_DIR/features" TEST_FEATURES_PREFIX="$FEATURES_DIR/deep.44.layer.3.test.4.index.2555.test" TEST_FEATURES_PATH="$TEST_FEATURES_PREFIX.feats.pkl" RESULTS_DIR="$JBHI_DIR/results" RESULTS_PREFIX="$RESULTS_DIR/deep.44.layer.3.test.4.index.2555.svm" RESULTS_PATH="$RESULTS_PREFIX.results.txt" # ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue" LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt:$SVM_PREFIX.finish.txt" START_PATH="$RESULTS_PREFIX.start.txt" FINISH_PATH="$RESULTS_PREFIX.finish.txt" LOCK_PATH="$RESULTS_PREFIX.running.lock" LAST_OUTPUT="$RESULTS_PATH" # ...creates mid-way checkpoint after the expensive test features extraction SEMIFINISH_PATH="$TEST_FEATURES_PREFIX.finish.txt" # EXPERIMENT_STATUS=1 # STARTED_BEFORE=No mkdir -p "$FEATURES_DIR" mkdir -p "$RESULTS_DIR" # # Assumes that the following environment variables where initialized # SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue" # LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:" # START_PATH="$OUTPUT_DIR/start.txt" # FINISH_PATH="$OUTPUT_DIR/finish.txt" # LOCK_PATH="$OUTPUT_DIR/running.lock" # LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta" EXPERIMENT_STATUS=1 STARTED_BEFORE=No # Checks if code is stable, otherwise alerts scheduler pushd "$SOURCES_GIT_DIR" >/dev/null GIT_STATUS=$(git status --porcelain) GIT_COMMIT=$(git log | head -n 1) popd >/dev/null if [ "$GIT_STATUS" != "" ]; then echo 'FATAL: there are uncommitted changes in your git sources file' >&2 echo ' for reproducibility, experiments only run on committed changes' >&2 echo >&2 echo ' Git status returned:'>&2 echo "$GIT_STATUS" >&2 exit 162 fi # The experiment is already finished - exits with special code so scheduler won't retry if [[ "$FINISH_PATH" != "-" ]]; then if [[ -e "$FINISH_PATH" ]]; then echo 'INFO: this experiment has already finished' >&2 exit 163 fi fi # The experiment is not ready to run due to dependencies - alerts scheduler if [[ "$LIST_OF_INPUTS" != "" ]]; then IFS=':' tokens_of_input=( $LIST_OF_INPUTS ) input_missing=No for input_to_check in ${tokens_of_input[*]}; do if [[ ! -e "$input_to_check" ]]; then echo "ERROR: input $input_to_check missing for this experiment" >&2 input_missing=Yes fi done if [[ "$input_missing" != No ]]; then exit 164 fi fi # Sets trap to return error code if script is interrupted before successful finish LOCK_SUCCESS=No FINISH_STATUS=161 function finish_trap { if [[ "$LOCK_SUCCESS" == "Yes" ]]; then rmdir "$LOCK_PATH" &> /dev/null fi if [[ "$FINISH_STATUS" == "165" ]]; then echo 'WARNING: experiment discontinued because other process holds its lock' >&2 else if [[ "$FINISH_STATUS" == "160" ]]; then echo 'INFO: experiment finished successfully' >&2 else [[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH" echo 'ERROR: an error occurred while executing the experiment' >&2 fi fi exit "$FINISH_STATUS" } trap finish_trap EXIT # While running, locks experiment so other parallel threads won't attempt to run it too if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then LOCK_SUCCESS=Yes else echo 'WARNING: this experiment is already being executed elsewhere' >&2 FINISH_STATUS="165" exit fi # If the experiment was started before, do any cleanup necessary if [[ "$START_PATH" != "-" ]]; then if [[ -e "$START_PATH" ]]; then echo 'WARNING: this experiment is being restarted' >&2 STARTED_BEFORE=Yes fi #...marks start date -u >> "$START_PATH" echo GIT "$GIT_COMMIT" >> "$START_PATH" fi #...gets closest checkpoint file MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \ sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \ sort -n | \ awk -v c=1 -v t=20000 \ 'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}') MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT" echo "$MODEL_PATH" >> "$START_PATH" if [[ ! -f "$SEMIFINISH_PATH" ]]; then #...performs preliminary feature extraction echo Extracting SVM test features with "$MODEL_PATH" python \ "$SOURCES_GIT_DIR/predict_image_classifier.py" \ --model_name="inception_v4" \ --checkpoint_path="$MODEL_PATH" \ --dataset_name=skin_lesions \ --task_name=label \ --dataset_split_name=test \ --preprocessing_name=dermatologic \ --aggressive_augmentation="True" \ --add_rotations="True" \ --minimum_area_to_crop="0.20" \ --normalize_per_image="1" \ --batch_size=1 \ --id_field_name=id \ --pool_features=avg \ --extract_features \ --output_format=pickle \ --add_scores_to_features=none \ --eval_replicas="50" \ --output_file="$TEST_FEATURES_PATH" \ --dataset_dir="$DATASET_DIR" # Tip: leave last the arguments that make the command fail if they're absent, # so if there's a typo or forgotten \ the entire thing fails EXPERIMENT_STATUS="$?" if [[ "$EXPERIMENT_STATUS" != "0" || ! -e "$TEST_FEATURES_PATH" ]]; then exit fi date -u >> "$SEMIFINISH_PATH" echo GIT "$GIT_COMMIT" >> "$SEMIFINISH_PATH" else echo Reloading features from "$TEST_FEATURES_PATH" fi #...performs prediction with SVM model python \ "$SOURCES_GIT_DIR/predict_svm_layer.py" \ --output_file "$RESULTS_PATH" \ --input_test "$TEST_FEATURES_PATH" \ --input_model "$SVM_PATH" # Tip: leave last the arguments that make the command fail if they're absent, # so if there's a typo or forgotten \ the entire thing fails EXPERIMENT_STATUS="$?" # #...starts training if [[ "$EXPERIMENT_STATUS" == "0" ]]; then if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then if [[ "$FINISH_PATH" != "-" ]]; then date -u >> "$FINISH_PATH" echo GIT "$GIT_COMMIT" >> "$FINISH_PATH" fi FINISH_STATUS="160" fi fi
class LabelledCollection: def __init__(self, data, classes_): self.data = data self.classes_ = classes_ def split_stratified(self, train_prop): # Implementation not required for this problem pass class Dataset: def __init__(self, training: LabelledCollection, test: LabelledCollection, vocabulary: dict = None, name=''): assert set(training.classes_) == set(test.classes_), 'incompatible labels in training and test collections' self.training = training self.test = test self.vocabulary = vocabulary self.name = name @classmethod def SplitStratified(cls, collection: LabelledCollection, train_size=0.6): return cls(*collection.split_stratified(train_prop=train_size)) # Example usage # Create labeled collections train_data = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] train_classes = ['A', 'B', 'A'] test_data = [[10, 11, 12], [13, 14, 15]] test_classes = ['B', 'A'] train_collection = LabelledCollection(train_data, train_classes) test_collection = LabelledCollection(test_data, test_classes) # Initialize a Dataset instance dataset = Dataset(train_collection, test_collection, vocabulary={'A': 0, 'B': 1}, name='Example Dataset') # Split a collection and create a new Dataset instance new_dataset = Dataset.SplitStratified(train_collection, train_size=0.7)
#!/bin/bash # # This is catalina template generator # # # Display Help message # function HELP() { echo "Usage: $0 -b /path/to/cataline_base" echo "Usage: $0 -c /path/to/cataline_home" echo "Usage: $0 -n app_name" } while getopts b:c:n:h FLAG; do case $FLAG in b) CATALINA_BASE=$OPTARG ;; c) CATALINA_HOME=$OPTARG ;; n) APP_NAME=$OPTARG ;; h) HELP ;; \?) #unrecognized option - show help echo -e \\n"Option -$OPTARG not allowed." HELP ;; esac done if [ "${CATALINA_HOME}" == "" ]; then echo "-c CATALINA_HOME is not defined." HELP exit 1 fi if [ "${CATALINA_BASE}" == "" ]; then echo "-b CATALINA_BASE is not defined." HELP exit 1 fi if [ "${APP_NAME}" == "" ]; then echo "-n APP_NAME is not defined." HELP exit 1 fi echo "Name: ${APP_NAME}" echo "CATALINA_HOME: ${CATALINA_HOME}" echo "CATALINA_BASE: ${CATALINA_BASE}" APP_BASE=${CATALINA_BASE}/${APP_NAME} APP_CATALINA_BASE=${APP_BASE}/catalina_base # # Check the target base directory exists # if [ -d "${APP_CATALINA_BASE}" ]; then rm -rf "${APP_CATALINA_BASE}" echo "Cleaned ${APP_CATALINA_BASE}" fi mkdir -p "${APP_CATALINA_BASE}" if [ ! -d ${APP_CATALINA_BASE} ]; then echo "Error: APP_CATALINA_BASE does not exists." exit 1 fi echo "Create catalina base sub directories." mkdir -p ${APP_CATALINA_BASE} mkdir -p ${APP_CATALINA_BASE}/conf mkdir -p ${APP_CATALINA_BASE}/logs mkdir -p ${APP_CATALINA_BASE}/webapps mkdir -p ${APP_CATALINA_BASE}/temp mkdir -p ${APP_CATALINA_BASE}/work echo "Copy server config files." cp ${CATALINA_HOME}/conf/server.xml ${APP_CATALINA_BASE}/conf cp ${CATALINA_HOME}/conf/web.xml ${APP_CATALINA_BASE}/conf echo "Create start.sh." cp ./templates/start.sh ${APP_BASE} echo "Create halt.sh." cp ./templates/halt.sh ${APP_BASE} echo "Create restart.sh." cp ./templates/restart.sh ${APP_BASE} echo "Create logtc." cp ./templates/logtc ${APP_BASE} echo "Create app.env file." cat <<EOT > ${APP_BASE}/app.env ### app name export APP_NAME=${APP_NAME} ### tomcat install dir export CATALINA_HOME=${CATALINA_HOME} ### web app instance base dir export CATALINA_BASE=${APP_CATALINA_BASE} EOT
#!/bin/bash # script path SOURCE="${BASH_SOURCE[0]}" SCRIPTPATH=`dirname $SOURCE` # load check_functions. . $SCRIPTPATH/../../check_functions/check_functions.bash # check tcp port: check_tcp 80 #http check_tcp 443 #https
at=`cat $1/$IDS_FOLDER/$ACCESS_TOKEN_FILENAME` for file in `ls $SOS_FOLDER` do id=`cat $1/$IDS_FOLDER/$file.id | perl -pe "s/\"/\n/g" | head -4 | tail -1` response=$(curl --digest -XGET \ -H "Content-Type: application/json;charset=UTF-8" \ -H "Authorization: $at" \ --write-out %{http_code} -s\ -o /dev/null \ http://$API_PUB_NODES:$API_PUB_SEC_PORT/$id/streams/$SAMPLE_STREAM) if [ $response != 200 ]; then echo "KO... Error retrieving data from SO based on $file -> response: "$response else echo "OK... Retrieved data from SO based on $file, ID: "$id fi done
package stacker const ( GitVersionAnnotation = "com.cisco.stacker.git_version" StackerContentsAnnotation = "com.cisco.stacker.stacker_yaml" )
public class VowelAsterisk { public static void main(String args[]) { String str = "Hello World"; char[] arr = str.toCharArray(); for(int i = 0; i < arr.length; i++) { if (arr[i] == 'a' || arr[i] == 'e' || arr[i] == 'i' || arr[i] == 'o' || arr[i] == 'u') { arr[i] = '*'; } } System.out.println(String.valueOf(arr)); } }
/* * Copyright (c) 2008-2014 <NAME> <<EMAIL>> * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <string.h> #include "fw_query.h" static struct fw_query fwq[FW_QUERY_CACHE_SIZE]; static int fwq_ix; void fw_query_init() { memset(fwq, 0, sizeof(struct fw_query) * FW_QUERY_CACHE_SIZE); fwq_ix = 0; } void fw_query_put(struct fw_query *fw_query) { memcpy(&(fwq[fwq_ix]), fw_query, sizeof(struct fw_query)); ++fwq_ix; if (fwq_ix >= FW_QUERY_CACHE_SIZE) fwq_ix = 0; } void fw_query_get(unsigned short query_id, struct fw_query **fw_query) { int i; *fw_query = NULL; for (i = 0; i < FW_QUERY_CACHE_SIZE; i++) { if (fwq[i].id == query_id) { *fw_query = &(fwq[i]); return; } } }
package models; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import org.bson.types.ObjectId; import org.jongo.MongoCollection; import org.jongo.MongoCursor; import org.jongo.marshall.jackson.oid.Id; @lombok.Getter public class Comment extends Model { @Id private ObjectId id; @JsonProperty("user_id") @Postproduct private ObjectId userId; private String text; private List<Attachment> attachments; private Date created; @JsonProperty("like_count") private int likeCount; private Set<ObjectId> likes; @JsonIgnore private boolean liked; private Boolean deleted; public Comment() { } public Comment(ObjectId userId, String text, List<Attachment> attachments) { this.id = new ObjectId(); this.userId = userId; this.text = text; this.attachments = attachments; this.created = new Date(); } public void save(ObjectId postId) { MongoCollection postCol = jongo.getCollection("post"); MongoCollection commentCol = jongo.getCollection("comment"); Post post = postCol .findAndModify("{_id:#}", postId) .with("{$inc:{comment_count:1},$addToSet:{commentators:#}" + ",$push:{comments:{$each:[#],$slice:-4}}}", userId, this) .projection("{user_id:1,comment_count:1,commentators:1,likes:1}") .as(Post.class); if (post == null) return; int page = post.getCommentCount() / 50; commentCol.update("{post_id:#,page:#}", postId, page).upsert() .with("{$push:{comments:#},$setOnInsert:{user_id:#,created:#}}", this, post.getUserId(), this.created); del("post:" + postId); if (!userId.equals(post.getUserId())) { new Activity(userId, ActivityType.commentYourPost, postId, post.getUserId()).queue(); } Set<ObjectId> likes = post.getLikes(); if (userId.equals(post.getUserId()) && likes != null) { likes.remove(userId); new Activity(userId, ActivityType.ownerCommentPostYouLike, postId, likes).queue(); } else { Set<ObjectId> commentators = post.getCommentators(); if (commentators != null) { commentators.remove(userId); commentators.remove(post.getUserId()); new Activity(userId, ActivityType.commentPostYouComment, postId, commentators) .queue(); } } } public static Page get(ObjectId postId, ObjectId userId, Date until, int limit) { MongoCollection commentCol = jongo.getCollection("comment"); String previous = null; MongoCursor<Comments> cursor = commentCol .find("{post_id:#,created:{$lt:#}}", postId, until) .sort("{created:-1}") .limit(2) .as(Comments.class); List<Comments> commentses = new ArrayList<Comments>(2); while (cursor.hasNext()) commentses.add(cursor.next()); try { cursor.close(); } catch (Exception e) { throw new RuntimeException(e); } List<Comment> comments = new ArrayList<Comment>(100); for (int i = commentses.size() - 1; i >= 0; i--) { for (Comment comment : commentses.get(i).getComments()) { if (comment.deleted == null && comment.created.before(until)) comments.add(comment); } } if (comments.size() > limit) comments.subList(0, comments.size() - limit).clear(); postproduct(comments, userId); if (comments.size() == limit) { previous = String.format("until=%d&limit=%d", comments.get(0).getCreated().getTime(), limit); } return new Page(comments, previous); } public static void postproduct(List<Comment> comments, ObjectId userId) { for (Comment comment : comments) { if (comment.likes == null) { comment.likeCount = 0; comment.liked = false; } else { comment.likeCount = comment.likes.size(); comment.liked = comment.likes.contains(userId); comment.likes = null; } } } public static void like(ObjectId commentId, ObjectId userId) { MongoCollection commentCol = jongo.getCollection("comment"); MongoCollection postCol = jongo.getCollection("post"); Comments comments = commentCol.findAndModify("{'comments._id':#}", commentId) .with("{$addToSet:{'comments.$.likes':#},$inc:{'comments.$.like_count':1}}", userId) .projection("{comments:{$elemMatch:{_id:#}}}", commentId) .as(Comments.class); if (comments == null) return; Post post = postCol.findAndModify("{'comments._id':#}", commentId) .with("{$addToSet:{'comments.$.likes':#},$inc:{'comments.$.like_count':1}}", userId) .projection("{_id:1}") .as(Post.class); if (post != null) { del("post:" + post.getId()); ObjectId commentator = comments.getComments().get(0).getUserId(); if (!userId.equals(commentator)) { new Activity(userId, ActivityType.likeYourComment, post.getId(), commentator) .queue(); } } } public static void unlike(ObjectId commentId, ObjectId userId) { MongoCollection postCol = jongo.getCollection("post"); MongoCollection commentCol = jongo.getCollection("comment"); commentCol.update("{'comments._id':#}", commentId) .with("{$pull:{'comments.$.likes':#}}", userId); Post post = postCol.findAndModify("{'comments._id':#}", commentId) .with("{$pull:{'comments.$.likes':#},$inc:{'comments.$.like_count':-1}}", userId) .projection("{_id:1}") .as(Post.class); if (post != null) del("post:" + post.getId()); } public static void delete(ObjectId commentId, ObjectId userId) { MongoCollection postCol = jongo.getCollection("post"); MongoCollection commentCol = jongo.getCollection("comment"); Comments comments = commentCol .findAndModify("{comments:{$elemMatch:{_id:#,user_id:#}}}", commentId, userId) .with("{$set:{'comments.$.deleted':#}}", true) .projection("{post_id:1}") .as(Comments.class); if (comments == null) { // a post owner can delete a another's comments comments = commentCol .findAndModify("{user_id:#,comments:{$elemMatch:{_id:#}}}", userId, commentId) .with("{$set:{'comments.$.deleted':#}}", true) .projection("{post_id:1}") .as(Comments.class); if (comments == null) return; } postCol.update(comments.getPostId()) .with("{$inc:{comment_count:-1},$pull:{comments:{_id:#}}}", commentId); del("post:" + comments.getPostId()); } }
// License: Apache 2.0. See LICENSE file in root directory. // Copyright(c) 2017 Intel Corporation. All Rights Reserved #pragma once #ifndef CAMERA_CORE_H // NOLINT(build/header_guard) #define CAMERA_CORE_H #include <cstdlib> #include <cctype> #include <sstream> #include <iostream> #include <vector> #include <map> #include <string> #include <sensor_msgs/PointCloud2.h> #include <sensor_msgs/Imu.h> #include <sensor_msgs/point_cloud2_iterator.h> #include <sensor_msgs/image_encodings.h> #include <image_transport/image_transport.h> #include <camera_info_manager/camera_info_manager.h> #include <std_msgs/String.h> #include <ros/ros.h> #include <std_msgs/Float32MultiArray.h> #include <cv_bridge/cv_bridge.h> #include <tf/transform_listener.h> #include <librealsense/rs.h> #include <realsense_ros_camera/constants.h> const int STREAM_COUNT = 5; // utest commandline args int g_color_height_exp = 0; int g_color_width_exp = 0; int g_depth_height_exp = 0; int g_depth_width_exp = 0; int g_fisheye_height_exp = 0; int g_fisheye_width_exp = 0; uint32_t g_depth_step_exp; // Expected depth step. uint32_t g_color_step_exp; // Expected color step. bool g_enable_color = true; bool g_enable_depth = true; bool g_enable_fisheye = false; bool g_enable_imu = false; std::string g_depth_encoding_exp; // Expected depth encoding. std::string g_color_encoding_exp; // Expected color encoding. std::map<std::string, std::string> g_config_args; const float R200_MAX_Z = 10.0f; double g_max_z = R200_MAX_Z * 1000.0f; // Converting meter to mm. bool g_depth_recv = false; bool g_color_recv = false; bool g_fisheye_recv = false; bool g_accel_recv = false; bool g_gyro_recv = false; float g_depth_avg = 0.0f; float g_color_avg = 0.0f; float g_fisheye_avg = 0.0f; float g_pc_depth_avg = 0.0f; int g_height_recv[STREAM_COUNT] = {0}; int g_width_recv[STREAM_COUNT] = {0}; uint32_t g_step_recv[STREAM_COUNT] = {0}; // Received stream step. std::string g_encoding_recv[STREAM_COUNT]; // Expected stream encoding. int g_caminfo_height_recv[STREAM_COUNT] = {0}; int g_caminfo_width_recv[STREAM_COUNT] = {0}; double g_color_caminfo_D_recv[5] = {0.0}; double g_depth_caminfo_D_recv[5] = {0.0}; double g_fisheye_caminfo_D_recv[5] = {0.0}; double g_caminfo_rotation_recv[STREAM_COUNT][9] = {{0.0}}; double g_caminfo_projection_recv[STREAM_COUNT][12] = {{0.0}}; std::string g_dmodel_recv[STREAM_COUNT]; std::string g_camera_type; #endif // CAMERA_CORE_H // NOLINT(build/header_guard)
<filename>src/domain/networking/packets/Ping.ts // // Ping.ts // // Created by <NAME> on 6 Sep 2021. // Copyright 2021 Vircadia contributors. // // Distributed under the Apache License, Version 2.0. // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // import UDT from "../udt/UDT"; import assert from "../../shared/assert"; type PingDetails = { pingType: number, timestamp: bigint, connectionID: bigint }; const Ping = new class { // C++ N/A /*@devdoc * Information returned by {@link PacketScribe|reading} a {@link PacketType(1)|Ping} packet. * @typedef {object} PacketScribe.PingDetails * @property {PingType} pingType - The type of ping. * @property {bigint} timestamp - The time at which the ping packet was created, in usec. * @property {bigint} connectionID - The ID of the connection that the ping was sent on. */ /*@devdoc * Reads a {@link PacketType(1)|Ping} packet. * @function PacketScribe.Ping&period;read * @param {DataView} data - The {@link Packets|Ping} message data to read. * @returns {PacketScribe.PingDetails} The ping details. */ read(data: DataView): PingDetails { /* eslint-disable-line class-methods-use-this */ // C++ void NodeList::processPingPacket(ReceivedMessage* message, Node* sendingNode) /* eslint-disable @typescript-eslint/no-magic-numbers */ let dataPosition = 0; const pingType = data.getUint8(dataPosition); dataPosition += 1; const timestamp = data.getBigUint64(dataPosition, UDT.LITTLE_ENDIAN); dataPosition += 8; const connectionID = data.getBigUint64(dataPosition, UDT.LITTLE_ENDIAN); dataPosition += 8; /* eslint-enable @typescript-eslint/no-magic-numbers */ assert(dataPosition === data.byteLength, "ERROR: Length mismatch reading Ping packet!"); return { pingType, timestamp, connectionID }; } }(); export default Ping; export type { PingDetails };
<filename>open-sphere-base/overlay/src/main/java/io/opensphere/overlay/worldmap/WorldMapFootPrint.java<gh_stars>10-100 package io.opensphere.overlay.worldmap; import java.awt.Color; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import io.opensphere.core.geometry.Geometry; import io.opensphere.core.geometry.PolylineGeometry; import io.opensphere.core.geometry.renderproperties.DefaultPolylineRenderProperties; import io.opensphere.core.geometry.renderproperties.PolylineRenderProperties; import io.opensphere.core.geometry.renderproperties.StippleModelConfig; import io.opensphere.core.hud.framework.Component; import io.opensphere.core.hud.framework.Renderable; import io.opensphere.core.model.Altitude; import io.opensphere.core.model.GeographicPosition; import io.opensphere.core.model.LatLonAlt; import io.opensphere.core.model.ScreenBoundingBox; import io.opensphere.core.model.ScreenPosition; import io.opensphere.core.util.concurrent.ProcrastinatingExecutor; import io.opensphere.core.util.model.GeographicUtilities; import io.opensphere.core.viewer.ViewChangeSupport; import io.opensphere.core.viewer.ViewChangeSupport.ViewChangeListener; import io.opensphere.core.viewer.Viewer; import io.opensphere.core.viewer.impl.Viewer3D; /** Footprint for the world map. */ public class WorldMapFootPrint extends Renderable { /** Listen to events from the main viewer. */ private final ViewChangeListener myMainViewListener = new ViewChangeListener() { @Override public void viewChanged(Viewer viewer, ViewChangeSupport.ViewChangeType type) { myViewChangeExecutor.execute(new Runnable() { @Override public void run() { drawFootPrint(); } }); } }; /** Executor to handle view changes. */ private final ProcrastinatingExecutor myViewChangeExecutor; /** * Construct me. * * @param parent parent component. * @param executor Executor shared by HUD components. */ public WorldMapFootPrint(Component parent, ScheduledExecutorService executor) { super(parent); myViewChangeExecutor = new ProcrastinatingExecutor(executor); } @Override public void handleCleanupListeners() { super.handleCleanupListeners(); getTransformer().getToolbox().getMapManager().getViewChangeSupport().removeViewChangeListener(myMainViewListener); } @Override public void init() { super.init(); // Register as a listener for view change events getTransformer().getToolbox().getMapManager().getViewChangeSupport().addViewChangeListener(myMainViewListener); drawFootPrint(); } /** * This method assures the point passed in is within the range of -180 to * 180. If the longitude is outside of the range it will be converted. So * for example a value of -185 becomes 175. * * @param position The GeographicPosition to check. * @return position The updated position. */ private GeographicPosition checkLongitudes(GeographicPosition position) { GeographicPosition geoPosition = null; if (Math.abs(position.getLatLonAlt().getLonD()) > 180d) { if (position.getLatLonAlt().getLonD() < -180d) { geoPosition = new GeographicPosition(LatLonAlt.createFromDegreesMeters(position.getLatLonAlt().getLatD(), position.getLatLonAlt().getLonD() + 360d, position.getLatLonAlt().getAltM(), Altitude.ReferenceLevel.TERRAIN)); } else if (position.getLatLonAlt().getLonD() > 180d) { geoPosition = new GeographicPosition(LatLonAlt.createFromDegreesMeters(position.getLatLonAlt().getLatD(), position.getLatLonAlt().getLonD() - 360d, position.getLatLonAlt().getAltM(), Altitude.ReferenceLevel.TERRAIN)); } } else { geoPosition = position; } return geoPosition; } /** * Draw a footprint of the visual part of the map currently seen. */ synchronized void drawFootPrint() { List<List<ScreenPosition>> footPrint = getFootPrint(); Set<Geometry> startGeoms = new HashSet<>(getGeometries()); getGeometries().clear(); for (List<ScreenPosition> positions : footPrint) { if (!positions.isEmpty()) { PolylineGeometry.Builder<ScreenPosition> polyBuilder = new PolylineGeometry.Builder<>(); PolylineRenderProperties props = new DefaultPolylineRenderProperties(getBaseZOrder() + 4, true, false); props.setStipple(StippleModelConfig.DOTTED_3); props.setColor(Color.RED); props.setWidth(2f); polyBuilder.setVertices(positions); PolylineGeometry line = new PolylineGeometry(polyBuilder, props, null); getGeometries().add(line); } } Set<Geometry> endGeoms = getGeometries(); updateGeometries(endGeoms, startGeoms); } /** * Calculate the screen positions that compose the footprint. * * @return List of screen positions describing footprint. */ private List<List<ScreenPosition>> getFootPrint() { List<List<ScreenPosition>> screenPositions = new ArrayList<>(); // Get the list of geographic boundary points List<GeographicPosition> boundaryPoints = getTransformer().getToolbox().getMapManager().getVisibleBoundaries(); // Now do a check for crossing the longitude boundary // This may result in two collections of points. // This only needs to be performed for 3-d List<List<GeographicPosition>> footPrints = new ArrayList<>(); Viewer view = getTransformer().getToolbox().getMapManager().getStandardViewer(); if (view instanceof Viewer3D) { footPrints = longitudeBoundaryCheck(boundaryPoints); } else { footPrints.add(boundaryPoints); } // For these points we need to scale to our mini map and // convert to screen coordinates. ScreenBoundingBox bbox = getDrawBounds(); for (List<GeographicPosition> footprint : footPrints) { List<ScreenPosition> screenPos = GeographicUtilities.toScreenPositions(footprint, bbox); screenPositions.add(screenPos); } return screenPositions; } /** * Given a list of positions that constitute a polygon, go through and check * if the longitude boundary is crossed. If so, create two separate * collections of points to return. * * @param origPositions The original polygon points. * @return A list of lists of points. If there are no border crossings only * one list is returned and if there is a border crossing there will * be two lists returned. */ private List<List<GeographicPosition>> longitudeBoundaryCheck(List<GeographicPosition> origPositions) { List<List<GeographicPosition>> newPositions = new ArrayList<>(); GeographicPosition p0 = null; if (origPositions != null && !origPositions.isEmpty()) { p0 = origPositions.get(0); } else { return newPositions; } List<GeographicPosition> positions = new ArrayList<>(); List<GeographicPosition> crossedPositions = new ArrayList<>(); boolean crossLongitudeBorder = false; GeographicPosition p1 = null; for (int i = 0; i < origPositions.size(); ++i) { p1 = origPositions.get(i); // Need to assure longitude is within -180 to 180 range for p0 p0 = checkLongitudes(p0); // And again for p1 p1 = checkLongitudes(p1); // Now check to see if we cross the longitude boundary if (p0.getLatLonAlt().positionsCrossLongitudeBoundary(p1.getLatLonAlt())) { if (!crossLongitudeBorder) // p0 on the inside and p1 on the // outside { // Add border points to collections if (p0.getLatLonAlt().getLonD() > 0) { // Add border intersection points back in // TODO find correct Latitude intersection point for // these positions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), 180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); crossedPositions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), -180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); } else { positions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), -180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); crossedPositions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), 180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); } } else // p1 on the inside and p0 on the outside (coming back in) { // Add border points to collections if (p1.getLatLonAlt().getLonD() < 0) { // Add border intersection points back in // TODO find correct Latitude intersection point for // these positions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), -180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); crossedPositions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), 180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); } else { positions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), 180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); crossedPositions.add(new GeographicPosition(LatLonAlt.createFromDegreesMeters(p1.getLatLonAlt().getLatD(), -180d, p1.getLatLonAlt().getAltM(), p1.getLatLonAlt().getAltitudeReference()))); } } // First time through set the boolean that we have crossed. // Assuming a polygon, if we cross over the border we should // come back in. Second time through set back to false; crossLongitudeBorder = !crossLongitudeBorder; } // If we have crossed the border, then put these points in // crossedPositions if (crossLongitudeBorder) { crossedPositions.add(p1); } else { positions.add(p1); } p0 = p1; } newPositions.add(positions); if (!crossedPositions.isEmpty()) { newPositions.add(crossedPositions); } return newPositions; } }
class MYStack: def __init__(self): self._items = [] def push(self, item): self._items.append(item) def pop(self): return self._items.pop() def size(self): return len(self._items) def is_empty(self): return self.size() == 0
#!/usr/bin/env bash set -e code=0 common_step_script="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/suites/common/$1.sh" if [ -f "${common_step_script}" ]; then /usr/bin/env bash "${common_step_script}" || code=$? fi IFS=' ' read -r -a suites <<< "$2" for suite in "${suites[@]}"; do suite_step_script="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/suites/${suite}/$1.sh" if [ -f "${suite_step_script}" ]; then suite_step_is_suitable_script="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/suites/${suite}/is_suitable.sh" if [ -f "${suite_step_is_suitable_script}" ]; then /usr/bin/env bash "${suite_step_is_suitable_script}" || continue fi /usr/bin/env bash "${suite_step_script}" || code=$? fi done exit ${code}
first = 0 second = 0 uids=set() for q in QuestComment.all_objects.order_by('-id')[:10000]: cmts = q.author.comments.all().order_by('id') uids.add(q.author_id) try: f=cmts[0] if int(q.id) == int(f.id): first += 1 continue except IndexError: pass try: s=cmts[1] if int(q.id) == int(s.id): second += 1 except IndexError: pass
<reponame>PHSI-supplements/textbook-code /* * enum_examples.c (c) 2018-20 <NAME> */ #include <stdio.h> #include "enum_examples.h" int main() { enum days day = WEDNESDAY; printf("%d\n", day); char suitString[9] = ""; suits s = HEARTS; printf("%s\n", print_suit(s, suitString)); } char *print_suit(suits suit, char *suit_string) { switch (suit) { case CLUBS: suit_string = "CLUBS"; break; case DIAMONDS: suit_string = "DIAMONDS"; break; case HEARTS: suit_string = "HEARTS"; break; case SPADES: suit_string = "SPADES"; break; default: suit_string = "UNKNOWN"; } return suit_string; }
export GFS0PIa9nuH= #"list/GiBUUMINERvA_LE_Neutrino_more_ensembles_H_Hydrogen_1000files.list" export GFS0PIa9nuC="list/GiBUUMINERvA_LE_Neutrino_current_Carbon_250files.list" #'list/GiBUUMINERvA_LE_Neutrino_current_Carbon_10files.list' #'list/Filelist_GiBUUMINERvALE_nu_T0_Carbon.list' # export GFS0PIa9nuNOFSIC= #"list/GiBUUMINERvA_LE_Neutrino_NoFSI_Carbon_250files.list" export GFSPIZEROa7nuH=${GFS0PIa9nuH} export GFSPIZEROa7nuC=${GFS0PIa9nuC} export GFSPIZEROa7nuNOFSIC=${GFS0PIa9nuNOFSIC} export EXCL3a10nuH= export EXCL3a10nubarH= export EXCL3a10nuC= export EXCL3a10nubarC= export LOWRECOILa1nuC="" export LOWRECOILa1nubarC="" export LOWRECOILa1nuH="" export LOWRECOILa1nubarH="" export NUBAR1PIa2nubarC="" export NUBAR1PIa2nubarH="" export MMECCQEa4nuC="" export GFSa3nuC="" export NUGASa8P50= nfile= tag=MINERvALE_GiBUU_newE${nfile}_ #MINERvAGiBUU_LE_ #MINERvAGiBUU_PIZEROrep_ #DUNEGiBUU_${nfile}_ #MINERvAGiBUU_protectedBkg_${nfile}_ #MINERvAGiBUU_PiCut2Fixed_${nfile}_ #################################### mkexe.sh anaGenerator -I${NUGENTKI}/include -lstyle -I${NUGENTKI}/style -L${NUGENTKI}/style || exit #exit #GFS 7 #opt=${tag}GFSPIZEROa7nuH; nohup ./doAna.sh $GFSPIZEROa7nuH ${opt} 7 ${nfile} > see${opt}.log & opt=${tag}GFSPIZEROa7nuC; nohup ./doAna.sh $GFSPIZEROa7nuC ${opt} 7 ${nfile} > see${opt}.log & #opt=${tag}GFSPIZEROa7nuNOFSIC; nohup ./doAna.sh $GFSPIZEROa7nuNOFSIC ${opt} 7 ${nfile} > see${opt}.log & #exit #GFS0PI 9 #opt=${tag}GFS0PIa9nuH; nohup ./doAna.sh $GFS0PIa9nuH ${opt} 9 ${nfile} > see${opt}.log & opt=${tag}GFS0PIa9nuC; nohup ./doAna.sh $GFS0PIa9nuC ${opt} 9 ${nfile} > see${opt}.log & #opt=${tag}GFS0PIa9nuNOFSIC; nohup ./doAna.sh $GFS0PIa9nuNOFSIC ${opt} 9 ${nfile} > see${opt}.log & exit #EXCL3 10 opt=${tag}EXCL3a10nuH; nohup ./doAna.sh $EXCL3a10nuH ${opt} 10 ${nfile} > see${opt}.log & opt=${tag}EXCL3a10nubarH; nohup ./doAna.sh $EXCL3a10nubarH ${opt} 10 ${nfile} > see${opt}.log & opt=${tag}EXCL3a10nuC; nohup ./doAna.sh $EXCL3a10nuC ${opt} 10 ${nfile} > see${opt}.log & opt=${tag}EXCL3a10nubarC; nohup ./doAna.sh $EXCL3a10nubarC ${opt} 10 ${nfile} > see${opt}.log & exit #NUGAS 8 opt=${tag}NUGASa8P50; nohup ./doAna.sh $NUGASa8P50 ${opt} 8 ${nfile} > see${opt}.log & exit #MMECCQE 4 only carbon opt=${tag}MMECCQEa4nuC; nohup ./doAna.sh $MMECCQEa4nuC ${opt} 4 ${nfile} > see${opt}.log & exit #GFS 3 opt=${tag}GFSa3nuC; nohup ./doAna.sh $GFSa3nuC ${opt} 3 > see${opt}.log & exit #LOWRECOIL 1 opt=${tag}LOWRECOILa1nuC; nohup ./doAna.sh $LOWRECOILa1nuC ${opt} 1 > see${opt}.log & opt=${tag}LOWRECOILa1nubarC; nohup ./doAna.sh $LOWRECOILa1nubarC ${opt} 1 > see${opt}.log & opt=${tag}LOWRECOILa1nuH; nohup ./doAna.sh $LOWRECOILa1nuH ${opt} 1 > see${opt}.log & opt=${tag}LOWRECOILa1nubarH; nohup ./doAna.sh $LOWRECOILa1nubarH ${opt} 1 > see${opt}.log & #NUBAR1PI 2 opt=${tag}NUBAR1PIa2nubarC; nohup ./doAna.sh $NUBAR1PIa2nubarC ${opt} 2 > see${opt}.log & opt=${tag}NUBAR1PIa2nubarH; nohup ./doAna.sh $NUBAR1PIa2nubarH ${opt} 2 > see${opt}.log & exit
#!/bin/bash i=0 while IFS='' read -r line ; do printf -v num '%07d' $i echo "Converting hash: $line --> ${num}.png" if [ -s ${line}.html ] ; then i=$((i+1)) cutycapt --min-width=7680 --min-height=4320 --url=file://$(pwd)/${line}.html --out=${num}.png else echo "empty file, skip." fi ; done < "$1"
// <NAME>, Geometric Tools, Redmond WA 98052 // Copyright (c) 1998-2016 // Distributed under the Boost Software License, Version 1.0. // http://www.boost.org/LICENSE_1_0.txt // http://www.geometrictools.com/License/Boost/LICENSE_1_0.txt // File Version: 3.0.0 (2016/06/19) #include <GTEnginePCH.h> #include <Graphics/GteTransform.h> #include <algorithm> using namespace gte; Transform const Transform::IDENTITY; Transform::Transform() : mTranslate({ 0.0f, 0.0f, 0.0f, 1.0f }), mScale({ 1.0f, 1.0f, 1.0f, 1.0f }), mIsIdentity(true), mIsRSMatrix(true), mIsUniformScale(true), mInverseNeedsUpdate(false) { mHMatrix.MakeIdentity(); mInvHMatrix.MakeIdentity(); mMatrix.MakeIdentity(); } void Transform::MakeIdentity() { mMatrix.MakeIdentity(); mTranslate = { 0.0f, 0.0f, 0.0f, 1.0f }; mScale = { 1.0f, 1.0f, 1.0f, 1.0f }; mIsIdentity = true; mIsRSMatrix = true; mIsUniformScale = true; UpdateHMatrix(); } void Transform::MakeUnitScale() { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); mScale = { 1.0f, 1.0f, 1.0f, 1.0f }; mIsUniformScale = true; UpdateHMatrix(); } void Transform::SetRotation(Matrix4x4<float> const& rotate) { mMatrix = rotate; mIsIdentity = false; mIsRSMatrix = true; UpdateHMatrix(); } void Transform::SetMatrix(Matrix4x4<float> const& matrix) { mMatrix = matrix; mIsIdentity = false; mIsRSMatrix = false; mIsUniformScale = false; UpdateHMatrix(); } void Transform::SetTranslation(float x0, float x1, float x2) { mTranslate = { x0, x1, x2, 1.0f }; mIsIdentity = false; UpdateHMatrix(); } void Transform::SetTranslation(Vector3<float> const& translate) { SetTranslation(translate[0], translate[1], translate[2]); } void Transform::SetTranslation(Vector4<float> const& translate) { SetTranslation(translate[0], translate[1], translate[2]); } void Transform::SetScale(float s0, float s1, float s2) { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); LogAssert(s0 != 0.0f && s1 != 0.0f && s2 != 0.0f, "Scales must be nonzero."); mScale = { s0, s1, s2, 1.0f }; mIsIdentity = false; mIsUniformScale = false; UpdateHMatrix(); } void Transform::SetScale(Vector3<float> const& scale) { SetScale(scale[0], scale[1], scale[2]); } void Transform::SetScale(Vector4<float> const& scale) { SetScale(scale[0], scale[1], scale[2]); } void Transform::SetUniformScale(float scale) { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); LogAssert(scale != 0.0f, "Scale must be nonzero."); mScale = { scale, scale, scale, 1.0f }; mIsIdentity = false; mIsUniformScale = true; UpdateHMatrix(); } void Transform::SetRotation(Matrix3x3<float> const& rotate) { mMatrix.MakeIdentity(); for (int r = 0; r < 3; ++r) { for (int c = 0; c < 3; ++c) { mMatrix(r, c) = rotate(r, c); } } mIsIdentity = false; mIsRSMatrix = true; UpdateHMatrix(); } void Transform::GetRotation(Matrix3x3<float>& rotate) const { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); for (int r = 0; r < 3; ++r) { for (int c = 0; c < 3; ++c) { rotate(r, c) = mMatrix(r, c); } } } void Transform::SetRotation(Quaternion<float> const& q) { mMatrix = Rotation<4, float>(q); mIsIdentity = false; mIsRSMatrix = true; UpdateHMatrix(); } void Transform::GetRotation(Quaternion<float>& q) const { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); q = Rotation<4, float>(mMatrix); } void Transform::SetRotation(AxisAngle<4, float> const& axisAngle) { mMatrix = Rotation<4, float>(axisAngle); mIsIdentity = false; mIsRSMatrix = true; UpdateHMatrix(); } void Transform::GetRotation(AxisAngle<4, float>& axisAngle) const { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); axisAngle = Rotation<4, float>(mMatrix); } void Transform::SetRotation(EulerAngles<float> const& eulerAngles) { mMatrix = Rotation<4, float>(eulerAngles); mIsIdentity = false; mIsRSMatrix = true; UpdateHMatrix(); } void Transform::GetRotation(EulerAngles<float>& eulerAngles) const { LogAssert(mIsRSMatrix, "Transform is not rotation-scale."); eulerAngles = Rotation<4, float>(mMatrix)(eulerAngles.axis[0], eulerAngles.axis[1], eulerAngles.axis[2]); } float Transform::GetNorm() const { float r0, r1, r2; if (mIsRSMatrix) { // A RS matrix (GTE_USE_MAT_VEC) or an SR matrix (GTE_USE_VEC_MAT). r0 = fabs(mScale[0]); r1 = fabs(mScale[1]); r2 = fabs(mScale[2]); } else { // The spectral norm (the maximum absolute value of the eigenvalues) // is smaller or equal to this norm. Therefore, this function returns // an approximation to the maximum scale. #if defined(GTE_USE_MAT_VEC) // Use the max-row-sum matrix norm. r0 = fabs(mMatrix(0, 0)) + fabs(mMatrix(0, 1)) + fabs(mMatrix(0, 2)); r1 = fabs(mMatrix(1, 0)) + fabs(mMatrix(1, 1)) + fabs(mMatrix(1, 2)); r2 = fabs(mMatrix(2, 0)) + fabs(mMatrix(2, 1)) + fabs(mMatrix(2, 2)); #else // Use the max-col-sum matrix norm. r0 = fabs(mMatrix(0, 0)) + fabs(mMatrix(1, 0)) + fabs(mMatrix(2, 0)); r1 = fabs(mMatrix(0, 1)) + fabs(mMatrix(1, 1)) + fabs(mMatrix(2, 1)); r2 = fabs(mMatrix(0, 2)) + fabs(mMatrix(1, 2)) + fabs(mMatrix(2, 2)); #endif } return std::max(std::max(r0, r1), r2); } Matrix4x4<float> const& Transform::GetHInverse() const { if (mInverseNeedsUpdate) { if (mIsIdentity) { mInvHMatrix.MakeIdentity(); } else { if (mIsRSMatrix) { if (mIsUniformScale) { float invScale = 1.0f / mScale[0]; #if defined(GTE_USE_MAT_VEC) mInvHMatrix(0, 0) = invScale * mMatrix(0, 0); mInvHMatrix(0, 1) = invScale * mMatrix(1, 0); mInvHMatrix(0, 2) = invScale * mMatrix(2, 0); mInvHMatrix(1, 0) = invScale * mMatrix(0, 1); mInvHMatrix(1, 1) = invScale * mMatrix(1, 1); mInvHMatrix(1, 2) = invScale * mMatrix(2, 1); mInvHMatrix(2, 0) = invScale * mMatrix(0, 2); mInvHMatrix(2, 1) = invScale * mMatrix(1, 2); mInvHMatrix(2, 2) = invScale * mMatrix(2, 2); #else mInvHMatrix(0, 0) = mMatrix(0, 0) * invScale; mInvHMatrix(0, 1) = mMatrix(1, 0) * invScale; mInvHMatrix(0, 2) = mMatrix(2, 0) * invScale; mInvHMatrix(1, 0) = mMatrix(0, 1) * invScale; mInvHMatrix(1, 1) = mMatrix(1, 1) * invScale; mInvHMatrix(1, 2) = mMatrix(2, 1) * invScale; mInvHMatrix(2, 0) = mMatrix(0, 2) * invScale; mInvHMatrix(2, 1) = mMatrix(1, 2) * invScale; mInvHMatrix(2, 2) = mMatrix(2, 2) * invScale; #endif } else { // Replace 3 reciprocals by 6 multiplies and 1 reciprocal. float s01 = mScale[0] * mScale[1]; float s02 = mScale[0] * mScale[2]; float s12 = mScale[1] * mScale[2]; float invs012 = 1.0f / (s01 * mScale[2]); float invS0 = s12 * invs012; float invS1 = s02 * invs012; float invS2 = s01 * invs012; #if defined(GTE_USE_MAT_VEC) mInvHMatrix(0, 0) = invS0 * mMatrix(0, 0); mInvHMatrix(0, 1) = invS0 * mMatrix(1, 0); mInvHMatrix(0, 2) = invS0 * mMatrix(2, 0); mInvHMatrix(1, 0) = invS1 * mMatrix(0, 1); mInvHMatrix(1, 1) = invS1 * mMatrix(1, 1); mInvHMatrix(1, 2) = invS1 * mMatrix(2, 1); mInvHMatrix(2, 0) = invS2 * mMatrix(0, 2); mInvHMatrix(2, 1) = invS2 * mMatrix(1, 2); mInvHMatrix(2, 2) = invS2 * mMatrix(2, 2); #else mInvHMatrix(0, 0) = mMatrix(0, 0) * invS0; mInvHMatrix(0, 1) = mMatrix(1, 0) * invS1; mInvHMatrix(0, 2) = mMatrix(2, 0) * invS2; mInvHMatrix(1, 0) = mMatrix(0, 1) * invS0; mInvHMatrix(1, 1) = mMatrix(1, 1) * invS1; mInvHMatrix(1, 2) = mMatrix(2, 1) * invS2; mInvHMatrix(2, 0) = mMatrix(0, 2) * invS0; mInvHMatrix(2, 1) = mMatrix(1, 2) * invS1; mInvHMatrix(2, 2) = mMatrix(2, 2) * invS2; #endif } } else { Invert3x3(mHMatrix, mInvHMatrix); } #if defined(GTE_USE_MAT_VEC) mInvHMatrix(0, 3) = -( mInvHMatrix(0, 0) * mTranslate[0] + mInvHMatrix(0, 1) * mTranslate[1] + mInvHMatrix(0, 2) * mTranslate[2] ); mInvHMatrix(1, 3) = -( mInvHMatrix(1, 0) * mTranslate[0] + mInvHMatrix(1, 1) * mTranslate[1] + mInvHMatrix(1, 2) * mTranslate[2] ); mInvHMatrix(2, 3) = -( mInvHMatrix(2, 0) * mTranslate[0] + mInvHMatrix(2, 1) * mTranslate[1] + mInvHMatrix(2, 2) * mTranslate[2] ); // The last row of mHMatrix is always (0,0,0,1) for an affine // transformation, so it is set once in the constructor. It is // not necessary to reset it here. #else mInvHMatrix(3, 0) = -( mInvHMatrix(0, 0) * mTranslate[0] + mInvHMatrix(1, 0) * mTranslate[1] + mInvHMatrix(2, 0) * mTranslate[2] ); mInvHMatrix(3, 1) = -( mInvHMatrix(0, 1) * mTranslate[0] + mInvHMatrix(1, 1) * mTranslate[1] + mInvHMatrix(2, 1) * mTranslate[2] ); mInvHMatrix(3, 2) = -( mInvHMatrix(0, 2) * mTranslate[0] + mInvHMatrix(1, 2) * mTranslate[1] + mInvHMatrix(2, 2) * mTranslate[2] ); // The last column of mHMatrix is always (0,0,0,1) for an affine // transformation, so it is set once in the constructor. It is // not necessary to reset it here. #endif } mInverseNeedsUpdate = false; } return mInvHMatrix; } Transform Transform::Inverse() const { Transform inverse; if (mIsIdentity) { inverse.MakeIdentity(); } else { if (mIsRSMatrix && mIsUniformScale) { inverse.SetRotation(Transpose(GetRotation())); inverse.SetUniformScale(1.0f / GetUniformScale()); } else { Matrix4x4<float> invUpper; Invert3x3(GetMatrix(), invUpper); inverse.SetMatrix(invUpper); } Vector4<float> trn = -GetTranslationW0(); inverse.SetTranslation(inverse.GetMatrix() * trn); } mInverseNeedsUpdate = true; return inverse; } void Transform::UpdateHMatrix() { if (mIsIdentity) { mHMatrix.MakeIdentity(); } else { if (mIsRSMatrix) { #if defined(GTE_USE_MAT_VEC) mHMatrix(0, 0) = mMatrix(0, 0) * mScale[0]; mHMatrix(0, 1) = mMatrix(0, 1) * mScale[1]; mHMatrix(0, 2) = mMatrix(0, 2) * mScale[2]; mHMatrix(1, 0) = mMatrix(1, 0) * mScale[0]; mHMatrix(1, 1) = mMatrix(1, 1) * mScale[1]; mHMatrix(1, 2) = mMatrix(1, 2) * mScale[2]; mHMatrix(2, 0) = mMatrix(2, 0) * mScale[0]; mHMatrix(2, 1) = mMatrix(2, 1) * mScale[1]; mHMatrix(2, 2) = mMatrix(2, 2) * mScale[2]; #else mHMatrix(0, 0) = mScale[0] * mMatrix(0, 0); mHMatrix(0, 1) = mScale[0] * mMatrix(0, 1); mHMatrix(0, 2) = mScale[0] * mMatrix(0, 2); mHMatrix(1, 0) = mScale[1] * mMatrix(1, 0); mHMatrix(1, 1) = mScale[1] * mMatrix(1, 1); mHMatrix(1, 2) = mScale[1] * mMatrix(1, 2); mHMatrix(2, 0) = mScale[2] * mMatrix(2, 0); mHMatrix(2, 1) = mScale[2] * mMatrix(2, 1); mHMatrix(2, 2) = mScale[2] * mMatrix(2, 2); #endif } else { mHMatrix(0, 0) = mMatrix(0, 0); mHMatrix(0, 1) = mMatrix(0, 1); mHMatrix(0, 2) = mMatrix(0, 2); mHMatrix(1, 0) = mMatrix(1, 0); mHMatrix(1, 1) = mMatrix(1, 1); mHMatrix(1, 2) = mMatrix(1, 2); mHMatrix(2, 0) = mMatrix(2, 0); mHMatrix(2, 1) = mMatrix(2, 1); mHMatrix(2, 2) = mMatrix(2, 2); } #if defined(GTE_USE_MAT_VEC) mHMatrix(0, 3) = mTranslate[0]; mHMatrix(1, 3) = mTranslate[1]; mHMatrix(2, 3) = mTranslate[2]; // The last row of mHMatrix is always (0,0,0,1) for an affine // transformation, so it is set once in the constructor. It is not // necessary to reset it here. #else mHMatrix(3, 0) = mTranslate[0]; mHMatrix(3, 1) = mTranslate[1]; mHMatrix(3, 2) = mTranslate[2]; // The last column of mHMatrix is always (0,0,0,1) for an affine // transformation, so it is set once in the constructor. It is not // necessary to reset it here. #endif } mInverseNeedsUpdate = true; } void Transform::Invert3x3(Matrix4x4<float> const& mat, Matrix4x4<float>& invMat) { // Compute the adjoint of M (3x3). invMat(0, 0) = mat(1, 1) * mat(2, 2) - mat(1, 2) * mat(2, 1); invMat(0, 1) = mat(0, 2) * mat(2, 1) - mat(0, 1) * mat(2, 2); invMat(0, 2) = mat(0, 1) * mat(1, 2) - mat(0, 2) * mat(1, 1); invMat(0, 3) = 0.0f; invMat(1, 0) = mat(1, 2) * mat(2, 0) - mat(1, 0) * mat(2, 2); invMat(1, 1) = mat(0, 0) * mat(2, 2) - mat(0, 2) * mat(2, 0); invMat(1, 2) = mat(0, 2) * mat(1, 0) - mat(0, 0) * mat(1, 2); invMat(1, 3) = 0.0f; invMat(2, 0) = mat(1, 0) * mat(2, 1) - mat(1, 1) * mat(2, 0); invMat(2, 1) = mat(0, 1) * mat(2, 0) - mat(0, 0) * mat(2, 1); invMat(2, 2) = mat(0, 0) * mat(1, 1) - mat(0, 1) * mat(1, 0); invMat(2, 3) = 0.0f; invMat(3, 0) = 0.0f; invMat(3, 1) = 0.0f; invMat(3, 2) = 0.0f; invMat(3, 3) = 1.0f; // Compute the reciprocal of the determinant of M. float invDet = 1.0f / ( mat(0, 0) * invMat(0, 0) + mat(0, 1) * invMat(1, 0) + mat(0, 2) * invMat(2, 0) ); // inverse(M) = adjoint(M)/determinant(M). invMat(0, 0) *= invDet; invMat(0, 1) *= invDet; invMat(0, 2) *= invDet; invMat(1, 0) *= invDet; invMat(1, 1) *= invDet; invMat(1, 2) *= invDet; invMat(2, 0) *= invDet; invMat(2, 1) *= invDet; invMat(2, 2) *= invDet; } namespace gte { Vector4<float> operator*(Transform const& M, Vector4<float> const& V) { return M.GetHMatrix() * V; } Vector4<float> operator*(Vector4<float> const& V, Transform const& M) { return V * M.GetHMatrix(); } Transform operator*(Transform const& A, Transform const& B) { if (A.IsIdentity()) { return B; } if (B.IsIdentity()) { return A; } Transform product; if (A.IsRSMatrix() && B.IsRSMatrix()) { #if defined(GTE_USE_MAT_VEC) if (A.IsUniformScale()) { product.SetRotation(A.GetRotation() * B.GetRotation()); product.SetTranslation(A.GetUniformScale()*( A.GetRotation() * B.GetTranslationW0()) + A.GetTranslationW1()); if (B.IsUniformScale()) { product.SetUniformScale( A.GetUniformScale() * B.GetUniformScale()); } else { product.SetScale(A.GetUniformScale() * B.GetScale()); } return product; } #else if (B.IsUniformScale()) { product.SetRotation(A.GetRotation() * B.GetRotation()); product.SetTranslation(B.GetUniformScale()*( A.GetTranslationW0() * B.GetRotation()) + B.GetTranslationW1()); if (A.IsUniformScale()) { product.SetUniformScale( A.GetUniformScale() * B.GetUniformScale()); } else { product.SetScale(A.GetScale() * B.GetUniformScale()); } return product; } #endif } // In all remaining cases, the matrix cannot be written as R*S*X+T. Matrix4x4<float> matMA; if (A.IsRSMatrix()) { #if defined(GTE_USE_MAT_VEC) matMA = MultiplyMD(A.GetRotation(), A.GetScaleW1()); #else matMA = MultiplyDM(A.GetScaleW1(), A.GetRotation()); #endif } else { matMA = A.GetMatrix(); } Matrix4x4<float> matMB; if (B.IsRSMatrix()) { #if defined(GTE_USE_MAT_VEC) matMB = MultiplyMD(B.GetRotation(), B.GetScaleW1()); #else matMB = MultiplyDM(B.GetScaleW1(), B.GetRotation()); #endif } else { matMB = B.GetMatrix(); } product.SetMatrix(matMA * matMB); #if defined(GTE_USE_MAT_VEC) product.SetTranslation(matMA * B.GetTranslationW0() + A.GetTranslationW1()); #else product.SetTranslation(A.GetTranslationW0() * matMB + B.GetTranslationW1()); #endif return product; } }
#!/usr/bin/env bash docker-compose run web_graphs python3 manage.py migrate docker-compose run web_graphs python3 manage.py collectstatic --noinput
<reponame>saraElskely/gam3na function activeTab(id_name) { var x; x = document.getElementsByClassName("tab"); for (i = 0; i < x.length; i++) { x[i].style.backgroundColor= "#df4864"; } document.getElementById('tab'+id_name).style.backgroundColor = "#1c9287"; $.ajax({url: "/event/calendar/"+id_name , success: function(result){ var months = { '01' : 'Jan', '02' : 'Feb','03' : 'Mar','04' : 'Apr','05' : 'May','06' : 'Jun','07' : 'Jul','08' : 'Aug','09' : 'Sept','10' : 'Oct','11' : 'Nov','12' : 'Dec', } if (result){ $('.events').empty(); result.forEach(function(event){ console.log(event.event_name); var arr =event.event_date.split('-'); var day = arr[2].split(' '); $('.events').append('<div class="event-block1">\ <div class="event-date1 eCol">\ <div class="eDate">'+day[0] +'</div>\ <div class="eMonth">'+ months[arr[1]]+'</div>\ </div>\ <div class="event-details1 eCol">\ <div class="event-name1"><a href="/event/'+event.id+'/checkevent">'+event.event_name+'</a></div><div>\ <span class="glyphicon glyphicon-map-marker" ></span>'+event.event_address+'</div>\ </div>\ </div>'); }); } } }); }
<reponame>kevinoid/openapi-transformers /** * @copyright Copyright 2021 <NAME> <<EMAIL>> * @license MIT */ import assert from 'assert'; import deepFreeze from 'deep-freeze'; import { isDeepStrictEqual } from 'util'; import intersectSchema, { EmptyIntersectionError, IntersectNotSupportedError } from '../../lib/intersect-schema.js'; function deepStrictEqualAnyOf(actual, expected) { if (!expected.some((e) => isDeepStrictEqual(actual, e))) { throw new assert.AssertionError({ actual, // expected is used for diff. Only include one. expected: expected[0], operator: 'deepStrictEqual', stackStartFn: deepStrictEqualAnyOf, }); } } // eslint-disable-next-line no-shadow function testIntersectSchema(intersectSchema) { it('throws TypeError if schema is undefined', () => { assert.throws( () => intersectSchema(undefined, {}), TypeError, ); }); it('unconstrained and unconstrained is unconstrained', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({}), deepFreeze({}), ), {}, ); }); it('false and anything is false', () => { assert.deepStrictEqual( intersectSchema( false, deepFreeze({ type: 'string' }), ), false, ); }); it('true and true is true', () => { assert.deepStrictEqual( intersectSchema( true, true, ), true, ); }); it('true and anything is anything', () => { assert.deepStrictEqual( intersectSchema( true, deepFreeze({ type: 'number' }), ), { type: 'number' }, ); }); it('unspecified and unconstrained is unspecified', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ unspecified: 1 }), deepFreeze({}), ), { unspecified: 1 }, ); }); it('unspecified === unspecified is unspecified', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ unspecified: 1 }), deepFreeze({ unspecified: 1 }), ), { unspecified: 1 }, ); }); it('isDeepStrictEqual(unspecified, unspecified) is unspecified', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ unspecified: [1] }), deepFreeze({ unspecified: [1] }), ), { unspecified: [1] }, ); }); it('unspecified !== unspecified throws IntersectNotSupportedError', () => { assert.throws( () => intersectSchema( deepFreeze({ unspecified: 1 }), deepFreeze({ unspecified: '1' }), ), IntersectNotSupportedError, ); }); // These tests should pass for any schema validation keyword. // Test once, since they share the same code. describe('for any keyword', () => { it('keyword and unconstrained is keyword', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: 'boolean' }), deepFreeze({}), ), { type: 'boolean' }, ); }); it('undefined keyword is unconstrained', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: undefined }), deepFreeze({ type: 'boolean' }), ), { type: 'boolean' }, ); }); it('keyword1 === keyword2 is keyword1/2', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: 'boolean' }), deepFreeze({ type: 'boolean' }), ), { type: 'boolean' }, ); }); it('isDeepStrictEqual(keyword1, keyword2) is keyword1/2', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: ['boolean'] }), deepFreeze({ type: ['boolean'] }), ), { type: ['boolean'] }, ); }); }); describe('for properties', () => { it('combines non-overlapping properties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { b: { type: 'boolean' } } }), deepFreeze({ properties: { n: { type: 'number' } } }), ), { properties: { b: { type: 'boolean' }, n: { type: 'number' }, }, }, ); }); it('intersects same-name properties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ properties: { n: { minimum: 5 } } }), ), { properties: { n: { minimum: 5, maximum: 10, }, }, }, ); }); it('handles schema without properties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({}), deepFreeze({ properties: { n: { type: 'number' } } }), ), { properties: { n: { type: 'number' }, }, }, ); }); it('handles schema with empty properties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: {} }), deepFreeze({ properties: { n: { type: 'number' } } }), ), { properties: { n: { type: 'number' }, }, }, ); }); it('combines non-overlapping patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ patternProperties: { b: { type: 'boolean' } } }), deepFreeze({ patternProperties: { n: { type: 'number' } } }), ), { patternProperties: { b: { type: 'boolean' }, n: { type: 'number' }, }, }, ); }); it('intersects identical patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ patternProperties: { n: { maximum: 10 } } }), deepFreeze({ patternProperties: { n: { minimum: 5 } } }), ), { patternProperties: { n: { minimum: 5, maximum: 10, }, }, }, ); }); it('handles schema without patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({}), deepFreeze({ patternProperties: { n: { type: 'number' } } }), ), { patternProperties: { n: { type: 'number' }, }, }, ); }); it('handles schema with empty patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ patternProperties: {} }), deepFreeze({ patternProperties: { n: { type: 'number' } } }), ), { patternProperties: { n: { type: 'number' }, }, }, ); }); it('intersects properties matching patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ patternProperties: { n: { minimum: 5 } } }), ), { properties: { n: { minimum: 5, maximum: 10, }, }, patternProperties: { n: { minimum: 5 }, }, }, ); }); // Nope: Result must have `n: false` (or no n), since any non-false value // would validate {n:10} not allowed by schema2. it('intersects properties matching patternProperties: false', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ patternProperties: { n: false } }), ), { properties: { n: false, }, patternProperties: { n: false, }, }, ); }); it('intersects properties not matching patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ patternProperties: { m: { minimum: 5 } } }), ), { properties: { n: { maximum: 10 }, }, patternProperties: { m: { minimum: 5 }, }, }, ); }); // patternProperties from schema2 does not apply to n, since it is included // in properties. it('intersects properties skipping patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ properties: { n: { multipleOf: 2 } }, patternProperties: { n: { minimum: 5 } }, }), ), { properties: { n: { maximum: 10, multipleOf: 2, }, }, patternProperties: { n: { minimum: 5 }, }, }, ); }); it('intersects properties matching multiple patternProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n1: { maximum: 10 } } }), deepFreeze({ patternProperties: { n: { minimum: 5 }, 1: { multipleOf: 2 }, }, }), ), { properties: { n1: { minimum: 5, maximum: 10, multipleOf: 2, }, }, patternProperties: { n: { minimum: 5 }, 1: { multipleOf: 2 }, }, }, ); }); it('intersects additionalProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ additionalProperties: { maximum: 10 } }), deepFreeze({ additionalProperties: { minimum: 5 } }), ), { additionalProperties: { minimum: 5, maximum: 10, }, }, ); }); it('intersects additionalProperties: true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ additionalProperties: true }), deepFreeze({ additionalProperties: true }), ), { additionalProperties: true, }, ); }); it('intersects additionalProperties: false', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ additionalProperties: false }), deepFreeze({ additionalProperties: false }), ), { additionalProperties: false, }, ); }); it('intersects additionalProperties: true/false', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ additionalProperties: true }), deepFreeze({ additionalProperties: false }), ), { additionalProperties: false, }, ); }); it('intersects properties matching additionalProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ additionalProperties: { minimum: 5 } }), ), { properties: { n: { minimum: 5, maximum: 10, }, }, additionalProperties: { minimum: 5 }, }, ); }); // Nope: Result must have `n: false` (or no n), since any non-false value // would validate {n:10} not allowed by schema2. it('intersects properties matching additionalProperties: false', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ additionalProperties: false }), ), { properties: { n: false, }, additionalProperties: false, }, ); }); // additionalProperties from schema2 does not apply to n, since it is // included in properties. it('intersects properties skipping additionalProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n: { maximum: 10 } } }), deepFreeze({ properties: { n: { multipleOf: 2 } }, additionalProperties: { minimum: 5 }, }), ), { properties: { n: { maximum: 10, multipleOf: 2, }, }, additionalProperties: { minimum: 5 }, }, ); }); it('intersects properties no additionalProperties due to pattern', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ properties: { n1: { maximum: 10 } } }), deepFreeze({ patternProperties: { n: { multipleOf: 2 } }, additionalProperties: { minimum: 5 }, }), ), { properties: { n1: { maximum: 10, multipleOf: 2, }, }, patternProperties: { n: { multipleOf: 2 } }, additionalProperties: { minimum: 5 }, }, ); }); it('intersects patternProperties with additionalProperties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ patternProperties: { n: { maximum: 10 } } }), deepFreeze({ additionalProperties: { minimum: 5 } }), ), { patternProperties: { n: { maximum: 10, minimum: 5, }, }, additionalProperties: { minimum: 5 }, }, ); }); }); describe('for allOf', () => { it('concatenates', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ allOf: [{ maximum: 10 }] }), deepFreeze({ allOf: [{ minimum: 5 }] }), ), [ { allOf: [ { maximum: 10 }, { minimum: 5 }, ], }, { allOf: [ { minimum: 5 }, { maximum: 10 }, ], }, ], ); }); it('removes duplicates', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ allOf: [{ maximum: 10 }] }), deepFreeze({ allOf: [{ maximum: 10 }] }), ), { allOf: [ { maximum: 10 }, ], }, ); }); }); describe('for anyOf', () => { it('one is subset of other', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ anyOf: [{ maximum: 10 }] }), deepFreeze({ anyOf: [{ minimum: 12 }, { maximum: 10 }] }), ), { anyOf: [{ maximum: 10 }] }, ); }); it('intersects each if one is single element', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ anyOf: [{ multipleOf: 1 }] }), deepFreeze({ anyOf: [{ minimum: 12 }, { maximum: 10 }] }), ), { anyOf: [ { minimum: 12, multipleOf: 1 }, { maximum: 10, multipleOf: 1 }, ], }, ); }); it('throws IntersectNotSupportedError if neither subset nor single', () => { assert.throws( () => intersectSchema( deepFreeze({ anyOf: [{ minimum: 12 }, { maximum: 10 }] }), deepFreeze({ anyOf: [{ multipleOf: 1 }, { const: 3.14 }] }), ), IntersectNotSupportedError, ); }); }); describe('for const', () => { // Note: equal const is handled by generic keyword handling test above it('throws EmptyIntersectionError if not equal', () => { assert.throws( () => intersectSchema( deepFreeze({ const: 1 }), deepFreeze({ const: '1' }), ), EmptyIntersectionError, ); }); }); describe('for dependentRequired', () => { it('returns empty if both empty', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ dependentRequired: {} }), deepFreeze({ dependentRequired: {} }), ), { dependentRequired: {} }, ); }); it('returns non-empty if other empty', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ dependentRequired: { a: ['b'], }, }), deepFreeze({ dependentRequired: {} }), ), { dependentRequired: { a: ['b'], }, }, ); }); it('returns non-overlapping properties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ dependentRequired: { a: ['b'], }, }), deepFreeze({ dependentRequired: { c: ['d'], }, }), ), { dependentRequired: { a: ['b'], c: ['d'], }, }, ); }); it('returns union of overlapping properties', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ dependentRequired: { a: ['b'], }, }), deepFreeze({ dependentRequired: { a: ['d'], }, }), ), [ { dependentRequired: { a: ['b', 'd'], }, }, { dependentRequired: { a: ['d', 'b'], }, }, ], ); }); it('returns unique union of overlapping properties', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ dependentRequired: { a: ['b'], }, }), deepFreeze({ dependentRequired: { a: ['b', 'd'], }, }), ), { dependentRequired: { a: ['b', 'd'], }, }, ); }); }); describe('for deprecated', () => { it('not deprecated if one is not deprecated', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ deprecated: true }), deepFreeze({}), ), {}, ); }); it('not deprecated if one is false', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ deprecated: true }), deepFreeze({ deprecated: false }), ), {}, ); }); it('returns true if both true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ deprecated: true }), deepFreeze({ deprecated: true }), ), { deprecated: true }, ); }); }); describe('for description', () => { it('is combination of both descriptions', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ description: 'description1' }), deepFreeze({ description: 'description2' }), ), [ { description: 'Intersection of description1 and description2' }, { description: 'Intersection of description2 and description1' }, ], ); }); it('is surrounds spaced description with with parens', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ description: 'My description1' }), deepFreeze({ description: 'My_description2' }), ), [ { description: 'Intersection of (My description1) and My_description2', }, { description: 'Intersection of My_description2 and (My description1)', }, ], ); }); }); describe('for enum', () => { it('intersects permuted values', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ enum: [1, '1', 2] }), deepFreeze({ enum: [2, 1, '1'] }), ), [ { enum: [1, '1', 2] }, { enum: [2, 1, '1'] }, ], ); }); it('common subset of first and second', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ enum: [1, 2, 3] }), deepFreeze({ enum: [0, 1, 2] }), ), { enum: [1, 2] }, ); }); it('treats null like any other value', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ enum: [1, 2, 3, null] }), deepFreeze({ enum: [0, 1, 2, null] }), ), { enum: [1, 2, null] }, ); }); it('throws EmptyIntersectionError if none in common', () => { assert.throws( () => intersectSchema( deepFreeze({ enum: [2, 3] }), deepFreeze({ enum: [0, 1] }), ), EmptyIntersectionError, ); }); // TODO: Support this it('throws IntersectNotSupportedError if one has an object', () => { assert.throws( () => intersectSchema( deepFreeze({ enum: [2, {}] }), deepFreeze({ enum: [0, 1] }), ), IntersectNotSupportedError, ); }); }); describe('for examples', () => { it('concatenates', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ examples: [10] }), deepFreeze({ examples: [5] }), ), [ { examples: [10, 5] }, { examples: [5, 10] }, ], ); }); it('removes duplicates', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ examples: [10] }), deepFreeze({ examples: [10] }), ), { examples: [10], }, ); }); }); // eslint-disable-next-line no-shadow function boolExclusiveMax(intersectSchema) { it('returns smaller with exclusive true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maximum: 3 }), deepFreeze({ exclusiveMaximum: true, maximum: 2 }), ), { exclusiveMaximum: true, maximum: 2 }, ); }); it('returns smaller without exclusive true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMaximum: true, maximum: 3 }), deepFreeze({ maximum: 2 }), ), { maximum: 2 }, ); }); it('returns more negative with exclusive true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMaximum: true, maximum: -3 }), deepFreeze({ maximum: -2 }), ), { exclusiveMaximum: true, maximum: -3 }, ); }); } describe('for exclusiveMaximum', () => { it('returns smaller', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMaximum: 3 }), deepFreeze({ exclusiveMaximum: 2 }), ), { exclusiveMaximum: 2 }, ); }); it('returns more negative', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMaximum: -3 }), deepFreeze({ exclusiveMaximum: -2 }), ), { exclusiveMaximum: -3 }, ); }); it('returns smaller exclusiveMaximum', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maximum: 3 }), deepFreeze({ exclusiveMaximum: 2 }), ), { exclusiveMaximum: 2 }, ); }); it('returns smaller maximum', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMaximum: 3 }), deepFreeze({ maximum: 2 }), ), { maximum: 2 }, ); }); boolExclusiveMax(intersectSchema); // exclusiveMaximum:false should behave the same as undefined // Run same tests with false in place of undefined function intersectSchemaFalse(schema1, schema2) { if (schema1.exclusiveMaximum === undefined) { schema1 = { ...schema1, exclusiveMaximum: false }; } if (schema2.exclusiveMaximum === undefined) { schema2 = { ...schema2, exclusiveMaximum: false }; } return intersectSchema(schema1, schema2); } boolExclusiveMax(intersectSchemaFalse); }); // eslint-disable-next-line no-shadow function boolExclusiveMin(intersectSchema) { it('returns larger with exclusive true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minimum: 2 }), deepFreeze({ exclusiveMinimum: true, minimum: 3 }), ), { exclusiveMinimum: true, minimum: 3 }, ); }); it('returns larger without exclusive true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMinimum: true, minimum: 2 }), deepFreeze({ minimum: 3 }), ), { minimum: 3 }, ); }); it('returns less negative with exclusive true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMinimum: true, minimum: -2 }), deepFreeze({ minimum: -3 }), ), { exclusiveMinimum: true, minimum: -2 }, ); }); } describe('for exclusiveMinimum', () => { it('returns larger', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMinimum: 3 }), deepFreeze({ exclusiveMinimum: 2 }), ), { exclusiveMinimum: 3 }, ); }); it('returns less negative', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMinimum: -3 }), deepFreeze({ exclusiveMinimum: -2 }), ), { exclusiveMinimum: -2 }, ); }); it('returns larger exclusiveMinimum', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minimum: 2 }), deepFreeze({ exclusiveMinimum: 3 }), ), { exclusiveMinimum: 3 }, ); }); it('returns larger minimum', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ exclusiveMinimum: 2 }), deepFreeze({ minimum: 3 }), ), { minimum: 3 }, ); }); boolExclusiveMin(intersectSchema); // exclusiveMinimum:false should behave the same as undefined // Run same tests with false in place of undefined function intersectSchemaFalse(schema1, schema2) { if (schema1.exclusiveMinimum === undefined) { schema1 = { ...schema1, exclusiveMinimum: false }; } if (schema2.exclusiveMinimum === undefined) { schema2 = { ...schema2, exclusiveMinimum: false }; } return intersectSchema(schema1, schema2); } boolExclusiveMin(intersectSchemaFalse); }); describe('for maxItems', () => { it('returns smaller', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maxItems: 3 }), deepFreeze({ maxItems: 2 }), ), { maxItems: 2 }, ); }); }); describe('for maxLength', () => { it('returns smaller', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maxLength: 3 }), deepFreeze({ maxLength: 2 }), ), { maxLength: 2 }, ); }); }); describe('for maxProperties', () => { it('returns smaller', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maxProperties: 3 }), deepFreeze({ maxProperties: 2 }), ), { maxProperties: 2 }, ); }); }); describe('for maximum', () => { it('returns smaller', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maximum: 3 }), deepFreeze({ maximum: 2 }), ), { maximum: 2 }, ); }); it('returns more negative', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ maximum: -3 }), deepFreeze({ maximum: -2 }), ), { maximum: -3 }, ); }); }); describe('for minItems', () => { it('returns larger', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minItems: 3 }), deepFreeze({ minItems: 2 }), ), { minItems: 3 }, ); }); }); describe('for minLength', () => { it('returns larger', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minLength: 3 }), deepFreeze({ minLength: 2 }), ), { minLength: 3 }, ); }); }); describe('for minProperties', () => { it('returns larger', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minProperties: 3 }), deepFreeze({ minProperties: 2 }), ), { minProperties: 3 }, ); }); }); describe('for minimum', () => { it('returns larger', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minimum: 3 }), deepFreeze({ minimum: 2 }), ), { minimum: 3 }, ); }); it('returns less negative', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ minimum: -3 }), deepFreeze({ minimum: -2 }), ), { minimum: -2 }, ); }); }); describe('for multipleOf', () => { it('returns one if multiple of other (integer)', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ multipleOf: 4 }), deepFreeze({ multipleOf: 2 }), ), { multipleOf: 4 }, ); }); it('returns one if multiple of other (float)', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ multipleOf: 1 }), deepFreeze({ multipleOf: 0.5 }), ), { multipleOf: 1 }, ); }); it('otherwise returns multiple of first and second', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ multipleOf: 2 }), deepFreeze({ multipleOf: 3 }), ), { multipleOf: 6 }, ); }); }); describe('for required', () => { it('returns empty if both empty', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ required: [] }), deepFreeze({ required: [] }), ), { required: [] }, ); }); it('returns non-empty if other empty', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ required: ['a'] }), deepFreeze({ required: [] }), ), { required: ['a'] }, ); }); it('returns union with no intersection', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ required: ['a'] }), deepFreeze({ required: ['b'] }), ), [ { required: ['a', 'b'] }, { required: ['b', 'a'] }, ], ); }); it('returns either if permuted', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ required: ['a', 'b'] }), deepFreeze({ required: ['b', 'a'] }), ), [ { required: ['a', 'b'] }, { required: ['b', 'a'] }, ], ); }); }); describe('for oneOf', () => { it('one is permuted other', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ oneOf: [{ maximum: 10 }, { minimum: 12 }] }), deepFreeze({ oneOf: [{ minimum: 12 }, { maximum: 10 }] }), ), [ { oneOf: [{ maximum: 10 }, { minimum: 12 }] }, { oneOf: [{ minimum: 12 }, { maximum: 10 }] }, ], ); }); it('intersects each if one is single element', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ oneOf: [{ multipleOf: 1 }] }), deepFreeze({ oneOf: [{ minimum: 12 }, { maximum: 10 }] }), ), { oneOf: [ { minimum: 12, multipleOf: 1 }, { maximum: 10, multipleOf: 1 }, ], }, ); }); it('throws IntersectNotSupportedError if not permuted nor single', () => { assert.throws( () => intersectSchema( deepFreeze({ oneOf: [{ minimum: 12 }, { maximum: 10 }] }), deepFreeze({ oneOf: [{ multipleOf: 1 }, { const: 3.14 }] }), ), IntersectNotSupportedError, ); }); }); describe('for propertyNames', () => { it('intersects', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ propertyNames: { maxLength: 10 } }), deepFreeze({ propertyNames: { minLength: 5 } }), ), { propertyNames: { minLength: 5, maxLength: 10, }, }, ); }); }); describe('for pattern', () => { it('returns anchored lookahead assertion for each pattern', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ pattern: 'hi' }), deepFreeze({ pattern: 'ho' }), ), [ { pattern: '^(?=.*hi)(?=.*ho)' }, { pattern: '^(?=.*ho)(?=.*hi)' }, ], ); }); it('skips .* for anchored patterns', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ pattern: '^hi' }), deepFreeze({ pattern: '^ho' }), ), [ { pattern: '^(?=^hi)(?=^ho)' }, { pattern: '^(?=^ho)(?=^hi)' }, ], ); }); }); describe('for readOnly', () => { it('returns true if either true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ readOnly: true }), deepFreeze({ readOnly: false }), ), { readOnly: true }, ); }); }); describe('for title', () => { it('is combination of both titles', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ title: 'title1' }), deepFreeze({ title: 'title2' }), ), [ { title: 'Intersection of title1 and title2' }, { title: 'Intersection of title2 and title1' }, ], ); }); it('is surrounds spaced title with with parens', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ title: 'My title1' }), deepFreeze({ title: 'My_title2' }), ), [ { title: 'Intersection of (My title1) and My_title2' }, { title: 'Intersection of My_title2 and (My title1)' }, ], ); }); }); describe('for type', () => { it('type1 !== type2 throws EmptyIntersectionError', () => { assert.throws( () => intersectSchema( deepFreeze({ type: 'boolean' }), deepFreeze({ type: 'number' }), ), EmptyIntersectionError, ); }); it('type1 intersect type2 empty EmptyIntersectionError', () => { assert.throws( () => intersectSchema( deepFreeze({ type: ['boolean'] }), deepFreeze({ type: ['number'] }), ), EmptyIntersectionError, ); }); it('integer intersect number is integer', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: 'integer' }), deepFreeze({ type: 'number' }), ), { type: 'integer' }, ); }); it('permuted array types intersect', () => { deepStrictEqualAnyOf( intersectSchema( deepFreeze({ type: ['boolean', 'number'] }), deepFreeze({ type: ['number', 'boolean'] }), ), [ { type: ['boolean', 'number'] }, { type: ['number', 'boolean'] }, ], ); }); it('type1 subset type2 intersect', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: ['boolean', 'string'] }), deepFreeze({ type: ['boolean', 'number', 'string'] }), ), { type: ['boolean', 'string'] }, ); }); it('type2 subset type1 intersect to single element', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ type: ['boolean', 'number', 'string'] }), deepFreeze({ type: ['null', 'string'] }), ), { type: 'string' }, ); }); }); describe('for uniqueItems', () => { it('returns true if either true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ uniqueItems: true }), deepFreeze({ uniqueItems: false }), ), { uniqueItems: true }, ); }); }); describe('for writeOnly', () => { it('returns true if either true', () => { assert.deepStrictEqual( intersectSchema( deepFreeze({ writeOnly: true }), deepFreeze({ writeOnly: false }), ), { writeOnly: true }, ); }); }); } describe('intersectSchema', () => testIntersectSchema(intersectSchema)); // intersectSchema should be symmetric. Test with args swapped. function intersectSchemaSwapped(schema1, schema2) { return intersectSchema(schema2, schema1); } describe( 'intersectSchema (swapped)', () => testIntersectSchema(intersectSchemaSwapped), );
import numpy as np from sklearn.tree import DecisionTreeClassifier # Load the dataset X = np.load("X.npy") y = np.load("y.npy") # Split the data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) # Define the model model = DecisionTreeClassifier() # Train the model model.fit(X_train, y_train) # Test the model test_acc = model.score(X_test, y_test) print('Test accuracy:', test_acc)
<filename>utils/streams.js function readStringFromStream(stream, callback){ let data = ""; stream.on("data", (messagePart)=>{ data += messagePart; }); stream.on("end", ()=>{ callback(null, data); }); stream.on("error", (err)=>{ callback(err); }); } function readMessageBufferFromHTTPStream(reqORres, callback) { const contentType = reqORres.headers['content-type']; if (contentType === 'application/octet-stream') { const contentLength = Number.parseInt(reqORres.headers['content-length'], 10); if (Number.isNaN(contentLength)) { return callback(new Error("Wrong content length header received!")); } streamToBuffer(reqORres, contentLength, (err, bodyAsBuffer) => { if (err) { return OpenDSUSafeCallback(callback)(createOpenDSUErrorWrapper(`Failed to convert stream to buffer`, err)); } callback(undefined, bodyAsBuffer); }); } else { callback(new Error("Wrong message format received!")); } function streamToBuffer(stream, bufferSize, callback) { const buffer = $$.Buffer.alloc(bufferSize); let currentOffset = 0; stream.on('data', function (chunk) { const chunkSize = chunk.length; const nextOffset = chunkSize + currentOffset; if (currentOffset > bufferSize - 1) { stream.close(); return callback(new Error('Stream is bigger than reported size')); } write2Buffer(buffer, chunk, currentOffset); currentOffset = nextOffset; }); stream.on('end', function () { callback(undefined, buffer); }); stream.on('error', callback); } function write2Buffer(buffer, dataToAppend, offset) { const dataSize = dataToAppend.length; for (let i = 0; i < dataSize; i++) { buffer[offset++] = dataToAppend[i]; } } } module.exports = { readStringFromStream, readMessageBufferFromHTTPStream }
/* * Copyright 2017-2020 original authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.micronaut.data.mongodb.annotation; import com.mongodb.CursorType; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Defines a custom MongoDB find query options. * * @author <NAME> * @since 3.3.0 */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.TYPE}) @Documented @Inherited public @interface MongoFindOptions { /** * The batchSize. * * @return The batchSize value */ int batchSize() default -1; /** * The skip. * * @return The skip value */ int skip() default -1; /** * The limit. * * @return The limit value */ int limit() default -1; /** * The maxTimeMS. * * @return The maxTimeMS value */ long maxTimeMS() default -1; /** * The maxAwaitTimeMS. * * @return The maxAwaitTimeMS value */ long maxAwaitTimeMS() default -1; /** * The cursorType. * * @return The cursorType value */ CursorType cursorType() default CursorType.NonTailable; /** * The cursorType. * * @return The cursorType value */ boolean noCursorTimeout() default false; /** * The partial. * * @return The partial value */ boolean partial() default false; /** * The comment. * * @return The comment value */ String comment() default ""; /** * The hint. * * @return The hint value */ String hint() default ""; /** * The max. * * @return The max value */ String max() default ""; /** * The min. * * @return The min value */ String min() default ""; /** * The returnKey. * * @return The returnKey value */ boolean returnKey() default false; /** * The showRecordId. * * @return The showRecordId value */ boolean showRecordId() default false; /** * The allowDiskUse. * * @return The allowDiskUse value */ boolean allowDiskUse() default false; }
function linearSearch(arr, item){ let i; let position = -1; for(i=0; i< arr.length; i++){ if(arr[i] == item){ position = i; } } return position; } let arr = [9, 7, 5, 6, 10]; let item = 10; let result = linearSearch(arr, item); if(result == -1){ console.log('Item not found in array!'); } else{ console.log('Item found at index ' + result); }
<gh_stars>0 var Tracking = { setup: function(){ (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-46259877-1', 'summon.co.nf'); ga('require', 'ecommerce', 'ecommerce.js'); //eCommerce ga('set', 'dimension1', Tracking.getCustTier()); // Custom Dimension ga('send', 'pageview'); }, fireEvent: function(evt){ if(evt){ var evtObj = { 'hitType': 'event' }; var gaEvtObj = $.extend(true, {}, evtObj, evt.getEvtObj()); ga('send', gaEvtObj); console.info("event sent:"); console.dir(gaEvtObj); } }, sendTxnAndItemsData: function(txn, items){ if(txn || items){ if(txn){ ga('ecommerce:addTransaction', txn.getTxnObj()); console.debug("Added Txn:"); console.dir(txn.getTxnObj()); } if(items && items != null){ for(var i=0; i<items.length; i++){ ga('ecommerce:addItem', items[i].getItemObj()); console.debug("Added Item:"); console.dir(items[i].getItemObj()); } } ga('ecommerce:send'); console.info("eCommerce data sent"); } }, getCustTier: function(){ //Testing return "Premier"; } } // Init Singleton Tracking.setup();
#!/usr/bin/env bash # # @Desc: docker 容器设置静态IP与宿主机同一网段通信 # @FileName: docker_setip.sh <https://github.com/s23xiaojia/docker-cli-tools.git> # @Version: v1.0.0 # @Date: 2018/01/04 # @Author: Jia Junwei <login_532_gajun@sina.com> cmd=$0 arg=$@ arg_num=$# # // 打印命令使用方法 help_info() { echo """ Usage: $(basename $0) <OPTION> ... OPTION as follows: --container container name or container id --br physical bridge name prepare container connect --ip container ip address eg ip/prefix --gw container default gateway Example: $(basename $0) --container test --br mybr0 --ip 192.168.1.1/24 --gw 192.168.1.254 """ } # // 获取参数的值 get_params() { for ((i=1;i<=$arg_num/2;i++));do case $1 in --container) container_name=$2 ;; --br) bridge_name=$2 ;; --ip) ipaddr=$2 ;; --gw) gateway=$2 ;; *) help_info exit 1 esac shift 2 done } # // 容器、物理桥健康状态检测 health_check(){ if [[ `docker container ls -f name=$container_name -q | wc -l` -ne 1 ]];then echo "Error: Container $container_name is not running" return 1 fi brctl show $bridge_name 2>&1 | grep -q No && echo "Error: can't get info No such device $bridge_name" && return 1 || return 0 } # // 添加veth网卡一半到容器 # // 添加veth网卡另一半到物理桥 # // 初始化容器新网卡配置 addif_to_bridge() { container_id=$(docker container ls -f name=$container_name -q | cut -c 1-5) ## 容器ID pid=$(docker inspect -f '{{.State.Pid}}' $container_name) ## 容器pid # // 设置容器的网络名称空间 [ ! -d /var/run/netns ] && mkdir -p /var/run/netns find -L /var/run/netns -type l -delete ln -sf /proc/$pid/ns/net /var/run/netns/$container_name # // 生成veth网卡队并添加到容器和物理桥 ip link add veth${container_id}.0 type veth peer name veth${container_id}.1 && true || exit 1 #for i in {1..4};do # if [ $i -eq 4 ];then echo "Exception: line[82] [NIC pair generation failed.]";exit 1;fi # ip link add veth${container_id}.0 type veth peer name veth${container_id}.1 # if [ $? -ne 0 ];then # container_id=$(docker container ls -f name=$container_name -q | cut -c 1-$[5+i]) # else # break # fi #done brctl addif $bridge_name veth${container_id}.0 && true || exit 1 ip link set veth${container_id}.0 up && true || exit 1 ip link set veth${container_id}.1 netns $container_name && true || exit 1 # // 更新容器网卡配置 #ip netns exec $container_name ip link set dev veth${container_id}.1 name eth0 #ip netns exec $container_name ip link set eth0 up ip netns exec $container_name ip link set dev veth${container_id}.1 up && true || exit 1 ip netns exec $container_name ip addr add $ipaddr dev veth${container_id}.1 && true || exit 1 ip netns exec $container_name ip route add default via $gateway dev veth${container_id}.1 proto static metric 200 # // 打印网卡配置信息 echo "container_name: $container_name" echo "bridge_name: $bridge_name" echo "ipaddr: $ipaddr" echo "gateway: $gateway" } # // 添加桥路由转发 set_ip_route() { iptables -t filter -L FORWARD -nv --line | grep -wq "$bridge_name" || iptables -t filter -I FORWARD 3 -o $bridge_name -s 0.0.0.0/0 -d 0.0.0.0/0 -j ACCEPT sysctl net.ipv4.ip_forward | grep -wq 0 && sysctl -wq net.ipv4.ip_forward=1 } # // main if [[ $arg_num -ne 8 ]];then help_info exit 1 fi get_params $arg health_check && addif_to_bridge set_ip_route
python run.py --dataset wmt14-ende --vocab_size 60000 --gpu 2 --ffw_block highway --params big --lr_schedule anneal --fast --valid_repeat_dec 1 --use_argmax --next_dec_input both --use_distillation --load_from <model-name> --resume --finetune_trg_len --trg_len_option predict
<reponame>Priyalc/Gcloud<filename>sdk/src/main/java/com/google/cloud/dataflow/sdk/options/PipelineOptionsRegistrar.java /* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.options; import java.util.ServiceLoader; /** * {@link PipelineOptions} creators have the ability to automatically have their * {@link PipelineOptions} registered with this SDK by creating a {@link ServiceLoader} entry * and a concrete implementation of this interface. * <p> * Note that automatic registration of any * {@link com.google.cloud.dataflow.sdk.options.PipelineOptions} requires users * conform to the limitations discussed on {@link PipelineOptionsFactory#register(Class)}. * <p> * It is optional but recommended to use one of the many build time tools such as * {@link com.google.auto.service.AutoService} to generate the necessary META-INF * files automatically. */ public interface PipelineOptionsRegistrar { Iterable<Class<? extends PipelineOptions>> getPipelineOptions(); }
package kr.co.gardener.admin.dao.user; import java.util.List; import kr.co.gardener.admin.model.other.Notice; import kr.co.gardener.admin.model.user.User; import kr.co.gardener.util.ComboItem; import kr.co.gardener.util.Pager; public interface UserDao { List<User> list(); void add(User item); void update(User item); void delete(String userId); String imgSrc(User user); int duplication(String id); List<User> list_pager(Pager pager); float total(Pager pager); List<ComboItem> combo(); void insert_list(List<User> list); void delete_list(List<User> list); void update_list(List<User> list); User item(User user); User item_checkPass(User user); int levelUp(User user); int count(User user); List<User> getUpdatePreList(List<User> list); }
# Create the linked list class Node(): def __init__(self,data): self.data = data self.next = None head = Node(1) current = head #add the given numbers for i in [2,3,4,5]: new_node = Node(i) current.next = new_node current = new_node
package com.fasterweb.model; public class LoginForm { private Integer accountId; private String password; public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public void setAccountId(Integer accountId) { this.accountId = accountId; } public Integer getAccountId() { return accountId; } }
<reponame>nsharma70/pyvoltha #!/usr/bin/env python # # Copyright 2017 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ A gateway between the internal event bus and the Kafka publisher proxy to publish select topics and messages posted to the Voltha-internal event bus toward the external world. """ from __future__ import absolute_import import structlog from google.protobuf.json_format import MessageToDict from google.protobuf.message import Message from simplejson import dumps from pyvoltha.common.event_bus import EventBusClient import six log = structlog.get_logger() class EventBusPublisher(object): def __init__(self, kafka_proxy, config): self.kafka_proxy = kafka_proxy self.config = config self.topic_mappings = config.get('topic_mappings', {}) self.event_bus = EventBusClient() self.subscriptions = None def start(self): log.debug('starting') self.subscriptions = list() self._setup_subscriptions(self.topic_mappings) log.info('started') return self def stop(self): try: log.debug('stopping-event-bus') if self.subscriptions: for subscription in self.subscriptions: self.event_bus.unsubscribe(subscription) log.info('stopped-event-bus') except Exception as e: log.exception('failed-stopping-event-bus', e=e) return def _setup_subscriptions(self, mappings): for event_bus_topic, mapping in six.iteritems(mappings): kafka_topic = mapping.get('kafka_topic', None) if kafka_topic is None: log.error('no-kafka-topic-in-config', event_bus_topic=event_bus_topic, mapping=mapping) continue self.subscriptions.append(self.event_bus.subscribe( event_bus_topic, # to avoid Python late-binding to the last registered # kafka_topic, we force instant binding with the default arg lambda _, m, k=kafka_topic: self.forward(k, m))) log.info('event-to-kafka', kafka_topic=kafka_topic, event_bus_topic=event_bus_topic) def forward(self, kafka_topic, msg): try: # convert to JSON string if msg is a protobuf msg if isinstance(msg, Message): msg = dumps(MessageToDict(msg, True, True)) log.debug('forward-event-bus-publisher') self.kafka_proxy.send_message(kafka_topic, msg) except Exception as e: log.exception('failed-forward-event-bus-publisher', e=e)
package org.jooby.json; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.isA; import java.util.function.BiConsumer; import java.util.function.Consumer; import org.jooby.Env; import org.jooby.test.MockUnit; import org.jooby.test.MockUnit.Block; import org.jooby.Parser; import org.jooby.Renderer; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.inject.Binder; import com.google.inject.binder.AnnotatedBindingBuilder; import com.google.inject.binder.LinkedBindingBuilder; import com.google.inject.multibindings.Multibinder; import com.typesafe.config.Config; @RunWith(PowerMockRunner.class) @PrepareForTest({Gzon.class, Gson.class, GsonBuilder.class, Multibinder.class }) public class GzonTest { @SuppressWarnings("unchecked") private Block body = unit -> { GsonBuilder gsonb = unit.mockConstructor(GsonBuilder.class); unit.registerMock(GsonBuilder.class, gsonb); Gson gson = unit.get(Gson.class); AnnotatedBindingBuilder<Gson> abbGson = unit.mock(AnnotatedBindingBuilder.class); abbGson.toInstance(gson); Binder binder = unit.get(Binder.class); expect(binder.bind(Gson.class)).andReturn(abbGson); expect(gsonb.create()).andReturn(gson); LinkedBindingBuilder<Parser> lbbparser = unit.mock(LinkedBindingBuilder.class); lbbparser.toInstance(isA(GsonParser.class)); Multibinder<Parser> mbparser = unit.mock(Multibinder.class); expect(mbparser.addBinding()).andReturn(lbbparser); LinkedBindingBuilder<Renderer> lbbrenderer = unit.mock(LinkedBindingBuilder.class); lbbrenderer.toInstance(isA(GsonRenderer.class)); Multibinder<Renderer> mbrenderer = unit.mock(Multibinder.class); expect(mbrenderer.addBinding()).andReturn(lbbrenderer); unit.mockStatic(Multibinder.class); expect(Multibinder.newSetBinder(binder, Parser.class)).andReturn(mbparser); expect(Multibinder.newSetBinder(binder, Renderer.class)).andReturn(mbrenderer); }; @Test public void defaults() throws Exception { new MockUnit(Env.class, Config.class, Binder.class, Gson.class) .expect(body) .run(unit -> { new Gzon() .configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class)); }); } @SuppressWarnings("unchecked") @Test public void withCallback() throws Exception { new MockUnit(Env.class, Config.class, Binder.class, Gson.class, Consumer.class) .expect(body) .expect(unit -> { unit.get(Consumer.class).accept(unit.get(GsonBuilder.class)); }) .run(unit -> { new Gzon() .doWith(unit.get(Consumer.class)) .configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class)); }); } @SuppressWarnings("unchecked") @Test public void with2ArgsCallback() throws Exception { new MockUnit(Env.class, Config.class, Binder.class, Gson.class, BiConsumer.class) .expect(body) .expect(unit -> { unit.get(BiConsumer.class).accept(unit.get(GsonBuilder.class), unit.get(Config.class)); }) .run(unit -> { new Gzon() .doWith(unit.get(BiConsumer.class)) .configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class)); }); } }
#!/usr/bin/env bash # to setup-scripts directory cd "$(dirname "$0")" # import functions . ./common/color-print.sh cat <<'EOF' | colored_cat c ################################################# ### sample identity setup ################################################# EOF ### ---------------------------------------------------------------- echo '' echo '# Steps' | colored_cat r echo '1. create key pair to register as a service in ZMS' | colored_cat g mkdir -p "${BASE_DIR}"/docker/sample/example-service openssl genrsa -out "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.key.pem 4096 2> /dev/null openssl rsa -pubout -in "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.key.pem -out "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.pub.key echo '2. register the example service to Athenz' | colored_cat g # encode public key in ybase64, reference: https://github.com/yahoo/athenz/blob/545d9487a866cad10ba864b435bdb7ece390d4bf/libs/java/auth_core/src/main/java/com/yahoo/athenz/auth/util/Crypto.java#L334-L343 ENCODED_EXAMPLE_PUBLIC_KEY="$(base64 -w 0 "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.pub.key | tr '\+\=\/' '\.\-\_')" DATA='{"name": "athenz.example-service","publicKeys": [{"id": "0","key": "'"${ENCODED_EXAMPLE_PUBLIC_KEY}"'"}]}' ZMS_URL="https://${ZMS_HOST}:${ZMS_PORT}" ZTS_URL="https://${ZTS_HOST}:${ZTS_PORT}" alias admin_curl="curl --cacert ${ATHENZ_CA_PATH} --key ${DOMAIN_ADMIN_CERT_KEY_PATH} --cert ${DOMAIN_ADMIN_CERT_PATH} --silent --show-error -D header.http -o response.json" curl --silent --fail --show-error --request PUT \ --cacert "${ATHENZ_CA_PATH}" \ --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" \ --cert "${DOMAIN_ADMIN_CERT_PATH}" \ --url "${ZMS_URL}/zms/v1/domain/athenz/service/example-service" \ --header 'content-type: application/json' \ --data "${DATA}" echo '3. Confirm the service in ZMS' | colored_cat g curl --silent --fail --show-error --request GET \ --cacert "${ATHENZ_CA_PATH}" \ --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" \ --cert "${DOMAIN_ADMIN_CERT_PATH}" \ --url "${ZMS_URL}/zms/v1/domain/athenz/service/example-service"; echo ''; echo '4. Confirm the service in ZTS' | colored_cat g echo 'Wait for ZTS to sync...' | colored_cat p PUB_KEY_IN_ZTS='' until [ "${ENCODED_EXAMPLE_PUBLIC_KEY}" == "${PUB_KEY_IN_ZTS}" ] do admin_curl -X GET "${ZTS_URL}/zts/v1/domain/athenz/service/example-service" jq '.' response.json | colored_cat w PUB_KEY_IN_ZTS="$(jq -r '.publicKeys[]? | select(.id == "0") | .key' response.json)" echo 'waiting 5s...' sleep 5s done echo 'ZMS and ZTS sync-ed.' | colored_cat p echo '5. Setting up ZTS as provider' zms-cli -z "${ZMS_URL}/zms/v1" --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" --cert "${DOMAIN_ADMIN_CERT_PATH}" -c "${ATHENZ_CA_PATH}" \ -d sys.auth add-regular-role providers sys.auth.zts zms-cli -z "${ZMS_URL}/zms/v1" --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" --cert "${DOMAIN_ADMIN_CERT_PATH}" -c "${ATHENZ_CA_PATH}" \ -d sys.auth add-policy providers grant launch to providers on 'instance' zms-cli -z "${ZMS_URL}/zms/v1" --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" --cert "${DOMAIN_ADMIN_CERT_PATH}" -c "${ATHENZ_CA_PATH}" \ -d sys.auth add-regular-role provider.sys.auth.zts sys.auth.zts zms-cli -z "${ZMS_URL}/zms/v1" --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" --cert "${DOMAIN_ADMIN_CERT_PATH}" -c "${ATHENZ_CA_PATH}" \ -d sys.auth add-policy provider.sys.auth.zts grant launch to provider.sys.auth.zts on 'dns.zts.athenz.cloud' zms-cli -z "${ZMS_URL}/zms/v1" --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" --cert "${DOMAIN_ADMIN_CERT_PATH}" -c "${ATHENZ_CA_PATH}" \ -d sys.auth set-service-endpoint zts class://com.yahoo.athenz.instance.provider.impl.InstanceZTSProvider echo '5. Allowing ZTS provider to launch example-service' zms-cli -z "${ZMS_URL}/zms/v1" --key "${DOMAIN_ADMIN_CERT_KEY_PATH}" --cert "${DOMAIN_ADMIN_CERT_PATH}" -c "${ATHENZ_CA_PATH}" \ -d athenz set-domain-template zts_instance_launch_provider service=example-service echo '6. Get identity certificate for example-service from ZTS using ZTS as a provider' | colored_cat g until test -e "${BASE_DIR}/docker/sample/example-service/athenz.example-service.cert.pem" ; do zts-svccert -domain athenz -service example-service \ -private-key "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.key.pem -key-version 0 -zts "${ZTS_URL}"/zts/v1 \ -dns-domain zts.athenz.cloud -cert-file "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.cert.pem \ -cacert "${ATHENZ_CA_PATH}" -provider sys.auth.zts -instance instance123 echo "waiting for 30s for ZTS to get provider authorization" | colored_cat y sleep 30 done echo '7. verify cert CN' openssl x509 -in "${BASE_DIR}"/docker/sample/example-service/athenz.example-service.cert.pem -noout -subject echo ''
const BaseComponent = require('../BaseComponent'); module.exports = class HelpersComponent extends BaseComponent { constructor(opts = {}) { super({ ...opts }); } build(parent) { const self = this; return { /** * Parse composed format * * @param {object} dummy dummy instance * @param {string} string string to parse */ parseFormat: (dummy, string) => { if (typeof string !== 'string' || !string.length) return ''; const start = string.search('{{'); const end = string.search('}}'); if (start === -1 && end === -1) return string; let method = string.substring(start + 2, end); let params; const regExp = /\(([^)]+)\)/; const matches = regExp.exec(method); if (matches) { method = method.replace(regExp, ''); const parameters = matches[1]; try { params = JSON.parse(parameters); } catch (err) { params = parameters; } } const parts = method.split('.'); if (typeof dummy[parts[0]] === 'undefined') { throw new Error(`Invalid module: ${parts[0]}`); } if (typeof dummy[parts[0]][parts[1]] === 'undefined') { throw new Error(`Invalid method: ${parts[0]}.${parts[1]}`); } const fn = dummy[parts[0]][parts[1]]; let result; if (typeof params === 'string' && !params.length) { result = fn.call(this); } else { result = fn.call(this, params); } const res = string.replace(`{{${method}}}`, result || '').trim(); return parent.helpers.parseFormat(dummy, res); }, }; } };
#!/bin/sh scp $POKEDEXBOTHOST:$POKEDEXBOTPATH/log.txt .
#!/bin/bash # Copyright 2013 Johns Hopkins University (author: Daniel Povey) # Apache 2.0 # This script copies and modifies a data directory while combining # segments whose duration is lower than a specified minimum segment # length. # # Note: this does not work for the wav.scp, since there is no natural way to # concatenate segments; you have to operate on directories that already have # features extracted. # # begin configuration section cleanup=true # end configuration section . subtools/kaldi/utils/parse_options.sh if [ $# != 3 ]; then echo "Usage: " echo " $0 [options] <srcdir> <min-segment-length-in-seconds> <dir>" echo "e.g.:" echo " $0 data/train 1.55 data/train_comb" # options documentation here. exit 1; fi export LC_ALL=C srcdir=$1 min_seg_len=$2 dir=$3 if [ "$dir" == "$srcdir" ]; then echo "$0: this script requires <srcdir> and <dir> to be different." exit 1 fi for f in $srcdir/utt2spk $srcdir/feats.scp; do [ ! -s $f ] && echo "$0: expected file $f to exist and be nonempty" && exit 1 done if ! awk '{if (NF != 2) exit(1);}' <$srcdir/feats.scp; then echo "$0: could not combine short segments because $srcdir/feats.scp has " echo " entries with too many fields" fi if ! mkdir -p $dir; then echo "$0: could not create directory $dir" exit 1; fi if ! subtools/kaldi/utils/validate_data_dir.sh $srcdir; then echo "$0: failed to validate input directory $srcdir. If needed, run subtools/kaldi/utils/fix_data_dir.sh $srcdir" exit 1 fi if ! python -c "x=float('$min_seg_len'); assert(x>0.0 and x<100.0);" 2>/dev/null; then echo "$0: bad <min-segment-length-in-seconds>: got '$min_seg_len'" exit 1 fi set -e set -o pipefail # make sure $srcdir/utt2dur exists. subtools/kaldi/utils/data/get_utt2dur.sh $srcdir subtools/kaldi/utils/data/internal/choose_utts_to_combine.py --min-duration=$min_seg_len \ $srcdir/spk2utt $srcdir/utt2dur $dir/utt2utts $dir/utt2spk $dir/utt2dur subtools/kaldi/utils/utt2spk_to_spk2utt.pl < $dir/utt2spk > $dir/spk2utt # create the feats.scp. # if a line of utt2utts is like 'utt2-comb2 utt2 utt3', then # the subtools/kaldi/utils/apply_map.pl will create a line that looks like # 'utt2-comb2 foo.ark:4315 foo.ark:431423' # and the awk command creates suitable command lines like: # 'utt2-comb2 concat-feats foo.ark:4315 foo.ark:431423 - |' subtools/kaldi/utils/apply_map.pl -f 2- $srcdir/feats.scp <$dir/utt2utts | \ awk '{if (NF<=2){print;} else { $1 = $1 " concat-feats --print-args=false"; $NF = $NF " - |"; print; }}' > $dir/feats.scp # create $dir/text by concatenating the source 'text' entries for the original # utts. subtools/kaldi/utils/apply_map.pl -f 2- $srcdir/text <$dir/utt2utts > $dir/text if [ -f $srcdir/utt2uniq ]; then # the utt2uniq file is such that if 2 utts were derived from the same original # utt (e.g. by speed perturbing) they map to the same 'uniq' value. This is # so that we can properly hold out validation data for neural net training and # know that we're not training on perturbed verions of that utterance. We # need to obtain the utt2uniq file so that if any 2 'new' utts contain any of # the same 'old' utts, their 'uniq' values are the same [but otherwise as far # as possible, the 'uniq' values are different.] # # we'll do this by arranging the old 'uniq' values into groups as necessary to # capture this property. # The following command creates 'uniq_sets', each line of which contains # a set of original 'uniq' values, and effectively we assert that they must # be grouped together to the same 'uniq' value. # the first awk command prints a group of the original utterance-ids that # are combined together into a single new utterance, and the apply_map # command converts those into a list of original 'uniq' values. awk '{$1 = ""; print;}' < $dir/utt2utts | \ subtools/kaldi/utils/apply_map.pl $srcdir/utt2uniq > $dir/uniq_sets # The next command creates $dir/uniq2merged_uniq, which is a map from the # original 'uniq' values to the 'merged' uniq values. # for example, if $dir/uniq_sets were to contain # a b # b c # d # then we'd obtain a uniq2merged_uniq file that looks like: # a a # b a # c a # d d # ... because a and b appear together, and b and c appear together, # they have to be merged into the same set, and we name that set 'a' # (in general, we take the lowest string in lexicographical order). cat $dir/uniq_sets | LC_ALL=C python -c ' import sys; from collections import defaultdict uniq2orig_uniq = dict() equal_pairs = set() # set of 2-tuples (a,b) which should have equal orig_uniq while True: line = sys.stdin.readline() if line == "": break split_line = line.split() # list of uniq strings that should map in same set # initialize uniq2orig_uniq to the identity mapping for uniq in split_line: uniq2orig_uniq[uniq] = uniq for a in split_line[1:]: equal_pairs.add((split_line[0], a)) changed = True while changed: changed = False for a,b in equal_pairs: min_orig_uniq = min(uniq2orig_uniq[a], uniq2orig_uniq[b]) for x in [a,b]: if uniq2orig_uniq[x] != min_orig_uniq: uniq2orig_uniq[x] = min_orig_uniq changed = True for uniq in sorted(uniq2orig_uniq.keys()): print uniq, uniq2orig_uniq[uniq] ' > $dir/uniq_to_orig_uniq rm $dir/uniq_sets # In the following command, suppose we have a line like: # utt1-comb2 utt1 utt2 # .. the first awk command retains only the first original utt, to give # utt1-comb2 utt1 # [we can pick one arbitrarily since we know any of them would map to the same # orig_uniq value.] # the first apply_map.pl command maps the 'utt1' to the 'uniq' value it mapped to # in $srcdir, and the second apply_map.pl command maps it to the grouped 'uniq' # value obtained by the inline python script above. awk '{print $1, $2}' < $dir/utt2utts | subtools/kaldi/utils/apply_map.pl -f 2 $srcdir/utt2uniq | \ subtools/kaldi/utils/apply_map.pl -f 2 $dir/uniq_to_orig_uniq > $dir/utt2uniq rm $dir/uniq_to_orig_uniq fi # note: the user will have to recompute the cmvn, as the speakers may have changed. rm $dir/cmvn.scp 2>/dev/null || true subtools/kaldi/utils/validate_data_dir.sh --no-wav $dir if $cleanup; then rm $dir/utt2utts fi
package amz type DescribeSecurityGroupsResponse struct { RequestId string `xml:"requestId"` SecurityGroupInfo []SecurityGroup `xml:"securityGroupInfo>item"` }