text stringlengths 1 1.05M |
|---|
import * as React from 'react';
import * as db from '../interfaces/db';
import SubscribedGoalItem from './SubscribedGoalItem';
import Card from 'material-ui/Card';
const plus = require('./plus.png');
import { RouteComponentProps } from 'react-router-dom';
import { withRouter } from 'react-router';
type P = { db: db.DBStructure };
class SubscribedGoals extends React.Component<RouteComponentProps<P> & P> {
render() {
return (
<div>
<div style={{ display: 'flex', flexFlow: 'row', flexWrap: 'wrap' }}>{this.subscribedGoals.map(sg => this.renderGoal(sg))}</div>
<div style={{ display: 'flex', flexFlow: 'row', flexWrap: 'wrap' }}>{this.renderNewGoal()}</div>
</div>
);
}
private renderGoal(subscribedGoal: db.SubscribedGoal) {
return (
<div style={{ width: '50%' }} key={subscribedGoal.goalId}>
<Card style={{ margin: '20px' }}>
<SubscribedGoalItem goal={this.goal(subscribedGoal.goalId)} />
</Card>
</div>
);
}
private renderNewGoal() {
return (
<div onClick={() => this.props.history.push(`/`)} style={{ width: '50%', cursor: 'pointer' }}>
<Card style={{ margin: '20px' }}>
<div style={{ textAlign: 'center' }}>
<img style={{ width: '45%', paddingTop: '30px', height: 'auto', objectFit: 'contain' }} src={plus} />
</div>
<div style={{ textAlign: 'center', color: '#e91e63', paddingTop: '20px', paddingBottom: '20px', fontSize: '40px' }}>
Add new Dream
</div>
</Card>
</div>
);
}
private get subscribedGoals() {
return this.props.db.customers[0].subscribedGoals;
}
private goal(goalId: string) {
return this.props.db.goals[goalId];
}
}
export default withRouter(SubscribedGoals); |
using System;
public class Fibonacci
{
public static void Main(string[] args)
{
int a = 0;
int b = 1;
int n = 10;
Console.Write(a + " " + b + " ");
for (int i = 2; i < n; i++)
{
int c = a + b;
a = b;
b = c;
Console.Write(c + " ");
}
Console.WriteLine();
}
} |
<reponame>kal727/l5r-sandbox
/* global describe, it, expect, beforeEach, integration */
/* eslint camelcase: 0, no-invalid-this: 0 */
describe('Shield of Lannisport', function() {
integration(function() {
beforeEach(function() {
const deck = this.buildDeck('greyjoy', [
'A Noble Cause',
'Shield of Lannisport', 'Tyrion Lannister (Core)', '<NAME> (Core)', '<NAME>'
]);
this.player1.selectDeck(deck);
this.player2.selectDeck(deck);
this.startGame();
this.keepStartingHands();
this.tyrion = this.player1.findCardByName('Tyrion Lannister', 'hand');
this.player1.clickCard(this.tyrion);
this.completeSetup();
this.player1.selectPlot('A Noble Cause');
this.player2.selectPlot('A Noble Cause');
this.selectFirstPlayer(this.player1);
// Attach Shield to Tyrion
this.player1.clickCard('Shield of Lannisport', 'hand');
this.player1.clickCard(this.tyrion);
});
it('should grant +2 STR and renown', function() {
expect(this.tyrion.getStrength()).toBe(6);
expect(this.tyrion.hasKeyword('Renown')).toBe(true);
});
describe('when another Lord or Lady is in play', function() {
describe('and they are of cost 4 or more', function() {
beforeEach(function() {
this.player1.clickCard('<NAME>', 'hand');
});
it('should remove the bonuses', function() {
expect(this.tyrion.getStrength()).toBe(4);
expect(this.tyrion.hasKeyword('Renown')).toBe(false);
});
});
describe('and they are below cost 4', function() {
beforeEach(function() {
this.player1.clickCard('<NAME>', 'hand');
});
it('should retain the bonuses', function() {
expect(this.tyrion.getStrength()).toBe(6);
expect(this.tyrion.hasKeyword('Renown')).toBe(true);
});
});
});
});
});
|
import { GetServerSideProps } from 'next'
import handleWrappedCases from './handleWrappedCases'
import {
SrrHandlerGenerator,
SsrCaseHandler,
SsrContextGenerationErrorHandler,
SsrHandler,
SsrHandlerConfig,
} from './types'
import wrapCaseHandler from './wrapCaseHandler'
const defaultContextGenerationErrorHandler: SsrContextGenerationErrorHandler = (
err
) => {
const error = err as Error
if (error.message) console.error(error.message)
throw new Error(`Failed to generate SSR context.`)
}
const generateSsrHandler: SrrHandlerGenerator = <ContextType>(
config: SsrHandlerConfig<ContextType>
) => {
// Deconstruct config
const {
contextGenerator,
globalCaseHandlers,
onContextGenerationError,
onCaseHandlingError,
getErrorPageUrl,
} = config
// Generate SSR handler
const ssrHandler: SsrHandler<ContextType> = (
pageCaseHandlers: Array<SsrCaseHandler<ContextType>>
) => {
// Concatenate global case handlers and page-specific case handlers
const allCaseHandlers = [...globalCaseHandlers, ...pageCaseHandlers]
// Generate getServerSideProps function
const getServerSideProps: GetServerSideProps = async (nextContext) => {
const { req } = nextContext
// Generate context
const generateContext: () => Promise<{
success: boolean
context: ContextType | null
err?: unknown
}> = async () => {
try {
const context = await contextGenerator(nextContext)
return {
success: true,
context,
}
} catch (err) {
const handleError =
onContextGenerationError ?? defaultContextGenerationErrorHandler
handleError(err, nextContext)
return {
success: false,
context: null,
err,
}
}
}
const {
success: wasContextGenerated,
context: ambiguousContext,
err: contextGenerationError,
} = await generateContext()
if (!wasContextGenerated) {
const errorPageUrl = getErrorPageUrl(
contextGenerationError,
nextContext
)
return {
redirect: {
permanent: false,
destination: errorPageUrl,
},
}
}
const context = ambiguousContext as ContextType
// Wrap case handlers
const wrappedCaseHandlers = allCaseHandlers.map((caseHandler) =>
wrapCaseHandler(
caseHandler,
context,
getErrorPageUrl,
onCaseHandlingError
)
)
// Handle cases
const ssrResult = await handleWrappedCases(
nextContext,
wrappedCaseHandlers
)
return ssrResult
}
return getServerSideProps
}
return ssrHandler
}
export default generateSsrHandler
|
package icbm.compat.waila;
import icbm.Settings;
import icbm.content.tile.TileCamouflage;
import mcp.mobius.waila.api.IWailaRegistrar;
public class WailaRegistrar
{
public static final String wailaCamoBlockHide = "wailaCamoBlockWailaHide";
public static void registerWailaTooltip(IWailaRegistrar registrar) {
registrar.registerBodyProvider(new WailaCamoBlockDataProvider(), TileCamouflage.class);
}
} |
import React from "react";
import { Link } from "gatsby";
import select from "../components/utils";
import { FormattedMessage } from "react-intl";
import menuTree from "../data/menuTree";
import {
FaFacebook,
FaTwitter,
FaInstagram,
FaVimeo,
FaLinkedin,
} from "react-icons/fa";
import Copyright from "../components/Copyright";
import ScrollToTop from "../components/ScrollToTop";
const Footer = class extends React.Component {
render() {
const props = this.props;
const sel = select(props.langKey);
return (
<footer>
<div className="content has-text-centered">
<hr />
<a
title="facebook"
target="_blank"
href="https://www.facebook.com/fengshuimoderndesign/"
>
<FaFacebook className="facebook-icon" size="2em" />
</a>
<a
title="instagram"
target="_blank"
href="https://instagram.com/fengshui.home"
>
<FaInstagram className="instagram-icon" size="2em" />
</a>
<p><FormattedMessage id="rights" /></p>
</div>
<ScrollToTop />
</footer>
);
}
};
export default Footer;
|
package com.boomi.flow.external.storage.states;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
public interface StateRepository {
void delete(UUID tenant, List<UUID> ids);
Optional<String> find(UUID tenant, UUID id);
void save(UUID tenant, List<State> states);
}
|
package com.springsecurity.loginserver.repository;
import com.springsecurity.loginserver.entity.Account;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.Optional;
public interface AccountRepository extends JpaRepository<Account, Long> {
Optional<Account> findByUsername(String username);
}
|
package frc.robot;
public class RobotMap {
public static final int LEFT_SHOOTER_ID = 55;
public static final int RIGHT_SHOOTER_ID = 56;
}
|
<reponame>eengineergz/Lambda
const checker = Symbol( 'check' );
const isArray = Symbol( 'array' );
const isObject = Symbol( 'object' );
const isInteger = Symbol( 'integer' );
const isDouble = Symbol( 'double' );
const isString = Symbol( "string" );
const isFunction = Symbol( "function" );
const isClass = Symbol( "class" );
const isEmpty = Symbol( "empty" );
//^^^^^NGL i never thought I would ever use symbols for anythig^^^^^
class Util {
constructor() {}
get Array() {
return "Array";
}
get Object() {
return "Object";
}
get Integer() {
return "Integer";
}
get Double() {
return "Double";
}
get String() {
return "String";
}
get Function() {
return "Function";
}
get Class() {
return "Class";
}
get Letter() {
return "Letter";
}
get Empty() {
return "Empty";
}
get Blank() {
return "Blank";
}
get Element() {
return "Element";
}
get Email() {
return "Email";
}
[ isArray ]( data ) {
return Array.isArray( data );
}
[ isObject ]( data ) {
return data instanceof Object && data.constructor === Object;
}
[ isInteger ]( data ) {
let x;
if ( isNaN( data ) ) {
return false;
}
x = parseFloat( data );
return ( x | 0 ) === x;
}
[ isDouble ]( data ) {
let isNan = isNaN( data );
let isDouble = false;
if ( isNan ) {
isDouble = false;
} else {
isDouble = !( Math.round( data ) === data );
}
return isDouble;
}
[ isString ]( data ) {
return data.constructor === String && Object.prototype.toString.call( data ) === '[object String]';
}
[ isFunction ]( data ) {
let isFunc = ( ( Object.prototype.toString.call( data ) === '[object Function]' ||
data.constructor === Function ) &&
this.startsWith( data, '(' ) );
return isFunc || this.startsWith( data, 'function' );
}
[ isClass ]( data ) {
let isClass = ( ( Object.prototype.toString.call( data ) === '[object Function]' ||
data.constructor === Function ) );
return isClass && this.startsWith( data, 'class' );
}
[ isEmpty ]( data, type ) {
let ret = false;
switch ( type.toString() ) {
case 'Empty':
ret = ( !data || 0 === data.length )
break;
case 'Blank':
ret = ( data.length === 0 || !data.trim() );
break;
default:
ret = false;
break;
}
return ret;
}
[ checker ]( data, type ) {
let elemArr = [ 'checked', 'visible' ]
let types = {
'Array': this[ isArray ]( data ),
'Object': this[ isObject ]( data ),
'Integer': this[ isInteger ]( data ),
'Double': this[ isDouble ]( data ),
'String': this[ isString ]( data ),
'Function': this[ isFunction ]( data ),
'Class': this[ isClass ]( data ),
'Empty': this[ isEmpty ]( data, type ),
'Blank': this[ isEmpty ]( data, type ),
}
if ( elemArr.indexOf( type ) > -1 ) {
type = 'Element';
types[ 'Element' ] = this[ isElement ]( data, type );
}
return types[ type ];
}
startsWith( data, search, pos ) {
return data.toString().substr( !pos || pos < 0 ? 0 : +pos, search.length ) === search;
}
format( strVal ) {
let str = strVal.toString();
if ( arguments.length ) {
let t = typeof arguments[ 1 ];
let key;
let args = ( "string" === t || "number" === t ) ?
Array.prototype.slice.call( arguments ) :
arguments[ 1 ];
for ( key in args ) {
str = str.replace( new RegExp( "\\{" + key + "\\}", "gi" ), args[ key ] );
}
}
return str;
}
is( data, type ) {
return this[ checker ]( data, type );
}
includes( strVal, search ) {
if ( !String.prototype.includes ) {
return strVal.indexOf( search ) > -1;
} else {
return strVal.includes( search );
}
}
count( str, search ) {
let re = new RegExp( '(' + search + ')', 'g' );;
let count = 0;
try {
count = str.match( re ).length;
} catch ( error ) {}
if ( search === this.Letter && !this.is( str, this.Array ) ) {
count = str.length;
} else if ( search === this.Array && this.is( str, this.Array ) ) {
count = str.length;
} else if ( search === this.Object && this.is( str, this.Object ) ) {
count = Object.keys( str ).length;
} else
return count;
}
}
//------------------------------------------------------------------------------------------------
const OmightyMagicConchShell = new Util();
console.log( 'OmightyMagicConchShell : ', OmightyMagicConchShell );
//OmightyMagicConchShell : Util {}
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.format( 'Hi {0}. Did you see the {1}?', [ 'Pikachu', 'Meow' ] ) );
// Hi Pikachu.Did you see the Meow ?
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.includes( 'Ali Baba', 'Baba' ) );
/*
true
*/
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.is( [ 1, 2, 3 ], OmightyMagicConchShell.Array ) );
/*
true
*/
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.is( {
'a': 'v'
}, OmightyMagicConchShell.Object ) )
/*
true
*/
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.is( 5.1, OmightyMagicConchShell.Integer ) );
/*
false
*/
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.is( 3.2, OmightyMagicConchShell.Double ) );
/*
true
*/
//------------------------------------------------------------------------------------------------
console.log( OmightyMagicConchShell.is( 3.2, OmightyMagicConchShell.Double ) );
/*
true
*/
//------------------------------------------------------------------------------------------------
console.log( 'OmightyMagicConchShell.is( 3.2, OmightyMagicConchShell.Double ): ', OmightyMagicConchShell.is( 3.2, OmightyMagicConchShell.Double ) );
/*
OmightyMagicConchShell.is( 3.2, OmightyMagicConchShell.Double ): true
*/
//------------------------------------------------------------------------------------------------
console.log( ' OmightyMagicConchShell.is( Ali + 123, OmightyMagicConchShell.String ): ', OmightyMagicConchShell.is( "Ali" + 123, OmightyMagicConchShell.String ) );
/*
OmightyMagicConchShell.is( Ali + 123, OmightyMagicConchShell.String ): true
*/
//------------------------------------------------------------------------------------------------
OmightyMagicConchShell.is( 'Ali' + 123, OmightyMagicConchShell.String )
console.log( 'OmightyMagicConchShell.is( Ali+ 123, OmightyMagicConchShell.String ) : ', OmightyMagicConchShell.is( 'Ali' + 123, OmightyMagicConchShell.String ) );
/*
OmightyMagicConchShell.is( Ali + 123, OmightyMagicConchShell.String ): true
*/
//------------------------------------------------------------------------------------------------
//OmightyMagicConchShell.is( 'Ali' + 123, OmightyMagicConchShell.String )
console.log( ' OmightyMagicConchShell.is(Ali + 123, OmightyMagicConchShell.String ): ', OmightyMagicConchShell.is( 'Ali' + 123, OmightyMagicConchShell.String ) );
/*
true
*/
//------------------------------------------------------------------------------------------------
const a = ( d ) => {}
//OmightyMagicConchShell.is( a , OmightyMagicConchShell.Function )
console.log( 'OmightyMagicConchShell.is( a , OmightyMagicConchShell.Function ): ', OmightyMagicConchShell.is( a, OmightyMagicConchShell.Function ) );
/*
OmightyMagicConchShell.is( a, OmightyMagicConchShell.Function ): true
*/
//OmightyMagicConchShell.is( function a( d ) {}, OmightyMagicConchShell.Function )
console.log( 'OmightyMagicConchShell.is( function a( d ) {}, OmightyMagicConchShell.Function ): ', OmightyMagicConchShell.is( function a( d ) {}, OmightyMagicConchShell.Function ) );
/*
OmightyMagicConchShell.is( function a( d ) {}, OmightyMagicConchShell.Function ): true
*/
//------------------------------------------------------------------------------------------------
// OmightyMagicConchShell.is( class b {}, OmightyMagicConchShell.Class )
console.log( 'OmightyMagicConchShell.is( class b {}, OmightyMagicConchShell.Class ): ', OmightyMagicConchShell.is( class b {}, OmightyMagicConchShell.Class ) );
/*
OmightyMagicConchShell.is( class b {}, OmightyMagicConchShell.Class ): true
*/
//------------------------------------------------------------------------------------------------
OmightyMagicConchShell.count( 'My name is no name when I do not like names. What is your name? Can u say your naming conversion', 'name' )
console.log( 'OmightyMagicConchShell.count( My name is no name when I do not like names. What is your name? Can u say your naming conversion, name): ', OmightyMagicConchShell.count( 'My name is no name when I do not like names. What is your name? Can u say your naming conversion', 'name' ) );
/*
OmightyMagicConchShell.count( My name is no name when I do not like names.What is your name ? Can u say your naming conversion, name ): 4
*/
//------------------------------------------------------------------------------------------------
// OmightyMagicConchShell.count( {
// 'w': 't',
// 'w2': 't2',
// 'wf': 'wf',
// 'wfs': 'wfs2'
// }, OmightyMagicConchShell.Object )
console.log( 'OmightyMagicConchShell.count( {w:t,w2:t2,wf:wf,wfs:wfs2 } OmightyMagicConchShell.Object ): ', OmightyMagicConchShell.count( {
'w': 't',
'w2': 't2',
'wf': 'wf',
'wfs': 'wfs2'
}, OmightyMagicConchShell.Object ) );
/*
!OmightyMagicConchShell.count( {
! w: t,
! w2: t2,
! wf: wf,
! wfs: wfs2
! }
! OmightyMagicConchShell.Object ): undefined
*/
//------------------------------------------------------------------------------------------------
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.brooklyn.camp.test.mock.web;
import javax.annotation.Nullable;
import brooklyn.util.guava.Maybe;
import io.brooklyn.camp.BasicCampPlatform;
import io.brooklyn.camp.spi.ApplicationComponentTemplate;
import io.brooklyn.camp.spi.AssemblyTemplate;
import io.brooklyn.camp.spi.PlatformComponentTemplate;
import io.brooklyn.camp.spi.collection.BasicResourceLookup;
import io.brooklyn.camp.spi.collection.ResolvableLink;
import io.brooklyn.camp.spi.instantiate.AssemblyTemplateInstantiator;
import io.brooklyn.camp.spi.pdp.Artifact;
import io.brooklyn.camp.spi.pdp.AssemblyTemplateConstructor;
import io.brooklyn.camp.spi.pdp.Service;
import io.brooklyn.camp.spi.resolve.PdpMatcher;
public class MockWebPlatform {
public static final ApplicationComponentTemplate WAR =
ApplicationComponentTemplate.builder()
.name("io.camp.mock:WAR")
.description("Mock WAR")
.build();
public static final PlatformComponentTemplate APPSERVER =
PlatformComponentTemplate.builder()
.name("io.camp.mock:AppServer")
.description("Mock Application Server")
.build();
public static final PlatformComponentTemplate DATABASE =
PlatformComponentTemplate.builder()
.name("io.camp.mock:Database")
.description("Mock Database")
.build();
public static final AssemblyTemplate ASSEMBLY1 =
AssemblyTemplate.builder()
.name("WebAppAssembly1")
.description("Mock Web App Assembly Template")
.applicationComponentTemplates(BasicResourceLookup.of(WAR))
.instantiator(MockAssemblyTemplateInstantiator.class)
.build();
public static final PdpMatcher WAR_GETS_WAR_MATCHER = new PdpMatcher.ArtifactMatcher("com.java:WAR") {
public boolean apply(Object art, AssemblyTemplateConstructor atc) {
ApplicationComponentTemplate act = ApplicationComponentTemplate.builder()
.name( ((Artifact)art).getName() )
.description( ((Artifact)art).getDescription() )
.customAttribute("implementation", WAR.getName())
.customAttribute("artifactType", ((Artifact)art).getArtifactType())
.build();
// TODO requirements, etc
atc.add(act);
return true;
}
};
public static final PdpMatcher newLiteralServiceTypeToPlatformComponentTemplateMatcher(final BasicCampPlatform platform, @Nullable final Class<? extends AssemblyTemplateInstantiator> instantiator) {
return new PdpMatcher() {
public boolean apply(Object item, AssemblyTemplateConstructor atc) {
if (!(item instanceof Service)) return false;
Service svc = (Service)item;
String type = svc.getServiceType();
for (ResolvableLink<PlatformComponentTemplate> t: platform.platformComponentTemplates().links()) {
if (type.equals(t.getName())) {
PlatformComponentTemplate pct = PlatformComponentTemplate.builder()
.name(svc.getName())
.customAttribute("serviceType", type)
.description(Maybe.fromNullable(svc.getDescription()).or(t.resolve().getDescription()))
.build();
if (atc!=null) {
atc.add(pct);
if (instantiator!=null)
atc.instantiator(instantiator);
}
return true;
}
}
return false;
}
@Override
public boolean accepts(Object deploymentPlanItem) {
return apply(deploymentPlanItem, null);
}
};
}
public static <T extends BasicCampPlatform> T populate(T platform) {
return populate(platform, null);
}
public static <T extends BasicCampPlatform> T populate(T platform, @Nullable Class<? extends AssemblyTemplateInstantiator> instantiator) {
platform.platformComponentTemplates().addAll(APPSERVER, DATABASE);
platform.applicationComponentTemplates().add(WAR);
platform.assemblyTemplates().add(ASSEMBLY1);
platform.pdp().addMatcher(WAR_GETS_WAR_MATCHER);
platform.pdp().addMatcher(newLiteralServiceTypeToPlatformComponentTemplateMatcher(platform, instantiator));
return platform;
}
public static BasicCampPlatform newPlatform() {
return MockWebPlatform.populate(new BasicCampPlatform());
}
}
|
import Ably from 'ably/browser/static/ably'
export default function(userId, authKey) {
return new Ably.Realtime({
authUrl: `/api/Accounts/${userId}/socketToken`,
authHeaders: {
Authorization: authKey
}
})
}
|
#!/bin/bash
#SBATCH --gres=gpu:2 # request GPU "generic resource"
#SBATCH --cpus-per-task=6 # maximum CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=15000M # memory per node
#SBATCH --time=0-00:40 # time (DD-HH:MM)
#SBATCH --output=scripts/cnn/mnist/gen_adv/o_gen_ILLCM_ep1_iter16.out # %N for node name, %j for jobID
source ~/tfp363/bin/activate
REPO_DIR=/home/xuc/Adversarial-Attack-on-CapsNets
SUMMARY_DIR=/home/xuc/scratch/xuc/summary/
MODEL=cnn
DATASET=mnist
ADVERSARIAL_METHOD=ILLCM
EPSILON=1
ITERATION_N=16
python $REPO_DIR/experiment.py --mode=gen_adv --data_dir=$REPO_DIR/data/$MODEL/$DATASET --dataset=$DATASET --adversarial_method=$ADVERSARIAL_METHOD --epsilon=$EPSILON --iteration_n=$ITERATION_N --summary_dir=$SUMMARY_DIR/$MODEL/$DATASET/Default/
|
<gh_stars>0
package com.twu.biblioteca;
/**
* Created by hxlin on 9/20/15.
*/
public class Book {
private int id;
private String title;
private String author;
private String publishedYear;
public Book(int id, String title, String author, String publishedYear) {
this.id = id;
this.title = title;
this.author = author;
this.publishedYear = publishedYear;
}
public Book() {
}
public void setBook(int id, String title, String author, String publishedYear){
this.id = id;
this.title = title;
this.author = author;
this.publishedYear = publishedYear;
}
public int getId(){
return this.id;
}
public String getDetails() {
String bookDetail = String.format("%d : %s | %s | %s", id, title, author, publishedYear);
return bookDetail;
}
}
|
<gh_stars>0
class Mover {
position;
velocity;
acceleration;
mass;
radius;
constructor(x, y, m) {
this.position = new Vector2(x, y);
this.velocity = new Vector2(0, 3);
this.acceleration = new Vector2();
this.mass = m;
this.radius = 25;
}
applyForce(force) {
force.divide(this.mass);
this.acceleration.add(force);
}
update(deltaTime) {
this.velocity.add(this.acceleration.clone().multiply(deltaTime));
this.position.add(this.velocity.clone().multiply(deltaTime));
}
render(context) {
context.fillStyle = '#aaa';
context.strokeStyle = '#fff';
context.lineWidth = 3;
context.beginPath();
context.arc(this.position.x, this.position.y, this.radius, 0, Math.TWO_PI);
context.closePath();
context.fill();
context.stroke();
}
}
class Attractor {
position;
mass;
radius;
constructor(x, y, m) {
this.position = new Vector2(x, y);
this.mass = m;
this.radius = 50;
}
attract(mover) {
const G = 6.67 * Math.pow(10, -11);
let r = this.position.distance(mover.position);
let strength = (G * (this.mass * mover.mass)) / Math.pow(r, 2);
let force = this.position.clone().subtract(mover.position).setMagnitude(strength);
mover.applyForce(force);
}
render(context) {
context.fillStyle = '#fff';
context.strokeStyle = '#fff';
context.lineWidth = 3;
context.beginPath();
context.arc(this.position.x, this.position.y, this.radius, 0, Math.TWO_PI);
context.closePath();
context.fill();
context.stroke();
}
} |
const request = require('supertest');
const app = require('../../server.jest');
require("../routes/oauth.routes")(app);
jest.mock("./save_json", () => ({
save: jest.fn(),
}));
jest.mock("./usStates.json", () => [
{
state: "MI",
capital: "Lansing",
governor: "<NAME>",
},
{
state: "GA",
capital: "Atlanta",
governor: "<NAME>",
},
]);
describe("testing-server-oauth-routes", () => {
it("POST /api/oauth/login - success", async (done) => {
const params = {
email: "<PASSWORD>-<PASSWORD> <EMAIL>5<EMAIL>",
password: "<PASSWORD>"
};
const response = await request(app).post("/api/oauth/login").send(params);
expect(response.statusCode).toEqual(200);
done();
});
}); |
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
sudo apt-get install -y nodejs
|
#!/bin/sh
. _definitions.sh
if [ "${#}" -lt "1" ]
then
echo -e "${FAIL_COLOR}One network name (at least) must be provided by parameters!${NULL_COLOR}"
exit 1
fi
. _ssh-config.sh
echo -e "\n${INFO_COLOR}Creating networks at remote ${DATA_COLOR}${remoteHost}${INFO_COLOR} ..${NULL_COLOR}\n"
createNetsCmd=""
for arg in "${@}"
do
createNetsCmd="${createNetsCmd}\${createNetCmd} ${arg}; "
done
createNetsCmd="${createNetsCmd} :"
createNetsInRemoteCmd="\
createNetCmd=\"docker network create\" && \
if docker stack ls > /dev/null 2> /dev/null ; \
then \
createNetCmd=\"\${createNetCmd} -d overlay --attachable\" ; \
fi ; \
createNetsCmd=\$(echo \"${createNetsCmd}\") && \
eval \"\${createNetsCmd}\""
if ssh ${SSH_PARAMS} "${SSH_REMOTE}" ${createNetsInRemoteCmd}
then
echo -e "${PASS_COLOR}Network creation was successful!${NULL_COLOR}"
else
echo -e "${FAIL_COLOR}Network creation failed!${NULL_COLOR}"
ssh ${SSH_PARAMS} -q -O exit "${SSH_REMOTE}"
exit 1
fi
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled-N/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled-N/512+512+512-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_first_third_sixth --eval_function penultimate_sixth_eval |
import React, {useState} from 'react';
const App = () => {
const [value, setValue] = useState('');
const handleChange = (e) => {
let num = parseInt(e.target.value);
let r = Math.floor(num * 2.5);
let g = Math.floor(num * 1.2);
let b = Math.floor(num * 0.7);
let color = 'rgb(' + r + ', ' + g + ', ' + b + ')';
document.body.style.background = color;
setValue(e.target.value);
};
return (
<div>
<input type='number' value={value} onChange={handleChange} />
</div>
);
};
export default App; |
#!/bin/sh
export TITANIUM_PREFIX="_Prefix-*"
echo "Xcode Pre-Compile Phase: Removing $SHARED_PRECOMPS_DIR/$PROJECT$TITANIUM_PREFIX"
find "$SHARED_PRECOMPS_DIR" -name "$PROJECT$TITANIUM_PREFIX" -print0 | xargs -0 rm -rf
if [ "x$TITANIUM_CLI_XCODEBUILD" == "x" ]; then
/usr/local/bin/node "/Users/deoncole/.appcelerator/install/4.1.2/package/node_modules/appc-cli-titanium/node_modules/titanium/bin/titanium" build --platform iphone --sdk "4.1.0.GA" --no-prompt --no-progress-bars --no-banner --no-colors --build-only --xcode
exit $?
else
echo "skipping pre-compile phase"
fi
|
import React from "react";
import axios from "axios";
import DeleteStudent from "./DeleteStudent";
import AddStudent from "./AddStudent";
class UserList extends React.Component {
state = { users: [] };
componentDidMount() {
axios.get(`https://jsonplaceholder.typicode.com/users`).then(res => {
this.setState({
users: res.data
});
});
}
refreshUser() {
axios.get(`https://jsonplaceholder.typicode.com/users`).then(res => {
this.setState({
users: res.data
});
});
}
render() {
return (
<div>
<AddStudent refreshAction={this.refreshUser.bind(this)} />
<ul>
{this.state.users.map((user, index) => {
return (
<li key={index}>
{" "}
{user.name}
<DeleteStudent
id={user.id}
refreshAction={this.refreshUser.bind(this)}
/>
</li>
);
})}
</ul>
</div>
);
}
}
export default UserList;
|
<reponame>sergiomarchio/SolvdTACourseProjects
package com.solvd.carfactory.models.supply;
import com.fasterxml.jackson.annotation.JsonRootName;
import jakarta.xml.bind.annotation.*;
@XmlRootElement(name = "paint_color")
@XmlAccessorType(XmlAccessType.FIELD)
@JsonRootName("paintColor")
public class PaintColor {
@XmlAttribute(name = "id")
private long id;
@XmlElement(name = "name")
private String name;
@XmlElement(name = "provider")
private Provider provider;
public PaintColor() {
}
public PaintColor(long id) {
this.id = id;
}
@Override
public String toString() {
return "PaintColor{" +
"id:" + id +
", name:'" + name + '\'' +
", provider:" + provider +
'}';
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Provider getProvider() {
return provider;
}
public void setProvider(Provider provider) {
this.provider = provider;
}
}
|
#!/bin/bash
# get save location
loc="$(grep save: $1 | sed 's/^.*: //')"
# transform image to pddl problem
python recognize.py -opts "$1" -goal "$2"
# concatenate domain and problem file
cat "$loc/domain.pddl" > "$loc/temp.pddl"
cat "$loc/problem.pddl" >> "$loc/temp.pddl"
# open PDDL server
./mdpsim/mdpsim --port=2322 -R 100 --time-limit=10000 "$loc/temp.pddl" & _pid="$!"
# save mdpsim pid
echo "$_pid" > server.pid
# run planner and save temporary result to planresult.txt
# to see other planning options, run the planner without any argument
# e.g. ./mini-gpt/planner
./mini-gpt/planner -v 100 -h ff localhost:2322 "$loc/temp.pddl" dom1 > "$loc/planresult.txt"
# kill mdpsim server
kill -9 "$(cat server.pid)"
# remove auxilliary files
rm server.pid
rm -r logs
# parse the plan result
# see the plan in {savepath}/plan.txt
python parse_plan.py -opts "$1"
cat "$loc/plan.txt" >> "$loc/objects.txt"
rm "$loc/planresult.txt"
rm "$loc/plan.txt"
mv "$loc/objects.txt" "$loc/plan.txt"
|
<gh_stars>0
module tuna.gantt {
export class Utils {
static loopRange(range: IRange, unit: moment.unitOfTime.DurationConstructor, callback: (current: moment.Moment) => void) {
const end = range.end.endOf("day");
const current = moment(range.start).startOf("day");
while (current <= end) {
callback(current);
current.add(1, unit);
}
}
static createPart(range: IRange, unit: moment.unitOfTime.DurationConstructor, displayFormat?: string, created?: (item: JQuery, current: moment.Moment) => JQuery): JQuery[] {
const result: JQuery[] = [];
Utils.loopRange(range, unit, current => {
const titleHtml = displayFormat ? `<div class="vn-title">${current.format(displayFormat)}</div>` : ``;
const element = $(`<div class="vn-${unit}">${titleHtml}</div>`);
result.push(created ? created(element, current) : element);
});
return result;
}
static createRange(date: moment.Moment, unit: moment.unitOfTime.DurationConstructor) {
return { start: date.clone().startOf(unit), end: date.clone().endOf(unit) };
}
}
} |
<reponame>zonesgame/StendhalArcClient
package test.ai.fma;
import arc.ApplicationListener;
/**
*
*/
public class FormationTest implements ApplicationListener {
public FormationTest() {
}
@Override
public void init() {
}
@Override
public void resize(int width, int height) {
}
@Override
public void dispose() {
}
@Override
public void update() {
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alipay.sofa.ark.spi.argument;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.ARK_BIZ_ARGUMENTS_MARK;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.ARK_CONTAINER_ARGUMENTS_MARK;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.CLASSPATH_ARGUMENT_KEY;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.CLASSPATH_SPLIT;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.ENTRY_CLASS_NAME_ARGUMENT_KEY;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.ENTRY_METHOD_NAME_ARGUMENT_KEY;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.FAT_JAR_ARGUMENT_KEY;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.PROFILE;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.PROFILE_SPLIT;
import static com.alipay.sofa.ark.spi.argument.CommandArgument.VM_PROFILE;
import static com.alipay.sofa.ark.spi.constant.Constants.DEFAULT_PROFILE;
/**
* command argument parsed as a launchCommand
*
* @author qilong.zql
* @since 0.1.0
*/
public class LaunchCommand {
private URL executableArkBizJar;
private URL[] classpath;
/**
* the following two configs are mainly used by bootstrap ark biz at startup of IDE.
*/
private String entryClassName;
private String entryMethodName;
private String[] launchArgs;
private String[] profiles;
public boolean isExecutedByCommandLine() {
return executableArkBizJar != null;
}
public URL getExecutableArkBizJar() {
return executableArkBizJar;
}
public LaunchCommand setExecutableArkBizJar(URL executableArkBizJar) {
this.executableArkBizJar = executableArkBizJar;
return this;
}
public URL[] getClasspath() {
return classpath;
}
public LaunchCommand setClasspath(URL[] classpath) {
this.classpath = classpath;
return this;
}
public String getEntryMethodName() {
return entryMethodName;
}
public LaunchCommand setEntryMethodName(String entryMethodName) {
this.entryMethodName = entryMethodName;
return this;
}
public String getEntryClassName() {
return entryClassName;
}
public LaunchCommand setEntryClassName(String entryClassName) {
this.entryClassName = entryClassName;
return this;
}
public String[] getLaunchArgs() {
return launchArgs;
}
public LaunchCommand setLaunchArgs(String[] launchArgs) {
this.launchArgs = launchArgs;
return this;
}
public String[] getProfiles() {
if (profiles != null) {
return profiles;
}
String profileVMArgs = System.getProperty(VM_PROFILE);
return profileVMArgs == null ? new String[] { DEFAULT_PROFILE } : profileVMArgs
.split(PROFILE_SPLIT);
}
public LaunchCommand setProfiles(String[] profiles) {
this.profiles = profiles;
return this;
}
public static LaunchCommand parse(String[] args) throws MalformedURLException {
LaunchCommand launchCommand = new LaunchCommand();
String arkJarPrefix = String.format("%s%s=", ARK_CONTAINER_ARGUMENTS_MARK,
FAT_JAR_ARGUMENT_KEY);
String arkClasspathPrefix = String.format("%s%s=", ARK_CONTAINER_ARGUMENTS_MARK,
CLASSPATH_ARGUMENT_KEY);
String entryClassNamePrefix = String.format("%s%s=", ARK_BIZ_ARGUMENTS_MARK,
ENTRY_CLASS_NAME_ARGUMENT_KEY);
String entryMethodNamePrefix = String.format("%s%s=", ARK_BIZ_ARGUMENTS_MARK,
ENTRY_METHOD_NAME_ARGUMENT_KEY);
String arkConfigProfilePrefix = String.format("%s%s=", ARK_CONTAINER_ARGUMENTS_MARK,
PROFILE);
List<String> arguments = new ArrayList<>();
for (String arg : args) {
arg = arg.trim();
if (arg.startsWith(arkJarPrefix)) {
String fatJarUrl = arg.substring(arkJarPrefix.length());
launchCommand.setExecutableArkBizJar(new URL(fatJarUrl));
} else if (arg.startsWith(entryClassNamePrefix)) {
String entryClassName = arg.substring(entryClassNamePrefix.length());
launchCommand.setEntryClassName(entryClassName);
} else if (arg.startsWith(entryMethodNamePrefix)) {
String entryMethodName = arg.substring(entryMethodNamePrefix.length());
launchCommand.setEntryMethodName(entryMethodName);
} else if (arg.startsWith(arkClasspathPrefix)) {
String classpath = arg.substring(arkClasspathPrefix.length());
List<URL> urlList = new ArrayList<>();
for (String url : classpath.split(CLASSPATH_SPLIT)) {
if (url.isEmpty()) {
continue;
}
urlList.add(new URL(url));
}
launchCommand.setClasspath(urlList.toArray(new URL[urlList.size()]));
} else if (arg.startsWith(arkConfigProfilePrefix)) {
String profile = arg.substring(arkConfigProfilePrefix.length());
launchCommand.setProfiles(profile.split(PROFILE_SPLIT));
} else {
// -A and -B argument would not passed into biz main method.
arguments.add(arg);
}
}
return launchCommand.setLaunchArgs(arguments.toArray(new String[] {}));
}
public static String toString(String[] args) {
StringBuilder sb = new StringBuilder();
for (String arg : args) {
sb.append(arg);
}
return sb.toString();
}
} |
<gh_stars>1-10
package main
import (
"log"
"net/http"
"time"
"github.com/gorilla/mux"
"github.com/rs/cors"
"github.com/urfave/negroni"
)
func main() {
if err := run(); err != nil {
log.Fatal(err)
}
}
func run() error {
storage, err := NewEnvStorage()
if err != nil {
if err == ErrCouldNotConnectToStorage {
log.Fatal("[main] unable to connect to configured storage")
}
log.Fatal(err.Error())
}
syncer := NewSyncer(storage, 1*time.Hour)
go syncer.Run()
server := &server{
router: mux.NewRouter(),
storage: storage,
}
server.routes()
n := negroni.Classic()
n.Use(cors.Default())
n.UseHandler(server)
s := &http.Server{
Addr: ":8000",
Handler: n,
ReadTimeout: 10 * time.Second,
WriteTimeout: 10 * time.Second,
}
// Can't be bothered to make TLS configurable. Just
// use a reverse proxy for that...
return s.ListenAndServe()
}
|
"use strict";
exports.__esModule = true;
var filter_1 = require("./filter");
var makeIterator_1 = require("../function/makeIterator_");
/**
* Inverse or collection/filter
*/
function reject(list, iterator, thisObj) {
iterator = makeIterator_1["default"](iterator, thisObj);
return filter_1["default"](list, function (value, index, list) {
return !iterator(value, index, list);
}, thisObj);
}
exports["default"] = reject;
|
// Copyright 2016 <NAME>, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gogenerate
import (
"crypto/sha256"
"fmt"
"io"
"os"
"os/exec"
"path"
"path/filepath"
"sort"
"strings"
"github.com/palantir/pkg/matcher"
"github.com/pkg/errors"
"github.com/palantir/checks/gogenerate/config"
)
func Run(rootDir string, cfg config.GoGenerate, verify bool, stdout io.Writer) error {
diff, err := runGenerate(rootDir, cfg, stdout)
if err != nil {
return err
}
if !verify || len(diff) == 0 {
return nil
}
var sortedKeys []string
for k := range diff {
sortedKeys = append(sortedKeys, k)
}
sort.Strings(sortedKeys)
var outputParts []string
outputParts = append(outputParts, fmt.Sprintf("Generators produced output that differed from what already exists: %v", sortedKeys))
for _, k := range sortedKeys {
outputParts = append(outputParts, fmt.Sprintf(" %s:", k))
for _, currGenLine := range strings.Split(diff[k].String(), "\n") {
outputParts = append(outputParts, fmt.Sprintf(" %s", currGenLine))
}
}
return fmt.Errorf(strings.Join(outputParts, "\n"))
}
func runGenerate(rootDir string, cfg config.GoGenerate, stdout io.Writer) (map[string]ChecksumsDiff, error) {
diffs := make(map[string]ChecksumsDiff)
for _, k := range cfg.Generators.SortedKeys() {
v := cfg.Generators[k]
m := v.GenPaths.Matcher()
origChecksums, err := checksumsForMatchingPaths(rootDir, m)
if err != nil {
return nil, errors.Wrapf(err, "failed to compute checksums")
}
genDir := path.Join(rootDir, v.GoGenDir)
cmd := exec.Command("go", "generate")
cmd.Dir = genDir
cmd.Stdout = stdout
cmd.Stderr = stdout
var envVars []string
for k, v := range cfg.Generators[k].Environment {
envVars = append(envVars, fmt.Sprintf("%s=%v", k, v))
}
cmd.Env = append(envVars, os.Environ()...)
if err := cmd.Run(); err != nil {
return nil, errors.Wrapf(err, "failed to run go generate in %q", genDir)
}
newChecksums, err := checksumsForMatchingPaths(rootDir, m)
if err != nil {
return nil, errors.Wrapf(err, "failed to compute checksums")
}
diff := origChecksums.compare(newChecksums)
if len(diff) > 0 {
diffs[k] = diff
}
}
return diffs, nil
}
type checksumSet map[string]*fileChecksumInfo
func (c checksumSet) sortedKeys() []string {
var sorted []string
for k := range c {
sorted = append(sorted, k)
}
sort.Strings(sorted)
return sorted
}
type ChecksumsDiff map[string]string
func (c ChecksumsDiff) String() string {
var sortedKeys []string
for k := range c {
sortedKeys = append(sortedKeys, k)
}
sort.Strings(sortedKeys)
var parts []string
for _, k := range sortedKeys {
parts = append(parts, fmt.Sprintf("%s: %s", k, c[k]))
}
return strings.Join(parts, "\n")
}
func (c checksumSet) compare(other checksumSet) ChecksumsDiff {
diffs := make(map[string]string)
// determine missing and extra entries
for k := range c {
if _, ok := other[k]; !ok {
diffs[k] = "existed before, no longer exists"
}
}
for k := range other {
if _, ok := c[k]; !ok {
diffs[k] = "did not exist before, now exists"
}
}
// compare content
for k, v := range c {
otherV, ok := other[k]
if !ok {
continue
}
if v.isDir != otherV.isDir {
if v.isDir {
diffs[k] = "was previously a directory, is now a file"
} else {
diffs[k] = "was previously a file, is now a directory"
}
continue
}
if v.sha256checksum != otherV.sha256checksum {
diffs[k] = fmt.Sprintf("previously had checksum %s, now has checksum %s", v.sha256checksum, otherV.sha256checksum)
}
}
return diffs
}
type fileChecksumInfo struct {
path string
isDir bool
sha256checksum string
}
func checksumsForMatchingPaths(rootDir string, m matcher.Matcher) (checksumSet, error) {
pathsToChecksums := make(map[string]*fileChecksumInfo)
if err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
relPath, err := filepath.Rel(rootDir, path)
if err != nil {
return err
}
if m.Match(relPath) {
checksum, err := newChecksum(path, info)
if err != nil {
return err
}
pathsToChecksums[relPath] = checksum
}
return nil
}); err != nil {
return nil, errors.Wrapf(err, "failed to walk directory %q", rootDir)
}
return pathsToChecksums, nil
}
func newChecksum(filePath string, info os.FileInfo) (*fileChecksumInfo, error) {
f, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer func() {
// file is opened for reading only, so safe to ignore errors on close
_ = f.Close()
}()
if info.IsDir() {
return &fileChecksumInfo{
path: filePath,
isDir: true,
}, nil
}
h := sha256.New()
if _, err := io.Copy(h, f); err != nil {
return nil, err
}
return &fileChecksumInfo{
path: filePath,
sha256checksum: fmt.Sprintf("%x", h.Sum(nil)),
}, nil
}
|
package com.dao;
import java.util.List;
import com.model.User;
public interface UserDAO {
public void save(User user);
public boolean exists(int id);
public boolean existsForName(String name);
public List<User> list();
public User get(int id);
public User get(String name);
}
|
#!/bin/bash -eu
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
UNRAR_DEFINES="-D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -DRAR_SMP -DRARDLL -DSILENT -DNOVOLUME"
UNRAR_WNOS="-Wno-logical-op-parentheses -Wno-switch -Wno-dangling-else"
UNRAR_SRC_DIR="$SRC/unrar"
# build 'lib'. This builds libunrar.a and libunrar.so
# -fPIC is required for successful compilation.
make CXX=$CXX CXXFLAGS="$CXXFLAGS -fPIC $UNRAR_DEFINES $UNRAR_WNOS" \
-C $UNRAR_SRC_DIR lib
# remove the .so file so that the linker links unrar statically.
rm -v $UNRAR_SRC_DIR/libunrar.so
# build fuzzer
$CXX $CXXFLAGS -I. $UNRAR_SRC_DIR/unrar_fuzzer.cc -o $OUT/unrar_fuzzer \
$UNRAR_DEFINES $LIB_FUZZING_ENGINE -L$UNRAR_SRC_DIR -lunrar
|
/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow strict-local
* @format
*/
import React from 'react';
import {action} from '@storybook/addon-actions';
import {storiesOf} from '@storybook/react';
import Alert from '../../components/Alert/Alert';
import Button from '@material-ui/core/Button';
import withAlert from '../../components/Alert/withAlert';
import {STORY_CATEGORIES} from '../storybookUtils';
const DemoButtonWithAlert = withAlert(({alert, label}) => {
const handleClick = () => {
alert('This is an alert', label).then(action('dismissed'));
};
return (
<div>
<Button onClick={handleClick}>Save</Button>
</div>
);
});
const DemoButtonWithConfirm = withAlert(({confirm, confirmProps}) => {
const handleClick = () => {
confirm(confirmProps).then(action('confirmed')).catch(action('cancelled'));
};
return (
<div>
<Button onClick={handleClick}>Delete</Button>
</div>
);
});
storiesOf(`${STORY_CATEGORIES.MUI_COMPONENTS}/Alert`, module)
.add('default', () => (
<Alert
open={true}
title="Title"
message="message"
onCancel={action('cancelled')}
onConfirm={action('confirmed')}
/>
))
.add('actions', () => (
<Alert
open={true}
title="Title"
message="message"
confirmLabel="Confirm"
cancelLabel="Cancel"
/>
));
storiesOf(`${STORY_CATEGORIES.MUI_COMPONENTS}/Alert/withAlert/alert`, module)
.add('default', () => <DemoButtonWithAlert />)
.add('custom label', () => <DemoButtonWithAlert label="Got it" />);
storiesOf(`${STORY_CATEGORIES.MUI_COMPONENTS}/Alert/withAlert/confirm`, module)
.add('default', () => <DemoButtonWithConfirm confirmProps="Are you sure?" />)
.add('custom confirm label', () => (
<DemoButtonWithConfirm
confirmProps={{message: 'Are you sure?', confirmLabel: 'Delete'}}
/>
))
.add('custom cancel label', () => (
<DemoButtonWithConfirm
confirmProps={{message: 'Are you sure?', cancelLabel: 'Abort'}}
/>
));
|
import Vapor
import Fluent
final class User: Model, Content {
static let schema = "users"
@ID(key: .id)
var id: UUID?
@Field(key: "name")
var name: String
@Field(key: "email")
var email: String
@Field(key: "password")
var password: String
init() { }
init(id: UUID? = nil, name: String, email: String, password: String) {
self.id = id
self.name = name
self.email = email
self.password = password
}
func validateCredentials(email: String, password: String) -> EventLoopFuture<Bool> {
return User.query(on: DatabaseID(string: "your_database_id"))
.filter(\.$email == email)
.first()
.map { user in
if let user = user, user.password == password {
return true
} else {
return false
}
}
}
} |
#ifndef INCLUDED_NETWORK_LIFECYCLE_SENDER_SYSTEM_H
#define INCLUDED_NETWORK_LIFECYCLE_SENDER_SYSTEM_H
#include "engine/system.h"
#include "messsage_holder.h"
#include "core/scene.h"
#include "core/program_state.h"
namespace network {
class LifecycleSenderSystem: public engine::System
{
DEFINE_SYSTEM_BASE( LifecycleSenderSystem )
MessageHolder& mMessageHolder;
ModelValue mLifecycleModel;
ModelValue mHostModel;
Scene& mScene;
core::ProgramState& mProgramState;
public:
LifecycleSenderSystem();
virtual void Init();
virtual void Update( double DeltaTime );
void Host();
};
} // namespace network
#endif//INCLUDED_NETWORK_LIFECYCLE_SENDER_SYSTEM_H
|
<reponame>Sasha7b9Work/S8-53M2<filename>sources/VS/ThirdParty/wxWidgets/src/generic/combog.cpp
/////////////////////////////////////////////////////////////////////////////
// Name: src/generic/combog.cpp
// Purpose: Generic wxComboCtrl
// Author: <NAME>
// Modified by:
// Created: Apr-30-2006
// Copyright: (c) 2005 <NAME>
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
#include "wx/wxprec.h"
#if wxUSE_COMBOCTRL
#include "wx/combo.h"
#ifndef WX_PRECOMP
#include "wx/log.h"
#include "wx/combobox.h"
#include "wx/dcclient.h"
#include "wx/settings.h"
#include "wx/textctrl.h"
#endif
#include "wx/dcbuffer.h"
// ----------------------------------------------------------------------------
// Some constant adjustments to make the generic more bearable
#if defined(__WXUNIVERSAL__)
// position adjustment for wxTextCtrl, to achieve zero left margin
// meaningless if LEFT_MARGIN_CAN_BE_SET set to 1 in combocmn.cpp
#define TEXTCTRLXADJUST 0
#define DEFAULT_DROPBUTTON_WIDTH 19
#elif defined(__WXMSW__)
// position adjustment for wxTextCtrl, to achieve zero left margin
// meaningless if LEFT_MARGIN_CAN_BE_SET set to 1 in combocmn.cpp
#define TEXTCTRLXADJUST 2
#define DEFAULT_DROPBUTTON_WIDTH 17
#elif defined(__WXGTK__)
// position adjustment for wxTextCtrl, to achieve zero left margin
// meaningless if LEFT_MARGIN_CAN_BE_SET set to 1 in combocmn.cpp
#define TEXTCTRLXADJUST -1
#define DEFAULT_DROPBUTTON_WIDTH 23
#elif defined(__WXMAC__)
// position adjustment for wxTextCtrl, to achieve zero left margin
// meaningless if LEFT_MARGIN_CAN_BE_SET set to 1 in combocmn.cpp
#define TEXTCTRLXADJUST 0
#define DEFAULT_DROPBUTTON_WIDTH 22
#else
// position adjustment for wxTextCtrl, to achieve zero left margin
// meaningless if LEFT_MARGIN_CAN_BE_SET set to 1 in combocmn.cpp
#define TEXTCTRLXADJUST 0
#define DEFAULT_DROPBUTTON_WIDTH 19
#endif
// ============================================================================
// implementation
// ============================================================================
// Only implement if no native or it wasn't fully featured
#ifndef wxCOMBOCONTROL_FULLY_FEATURED
// ----------------------------------------------------------------------------
// wxGenericComboCtrl
// ----------------------------------------------------------------------------
wxBEGIN_EVENT_TABLE(wxGenericComboCtrl, wxComboCtrlBase)
EVT_PAINT(wxGenericComboCtrl::OnPaintEvent)
EVT_MOUSE_EVENTS(wxGenericComboCtrl::OnMouseEvent)
wxEND_EVENT_TABLE()
wxIMPLEMENT_DYNAMIC_CLASS(wxGenericComboCtrl, wxComboCtrlBase);
void wxGenericComboCtrl::Init()
{
}
bool wxGenericComboCtrl::Create(wxWindow *parent,
wxWindowID id,
const wxString& value,
const wxPoint& pos,
const wxSize& size,
long style,
const wxValidator& validator,
const wxString& name)
{
//
// Note that technically we only support 'default' border and wxNO_BORDER.
long border = style & wxBORDER_MASK;
int tcBorder = wxNO_BORDER;
#if defined(__WXUNIVERSAL__)
if ( !border )
border = wxBORDER_SIMPLE;
#elif defined(__WXMSW__)
if ( !border )
border = wxBORDER_SUNKEN;
#else
//
// Generic version is optimized for wxGTK
//
#define UNRELIABLE_TEXTCTRL_BORDER
if ( !border )
{
if ( style & wxCB_READONLY )
{
m_widthCustomBorder = 1;
}
else
{
m_widthCustomBorder = 0;
tcBorder = 0;
}
}
else
{
// Have textctrl instead use the border given.
tcBorder = border;
}
// Because we are going to have button outside the border,
// let's use wxBORDER_NONE for the whole control.
border = wxBORDER_NONE;
Customize( wxCC_BUTTON_OUTSIDE_BORDER |
wxCC_NO_TEXT_AUTO_SELECT |
wxCC_BUTTON_STAYS_DOWN );
#endif
style = (style & ~(wxBORDER_MASK)) | border;
if ( style & wxCC_STD_BUTTON )
m_iFlags |= wxCC_POPUP_ON_MOUSE_UP;
// create main window
if ( !wxComboCtrlBase::Create(parent,
id,
value,
pos,
size,
style | wxFULL_REPAINT_ON_RESIZE,
validator,
name) )
return false;
// Create textctrl, if necessary
CreateTextCtrl( tcBorder );
// Add keyboard input handlers for main control and textctrl
InstallInputHandlers();
// Set background style for double-buffering, when needed
// (cannot use when system draws background automatically)
if ( !HasTransparentBackground() )
SetBackgroundStyle( wxBG_STYLE_PAINT );
// SetInitialSize should be called last
SetInitialSize(size);
return true;
}
wxGenericComboCtrl::~wxGenericComboCtrl()
{
}
bool wxGenericComboCtrl::HasTransparentBackground()
{
#if wxALWAYS_NATIVE_DOUBLE_BUFFER
#ifdef __WXGTK__
// Sanity check for GTK+
return IsDoubleBuffered();
#else
return true;
#endif
#else
return false;
#endif
}
void wxGenericComboCtrl::OnResize()
{
// Recalculates button and textctrl areas
CalculateAreas(DEFAULT_DROPBUTTON_WIDTH);
#if 0
// Move separate button control, if any, to correct position
if ( m_btn )
{
wxSize sz = GetClientSize();
m_btn->SetSize( m_btnArea.x + m_btnSpacingX,
(sz.y-m_btnSize.y)/2,
m_btnSize.x,
m_btnSize.y );
}
#endif
// Move textctrl, if any, accordingly
PositionTextCtrl( TEXTCTRLXADJUST );
}
void wxGenericComboCtrl::OnPaintEvent( wxPaintEvent& WXUNUSED(event) )
{
// Determine wxDC to use based on need to double-buffer or
// use system-generated transparent background portions
wxDC* dcPtr;
if ( HasTransparentBackground() )
dcPtr = new wxPaintDC(this);
else
dcPtr = new wxAutoBufferedPaintDC(this);
wxDC& dc = *dcPtr;
wxSize sz = GetClientSize();
const wxRect& butRect = m_btnArea;
wxRect tcRect = m_tcArea;
wxRect fullRect(0, 0, sz.x, sz.y);
// artificial simple border
if ( m_widthCustomBorder )
{
int customBorder = m_widthCustomBorder;
// Set border colour
#ifdef __WXMAC__
wxPen pen1( wxColour(133,133,133),
customBorder,
wxPENSTYLE_SOLID );
#else
wxPen pen1( wxSystemSettings::GetColour(wxSYS_COLOUR_GRAYTEXT),
customBorder,
wxPENSTYLE_SOLID);
#endif
dc.SetPen( pen1 );
// area around both controls
wxRect rect2(fullRect);
if ( m_iFlags & wxCC_IFLAG_BUTTON_OUTSIDE )
{
rect2 = tcRect;
if ( customBorder == 1 )
{
rect2.Inflate(1);
}
else
{
#ifdef __WXGTK__
rect2.x -= 1;
rect2.y -= 1;
#else
rect2.x -= customBorder;
rect2.y -= customBorder;
#endif
rect2.width += 1 + customBorder;
rect2.height += 1 + customBorder;
}
}
dc.SetBrush( *wxTRANSPARENT_BRUSH );
dc.DrawRectangle(rect2);
}
// Clear the main background if the system doesn't do it by itself
if ( !HasTransparentBackground() &&
(tcRect.x > 0 || tcRect.y > 0) )
{
wxColour winCol = GetParent()->GetBackgroundColour();
dc.SetBrush(winCol);
dc.SetPen(winCol);
dc.DrawRectangle(fullRect);
}
if ( !m_btn )
{
// Standard button rendering
DrawButton(dc, butRect);
}
// paint required portion on the control
if ( !m_text || m_widthCustomPaint )
{
wxASSERT( m_widthCustomPaint >= 0 );
// Clear the text-control area background
wxColour tcCol = GetBackgroundColour();
dc.SetBrush(tcCol);
dc.SetPen(tcCol);
dc.DrawRectangle(tcRect);
// this is intentionally here to allow drawed rectangle's
// right edge to be hidden
if ( m_text )
tcRect.width = m_widthCustomPaint;
dc.SetFont( GetFont() );
dc.SetClippingRegion(tcRect);
if ( m_popupInterface )
m_popupInterface->PaintComboControl(dc, tcRect);
else
wxComboPopup::DefaultPaintComboControl(this, dc, tcRect);
}
delete dcPtr;
}
void wxGenericComboCtrl::OnMouseEvent( wxMouseEvent& event )
{
int mx = event.m_x;
bool isOnButtonArea = m_btnArea.Contains(mx,event.m_y);
int handlerFlags = isOnButtonArea ? wxCC_MF_ON_BUTTON : 0;
if ( PreprocessMouseEvent(event,handlerFlags) )
return;
const bool ctrlIsButton = wxPlatformIs(wxOS_WINDOWS);
if ( ctrlIsButton &&
(m_windowStyle & (wxCC_SPECIAL_DCLICK|wxCB_READONLY)) == wxCB_READONLY )
{
// if no textctrl and no special double-click, then the entire control acts
// as a button
handlerFlags |= wxCC_MF_ON_BUTTON;
if ( HandleButtonMouseEvent(event,handlerFlags) )
return;
}
else
{
if ( isOnButtonArea || HasCapture() ||
(m_widthCustomPaint && mx < (m_tcArea.x+m_widthCustomPaint)) )
{
handlerFlags |= wxCC_MF_ON_CLICK_AREA;
if ( HandleButtonMouseEvent(event,handlerFlags) )
return;
}
else if ( m_btnState )
{
// otherwise need to clear the hover status
m_btnState = 0;
RefreshRect(m_btnArea);
}
}
//
// This will handle left_down and left_dclick events outside button in a Windows/GTK-like manner.
// See header file for further information on this method.
HandleNormalMouseEvent(event);
}
void wxGenericComboCtrl::SetCustomPaintWidth( int width )
{
#ifdef UNRELIABLE_TEXTCTRL_BORDER
//
// If starting/stopping to show an image in front
// of a writable text-field, then re-create textctrl
// with different kind of border (because we can't
// assume that textctrl fully supports wxNO_BORDER).
//
wxTextCtrl* tc = GetTextCtrl();
if ( tc && (m_iFlags & wxCC_BUTTON_OUTSIDE_BORDER) )
{
int borderType = tc->GetWindowStyle() & wxBORDER_MASK;
int tcCreateStyle = -1;
if ( width > 0 )
{
// Re-create textctrl with no border
if ( borderType != wxNO_BORDER )
{
m_widthCustomBorder = 1;
tcCreateStyle = wxNO_BORDER;
}
}
else if ( width == 0 )
{
// Re-create textctrl with normal border
if ( borderType == wxNO_BORDER )
{
m_widthCustomBorder = 0;
tcCreateStyle = 0;
}
}
// Common textctrl re-creation code
if ( tcCreateStyle != -1 )
{
tc->RemoveEventHandler(m_textEvtHandler);
delete m_textEvtHandler;
CreateTextCtrl( tcCreateStyle );
InstallInputHandlers();
}
}
#endif // UNRELIABLE_TEXTCTRL_BORDER
wxComboCtrlBase::SetCustomPaintWidth( width );
}
bool wxGenericComboCtrl::IsKeyPopupToggle(const wxKeyEvent& event) const
{
int keycode = event.GetKeyCode();
bool isPopupShown = IsPopupShown();
// This code is AFAIK appropriate for wxGTK.
if ( isPopupShown )
{
if ( keycode == WXK_ESCAPE ||
( keycode == WXK_UP && event.AltDown() ) )
return true;
}
else
{
if ( (keycode == WXK_DOWN && event.AltDown()) ||
(keycode == WXK_F4) )
return true;
}
return false;
}
#if defined(__WXOSX__)
wxTextWidgetImpl * wxGenericComboCtrl::GetTextPeer() const
{
return m_text ? m_text->GetTextPeer() : NULL;
}
#endif
#ifdef __WXUNIVERSAL__
bool wxGenericComboCtrl::PerformAction(const wxControlAction& action,
long numArg,
const wxString& strArg)
{
bool processed = false;
if ( action == wxACTION_COMBOBOX_POPUP )
{
if ( !IsPopupShown() )
{
ShowPopup();
processed = true;
}
}
else if ( action == wxACTION_COMBOBOX_DISMISS )
{
if ( IsPopupShown() )
{
HidePopup();
processed = true;
}
}
if ( !processed )
{
// pass along
return wxControl::PerformAction(action, numArg, strArg);
}
return true;
}
#endif // __WXUNIVERSAL__
// If native wxComboCtrl was not defined, then prepare a simple
// front-end so that wxRTTI works as expected.
#ifndef _WX_COMBOCONTROL_H_
wxIMPLEMENT_DYNAMIC_CLASS(wxComboCtrl, wxGenericComboCtrl);
#endif
#endif // !wxCOMBOCONTROL_FULLY_FEATURED
#endif // wxUSE_COMBOCTRL
|
#!/bin/bash
# Path to VIAME installation
export VIAME_INSTALL=/opt/noaa/viame
# Core processing options
export INPUT_DIRECTORY=training_data
# Setup paths and run command
source ${VIAME_INSTALL}/setup_viame.sh
# Adjust log level
export KWIVER_DEFAULT_LOG_LEVEL=info
viame_train_detector \
-i ${INPUT_DIRECTORY} \
-c ${VIAME_INSTALL}/configs/pipelines/train_yolo_704.viame_csv.conf \
--threshold 0.0
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.iosCropStrong = void 0;
var iosCropStrong = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"x": "128",
"y": "64",
"width": "32",
"height": "48"
},
"children": [{
"name": "rect",
"attribs": {
"x": "128",
"y": "64",
"width": "32",
"height": "48"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"points": "160,352 160,176 128,176 128,384 336,384 336,352 \t"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "160,352 160,176 128,176 128,384 336,384 336,352 \t"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "400",
"y": "352",
"width": "48",
"height": "32"
},
"children": [{
"name": "rect",
"attribs": {
"x": "400",
"y": "352",
"width": "48",
"height": "32"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"points": "64,128 64,160 352,160 352,448 384,448 384,128 \t"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "64,128 64,160 352,160 352,448 384,448 384,128 \t"
},
"children": []
}]
}]
}]
};
exports.iosCropStrong = iosCropStrong; |
class BS {
public static function span($width, $content) {
return "<div class='col-md-{$width}'>$content</div>";
}
public static function row($content) {
return "<div class='row'>$content</div>";
}
}
// Usage
$table = "<table>...</table>";
$panel = "<div>...</div>";
$left = BS::span(6, $table);
$right = BS::span(6, $panel);
echo BS::row($left . $right); |
"""AWSScanMuxer that runs account scans one-per-lambda"""
from concurrent.futures import Future, ThreadPoolExecutor
import json
from typing import Tuple
import boto3
import botocore
from altimeter.aws.log_events import AWSLogEvents
from altimeter.aws.scan.account_scanner import AccountScanResult
from altimeter.aws.scan.muxer import AWSScanMuxer
from altimeter.aws.scan.scan_plan import AccountScanPlan
from altimeter.core.base_model import BaseImmutableModel
from altimeter.core.config import AWSConfig
from altimeter.core.log import Logger
class AccountScanLambdaEvent(BaseImmutableModel):
account_scan_plan: AccountScanPlan
scan_id: str
artifact_path: str
max_svc_scan_threads: int
preferred_account_scan_regions: Tuple[str, ...]
scan_sub_accounts: bool
class LambdaAWSScanMuxer(AWSScanMuxer):
"""AWSScanMuxer that runs account scans in AccountScan lambdas
Args:
scan_id: unique scan identifier
account_scan_lambda_name: name of the AccountScan lambda
account_scan_lambda_timeout: timeout for the AccountScan lambda
config: Config object
"""
def __init__(
self,
scan_id: str,
account_scan_lambda_name: str,
account_scan_lambda_timeout: int,
config: AWSConfig,
):
super().__init__(scan_id=scan_id, config=config)
self.account_scan_lambda_name = account_scan_lambda_name
self.account_scan_lambda_timeout = account_scan_lambda_timeout
def _schedule_account_scan(
self, executor: ThreadPoolExecutor, account_scan_plan: AccountScanPlan
) -> Future:
"""Schedule an account scan by calling the AccountScan lambda with
the proper arguments."""
lambda_event = AccountScanLambdaEvent(
account_scan_plan=account_scan_plan,
scan_id=self.scan_id,
artifact_path=self.config.artifact_path,
max_svc_scan_threads=self.config.concurrency.max_svc_scan_threads,
preferred_account_scan_regions=self.config.scan.preferred_account_scan_regions,
scan_sub_accounts=self.config.scan.scan_sub_accounts,
)
return executor.submit(
invoke_lambda,
self.account_scan_lambda_name,
self.account_scan_lambda_timeout,
lambda_event,
)
def invoke_lambda(
lambda_name: str, lambda_timeout: int, account_scan_lambda_event: AccountScanLambdaEvent
) -> AccountScanResult:
"""Invoke the AccountScan AWS Lambda function
Args:
lambda_name: name of lambda
lambda_timeout: timeout of the lambda. Used to tell the boto3 lambda client to wait
at least this long for a response before timing out.
account_scan_lambda_event: AccountScanLambdaEvent object to serialize to json and send to the lambda
Returns:
AccountScanResult
Raises:
Exception if there was an error invoking the lambda.
"""
logger = Logger()
account_id = account_scan_lambda_event.account_scan_plan.account_id
with logger.bind(lambda_name=lambda_name, lambda_timeout=lambda_timeout, account_id=account_id):
logger.info(event=AWSLogEvents.RunAccountScanLambdaStart)
boto_config = botocore.config.Config(
read_timeout=lambda_timeout + 10, retries={"max_attempts": 0},
)
session = boto3.Session()
lambda_client = session.client("lambda", config=boto_config)
try:
resp = lambda_client.invoke(
FunctionName=lambda_name, Payload=account_scan_lambda_event.json().encode("utf-8")
)
except Exception as invoke_ex:
error = str(invoke_ex)
logger.info(event=AWSLogEvents.RunAccountScanLambdaError, error=error)
raise Exception(
f"Error while invoking {lambda_name} with event {account_scan_lambda_event.json()}: {error}"
) from invoke_ex
payload: bytes = resp["Payload"].read()
if resp.get("FunctionError", None):
function_error = payload.decode()
logger.info(event=AWSLogEvents.RunAccountScanLambdaError, error=function_error)
raise Exception(
f"Function error in {lambda_name} with event {account_scan_lambda_event.json()}: {function_error}"
)
payload_dict = json.loads(payload)
account_scan_result = AccountScanResult(**payload_dict)
logger.info(event=AWSLogEvents.RunAccountScanLambdaEnd)
return account_scan_result
|
/*
BSD 3-Clause License
Copyright (c) 2018, Roboy
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
author: <NAME> ( <EMAIL> ), 2018
description: triangulation, pose estimation, lighthouse pose correction, factory calibration
*/
#include "darkroom/Transform.hpp"
#include "darkroom/Triangulation.hpp"
#include "darkroom/PoseEstimatorSensorCloud.hpp"
#include "darkroom/PoseEstimatorMultiLighthouse.hpp"
#include "darkroom/Sensor.hpp"
#include <common_utilities/rviz_visualization.hpp>
#include <common_utilities/CommonDefinitions.h>
#include <roboy_communication_middleware/LighthousePoseCorrection.h>
#include <roboy_communication_middleware/DarkRoomSensor.h>
#include <roboy_communication_middleware/DarkRoomOOTX.h>
#include <roboy_communication_middleware/ArucoPose.h>
#include <common_utilities/CommonDefinitions.h>
#include <geometry_msgs/PoseWithCovarianceStamped.h>
#include <boost/filesystem.hpp>
#include <atomic>
#include <mutex>
#include "darkroom/InYourGibbousPhase.hpp"
#include "darkroom/InYourGibbousPhase2.hpp"
#include "darkroom/InYourGibbousPhase3.hpp"
#include <ros/package.h>
#include "darkroom/Utilities.hpp"
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <optimization.h>
#include <epnp/epnp.h>
using namespace alglib;
namespace fs = boost::filesystem;
#define MAX_ITERATIONS 100
#define ERROR_THRESHOLD 0.0000001
#define NUMBER_OF_SAMPLES 100
#define NUMBER_OF_PARTICLES 1000
static vector<int> DEFAULT_VECTOR;
void function1_fvec(const real_1d_array &x, real_1d_array &fi, void *ptr);
class LighthouseEstimator
: public DarkRoom::Transform, public rviz_visualization, public Triangulation, public Utilities {
public:
LighthouseEstimator();
/**
* This returns the sensors that are calibrated and visible by both lighthouses
* @param visible_sensors will be filled with sensor ids
*/
void getVisibleCalibratedSensors(vector<int> &visible_sensors);
/**
* This returns the sensors that are calibrated and visible to the given lighthouse
* @param visible_sensors will be filled with sensor ids
*/
void getVisibleCalibratedSensors(bool lighthouse, vector<int> &visible_sensors);
/**
* Estimates the pose correction between ligthhouse 1 and 2, such that the squared distances between sensor positions
* estimated for both lighthouses is minimized.
* @return success
*/
bool lighthousePoseEstimationLeastSquares();
/**
* Estimates the pose of an object using relative sensor position information and least square matching
*/
void objectPoseEstimationLeastSquares();
/**
* Estimates the sensor distances of all active sensors (or a vector of specified sensor ids)
* using the known (ie. calibrated) relative distance between the sensors and the lighthouse angles
* @param lighthouse for which lighthouse
* @param specificIds if defined, waits until the specified sensors become active
* @return
*/
bool estimateSensorPositionsUsingRelativeDistances(bool lighthouse, vector<int> &specificIds = DEFAULT_VECTOR);
/**
* Estimates the sensor distances of all active sensors
* using the known (ie. calibrated) relative distance between the sensors and the lighthouse angles, then estimates
* the object pose relative to each ligthhouse
* @return success
*/
bool estimateObjectPoseUsingRelativeDistances();
/**
* Estimates object pose using multi lighthouse approach
*/
void estimateObjectPoseMultiLighthouse();
/**
* Triangulates the sensor positions (the transform between lighthouse 1 and 2 needs to be known, otherwise the
* triangulated position is not correct)
*/
void triangulateSensors();
/**
* Estimates relative object pose using epnp
*/
void estimateObjectPoseEPNP();
/**
* Publishes the lighthouse rays
*/
void publishRays();
/**
* Measures triangulated sensor locations for 30 seconds. Calculates mean sensor locations and generates
* relative sensor positions which are saved to a yaml file
*/
void calibrateRelativeSensorDistances();
/**
* Estimates calibration values based on known sensor angles
* @param lighthouse for this lighthouse
* @return success
*/
bool estimateFactoryCalibration(int lighthouse);
/**
* Estimates calibration values using epnp
* @param lighthouse for this lighthouse
* @return success
*/
bool estimateFactoryCalibrationEPNP(int lighthouse);
/**
* Estimates calibration values using multi lighthouse pose estimator
* @param lighthouse for this lighthouse
* @return success
*/
bool estimateFactoryCalibrationMultiLighthouse(int lighthouse);
/**
* Estimates calibration values based on known sensor angles
* @param lighthouse for this lighthouse
* @return success
*/
bool estimateFactoryCalibration2(int lighthouse);
/**
* Returns a unique id for #MESSAGE_ID sensor and lighthouse
* @param type the message type #MESSAGE_ID
* @param sensor the sensor id
* @param lighthouse the lighthouse
* @return a unique id
*/
int getMessageID(int type, int sensor, bool lighthouse = false);
enum MESSAGE_ID {
TRIANGULATED = 0, // for each sensor
DISTANCE = 1, // for each sensor and lighthouse
RAY = 2, // for each sensor and lighthouse
SENSOR_NAME = 3, // for each sensor
DISTANCES = 4
};
enum POSE_CORRECTION_TYPE {
RELATIV = 0,
ABSOLUT = 1,
OBJECT = 2
};
map<int, Sensor> sensors;
vector<int> calibrated_sensors;
map<int, vector<double>> calibration_angles;
int active_sensors = 0;
atomic<bool> tracking, calibrating, poseestimating, poseestimating_epnp, poseestimating_multiLighthouse, objectposeestimating,
distances, rays, particle_filtering, use_lighthouse_calibration_data_phase[2],
use_lighthouse_calibration_data_tilt[2], use_lighthouse_calibration_data_gibphase[2],
use_lighthouse_calibration_data_gibmag[2];
mutex mux;
fs::path mesh;
bool has_mesh = false;
string name = "bastiisdoff";
string imu_topic_name, pose_topic_name;
ros::Publisher pose_pub;
tf::Transform pose;
static int trackedObjectInstance; //! a unique object instance (helps with unique rviz marker ids)
private:
void receiveOOTXData(const roboy_communication_middleware::DarkRoomOOTX::ConstPtr &msg);
void applyCalibrationData(Vector2d &lighthouse0_angles, Vector2d &lighthouse1_angles);
void applyCalibrationData(bool lighthouse, Vector2d &lighthouse_angles);
void applyCalibrationData(bool lighthouse, double &elevation, double &azimuth);
MatrixXd Pinv(MatrixXd A);
private:
ros::NodeHandlePtr nh;
boost::shared_ptr<ros::AsyncSpinner> spinner;
ros::Publisher sensor_location_pub, lighthouse_pose_correction;
ros::Subscriber ootx_sub;
VectorXd object_pose;
OOTXframe ootx[2];
};
|
def int_to_Roman(number):
val = [
1000, 900, 500, 400,
100, 90, 50, 40,
10, 9, 5, 4,
1
]
syb = [
"M", "CM", "D", "CD",
"C", "XC", "L", "XL",
"X", "IX", "V", "IV",
"I"
]
roman_num = ''
i = 0
while number > 0:
for_val = number // val[i]
roman_num += syb[i] * for_val
number -= val[i] * for_val
i += 1
return roman_num |
#!/bin/sh
cd ..
export DATASET_DIR="datasets/"
# Activate the relevant virtual environment:
#python dataset_tools.py --name_of_args_json_file experiment_config/umaml_maml_omniglot_characters_20_1_seed_1.json
python train_maml_system.py --name_of_args_json_file experiment_config/mini-imagenet_maml_5_way_1_shot_batch_norm_log_5_seed_1.json --gpu_to_use 0 |
<reponame>rsuite/rsuite-icons
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import StopCircleOSvg from '@rsuite/icon-font/lib/legacy/StopCircleO';
const StopCircleO = createSvgIcon({
as: StopCircleOSvg,
ariaLabel: 'stop circle o',
category: 'legacy',
displayName: 'StopCircleO'
});
export default StopCircleO;
|
import { StyleSheet } from 'react-native'
/**
* Custom styles to be used accordingly
*/
const Styles = StyleSheet.create({
marginHorizontalMedium: {
marginLeft: 16,
marginRight: 16,
},
marginVerticalMedium: {
marginTop: 16,
marginBottom: 16,
},
marginVerticalLarge: {
marginTop: 24,
marginBottom: 24,
},
marginTopSmall: {
marginTop: 8,
},
marginTopMedium: {
marginTop: 16,
},
marginTopLarge: {
marginTop: 24,
},
marginBottomMedium: {
marginBottom: 16,
},
alignCenter: {
alignItems: 'center',
},
full: {
width: '100%',
height: '100%',
},
'shadow-1': {
shadowColor: '#9E9E9E',
shadowOffset: { width: 0, height: 2 },
shadowOpacity: 1,
shadowRadius: 1,
// elevation: 1,
// transition: 'all 0.3s cubic-bezier(.25,.8,.25,1)',
},
'shadow-2': {
shadowColor: '#9E9E9E',
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 1,
shadowRadius: 2,
},
'shadow-3': {
shadowColor: '#9E9E9E',
shadowOffset: { width: 0, height: 6 },
shadowOpacity: 1,
shadowRadius: 3,
},
'shadow-4': {
shadowColor: '#9E9E9E',
shadowOffset: { width: 0, height: 8 },
shadowOpacity: 1,
shadowRadius: 4,
},
'shadow-5': {
shadowColor: '#9E9E9E',
shadowOffset: { width: 0, height: 10 },
shadowOpacity: 1,
shadowRadius: 5,
},
})
export default Styles
/*
.card-1 {
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.3s cubic-bezier(.25,.8,.25,1);
}
.card-2 {
box-shadow: 0 3px 6px rgba(0,0,0,0.16), 0 3px 6px rgba(0,0,0,0.23);
}
.card-3 {
box-shadow: 0 10px 20px rgba(0,0,0,0.19), 0 6px 6px rgba(0,0,0,0.23);
}
.card-4 {
box-shadow: 0 14px 28px rgba(0,0,0,0.25), 0 10px 10px rgba(0,0,0,0.22);
}
.card-5 {
box-shadow: 0 19px 38px rgba(0,0,0,0.30), 0 15px 12px rgba(0,0,0,0.22);
}
*/
|
function resolveModuleImport(importPath, currentFilePath) {
const path = require('path');
let resolvedPath;
if (importPath.startsWith("./")) {
resolvedPath = path.resolve(path.dirname(currentFilePath), importPath);
} else if (importPath.startsWith("../")) {
resolvedPath = path.resolve(path.dirname(path.dirname(currentFilePath)), importPath);
} else {
// Handle external module resolution logic here
// Example: resolvedPath = resolveExternalModule(importPath);
}
// Assuming the resolved file has a .js extension
return resolvedPath + ".js";
} |
// Autogenerated from library/graphs.i
package ideal.library.graphs;
import ideal.library.elements.*;
public interface immutable_graph<vertice_type extends readonly_data, edge_type extends readonly_data> extends immutable_data, readonly_graph<vertice_type, edge_type> { }
|
def sum_of_digits(n)
n.to_s.chars.map(&:to_i).inject(:+)
end
number = 1937
puts(sum_of_digits(number)) |
#!/usr/bin/env bash
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
echo 'Running with parameters:'
echo " USE_CASE: ${USE_CASE}"
echo " FRAMEWORK: ${FRAMEWORK}"
echo " WORKSPACE: ${WORKSPACE}"
echo " DATASET_LOCATION: ${DATASET_LOCATION}"
echo " CHECKPOINT_DIRECTORY: ${CHECKPOINT_DIRECTORY}"
echo " BACKBONE_MODEL_DIRECTORY: ${BACKBONE_MODEL_DIRECTORY}"
echo " IN_GRAPH: ${IN_GRAPH}"
echo " MOUNT_INTELAI_MODELS_COMMON_SOURCE_DIR: ${MOUNT_INTELAI_MODELS_COMMON_SOURCE}"
if [ -n "${DOCKER}" ]; then
echo " Mounted volumes:"
echo " ${BENCHMARK_SCRIPTS} mounted on: ${MOUNT_BENCHMARK}"
echo " ${EXTERNAL_MODELS_SOURCE_DIRECTORY} mounted on: ${MOUNT_EXTERNAL_MODELS_SOURCE}"
echo " ${INTELAI_MODELS} mounted on: ${MOUNT_INTELAI_MODELS_SOURCE}"
echo " ${DATASET_LOCATION_VOL} mounted on: ${DATASET_LOCATION}"
echo " ${CHECKPOINT_DIRECTORY_VOL} mounted on: ${CHECKPOINT_DIRECTORY}"
echo " ${BACKBONE_MODEL_DIRECTORY_VOL} mounted on: ${BACKBONE_MODEL_DIRECTORY}"
fi
echo " SOCKET_ID: ${SOCKET_ID}"
echo " MODEL_NAME: ${MODEL_NAME}"
echo " MODE: ${MODE}"
echo " PRECISION: ${PRECISION}"
echo " BATCH_SIZE: ${BATCH_SIZE}"
echo " NUM_CORES: ${NUM_CORES}"
echo " BENCHMARK_ONLY: ${BENCHMARK_ONLY}"
echo " ACCURACY_ONLY: ${ACCURACY_ONLY}"
echo " OUTPUT_RESULTS: ${OUTPUT_RESULTS}"
echo " DISABLE_TCMALLOC: ${DISABLE_TCMALLOC}"
echo " TCMALLOC_LARGE_ALLOC_REPORT_THRESHOLD: ${TCMALLOC_LARGE_ALLOC_REPORT_THRESHOLD}"
echo " NOINSTALL: ${NOINSTALL}"
echo " OUTPUT_DIR: ${OUTPUT_DIR}"
echo " MPI_NUM_PROCESSES: ${MPI_NUM_PROCESSES}"
echo " MPI_NUM_PEOCESSES_PER_SOCKET: ${MPI_NUM_PROCESSES_PER_SOCKET}"
echo " MPI_HOSTNAMES: ${MPI_HOSTNAMES}"
echo " NUMA_CORES_PER_INSTANCE: ${NUMA_CORES_PER_INSTANCE}"
echo " PYTHON_EXE: ${PYTHON_EXE}"
echo " PYTHONPATH: ${PYTHONPATH}"
echo " DRY_RUN: ${DRY_RUN}"
# inference & training is supported right now
if [ ${MODE} != "inference" ] && [ ${MODE} != "training" ]; then
echo "${MODE} mode for ${MODEL_NAME} is not supported"
exit 1
fi
# Determines if we are running in a container by checking for .dockerenv
function _running-in-container()
{
[ -f /.dockerenv ]
}
if [[ ${NOINSTALL} != "True" ]]; then
## install common dependencies
apt-get update -y
# Set env var before installs so that user interaction is not required
export DEBIAN_FRONTEND=noninteractive
apt-get install gcc-8 g++-8 cmake python-tk -y
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 700 --slave /usr/bin/g++ g++ /usr/bin/g++-7
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 800 --slave /usr/bin/g++ g++ /usr/bin/g++-8
apt-get install -y libsm6 libxext6
pip install --upgrade pip==20.3.4
pip install requests
# install google-perftools for tcmalloc
if [[ ${DISABLE_TCMALLOC} != "True" ]]; then
apt-get install google-perftools -y
fi
if [[ ${MPI_NUM_PROCESSES} != "None" ]]; then
## Installing OpenMPI
apt-get install openmpi-bin openmpi-common openssh-client openssh-server libopenmpi-dev -y
# Horovod Installation
export HOROVOD_VERSION=0.21.0
export HOROVOD_WITHOUT_PYTORCH=1
export HOROVOD_WITHOUT_MXNET=1
export HOROVOD_WITH_TENSORFLOW=1
apt-get update
# In case installing released versions of Horovod fail,and there is
# a working commit replace next set of commands with something like:
apt-get install -y --no-install-recommends --fix-missing cmake git
pip install git+https://github.com/horovod/horovod.git@v${HOROVOD_VERSION}
# apt-get install -y --no-install-recommends --fix-missing cmake
# pip install horovod==${HOROVOD_VERSION}
fi
fi
# If we are running in a container, call the container_init.sh files
if _running-in-container ; then
# Call the framework's container_init.sh, if it exists
if [ -f ${MOUNT_BENCHMARK}/common/${FRAMEWORK}/container_init.sh ]; then
${MOUNT_BENCHMARK}/common/${FRAMEWORK}/container_init.sh
fi
# Call the model specific container_init.sh, if it exists
if [ -f ${MOUNT_BENCHMARK}/${USE_CASE}/${FRAMEWORK}/${MODEL_NAME}/${MODE}/${PRECISION}/container_init.sh ]; then
${MOUNT_BENCHMARK}/${USE_CASE}/${FRAMEWORK}/${MODEL_NAME}/${MODE}/${PRECISION}/container_init.sh
fi
fi
verbose_arg=""
if [ ${VERBOSE} == "True" ]; then
verbose_arg="--verbose"
fi
accuracy_only_arg=""
if [ ${ACCURACY_ONLY} == "True" ]; then
accuracy_only_arg="--accuracy-only"
fi
benchmark_only_arg=""
if [ ${BENCHMARK_ONLY} == "True" ]; then
benchmark_only_arg="--benchmark-only"
fi
output_results_arg=""
if [ ${OUTPUT_RESULTS} == "True" ]; then
output_results_arg="--output-results"
fi
numa_cores_per_instance_arg=""
if [[ -n ${NUMA_CORES_PER_INSTANCE} && ${NUMA_CORES_PER_INSTANCE} != "None" ]]; then
numa_cores_per_instance_arg="--numa-cores-per-instance=${NUMA_CORES_PER_INSTANCE}"
fi
RUN_SCRIPT_PATH="common/${FRAMEWORK}/run_tf_benchmark.py"
timestamp=`date +%Y%m%d_%H%M%S`
LOG_FILENAME="benchmark_${MODEL_NAME}_${MODE}_${PRECISION}_${timestamp}.log"
if [ ! -d "${OUTPUT_DIR}" ]; then
mkdir ${OUTPUT_DIR}
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_INTELAI_MODELS_COMMON_SOURCE}:${MOUNT_INTELAI_MODELS_SOURCE}
# Common execution command used by all models
function run_model() {
# Navigate to the main benchmark directory before executing the script,
# since the scripts use the benchmark/common scripts as well.
cd ${MOUNT_BENCHMARK}
# Start benchmarking
if [[ -z $DRY_RUN ]]; then
if [[ -z $numa_cores_per_instance_arg ]]; then
eval ${CMD} 2>&1 | tee ${LOGFILE}
else
# Don't tee to a log file for numactl multi-instance runs
eval ${CMD}
fi
else
echo ${CMD}
return
fi
if [ ${VERBOSE} == "True" ]; then
echo "PYTHONPATH: ${PYTHONPATH}" | tee -a ${LOGFILE}
echo "RUNCMD: ${CMD} " | tee -a ${LOGFILE}
if [[ ${BATCH_SIZE} != "-1" ]]; then
echo "Batch Size: ${BATCH_SIZE}" | tee -a ${LOGFILE}
fi
fi
if [[ ${BATCH_SIZE} != "-1" ]]; then
echo "Ran ${MODE} with batch size ${BATCH_SIZE}" | tee -a ${LOGFILE}
fi
# if it starts with /workspace then it's not a separate mounted dir
# so it's custom and is in same spot as LOGFILE is, otherwise it's mounted in a different place
if [[ "${OUTPUT_DIR}" = "/workspace"* ]]; then
LOG_LOCATION_OUTSIDE_CONTAINER=${BENCHMARK_SCRIPTS}/common/${FRAMEWORK}/logs/${LOG_FILENAME}
else
LOG_LOCATION_OUTSIDE_CONTAINER=${LOGFILE}
fi
# Don't print log file location for numactl multi-instance runs, because those have
# separate log files for each instance
if [[ -z $numa_cores_per_instance_arg ]]; then
echo "Log file location: ${LOG_LOCATION_OUTSIDE_CONTAINER}" | tee -a ${LOGFILE}
fi
}
# basic run command with commonly used args
CMD="${PYTHON_EXE} ${RUN_SCRIPT_PATH} \
--framework=${FRAMEWORK} \
--use-case=${USE_CASE} \
--model-name=${MODEL_NAME} \
--precision=${PRECISION} \
--mode=${MODE} \
--benchmark-dir=${MOUNT_BENCHMARK} \
--intelai-models=${MOUNT_INTELAI_MODELS_SOURCE} \
--num-cores=${NUM_CORES} \
--batch-size=${BATCH_SIZE} \
--socket-id=${SOCKET_ID} \
--output-dir=${OUTPUT_DIR} \
--num-train-steps=${NUM_TRAIN_STEPS} \
${numa_cores_per_instance_arg} \
${accuracy_only_arg} \
${benchmark_only_arg} \
${output_results_arg} \
${verbose_arg}"
if [ ${MOUNT_EXTERNAL_MODELS_SOURCE} != "None" ]; then
CMD="${CMD} --model-source-dir=${MOUNT_EXTERNAL_MODELS_SOURCE}"
fi
if [[ -n "${IN_GRAPH}" && ${IN_GRAPH} != "" ]]; then
CMD="${CMD} --in-graph=${IN_GRAPH}"
fi
if [[ -n "${CHECKPOINT_DIRECTORY}" && ${CHECKPOINT_DIRECTORY} != "" ]]; then
CMD="${CMD} --checkpoint=${CHECKPOINT_DIRECTORY}"
fi
if [[ -n "${BACKBONE_MODEL_DIRECTORY}" && ${BACKBONE_MODEL_DIRECTORY} != "" ]]; then
CMD="${CMD} --backbone-model=${BACKBONE_MODEL_DIRECTORY}"
fi
if [[ -n "${DATASET_LOCATION}" && ${DATASET_LOCATION} != "" ]]; then
CMD="${CMD} --data-location=${DATASET_LOCATION}"
fi
if [ ${NUM_INTER_THREADS} != "None" ]; then
CMD="${CMD} --num-inter-threads=${NUM_INTER_THREADS}"
fi
if [ ${NUM_INTRA_THREADS} != "None" ]; then
CMD="${CMD} --num-intra-threads=${NUM_INTRA_THREADS}"
fi
if [ ${DATA_NUM_INTER_THREADS} != "None" ]; then
CMD="${CMD} --data-num-inter-threads=${DATA_NUM_INTER_THREADS}"
fi
if [ ${DATA_NUM_INTRA_THREADS} != "None" ]; then
CMD="${CMD} --data-num-intra-threads=${DATA_NUM_INTRA_THREADS}"
fi
if [ ${DISABLE_TCMALLOC} != "None" ]; then
CMD="${CMD} --disable-tcmalloc=${DISABLE_TCMALLOC}"
fi
## Added for bert
function bert_options() {
if [[ ${MODE} == "training" ]]; then
if [[ -z "${train_option}" ]]; then
echo "Error: Please specify a train option (SQuAD, Classifier, Pretraining)"
exit 1
fi
CMD=" ${CMD} --train-option=${train_option}"
fi
if [[ ${MODE} == "inference" ]]; then
if [[ -z "${infer_option}" ]]; then
echo "Error: Please specify a inference option (SQuAD, Classifier, Pretraining)"
exit 1
fi
CMD=" ${CMD} --infer-option=${infer_option}"
fi
if [[ -n "${init_checkpoint}" && ${init_checkpoint} != "" ]]; then
CMD=" ${CMD} --init-checkpoint=${init_checkpoint}"
fi
if [[ -n "${task_name}" && ${task_name} != "" ]]; then
CMD=" ${CMD} --task-name=${task_name}"
fi
if [[ -n "${warmup_steps}" && ${warmup_steps} != "" ]]; then
CMD=" ${CMD} --warmup-steps=${warmup_steps}"
fi
if [[ -n "${vocab_file}" && ${vocab_file} != "" ]]; then
CMD=" ${CMD} --vocab-file=${vocab_file}"
fi
if [[ -n "${config_file}" && ${config_file} != "" ]]; then
CMD=" ${CMD} --config-file=${config_file}"
fi
if [[ -n "${do_predict}" && ${do_predict} != "" ]]; then
CMD=" ${CMD} --do-predict=${do_predict}"
fi
if [[ -n "${predict_file}" && ${predict_file} != "" ]]; then
CMD=" ${CMD} --predict-file=${predict_file}"
fi
if [[ -n "${do_train}" && ${do_train} != "" ]]; then
CMD=" ${CMD} --do-train=${do_train}"
fi
if [[ -n "${train_file}" && ${train_file} != "" ]]; then
CMD=" ${CMD} --train-file=${train_file}"
fi
if [[ -n "${num_train_epochs}" && ${num_train_epochs} != "" ]]; then
CMD=" ${CMD} --num-train-epochs=${num_train_epochs}"
fi
if [[ -n "${num_train_steps}" && ${num_train_steps} != "" ]]; then
CMD=" ${CMD} --num-train-steps=${num_train_steps}"
fi
if [[ -n "${max_predictions}" && ${max_predictions} != "" ]]; then
CMD=" ${CMD} --max-predictions=${max_predictions}"
fi
if [[ -n "${learning_rate}" && ${learning_rate} != "" ]]; then
CMD=" ${CMD} --learning-rate=${learning_rate}"
fi
if [[ -n "${max_seq_length}" && ${max_seq_length} != "" ]]; then
CMD=" ${CMD} --max-seq-length=${max_seq_length}"
fi
if [[ -n "${doc_stride}" && ${doc_stride} != "" ]]; then
CMD=" ${CMD} --doc-stride=${doc_stride}"
fi
if [[ -n "${input_file}" && ${input_file} != "" ]]; then
CMD=" ${CMD} --input-file=${input_file}"
fi
if [[ -n "${do_eval}" && ${do_eval} != "" ]]; then
CMD=" ${CMD} --do-eval=${do_eval}"
fi
if [[ -n "${data_dir}" && ${data_dir} != "" ]]; then
CMD=" ${CMD} --data-dir=${data_dir}"
fi
if [[ -n "${do_lower_case}" && ${do_lower_case} != "" ]]; then
CMD=" ${CMD} --do-lower-case=${do_lower_case}"
fi
if [[ -n "${accum_steps}" && ${accum_steps} != "" ]]; then
CMD=" ${CMD} --accum_steps=${accum_steps}"
fi
if [[ -n "${profile}" && ${profile} != "" ]]; then
CMD=" ${CMD} --profile=${profile}"
fi
if [[ -n "${experimental_gelu}" && ${experimental_gelu} != "" ]]; then
CMD=" ${CMD} --experimental-gelu=${experimental_gelu}"
fi
if [[ -n "${optimized_softmax}" && ${optimized_softmax} != "" ]]; then
CMD=" ${CMD} --optimized-softmax=${optimized_softmax}"
fi
if [[ -n "${mpi_workers_sync_gradients}" && ${mpi_workers_sync_gradients} != "" ]]; then
CMD=" ${CMD} --mpi_workers_sync_gradients=${mpi_workers_sync_gradients}"
fi
}
function install_protoc() {
pushd "${MOUNT_EXTERNAL_MODELS_SOURCE}/research"
# install protoc, if necessary, then compile protoc files
if [ ! -f "bin/protoc" ]; then
install_location=$1
echo "protoc not found, installing protoc from ${install_location}"
apt-get update && apt-get install -y unzip wget
wget -O protobuf.zip ${install_location}
unzip -o protobuf.zip
rm protobuf.zip
else
echo "protoc already found"
fi
echo "Compiling protoc files"
./bin/protoc object_detection/protos/*.proto --python_out=.
popd
}
function get_cocoapi() {
# get arg for where the cocoapi repo was cloned
cocoapi_dir=${1}
# get arg for the location where we want the pycocotools
parent_dir=${2}
pycocotools_dir=${parent_dir}/pycocotools
# If pycoco tools aren't already found, then builds the coco python API
if [ ! -d ${pycocotools_dir} ]; then
# This requires that the cocoapi is cloned in the external model source dir
if [ -d "${cocoapi_dir}/PythonAPI" ]; then
# install cocoapi
pushd ${cocoapi_dir}/PythonAPI
echo "Installing COCO API"
make
cp -r pycocotools ${parent_dir}
popd
else
echo "${cocoapi_dir}/PythonAPI directory was not found"
echo "Unable to install the python cocoapi."
exit 1
fi
else
echo "pycocotools were found at: ${pycocotools_dir}"
fi
}
function add_arg() {
local arg_str=""
if [ -n "${2}" ]; then
arg_str=" ${1}=${2}"
fi
echo "${arg_str}"
}
function add_steps_args() {
# returns string with --steps and --warmup_steps, if there are values specified
local steps_arg=""
local trainepochs_arg=""
local epochsbtweval_arg=""
local warmup_steps_arg=""
local kmp_blocktime_arg=""
if [ -n "${steps}" ]; then
steps_arg="--steps=${steps}"
fi
if [ -n "${train_epochs}" ]; then
trainepochs_arg="--train_epochs=${train_epochs}"
fi
if [ -n "${epochs_between_evals}" ]; then
epochsbtweval_arg="--epochs_between_evals=${epochs_between_evals}"
fi
if [ -n "${warmup_steps}" ]; then
warmup_steps_arg="--warmup-steps=${warmup_steps}"
fi
if [ -n "${kmp_blocktime}" ]; then
kmp_blocktime_arg="--kmp-blocktime=${kmp_blocktime}"
fi
echo "${steps_arg} ${trainepochs_arg} ${epochsbtweval_arg} ${warmup_steps_arg} ${kmp_blocktime_arg}"
}
function add_calibration_arg() {
# returns string with --calibration-only, if True is specified,
# in this case a subset (~ 100 images) of the ImageNet dataset
# is generated to be used later on in calibrating the Int8 model.
# also this function returns a string with --calibrate, if True is specified,
# which enables resnet50 Int8 benchmark to run accuracy using the previously
# generated ImageNet data subset.
local calibration_arg=""
if [[ ${calibration_only} == "True" ]]; then
calibration_arg="--calibration-only"
elif [[ ${calibrate} == "True" ]]; then
calibration_arg="--calibrate=True"
fi
echo "${calibration_arg}"
}
# 3D UNet model
function 3d_unet() {
if [[ ${PRECISION} == "fp32" ]] && [[ ${MODE} == "inference" ]]; then
if [[ ${NOINSTALL} != "True" ]]; then
pip install -r "${MOUNT_BENCHMARK}/${USE_CASE}/${FRAMEWORK}/${MODEL_NAME}/requirements.txt"
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_INTELAI_MODELS_SOURCE}/inference/fp32
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "${PRECISION} ${MODE} is not supported for ${MODEL_NAME}"
exit 1
fi
}
#BERT model
function bert() {
if [ ${PRECISION} == "fp32" ]; then
export PYTHONPATH=${PYTHONPATH}:${MOUNT_BENCHMARK}:${MOUNT_EXTERNAL_MODELS_SOURCE}
if [ ${NOINSTALL} != "True" ]; then
apt-get update && apt-get install -y git
pip install -r ${MOUNT_BENCHMARK}/${USE_CASE}/${FRAMEWORK}/${MODEL_NAME}/requirements.txt
fi
CMD="${CMD} \
$(add_arg "--task_name" ${task_name}) \
$(add_arg "--max_seq_length" ${max_seq_length}) \
$(add_arg "--eval_batch_size" ${eval_batch_size}) \
$(add_arg "--learning_rate" ${learning_rate}) \
$(add_arg "--vocab_file" ${vocab_file}) \
$(add_arg "--bert_config_file" ${bert_config_file}) \
$(add_arg "--init_checkpoint" ${init_checkpoint})"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# DCGAN model
function dcgan() {
if [ ${PRECISION} == "fp32" ]; then
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}/research:${MOUNT_EXTERNAL_MODELS_SOURCE}/research/slim:${MOUNT_EXTERNAL_MODELS_SOURCE}/research/gan/cifar
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# DenseNet 169 model
function densenet169() {
if [ ${PRECISION} == "fp32" ]; then
CMD="${CMD} $(add_arg "--input_height" ${input_height}) $(add_arg "--input_width" ${input_width}) \
$(add_arg "--warmup_steps" ${warmup_steps}) $(add_arg "--steps" ${steps}) $(add_arg "--input_layer" ${input_layer}) \
$(add_arg "--output_layer" ${output_layer})"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# Faster R-CNN (ResNet50) model
function faster_rcnn() {
export PYTHONPATH=$PYTHONPATH:${MOUNT_EXTERNAL_MODELS_SOURCE}/research:${MOUNT_EXTERNAL_MODELS_SOURCE}/research/slim
original_dir=$(pwd)
if [ ${NOINSTALL} != "True" ]; then
# install dependencies
pip install -r "${MOUNT_BENCHMARK}/object_detection/tensorflow/faster_rcnn/requirements.txt"
cd "${MOUNT_EXTERNAL_MODELS_SOURCE}/research"
# install protoc v3.3.0, if necessary, then compile protoc files
install_protoc "https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip"
# Install git so that we can apply the patch
apt-get update && apt-get install -y git
fi
# Apply the patch to the tensorflow/models repo with fixes for the accuracy
# script and for running with python 3
cd ${MOUNT_EXTERNAL_MODELS_SOURCE}
git apply ${MOUNT_INTELAI_MODELS_SOURCE}/${MODE}/${PRECISION}/faster_rcnn.patch
if [ ${PRECISION} == "fp32" ]; then
if [ -n "${config_file}" ]; then
CMD="${CMD} --config_file=${config_file}"
fi
if [[ -z "${config_file}" ]] && [ ${BENCHMARK_ONLY} == "True" ]; then
echo "Fast R-CNN requires -- config_file arg to be defined"
exit 1
fi
elif [ ${PRECISION} == "int8" ]; then
number_of_steps_arg=""
if [ -n "${number_of_steps}" ] && [ ${BENCHMARK_ONLY} == "True" ]; then
CMD="${CMD} --number-of-steps=${number_of_steps}"
fi
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
cd $original_dir
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
}
# inceptionv4 model
function inceptionv4() {
# For accuracy, dataset location is required
if [ "${DATASET_LOCATION_VOL}" == None ] && [ ${ACCURACY_ONLY} == "True" ]; then
echo "No dataset directory specified, accuracy cannot be calculated."
exit 1
fi
# add extra model specific args and then run the model
CMD="${CMD} $(add_steps_args) $(add_arg "--input-height" ${input_height}) \
$(add_arg "--input-width" ${input_width}) $(add_arg "--input-layer" ${input_layer}) \
$(add_arg "--output-layer" ${output_layer})"
if [ ${PRECISION} == "int8" ]; then
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
elif [ ${PRECISION} == "fp32" ]; then
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# MiniGo model
function minigo() {
if [ ${MODE} == "training" ] && [ ${PRECISION} == "fp32" ]; then
original_dir=$(pwd)
local MODEL_DIR=${EXTERNAL_MODELS_SOURCE_DIRECTORY}
local INTELAI_MODEL_DIR=${INTELAI_MODELS}
local BENCHMARK_DIR=${BENCHMARK_SCRIPTS}
if [ -n "${DOCKER}" ]; then
MODEL_DIR=${MOUNT_EXTERNAL_MODELS_SOURCE}
INTELAI_MODEL_DIR=${MOUNT_INTELAI_MODELS_SOURCE}
BENCHMARK_DIR=${MOUNT_BENCHMARK}
# install dependencies
apt-get update && apt-get install -y cpio
# pip3 install -r ${MODEL_DIR}/requirements.txt
pip install -r ${BENCHMARK_DIR}/reinforcement/tensorflow/minigo/requirements.txt
if [ ! -f "bazel-0.22.0-installer-linux-x86_64.sh" ];then
wget https://github.com/bazelbuild/bazel/releases/download/0.22.0/bazel-0.22.0-installer-linux-x86_64.sh
chmod 755 bazel-0.22.0-installer-linux-x86_64.sh
fi
./bazel-0.22.0-installer-linux-x86_64.sh --prefix=/tmp/bazel
rm /root/.bazelrc
export PATH=/tmp/bazel/bin:$PATH
cd /l_mpi
sh install.sh --silent silent.cfg
source /opt/intel/compilers_and_libraries/linux/bin/compilervars.sh intel64
pip install mpi4py
fi
if [ ${NOINSTALL} != "True" ]; then
# install dependencies
apt-get update && apt-get install -y git
pip3 install -r ${MOUNT_EXTERNAL_MODELS_SOURCE}/requirements.txt
pip install -r ${BENCHMARK_DIR}/reinforcement/tensorflow/minigo/requirements.txt
if [ "${EXTERNAL_MODELS_SOURCE_DIRECTORY}" == "None" ]; then
echo "You are supposed to provide model dir."
exit 1
fi
# MODEL_DIR is the official mlperf minigo repo
cd ${MODEL_DIR}
git checkout 60ecb12f29582227a473fdc7cd09c2605f42bcd6
# delete the previous patch influence
git reset --hard
git clean -fd
rm -rf ./ml_perf/flags/9.mn/
# remove the quantization tools downloaded before
rm -rf ${MODEL_DIR}/ml_perf/tools/
rm -rf ${MODEL_DIR}/cc/ml_perf/tools/
if [ "${large_scale}" == "True" ]; then
# multi-node mode
git apply ${INTELAI_MODEL_DIR}/training/fp32/minigo_mlperf_large_scale.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/avoid-repeated-clone-multinode.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/bazel-clean-large-scale.patch
# git apply ${INTELAI_MODEL_DIR}/training/fp32/large-scale-no-bg.patch
elif [ "${large_num_cores}" == "True" ]; then
# single-node large num mode
git apply ${INTELAI_MODEL_DIR}/training/fp32/minigo_mlperf.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/avoid-repeated-clone-singlenode.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/bazel-clean-single-node.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/tune_for_many_core.patch
else
# single-node normal mode
git apply ${INTELAI_MODEL_DIR}/training/fp32/minigo_mlperf.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/mlperf_split.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/avoid-repeated-clone-singlenode.patch
git apply ${INTELAI_MODEL_DIR}/training/fp32/bazel-clean-single-node.patch
fi
# generate the flags with specified iterations
if [ -z "$steps" ];then
steps=30
fi
mv ml_perf/flags/9/rl_loop.flags ml_perf/flags/9/rl_loop.flags-org
sed "s/iterations=30/iterations=${steps}/g" ml_perf/flags/9/rl_loop.flags-org &> ml_perf/flags/9/rl_loop.flags
mv ml_perf/flags/9/train.flags ml_perf/flags/9/train.flags-org
sed "s/train_batch_size=8192/train_batch_size=4096/g" ml_perf/flags/9/train.flags-org &> ml_perf/flags/9/train.flags
# MiniGo need specified tensorflow version and to build selfplay part with tensorflow c lib.
rm -rf cc/minigo_tf/tensorflow-*.data
rm -rf cc/minigo_tf/tensorflow-*.dist-info
chmod +777 ./cc/configure_tensorflow.sh
chmod +777 ./build.sh
./cc/configure_tensorflow.sh
pip uninstall -y ./cc/tensorflow_pkg/tensorflow-*.whl
pip uninstall -y tensorflow
pip uninstall -y intel-tensorflow
pip install ./cc/tensorflow_pkg/tensorflow-*.whl
./build.sh
# ensure horovod installed
pip install horovod==0.15.1
# set the python path for quantization tools
export PYTHONPATH=${PYTHONPATH}:${MODEL_DIR}/cc/ml_perf/tools/api/intel_quantization:${MODEL_DIR}/ml_perf/tools/api/intel_quantization
# freeze the tfrecord and target to the checkpoint for training
git apply ${INTELAI_MODEL_DIR}/training/fp32/get-data.patch
BOARD_SIZE=9 python ml_perf/get_data.py
# $HOSTLIST.txt contains all the ip address
if [ ! $multi_node ];then
unset -v HOSTLIST
else
export HOSTLIST=${BENCHMARK_DIR}/node_list
fi
cd ${original_dir}
CMD="${CMD} \
$(add_arg "--large-scale" ${large_scale}) \
$(add_arg "--num-train-nodes" ${num_train_nodes}) \
$(add_arg "--num-eval-nodes" ${num_eval_nodes}) \
$(add_arg "--quantization" ${quantization}) \
$(add_arg "--multi-node" ${multi_node})"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
fi
else
echo "MODE=${MODE} PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# Mask R-CNN model
function maskrcnn() {
if [ ${PRECISION} == "fp32" ]; then
original_dir=$(pwd)
if [ ${NOINSTALL} != "True" ]; then
# install dependencies
pip3 install -r ${MOUNT_BENCHMARK}/image_segmentation/tensorflow/maskrcnn/inference/fp32/requirements.txt
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}:${MOUNT_EXTERNAL_MODELS_SOURCE}/mrcnn
CMD="${CMD} --data-location=${DATASET_LOCATION}"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# mobilenet_v1 model
function mobilenet_v1() {
if [ ${PRECISION} == "fp32" ] || [ ${PRECISION} == "bfloat16" ]; then
CMD="${CMD} $(add_arg "--input_height" ${input_height}) $(add_arg "--input_width" ${input_width}) \
$(add_arg "--warmup_steps" ${warmup_steps}) $(add_arg "--steps" ${steps}) \
$(add_arg "--input_layer" ${input_layer}) $(add_arg "--output_layer" ${output_layer})"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
elif [ ${PRECISION} == "int8" ]; then
CMD="${CMD} $(add_arg "--input_height" ${input_height}) $(add_arg "--input_width" ${input_width}) \
$(add_arg "--warmup_steps" ${warmup_steps}) $(add_arg "--steps" ${steps}) \
$(add_arg "--input_layer" ${input_layer}) $(add_arg "--output_layer" ${output_layer}) \
$(add_calibration_arg)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# MTCC model
function mtcc() {
if [ ${PRECISION} == "fp32" ]; then
if [ ! -d "${DATASET_LOCATION}" ]; then
echo "No Data location specified, please follow MTCC README instaructions to download the dataset."
exit 1
fi
if [ ${NOINSTALL} != "True" ]; then
# install dependencies
pip install opencv-python
pip install easydict
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}:${MOUNT_EXTERNAL_MODELS_SOURCE}/Detection:${MOUNT_INTELAI_MODELS_SOURCE}/inference/fp32:${MOUNT_INTELAI_MODELS_SOURCE}/inference/fp32/Detection
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# NCF model
function ncf() {
if [[ -n "${clean}" ]]; then
CMD="${CMD} --clean"
fi
# NCF supports different datasets including ml-1m and ml-20m.
if [[ -n "${dataset}" && ${dataset} != "" ]]; then
CMD="${CMD} --dataset=${dataset}"
fi
if [[ -n "${te}" && ${te} != "" ]]; then
CMD="${CMD} -te=${te}"
fi
if [ ${PRECISION} == "fp32" -o ${PRECISION} == "bfloat16" ]; then
# For ncf, if dataset location is empty, script downloads dataset at given location.
if [ ! -d "${DATASET_LOCATION}" ]; then
mkdir -p ./dataset
CMD="${CMD} --data-location=./dataset"
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
if [ ${NOINSTALL} != "True" ]; then
pip install -r ${MOUNT_BENCHMARK}/recommendation/tensorflow/ncf/inference/requirements.txt
fi
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# ResNet101, InceptionV3 model
function resnet101_inceptionv3() {
export PYTHONPATH=${PYTHONPATH}:$(pwd):${MOUNT_BENCHMARK}
# For accuracy, dataset location is required.
if [ "${DATASET_LOCATION_VOL}" == "None" ] && [ ${ACCURACY_ONLY} == "True" ]; then
echo "No Data directory specified, accuracy will not be calculated."
exit 1
fi
if [ ${PRECISION} == "int8" ]; then
CMD="${CMD} $(add_steps_args) $(add_calibration_arg)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
elif [ ${PRECISION} == "fp32" ]; then
CMD="${CMD} $(add_steps_args)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# ResNet50 model
function resnet50() {
export PYTHONPATH=${PYTHONPATH}:$(pwd):${MOUNT_BENCHMARK}
# For accuracy, dataset location is required.
if [ "${DATASET_LOCATION_VOL}" == "None" ] && [ ${ACCURACY_ONLY} == "True" ]; then
echo "No Data directory specified, accuracy will not be calculated."
exit 1
fi
if [ ${PRECISION} == "int8" ]; then
CMD="${CMD} $(add_steps_args) $(add_calibration_arg)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
elif [ ${PRECISION} == "fp32" ] || [ ${PRECISION} == "bfloat16" ]; then
CMD="${CMD} $(add_steps_args)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# MLPerf GNMT model
function mlperf_gnmt() {
export PYTHONPATH=${PYTHONPATH}:$(pwd):${MOUNT_BENCHMARK}
if [ ${NOINSTALL} != "True" ]; then
# install dependencies
pip install ${MOUNT_INTELAI_MODELS_SOURCE}/tensorflow_addons*.whl --no-deps
fi
# For accuracy, dataset location is required.
if [ "${DATASET_LOCATION_VOL}" == "None" ] && [ ${ACCURACY_ONLY} == "True" ]; then
echo "No Data directory specified, accuracy will not be calculated."
exit 1
fi
if [ ${PRECISION} == "int8" ]; then
CMD="${CMD} $(add_steps_args) $(add_calibration_arg)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
elif [ ${PRECISION} == "fp32" ]; then
CMD="${CMD} $(add_steps_args)"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# R-FCN (ResNet101) model
function rfcn() {
export PYTHONPATH=$PYTHONPATH:${MOUNT_EXTERNAL_MODELS_SOURCE}/research:${MOUNT_EXTERNAL_MODELS_SOURCE}/research/slim:${MOUNT_EXTERNAL_MODELS_SOURCE}
if [ ${NOINSTALL} != "True" ]; then
apt-get update && apt-get install -y git
# install dependencies
for line in $(cat ${MOUNT_BENCHMARK}/object_detection/tensorflow/rfcn/requirements.txt)
do
pip install $line
done
original_dir=$(pwd)
cd ${MOUNT_EXTERNAL_MODELS_SOURCE}
git apply --ignore-space-change --ignore-whitespace ${MOUNT_INTELAI_MODELS_SOURCE}/${MODE}/tf-2.0.patch
cd "${MOUNT_EXTERNAL_MODELS_SOURCE}/research"
# install protoc v3.3.0, if necessary, then compile protoc files
install_protoc "https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip"
fi
split_arg=""
if [ -n "${split}" ] && [ ${ACCURACY_ONLY} == "True" ]; then
split_arg="--split=${split}"
fi
number_of_steps_arg=""
if [ -n "${number_of_steps}" ] && [ ${BENCHMARK_ONLY} == "True" ]; then
number_of_steps_arg="--number_of_steps=${number_of_steps}"
fi
CMD="${CMD} ${number_of_steps_arg} ${split_arg}"
cd $original_dir
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
}
# SSD-MobileNet model
function ssd_mobilenet() {
if [ ${PRECISION} == "fp32" ] || [ ${PRECISION} == "bfloat16" ]; then
if [ ${BATCH_SIZE} != "-1" ]; then
echo "Warning: SSD-MobileNet FP32 inference script does not use the batch_size arg"
fi
elif [ ${PRECISION} != "int8" ]; then
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_BENCHMARK}
if [ ${NOINSTALL} != "True" ]; then
# install dependencies for both fp32 and int8
apt-get update && apt-get install -y git
# install one by one to solve dependency problems
for line in $(cat ${MOUNT_BENCHMARK}/object_detection/tensorflow/ssd-mobilenet/requirements.txt)
do
pip install $line
done
fi
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
}
# SSD-ResNet34 model
function ssd-resnet34() {
if [ ${MODE} == "inference" ]; then
if [ ${PRECISION} == "fp32" ] || [ ${PRECISION} == "int8" ]; then
old_dir=${PWD}
if [ ${NOINSTALL} != "True" ]; then
apt-get update && apt-get install -y git libgl1-mesa-glx libglib2.0-0
for line in $(cat ${MOUNT_BENCHMARK}/object_detection/tensorflow/ssd-resnet34/requirements.txt)
do
pip install $line
done
model_source_dir=${MOUNT_EXTERNAL_MODELS_SOURCE}
infer_dir=${MOUNT_INTELAI_MODELS_SOURCE}/${MODE}
else
model_source_dir=${EXTERNAL_MODELS_SOURCE_DIRECTORY}
infer_dir="${INTELAI_MODELS}/${MODE}"
fi
benchmarks_patch_path=${infer_dir}/tensorflow_benchmarks_tf2.0.patch
model_patch_path=${infer_dir}/tensorflow_models_tf2.0.patch
cd ${model_source_dir}/../
cd ssd-resnet-benchmarks
git apply ${benchmarks_patch_path}
cd ${model_source_dir}
git apply ${model_patch_path}
if [ ${NOINSTALL} != "True" ]; then
export PYTHONPATH=${PYTHONPATH}:"/workspace/models/research"
export PYTHONPATH=${PYTHONPATH}:"/workspace/ssd-resnet-benchmarks/scripts/tf_cnn_benchmarks"
fi
cd ${old_dir}
CMD="${CMD} \
$(add_arg "--input-size" ${input_size})"
CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} not supported for ${MODEL_NAME}"
exit 1
fi
elif [ ${MODE} == "training" ]; then
if [ ${PRECISION} == "fp32" ] || [ ${PRECISION} == "bfloat16" ]; then
if [ ${NOINSTALL} != "True" ]; then
apt-get update && apt-get install -y cpio git
# Enter the docker mount directory /l_mpi and install the intel mpi with silent mode
cd /l_mpi
sh install.sh --silent silent.cfg
source /opt/intel/compilers_and_libraries/linux/bin/compilervars.sh intel64
for line in $(cat ${MOUNT_BENCHMARK}/object_detection/tensorflow/ssd-resnet34/requirements.txt)
do
pip install $line
done
fi
old_dir=${PWD}
cd /tmp
rm -rf benchmark_ssd_resnet34
git clone https://github.com/tensorflow/benchmarks.git benchmark_ssd_resnet34
cd benchmark_ssd_resnet34
git checkout 509b9d288937216ca7069f31cfb22aaa7db6a4a7
git apply ${MOUNT_INTELAI_MODELS_SOURCE}/${MODE}/${PRECISION}/benchmark-tf-2.0.diff
if [ ${PRECISION} == "bfloat16" ]; then
git apply ${MOUNT_INTELAI_MODELS_SOURCE}/${MODE}/${PRECISION}/benchmark-bfloat16.diff
fi
cd ${old_dir}
CMD="${CMD} \
$(add_arg "--weight_decay" ${weight_decay}) \
$(add_arg "--epochs" ${epochs}) \
$(add_arg "--save_model_steps" ${save_model_steps}) \
$(add_arg "--timeline" ${timeline}) \
$(add_arg "--num_warmup_batches" ${num_warmup_batches})"
local old_pythonpath=${PYTHONPATH}
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}:${MOUNT_EXTERNAL_MODELS_SOURCE}/research
CMD=${CMD} run_model
PYTHONPATH=${old_pythonpath}
else
echo "PRECISION=${PRECISION} not supported for ${MODEL_NAME}"
exit 1
fi
fi
}
# SSD-VGG16 model
function ssd_vgg16() {
if [ ${NOINSTALL} != "True" ]; then
apt-get update && apt-get install -y git
pip install opencv-python Cython
if [ ${ACCURACY_ONLY} == "True" ]; then
# get the python cocoapi
get_cocoapi ${MOUNT_EXTERNAL_MODELS_SOURCE}/coco ${MOUNT_INTELAI_MODELS_SOURCE}/inference
fi
fi
cp ${MOUNT_INTELAI_MODELS_SOURCE}/__init__.py ${MOUNT_EXTERNAL_MODELS_SOURCE}/dataset
cp ${MOUNT_INTELAI_MODELS_SOURCE}/__init__.py ${MOUNT_EXTERNAL_MODELS_SOURCE}/preprocessing
cp ${MOUNT_INTELAI_MODELS_SOURCE}/__init__.py ${MOUNT_EXTERNAL_MODELS_SOURCE}/utility
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
if [ ${PRECISION} == "fp32" ] || [ ${PRECISION} == "int8" ]; then
if [ ${NOINSTALL} != "True" ]; then
for line in $(cat ${MOUNT_BENCHMARK}/object_detection/tensorflow/ssd-resnet34/requirements.txt)
do
pip install $line
done
old_dir=${PWD}
infer_dir=${MOUNT_INTELAI_MODELS_SOURCE}/inference
benchmarks_patch_path=${infer_dir}/tensorflow_benchmarks_tf2.0.patch
cd /tmp
git clone --single-branch https://github.com/tensorflow/benchmarks.git
cd benchmarks
git checkout 509b9d288937216ca7069f31cfb22aaa7db6a4a7
git apply ${benchmarks_patch_path}
model_patch_path=${infer_dir}/tensorflow_models_tf2.0.patch
cd ${MOUNT_EXTERNAL_MODELS_SOURCE}
git apply ${model_patch_path}
cd ${old_dir}
fi
CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# UNet model
function unet() {
if [ ${PRECISION} == "fp32" ]; then
if [[ ${NOINSTALL} != "True" ]]; then
pip install -r "${MOUNT_BENCHMARK}/${USE_CASE}/${FRAMEWORK}/${MODEL_NAME}/requirements.txt"
fi
if [[ -z "${checkpoint_name}" ]]; then
echo "UNet requires -- checkpoint_name arg to be defined"
exit 1
fi
if [ ${ACCURACY_ONLY} == "True" ]; then
echo "Accuracy testing is not supported for ${MODEL_NAME}"
exit 1
fi
CMD="${CMD} --checkpoint_name=${checkpoint_name}"
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# transformer language model from official tensorflow models
function transformer_lt_official() {
if [ ${PRECISION} == "fp32" ]; then
if [[ -z "${file}" ]]; then
echo "transformer-language requires -- file arg to be defined"
exit 1
fi
if [[ -z "${file_out}" ]]; then
echo "transformer-language requires -- file_out arg to be defined"
exit 1
fi
if [[ -z "${reference}" ]]; then
echo "transformer-language requires -- reference arg to be defined"
exit 1
fi
if [[ -z "${vocab_file}" ]]; then
echo "transformer-language requires -- vocab_file arg to be defined"
exit 1
fi
if [ ${NOINSTALL} != "True" ]; then
pip install -r "${MOUNT_BENCHMARK}/language_translation/tensorflow/transformer_lt_official/requirements.txt"
fi
CMD="${CMD}
--in_graph=${IN_GRAPH} \
--vocab_file=${DATASET_LOCATION}/${vocab_file} \
--file=${DATASET_LOCATION}/${file} \
--file_out=${OUTPUT_DIR}/${file_out} \
--reference=${DATASET_LOCATION}/${reference}"
PYTHONPATH=${PYTHONPATH}:${MOUNT_BENCHMARK}:${MOUNT_INTELAI_MODELS_SOURCE}/${MODE}/${PRECISION}
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# transformer in mlperf Translation for Tensorflow model
function transformer_mlperf() {
export PYTHONPATH=${PYTHONPATH}:$(pwd):${MOUNT_BENCHMARK}
#pip install tensorflow-addons==0.6.0 #/workspace/benchmarks/common/tensorflow/tensorflow_addons-0.6.0.dev0-cp36-cp36m-linux_x86_64.whl
if [[ (${PRECISION} == "bfloat16") || ( ${PRECISION} == "fp32") ]]
then
if [[ -z "${random_seed}" ]]; then
echo "transformer-language requires --random_seed arg to be defined"
exit 1
fi
if [[ -z "${params}" ]]; then
echo "transformer-language requires --params arg to be defined"
exit 1
fi
if [[ -z "${train_steps}" ]]; then
echo "transformer-language requires --train_steps arg to be defined"
exit 1
fi
if [[ -z "${steps_between_eval}" ]]; then
echo "transformer-language requires --steps_between_eval arg to be defined"
exit 1
fi
if [[ -z "${do_eval}" ]]; then
echo "transformer-language requires --do_eval arg to be defined"
exit 1
fi
if [[ -z "${save_checkpoints}" ]]; then
echo "transformer-language requires --save_checkpoints arg to be defined"
exit 1
fi
if [[ -z "${print_iter}" ]]; then
echo "transformer-language requires --print_iter arg to be defined"
exit 1
fi
CMD="${CMD} --random_seed=${random_seed} --params=${params} --train_steps=${train_steps} --steps_between_eval=${steps_between_eval} --do_eval=${do_eval} --save_checkpoints=${save_checkpoints}
--print_iter=${print_iter} --save_profile=${save_profile}"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# Wavenet model
function wavenet() {
if [ ${PRECISION} == "fp32" ]; then
if [[ -z "${checkpoint_name}" ]]; then
echo "wavenet requires -- checkpoint_name arg to be defined"
exit 1
fi
if [[ -z "${sample}" ]]; then
echo "wavenet requires -- sample arg to be defined"
exit 1
fi
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
if [ ${NOINSTALL} != "True" ]; then
pip install librosa==0.5
fi
CMD="${CMD} --checkpoint_name=${checkpoint_name} \
--sample=${sample}"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
}
# BERT base
function bert_base() {
if [ ${PRECISION} == "fp32" ] || [ $PRECISION == "bfloat16" ]; then
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
bert_options
CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} not supported for ${MODEL_NAME}"
exit 1
fi
}
# BERT Large model
function bert_large() {
# Change if to support fp32
if [ ${PRECISION} == "fp32" ] || [ $PRECISION == "int8" ] || [ $PRECISION == "bfloat16" ]; then
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
bert_options
CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} not supported for ${MODEL_NAME} in this repo."
exit 1
fi
}
# Wide & Deep model
function wide_deep() {
if [ ${PRECISION} == "fp32" ]; then
export PYTHONPATH=${PYTHONPATH}:${MOUNT_EXTERNAL_MODELS_SOURCE}
CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} not supported for ${MODEL_NAME}"
exit 1
fi
}
# Wide & Deep large dataset model
function wide_deep_large_ds() {
export PYTHONPATH=${PYTHONPATH}:$(pwd):${MOUNT_BENCHMARK}
# Depends on the Ubuntu version the ldpreload gets installed on various places.
# Hence getting the best available one from ldconfig and setting it up
TCMALLOC_LIB="libtcmalloc.so.4"
LIBTCMALLOC="$(ldconfig -p | grep $TCMALLOC_LIB | tr ' ' '\n' | grep /)"
if [[ -z $LIBTCMALLOC ]] && [[ $NOINSTALL != True ]]; then
echo "libtcmalloc.so.4 not found, trying to install"
apt-get update
apt-get install google-perftools --fix-missing -y
fi
LIBTCMALLOC="$(ldconfig -p | grep $TCMALLOC_LIB | tr ' ' '\n' | grep /)"
echo $LIBTCMALLOC
export LD_PRELOAD=$LIBTCMALLOC
if [[ -z "${LIBTCMALLOC}" ]]; then
echo "Failed to load $TCMALLOC_LIB"
fi
# Dataset file is required, see README for more information.
if [ "${DATASET_LOCATION_VOL}" == None ]; then
echo "Wide & Deep requires --data-location arg to be defined"
exit 1
fi
if [ ${MODE} == "training" ]; then
if [[ ! -z $steps ]]; then
CMD="${CMD} --steps=${steps}"
fi
if [ ${PRECISION} == "fp32" ]; then
CMD="${CMD}"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} not supported for ${MODEL_NAME}"
exit 1
fi
fi
if [ ${MODE} == "inference" ]; then
if [ "${num_omp_threads}" != None ]; then
CMD="${CMD} --num_omp_threads=${num_omp_threads}"
fi
if [ "${use_parallel_batches}" == "True" ]; then
CMD="${CMD} --use_parallel_batches=${use_parallel_batches}"
else
CMD="${CMD} --use_parallel_batches=False"
fi
if [ "${num_parallel_batches}" != None ] && [ "${use_parallel_batches}" == "True" ]; then
CMD="${CMD} --num_parallel_batches=${num_parallel_batches}"
fi
if [ "${kmp_block_time}" != None ] ; then
CMD="${CMD} --kmp_block_time=${kmp_block_time}"
fi
if [ "${kmp_settings}" != None ]; then
CMD="${CMD} --kmp_settings=${kmp_settings}"
fi
if [ ${PRECISION} == "int8" ] || [ ${PRECISION} == "fp32" ]; then
CMD="${CMD}"
PYTHONPATH=${PYTHONPATH} CMD=${CMD} run_model
else
echo "PRECISION=${PRECISION} is not supported for ${MODEL_NAME}"
exit 1
fi
fi
}
LOGFILE=${OUTPUT_DIR}/${LOG_FILENAME}
MODEL_NAME=$(echo ${MODEL_NAME} | tr 'A-Z' 'a-z')
if [ ${MODEL_NAME} == "3d_unet" ]; then
3d_unet
elif [ ${MODEL_NAME} == "bert" ]; then
bert
elif [ ${MODEL_NAME} == "dcgan" ]; then
dcgan
elif [ ${MODEL_NAME} == "densenet169" ]; then
densenet169
elif [ ${MODEL_NAME} == "draw" ]; then
draw
elif [ ${MODEL_NAME} == "facenet" ]; then
facenet
elif [ ${MODEL_NAME} == "faster_rcnn" ]; then
faster_rcnn
elif [ ${MODEL_NAME} == "mlperf_gnmt" ]; then
mlperf_gnmt
elif [ ${MODEL_NAME} == "ncf" ]; then
ncf
elif [ ${MODEL_NAME} == "inceptionv3" ]; then
resnet101_inceptionv3
elif [ ${MODEL_NAME} == "inceptionv4" ]; then
inceptionv4
elif [ ${MODEL_NAME} == "minigo" ]; then
minigo
elif [ ${MODEL_NAME} == "maskrcnn" ]; then
maskrcnn
elif [ ${MODEL_NAME} == "mobilenet_v1" ]; then
mobilenet_v1
elif [ ${MODEL_NAME} == "resnet101" ]; then
resnet101_inceptionv3
elif [ ${MODEL_NAME} == "resnet50" ]; then
resnet50
elif [ ${MODEL_NAME} == "resnet50v1_5" ]; then
resnet50
elif [ ${MODEL_NAME} == "rfcn" ]; then
rfcn
elif [ ${MODEL_NAME} == "ssd-mobilenet" ]; then
ssd_mobilenet
elif [ ${MODEL_NAME} == "ssd-resnet34" ]; then
ssd-resnet34
elif [ ${MODEL_NAME} == "transformer_lt_official" ]; then
transformer_lt_official
elif [ ${MODEL_NAME} == "transformer_mlperf" ]; then
transformer_mlperf
elif [ ${MODEL_NAME} == "unet" ]; then
unet
elif [ ${MODEL_NAME} == "wavenet" ]; then
wavenet
elif [ ${MODEL_NAME} == "wide_deep" ]; then
wide_deep
elif [ ${MODEL_NAME} == "wide_deep_large_ds" ]; then
wide_deep_large_ds
elif [ ${MODEL_NAME} == "bert_base" ]; then
bert_base
elif [ ${MODEL_NAME} == "bert_large" ]; then
bert_large
else
echo "Unsupported model: ${MODEL_NAME}"
exit 1
fi
|
/**
* @fileoverview This file is generated by the Angular 2 template compiler.
* Do not edit.
* @suppress {suspiciousCode,uselessCode,missingProperties}
*/
/* tslint:disable */
import * as import0 from '@angular/core/src/linker/ng_module_factory';
import * as import1 from '../../../app/setup/setup.module';
import * as import2 from '@angular/router/src/router_module';
import * as import3 from '../../../app/setup/setup-routing.module';
import * as import4 from '@angular/common/src/common_module';
import * as import5 from '../../../app/shared/shared.module';
import * as import6 from '@angular/common/src/localization';
import * as import7 from '@angular/core/src/di/injector';
import * as import8 from './setup.component.ngfactory';
import * as import9 from './settings-menu.component.ngfactory';
import * as import10 from './wizardsMenu/wizards.component.ngfactory';
import * as import11 from './abc.ngfactory';
import * as import12 from '../../../app/setup/setup.component';
import * as import13 from '../../../app/setup/settings-menu.component';
import * as import14 from '../../../app/setup/wizardsMenu/wizards.component';
import * as import15 from '../../../app/setup/abc';
import * as import16 from '@angular/core/src/i18n/tokens';
import * as import17 from '@angular/router/src/router_config_loader';
class SetupInjector extends import0.NgModuleInjector<import1.Setup> {
_RouterModule_0:import2.RouterModule;
_SetupRoutingModule_1:import3.SetupRoutingModule;
_CommonModule_2:import4.CommonModule;
_SharedModule_3:import5.SharedModule;
_Setup_4:import1.Setup;
__ROUTES_5:any[];
__NgLocalization_6:import6.NgLocaleLocalization;
constructor(parent:import7.Injector) {
super(parent,[
import8.SetupComponentNgFactory,
import9.SettingsMenuComponentNgFactory,
import10.WizardsComponentNgFactory,
import11.TjjComponentNgFactory
]
,([] as any[]));
}
get _ROUTES_5():any[] {
if ((this.__ROUTES_5 == null)) { (this.__ROUTES_5 = [[
{
path: 'wizardsMenu',
component: import12.SetupComponent,
children: [{
path: '',
component: import13.SettingsMenuComponent
}
]
}
,
{
path: 'wizardsMenus',
component: import14.WizardsComponent,
children: [{
path: 'upgradewizard',
component: import15.TjjComponent
}
]
}
,
{
path: 'settingsMenu',
component: import12.SetupComponent,
children: [
{
path: '',
component: import13.SettingsMenuComponent
}
,
{
path: 'linemovement',
component: import15.TjjComponent
}
,
{
path: 'printtrigger',
component: import15.TjjComponent
}
,
{
path: 'poweroptions',
component: import15.TjjComponent
}
,
{
path: 'positionorientation',
component: import15.TjjComponent
}
,
{
path: 'content',
component: import15.TjjComponent
}
,
{
path: 'properties',
component: import15.TjjComponent
}
,
{
path: 'clocksdates',
component: import15.TjjComponent
}
,
{
path: 'printheadmanualmode',
component: import15.TjjComponent
}
,
{
path: 'inksystemmanualmode',
component: import15.TjjComponent
}
,
{
path: 'inkdetails',
component: import15.TjjComponent
}
,
{
path: 'hardware',
component: import15.TjjComponent
}
,
{
path: 'gutter',
component: import15.TjjComponent
}
,
{
path: 'installoptions',
component: import15.TjjComponent
}
,
{
path: 'status',
component: import15.TjjComponent
}
,
{
path: 'logs',
component: import15.TjjComponent
}
,
{
path: 'qualityproblems',
component: import15.TjjComponent
}
,
{
path: 'jetprofile',
component: import15.TjjComponent
}
,
{
path: 'tests',
component: import15.TjjComponent
}
,
{
path: 'modulation',
component: import15.TjjComponent
}
,
{
path: 'pressurelogs',
component: import15.TjjComponent
}
,
{
path: 'testprint',
component: import15.TjjComponent
}
,
{
path: 'installation',
component: import15.TjjComponent
}
,
{
path: 'inspection',
component: import15.TjjComponent
}
,
{
path: 'configueralerts',
component: import15.TjjComponent
}
,
{
path: 'rangedalerts',
component: import15.TjjComponent
}
,
{
path: 'emailalerts',
component: import15.TjjComponent
}
,
{
path: 'servicealerts',
component: import15.TjjComponent
}
,
{
path: 'setup',
component: import15.TjjComponent
}
,
{
path: 'assignment',
component: import15.TjjComponent
}
,
{
path: 'monitor',
component: import15.TjjComponent
}
,
{
path: 'test',
component: import15.TjjComponent
}
,
{
path: 'labelselect',
component: import15.TjjComponent
}
,
{
path: 'ethernet',
component: import15.TjjComponent
}
,
{
path: 'serial',
component: import15.TjjComponent
}
,
{
path: 'advanced',
component: import15.TjjComponent
}
,
{
path: 'languagekeyboard',
component: import15.TjjComponent
}
,
{
path: 'datetime',
component: import15.TjjComponent
}
,
{
path: 'backup',
component: import15.TjjComponent
}
,
{
path: 'restore',
component: import15.TjjComponent
}
,
{
path: 'defaults',
component: import15.TjjComponent
}
,
{
path: 'upgrade',
component: import15.TjjComponent
}
,
{
path: 'upgradeconfigur',
component: import15.TjjComponent
}
,
{
path: 'overview',
component: import15.TjjComponent
}
,
{
path: 'softwareversions',
component: import15.TjjComponent
}
,
{
path: 'printerconnection',
component: import15.TjjComponent
}
,
{
path: 'languageandkey',
component: import15.TjjComponent
}
,
{
path: 'accessibility',
component: import15.TjjComponent
}
,
{
path: 'diagnostics',
component: import15.TjjComponent
}
,
{
path: 'network',
component: import15.TjjComponent
}
,
{
path: 'versioninformation',
component: import15.TjjComponent
}
,
{
path: 'logoeditor',
component: import15.TjjComponent
}
,
{
path: 'installpacks',
component: import15.TjjComponent
}
,
{
path: 'migratepacks',
component: import15.TjjComponent
}
,
{
path: 'consumables',
component: import15.TjjComponent
}
,
{
path: 'counters',
component: import15.TjjComponent
}
,
{
path: 'statistics',
component: import15.TjjComponent
}
,
{
path: 'inspectionp',
component: import15.TjjComponent
}
]
}
]
]); }
return this.__ROUTES_5;
}
get _NgLocalization_6():import6.NgLocaleLocalization {
if ((this.__NgLocalization_6 == null)) { (this.__NgLocalization_6 = new import6.NgLocaleLocalization(this.parent.get(import16.LOCALE_ID))); }
return this.__NgLocalization_6;
}
createInternal():import1.Setup {
this._RouterModule_0 = new import2.RouterModule(this.parent.get(import2.ROUTER_FORROOT_GUARD,(null as any)));
this._SetupRoutingModule_1 = new import3.SetupRoutingModule();
this._CommonModule_2 = new import4.CommonModule();
this._SharedModule_3 = new import5.SharedModule();
this._Setup_4 = new import1.Setup();
return this._Setup_4;
}
getInternal(token:any,notFoundResult:any):any {
if ((token === import2.RouterModule)) { return this._RouterModule_0; }
if ((token === import3.SetupRoutingModule)) { return this._SetupRoutingModule_1; }
if ((token === import4.CommonModule)) { return this._CommonModule_2; }
if ((token === import5.SharedModule)) { return this._SharedModule_3; }
if ((token === import1.Setup)) { return this._Setup_4; }
if ((token === import17.ROUTES)) { return this._ROUTES_5; }
if ((token === import6.NgLocalization)) { return this._NgLocalization_6; }
return notFoundResult;
}
destroyInternal():void {
}
}
export const SetupNgFactory:import0.NgModuleFactory<import1.Setup> = new import0.NgModuleFactory(SetupInjector,import1.Setup); |
import { Component, OnInit } from '@angular/core';
import { animate, state, style, transition, trigger } from '@angular/animations';
import { MatTableDataSource } from '@angular/material/table';
import { MatSnackBar } from '@angular/material/snack-bar';
import { AdminsitrativoFilterService } from '../../../services/adminsitrativofilter.service';
import { PersonaService } from '../../../services/persona.service';
@Component({
selector: 'app-administrativo-list',
templateUrl: './administrativo-list.component.html',
styleUrls: ['./administrativo-list.component.css'],
})
export class AdministrativoListComponent implements OnInit {
admiper: any = {
idpersona: null,
nombres: null,
dni: null,
ruc: null,
email: null,
area: null,
fechanacimiento: new Date(),
celular: null,
telefono: null,
direccion: null,
};
columnas: string[] = [
'nombres',
'dni',
'ruc',
'email',
'area',
'fechanacimiento',
'celular',
'telefono',
'direccion',
'acciones'
];
dataSource: MatTableDataSource<any>;
constructor(
private adminService: AdminsitrativoFilterService,
private persoService: PersonaService,
private snackBar: MatSnackBar) { }
ngOnInit() {
this.getAdmins();
}
getAdmins() {
this.adminService.getAdministrativos().subscribe(
res => {
this.admiper = res;
this.dataSource = new MatTableDataSource(this.admiper);
},
err => this.snackBar.open(err.error.mensaje, "Error", {duration: 3000})
);
}
deletePersona(id: number) {
this.persoService.deletepersona(id).subscribe(
(res: any) => {
this.getAdmins();
this.snackBar.open(res.mensaje, "Aceptar", {duration: 3000})
},
err => this.snackBar.open(err.error.mensaje, "Error", {duration: 3000})
);
}
applyFilter(filterValue: string) {
this.dataSource.filter = filterValue.trim().toLowerCase();
if (this.dataSource.paginator) {
this.dataSource.paginator.firstPage();
}
}
}
|
import spacy
# Load the spacy language model
nlp = spacy.load('en')
# Define a function to parse a sentence
def parse_sentence(sentence):
# Parse the sentence using the spacy nlp object
parsed_sentence = nlp(sentence)
# Iterate over the tokens
for token in parsed_sentence:
# Print the text and the part of speech
print(token.text, token.pos_)
# Call the function
parse_sentence("This is an example sentence.")
# Output
This DET
is VERB
an DET
example NOUN
sentence NOUN
. PUNCT |
#!/bin/bash
docker build --build-arg OPENVIDU_TUTORIALS_VERSION="$1" -t openvidu/openvidu-getaroom .
docker tag openvidu/openvidu-getaroom:latest openvidu/openvidu-getaroom:$1 |
<filename>app/src/main/java/org/spongycastle/tls/crypto/TlsSRP6VerifierGenerator.java
package org.spongycastle.tls.crypto;
import java.math.BigInteger;
/**
* Base interface for a generator for SRP-6 verifiers.
*/
public interface TlsSRP6VerifierGenerator
{
/**
* Creates a new SRP-6 verifier value.
*
* @param salt The salt to use, generally should be large and random
* @param identity The user's identifying information (eg. username)
* @param password The <PASSWORD>
* @return A new verifier for use in future SRP authentication
*/
BigInteger generateVerifier(byte[] salt, byte[] identity, byte[] password);
}
|
const SceneMapping = {
1001: "发现栏小程序主入口",
1005: "顶部搜索框的搜索结果页",
1006: "发现栏小程序主入口搜索框的搜索结果页",
1007: "单人聊天会话中的小程序消息卡片",
1008: "群聊会话中的小程序消息卡片",
1011: "扫描二维码",
1012: "长按图片识别二维码",
1013: "手机相册选取二维码",
1014: "小程序模版消息",
1017: "前往体验版的入口页",
1019: "微信钱包",
1020: "公众号profile页相关小程序列表",
1022: "聊天顶部置顶小程序入口",
1023: "安卓系统桌面图标",
1024: "小程序profile页",
1025: "扫描一维码",
1026: "附近小程序列表",
1027: "顶部搜索框搜索结果页“使用过的小程序”列表",
1028: "我的卡包",
1029: "卡券详情页",
1030: "自动化测试下打开小程序",
1031: "长按图片识别一维码",
1032: "手机相册选取一维码",
1034: "微信支付完成页",
1035: "公众号自定义菜单",
1036: "App 分享消息卡片",
1037: "小程序打开小程序",
1038: "从另一个小程序返回",
1039: "摇电视",
1042: "添加好友搜索框的搜索结果页",
1043: "公众号模板消息",
1044: "带shareTicket的小程序消息卡片",
1045: "朋友圈广告",
1046: "朋友圈广告详情页",
1047: "扫描小程序码",
1048: "长按图片识别小程序码",
1049: "手机相册选取小程序码",
1052: "卡券的适用门店列表",
1053: "搜一搜的结果页",
1054: "顶部搜索框小程序快捷入口",
1056: "音乐播放器菜单",
1057: "钱包中的银行卡详情页",
1058: "公众号文章",
1059: "体验版小程序绑定邀请页",
1064: "微信连Wifi状态栏",
1067: "公众号文章广告",
1068: "附近小程序列表广告",
1069: "移动应用",
1071: "钱包中的银行卡列表页",
1072: "二维码收款页面",
1073: "客服消息列表下发的小程序消息卡片",
1074: "公众号会话下发的小程序消息卡片",
1077: "摇周边",
1078: "连Wi-Fi成功页",
1079: "微信游戏中心",
1081: "客服消息下发的文字链",
1082: "公众号会话下发的文字链",
1084: "朋友圈广告原生页",
1089: "微信聊天主界面下拉",
1090: "长按小程序右上角菜单唤出最近使用历史",
1091: "公众号文章商品卡片",
1092: "城市服务入口",
1095: "小程序广告组件",
1096: "聊天记录",
1097: "微信支付签约页",
1099: "页面内嵌插件",
1102: "公众号 profile 页服务预览",
1103:"发现栏小程序主入口,「我的小程序」列表(基础库2.2.4版本起废弃)",
1104: "微信聊天主界面下拉,「我的小程序」栏(基础库2.2.4版本起废弃)",
1106:"聊天主界面下拉,从顶部搜索结果页,打开小程序",
1107: "订阅消息,打开小程序",
1113:"安卓手机负一屏,打开小程序(三星)",
1114:"安卓手机侧边栏,打开小程序(三星)",
1124:"扫“一物一码”打开小程序",
1125:"长按图片识别“一物一码”",
1126:"扫描手机相册中选取的“一物一码”",
1129:"微信爬虫访问 详情",
1131:"浮窗打开小程序",
1146:"地理位置信息打开出行类小程序",
1148:"1148",
default:"unknow scene value"
};
/*
var m =[{
inputRegex:".*",
outputName:"'test'"
},
{
default:"others"
}
]*/
class Mapping {
constructor() {
this.currentMap = {}
this.currentOutput = ""
}
getMap() {
return this.map
}
Mapping(map,input){
const current = map.find(item => input.match(new RegExp(item.inputRegex,"i"))) || map.find(n=>n.default)|| {}
this.currentMap = current
this.currentOutput = current.outputName || current.default
return current.outputName || input
}
getOutput() {
return this.currentOutput
}
}
var m = new Mapping()
class wechatUtils{
construct(sMapping){
this.onWechat = this.isOnwechat()
this.SMapping = sMapping || SceneMapping
}
map(table,field){
return m.Mapping(table,field)
}
isOnwechat(){
if(typeof wx == 'object'){
return true
}
return false
}
mappingSceneToCN(num, mapping){
let m = mapping || this.SMapping
if(this.onWechat){
return m[num] || m['default']
}
return ''
}
generateUA(){
if(typeof wx == 'object'){
let res = wx.getSystemInfoSync();
let isAndroid = res.system.toLowerCase().indexOf('android') > -1;
let iresPad = !isAndroid && res.model.toLowerCase().indexOf('iphone') == -1;
if (isAndroid) {
return "Mozilla/5.0 (Linux; U; " + res.system + "; " + res.model + " Build/000000) AppleWebKit/537.36 (KHTML, like Gecko)Verreson/4.0 Chrome/49.0.0.0 Mobile Safari/537.36 MicroMessenger/" + res.version;
} else if (!iresPad) {
let v = res.system.replace(/^.*?([0-9.]+).*?$/, function (x, y) { return y; }).replace(/\./g, '_');
return "Mozilla/5.0 (iPhone; CPU iPhone OS " + v + " like Mac OS X) AppleWebKit/602.3.12 (KHTML, like Gecko) Mobile/14C92 MicroMessenger/" + res.version;
} else {
let v = res.system.replace(/^.*?([0-9.]+).*?$/, function (x, y) { return y; }).replace(/\./g, '_');
return "Mozilla/5.0 (iPad; CPU OS " + v + " like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Mobile/10A406 MicroMessenger/" + res.version;
}
}
if(typeof navigator == 'object'){
return navigator.userAgent
}
return ''
}
getCurrentPageUrlWithArgs() {
var pages = getCurrentPages()
var currentPage = pages[pages.length - 1]
var url = currentPage.route
var options = currentPage.options
//拼接url的参数
var urlWithArgs = url + '?'
for (var key in options) {
var value = options[key]
urlWithArgs += key + '=' + value + '&'
}
urlWithArgs = urlWithArgs.substring(0, urlWithArgs.length - 1)
return urlWithArgs
}
getCurrentPageUrl() {
var pages = getCurrentPages()
var currentPage = pages[pages.length - 1]
var url = currentPage.route
return url
}
}
export default wechatUtils
|
/**
* MIT License
*
* Copyright (c) 2018 Infineon Technologies AG
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE
*
*
* \file ifx_i2c_config.c
*
* \brief This file provides the ifx i2c platform specific context configurations.
*
* \addtogroup grIFXI2C
* @{
*/
/***********************************************************************************************************************
* HEADER FILES
**********************************************************************************************************************/
// Protocol Stack Includes
#include "optiga/pal/pal_ifx_i2c_config.h"
#include "optiga/ifx_i2c/ifx_i2c_config.h"
/***********************************************************************************************************************
* MACROS
**********************************************************************************************************************/
/***********************************************************************************************************************
* ENUMS
**********************************************************************************************************************/
/***********************************************************************************************************************
* DATA STRUCTURES
***********************************************************************************************************************/
/** @brief This is IFX I2C context. Only one context is supported per slave.*/
//lint --e{785} suppress "Only required fields are initialized, the rest are handled by consumer of this structure"
ifx_i2c_context_t ifx_i2c_context_0 =
{
/// Slave address
0x30,
/// i2c-master frequency
400,
/// IFX-I2C frame size
#if (DL_MAX_FRAME_SIZE >= 0x0115)
0x0115,
#else
DL_MAX_FRAME_SIZE,
#endif
/// Vdd pin
&optiga_vdd_0,
/// Reset pin
&optiga_reset_0,
/// optiga pal i2c context
&optiga_pal_i2c_context_0,
};
/***********************************************************************************************************************
* GLOBAL
***********************************************************************************************************************/
/***********************************************************************************************************************
* LOCAL ROUTINES
***********************************************************************************************************************/
/***********************************************************************************************************************
* API PROTOTYPES
**********************************************************************************************************************/
/**
* @}
**/
|
<filename>src/index.js
import React from "react";
import ReactDOM from "react-dom";
import { Navbar } from "./Navbar";
import { Profile } from "./Profile";
const App = () => (
<>
<Navbar />
<Profile />
</>
);
ReactDOM.render(<App />, document.getElementById("root"));
|
<reponame>IlyaNyrkov/DripCode<gh_stars>0
#include <iostream>
#include <assert.h>
#include <vector>
using namespace std;
class Solution {
public:
int missingNumber(vector<int>& nums) {
int sum = 0;
for (int i = 0; i < nums.size(); ++i) {
sum += nums[i];
}
return (nums.size() ) * (nums.size() + 1) / 2 - sum;
}
};
bool testSolution(vector<int> nums, int result) {
Solution solution;
return solution.missingNumber(nums) == result;
}
int main() {
assert(testSolution({3,0,1}, 2));
assert(testSolution({0,1}, 2));
assert(testSolution({9,6,4,2,3,5,7,0,1}, 8));
return 0;
} |
use core::errors::Result;
use core::{
CoreFlavor, Range, Resolved, ResolvedByPrefix, Resolver, RpPackage, RpRequiredPackage,
RpVersionedPackage, Version,
};
use naming::Naming;
use relative_path::{RelativePath, RelativePathBuf};
use std::any::Any;
use std::collections::{HashMap, HashSet};
struct CustomResolver {
packages: HashMap<String, Vec<(String, Range)>>,
}
impl CustomResolver {
fn new() -> Self {
CustomResolver {
packages: HashMap::new(),
}
}
fn add_package_with_dependencies(&mut self, package_name: &str, dependencies: Vec<(&str, &str)>) {
let mut dependency_list = Vec::new();
for (name, version) in dependencies {
dependency_list.push((name.to_string(), Range::new(version)));
}
self.packages.insert(package_name.to_string(), dependency_list);
}
fn resolve_package_version(&self, package_name: &str, version_range: &Range) -> Option<RpVersionedPackage> {
if let Some(dependencies) = self.packages.get(package_name) {
// For simplicity, assume the first versioned package found satisfies the version range
for (name, range) in dependencies {
if let Some(version) = Version::parse(range.as_str()) {
if version_range.contains(&version) {
return Some(RpVersionedPackage {
package: RpPackage {
name: package_name.to_string(),
version: version.clone(),
},
flavor: CoreFlavor::default(),
});
}
}
}
}
None
}
}
fn main() {
let mut resolver = CustomResolver::new();
resolver.add_package_with_dependencies(
"package1",
vec![("dependency1", ">=1.0.0"), ("dependency2", "<2.0.0")],
);
resolver.add_package_with_dependencies(
"package2",
vec![("dependency1", ">=1.5.0"), ("dependency3", ">=3.0.0")],
);
let resolved_package = resolver.resolve_package_version("package1", &Range::new(">=1.0.0", "<2.0.0"));
println!("{:?}", resolved_package);
} |
main() {
# Use colors, but only if connected to a terminal, and that terminal
# supports them.
if which tput >/dev/null 2>&1; then
ncolors=$(tput colors)
fi
if [ -t 1 ] && [ -n "$ncolors" ] && [ "$ncolors" -ge 8 ]; then
RED="$(tput setaf 1)"
GREEN="$(tput setaf 2)"
YELLOW="$(tput setaf 3)"
BLUE="$(tput setaf 4)"
BOLD="$(tput bold)"
NORMAL="$(tput sgr0)"
else
RED=""
GREEN=""
YELLOW=""
BLUE=""
BOLD=""
NORMAL=""
fi
# Only enable exit-on-error after the non-critical colorization stuff,
# which may fail on systems lacking tput or terminfo
set -e
if ! command -v zsh >/dev/null 2>&1; then
printf "${YELLOW}Zsh is not installed!${NORMAL} Please install zsh first!\n"
exit
fi
if [ ! -n "$ZSH" ]; then
ZSH=~/.oh-my-zsh
fi
if [ -d "$ZSH" ]; then
printf "${YELLOW}You already have Oh My Zsh installed.${NORMAL}\n"
printf "You'll need to remove $ZSH if you want to re-install.\n"
else
# Prevent the cloned repository from having insecure permissions. Failing to do
# so causes compinit() calls to fail with "command not found: compdef" errors
# for users with insecure umasks (e.g., "002", allowing group writability). Note
# that this will be ignored under Cygwin by default, as Windows ACLs take
# precedence over umasks except for filesystems mounted with option "noacl".
umask g-w,o-w
printf "${BLUE}Cloning Oh My Zsh...${NORMAL}\n"
command -v git >/dev/null 2>&1 || {
echo "Error: git is not installed"
exit 1
}
# The Windows (MSYS) Git is not compatible with normal use on cygwin
if [ "$OSTYPE" = cygwin ]; then
if git --version | grep msysgit > /dev/null; then
echo "Error: Windows/MSYS Git is not supported on Cygwin"
echo "Error: Make sure the Cygwin git package is installed and is first on the path"
exit 1
fi
fi
env git clone --depth=1 https://github.com/robbyrussell/oh-my-zsh.git "$ZSH" || {
printf "Error: git clone of oh-my-zsh repo failed\n"
exit 1
}
printf "${GREEN}"
echo ' __ __ '
echo ' ____ / /_ ____ ___ __ __ ____ _____/ /_ '
echo ' / __ \/ __ \ / __ `__ \/ / / / /_ / / ___/ __ \ '
echo '/ /_/ / / / / / / / / / / /_/ / / /_(__ ) / / / '
echo '\____/_/ /_/ /_/ /_/ /_/\__, / /___/____/_/ /_/ '
echo ' /____/ ....is now installed!'
printf "${YELLOW}and hacked by thrimbda!${GREEN}"
echo ''
echo ''
echo 'Please look over the ~/.zshrc file to select plugins, themes, and options.'
echo ''
echo 'p.s. Follow us at https://twitter.com/ohmyzsh.'
echo ''
echo 'p.p.s. Get stickers and t-shirts at https://shop.planetargon.com.'
echo ''
printf "${NORMAL}\n"
fi
# install theme
if [ ! -n "$POWERLEVEL10K" ]; then
POWERLEVEL10K=$ZSH/custom/themes/powerlevel10k
fi
if [ ! -d "$POWERLEVEL10K" ]; then
printf "${BLUE}install theme powerlevel10k into your oh-my-zsh environment${NORMAL}\n"
env git clone --depth=1 https://github.com/romkatv/powerlevel10k.git $POWERLEVEL10K || {
printf "Error: git clone of oh-my-zsh repo failed\n"
exit 1
}
fi
# install plugins
if [ ! -n "$AUTOSUGGESTIONS" ]; then
AUTOSUGGESTIONS=$ZSH/custom/plugins/zsh-autosuggestions
fi
if [ ! -d "$AUTOSUGGESTIONS" ]; then
printf "${BLUE}install plugin auto suggestion into your oh-my-zsh environment${NORMAL}\n"
env git clone https://github.com/zsh-users/zsh-autosuggestions.git $AUTOSUGGESTIONS || {
printf "Error: git clone of zsh-autosuggestions repo failed\n"
exit 1
}
fi
if [ ! -n "$SYNTAX_HIGHLIGHTING" ]; then
SYNTAX_HIGHLIGHTING=$ZSH/custom/plugins/zsh-syntax-highlighting
fi
if [ ! -d "$SYNTAX_HIGHLIGHTING" ]; then
printf "${BLUE}install plugin syntax highlighting into your oh-my-zsh environment${NORMAL}\n"
env git clone https://github.com/zsh-users/zsh-syntax-highlighting.git $SYNTAX_HIGHLIGHTING || {
printf "Error: git clone of zsh-syntax-highlighting repo failed\n"
exit 1
}
fi
printf "${BLUE}Looking for an existing zsh config...${NORMAL}\n"
if [ -f ~/.zshrc ] || [ -h ~/.zshrc ]; then
printf "${YELLOW}Found ~/.zshrc.${NORMAL} ${GREEN}Backing up to ~/.zshrc.pre-oh-my-zsh${NORMAL}\n";
mv ~/.zshrc ~/.zshrc.pre-oh-my-zsh;
fi
# its obviously that people who can run this would have curl or wget.
if command -v curl 2>&1 >/dev/null ; then
curl -o ~/.zshrc -L https://raw.githubusercontent.com/Thrimbda/shell-set-up/master/.zshrc
curl -o ~/.p10k.zsh -L https://raw.githubusercontent.com/Thrimbda/shell-set-up/master/.p10k.zsh
elif command -v wget 2>&1 >/dev/null ; then
wget -O ~/.zshrc https://raw.githubusercontent.com/Thrimbda/shell-set-up/master/.zshrc
wget -O ~/.p10k.zsh https://raw.githubusercontent.com/Thrimbda/shell-set-up/master/.p10k.zsh
else
printf "${YELLOW}I don't know where did you get this script.${NORMAL} Please install curl or wget first!\n"
exit
fi
sed "/^export ZSH=/ c\\
export ZSH=\"$ZSH\"
" ~/.zshrc > ~/.zshrc-omztemp
mv -f ~/.zshrc-omztemp ~/.zshrc
printf "${GREEN}"
echo ' ___________ __ __ __ '
echo '/____ ____// / / / / / '
echo ' / / / /____ _____ ( ) __ ___ / /__ ____/ /_____ '
echo ' / / / __ / / ___// / / |/ | / __ \/ __ // __ |'
echo ' / / / / / / / / / / / /| /| | / /_/ // /_/ // /_/ |'
echo ' /_/ /_/ /_/ /_/ /_/ /_/ |_/ |_| \____/ \____/ \____|_| ...empower your shell, enjoy yourself.'
printf "${NORMAL}"
}
main
|
/* global describe, test, expect */
export default function ({ app }) {
describe('Misc', () => {
test('finishes response', async () => {
const res = {
finished: false,
end () {
this.finished = true
}
}
const html = await app.renderToHTML({}, res, '/finish-response', {})
expect(html).toBeFalsy()
})
})
}
|
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.openvalidation.common.ast.operand.lambda;
import io.openvalidation.common.ast.operand.property.ASTOperandProperty;
import io.openvalidation.common.data.DataPropertyType;
import java.util.List;
public class ASTOperandLambdaProperty extends ASTOperandLambdaExpression {
public ASTOperandLambdaProperty() {
super();
}
public ASTOperandLambdaProperty(ASTOperandProperty property) {
this.setProperty(property);
this.setLambdaToken(generateLambdaToken(this));
}
public void setProperty(ASTOperandProperty property) {
this.setOperand(property);
}
public ASTOperandProperty getProperty() {
return (this.getOperand() != null && this.getOperand() instanceof ASTOperandProperty)
? (ASTOperandProperty) this.getOperand()
: null;
}
@Override
public List<ASTOperandProperty> getProperties() {
return (this.getProperty() != null)
? this.getProperty().getProperties()
: super.getProperties();
}
@Override
public DataPropertyType getDataType() {
return DataPropertyType.Boolean;
}
@Override
public String print(int level) {
StringBuilder sb = new StringBuilder();
sb.append(super.print(level));
if (this.getProperty() != null) sb.append(this.getProperty().print(level + 1));
return sb.toString();
}
}
|
const config = require("../../config");
const mongo = require("mongodb");
const url = process.env.MONGO;
module.exports = () => async (ctx) => {
mongo.connect(
url,
{
useNewUrlParser: true,
useUnifiedTopology: true,
},
(err, client) => {
let db = client.db("randomath");
db.collection("users")
.find({ id: ctx.from.id })
.toArray((err, data) => {
let correct = data[0].true_answers;
let keyboard;
if (correct < 41) {
keyboard = config.train_keyboard_less_5_lvl;
} else if (correct < 91) {
keyboard = config.train_keyboard_less_10_lvl;
} else {
keyboard = config.train_keyboard;
}
ctx.deleteMessage();
ctx.replyWithMarkdown(
`🎓 *You can choose what skills you need to train here.*` +
`\n\n❔ Each training is an infinite amount of math examples with three options. *There is only one right option.*`,
{
reply_markup: {
inline_keyboard: keyboard,
},
parse_mode: "markdown",
}
);
});
}
);
}; |
#!/bin/bash
# Script Name: AtoMiC Mylar Installer
source "$SCRIPTPATH/inc/commons.sh"
source "$SCRIPTPATH/inc/header.sh"
echo -e "${GREEN}AtoMiC $APPTITLE Installer Script$ENDCOLOR"
source "$SCRIPTPATH/inc/pause.sh"
source "$SCRIPTPATH/inc/app-autostart-remove.sh"
source "$SCRIPTPATH/inc/app-move-previous.sh"
source "$SCRIPTPATH/utils/python/python-installer.sh"
source "$SCRIPTPATH/mylar/mylar-constants.sh"
source "$SCRIPTPATH/inc/app-install-pips.sh"
source "$SCRIPTPATH/inc/app-git-download.sh"
source "$SCRIPTPATH/inc/app-autostart-configure.sh"
source "$SCRIPTPATH/inc/app-set-permissions.sh"
source "$SCRIPTPATH/inc/app-start.sh"
source "$SCRIPTPATH/inc/app-install-confirmation.sh"
source "$SCRIPTPATH/inc/thankyou.sh"
source "$SCRIPTPATH/inc/exit.sh"
|
class Timer {
constructor() {
this.startTime = null
this.endTime = null
this.runtime = 0
this.interval = null
}
start() {
this.startTime = new Date()
this.interval = setInterval(() => {
this.endTime = new Date()
this.runtime = this.endTime.getTime() - this.startTime.getTime()
}, 1000)
}
stop(){
clearInterval(this.interval)
}
pause(){
clearInterval(this.interval)
this.endTime = new Date()
}
reset(){
this.startTime = null
this.endTime = null
this.runtime = 0
}
} |
require 'thor'
require 'csv'
require 'terminal-table'
require 'timeout'
module Dsfu
class CLI < Thor
desc "view TABLE", "Views a table of products"
def view_table
csv = Dir.glob("*.csv")[0]
products = Dsfu::CsvProductFactory.new(csv).build
table = products.map.with_index { |product, i| [i, product.display_name, product.dimensions, "$#{product.price}"] }
puts Terminal::Table.new rows: table, headings: ['ID', 'Display Name', 'Size', 'Price']
end
desc "csv", "Creates a new CSV for editing"
def csv
files = []
Dir.new(Dir.pwd).each do |file|
if file =~ /.png/
if file =~ /(.+) - ([\d.]+)[xX ]+([\d.]+).png/
files << [ file, $1, $2, $3]
else
files << [ file ]
end
end
end
CSV.open("product_listing.csv", "wb") do |csv|
csv << ['File Name', 'Display Name', 'Width', 'Height', 'Price']
files.each do |file|
csv << file
end
end
`open product_listing.csv`
end
desc "upload COMPANY CATEGORY", "uploads products in directory to the digital store front under COMPANY"
def upload(company_input, category_input)
csv = Dir.glob("*.csv")[0]
products = Dsfu::CsvProductFactory.new(csv).build
Dsfu::SentientStoreFront.execute do
products.each do |product|
product.find_image_path
product.company = company_input
product.category = category_input
new_product product
end
end
end
end
end
|
Zest.Telephony.Models.Device = Backbone.Model.extend({
defaults: {
state: "disabled_by_default"
},
state: function() {
return this.get("state");
},
uiShowAnswerButton: function() {
var displayableStates = ['ready', 'error', 'disconnect', 'incoming', 'answering'];
return _.contains(displayableStates, this.state()) ? '' : 'hidden';
},
uiDisableAnswerButton: function() {
var enabledStates = ['incoming'];
return _.contains(enabledStates, this.state()) ? '' : 'disabled';
},
uiShowHangupButton: function() {
var displayableStates = ['connect'];
return _.contains(displayableStates, this.state()) ? '' : 'hidden';
},
uiDisableHangupButton: function() {
var enabledStates = ['connect'];
return _.contains(enabledStates, this.state()) ? '' : 'disabled';
}
});
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from flask_api import status
from flask import Flask, request, render_template, send_from_directory
from flask_cors import CORS
import json
import pygame
import os
import re
app = Flask(__name__, static_folder="build/static", template_folder="build")
CORS(app)
pygame.mixer.init()
@app.route("/play", methods=['POST'])
def playSoundRequest():
data = request.get_data()
data = str(data, 'utf-8')
jsonData = json.loads(data)
cwd = os.getcwd()
path = cwd + "/dat/"+jsonData["playId"]
pygame.mixer.music.load(path)
pygame.mixer.music.play()
return "", status.HTTP_202_ACCEPTED
def getAllButtonsObject():
buttons = []
for fileName in os.listdir("./dat"):
if fileName.__contains__(".wav") or fileName.__contains__(".mp3"):
fileTitle = fileName.replace(".wav", "")
fileTitle = fileTitle.replace(".mp3", "")
fileTitle = re.sub("([a-z])([A-Z])","\g<1> \g<2>",fileTitle)
objToAppend={"id": fileName, "title": fileTitle}
buttons.append(objToAppend)
return {"buttons": buttons}
@app.route("/stop", methods=['GET'])
def stopMusic():
pygame.mixer.music.stop()
return "", status.HTTP_200_OK
@app.route("/getButtons", methods=['GET'])
def returnAllButtons():
obj = getAllButtonsObject()
return json.dumps(obj), status.HTTP_202_ACCEPTED
@app.route("/")
def index():
return render_template("index.html")
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join('./build', 'static'),'favicon.ico')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80)
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkdir work/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/il_S1_summaryaalcalc
mkdir work/full_correlation/il_S1_summaryaalcalc
mkfifo fifo/full_correlation/gul_fc_P1
mkfifo fifo/gul_P1
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_eltcalc_P1
mkfifo fifo/gul_S1_summarycalc_P1
mkfifo fifo/gul_S1_pltcalc_P1
mkfifo fifo/il_P1
mkfifo fifo/il_S1_summary_P1
mkfifo fifo/il_S1_eltcalc_P1
mkfifo fifo/il_S1_summarycalc_P1
mkfifo fifo/il_S1_pltcalc_P1
mkfifo fifo/full_correlation/gul_P1
mkfifo fifo/full_correlation/gul_S1_summary_P1
mkfifo fifo/full_correlation/gul_S1_eltcalc_P1
mkfifo fifo/full_correlation/gul_S1_summarycalc_P1
mkfifo fifo/full_correlation/gul_S1_pltcalc_P1
mkfifo fifo/full_correlation/il_P1
mkfifo fifo/full_correlation/il_S1_summary_P1
mkfifo fifo/full_correlation/il_S1_eltcalc_P1
mkfifo fifo/full_correlation/il_S1_summarycalc_P1
mkfifo fifo/full_correlation/il_S1_pltcalc_P1
# --- Do insured loss computes ---
eltcalc < fifo/il_S1_eltcalc_P1 > work/kat/il_S1_eltcalc_P1 & pid1=$!
summarycalctocsv < fifo/il_S1_summarycalc_P1 > work/kat/il_S1_summarycalc_P1 & pid2=$!
pltcalc < fifo/il_S1_pltcalc_P1 > work/kat/il_S1_pltcalc_P1 & pid3=$!
tee < fifo/il_S1_summary_P1 fifo/il_S1_eltcalc_P1 fifo/il_S1_summarycalc_P1 fifo/il_S1_pltcalc_P1 work/il_S1_summaryaalcalc/P1.bin > /dev/null & pid4=$!
summarycalc -m -f -1 fifo/il_S1_summary_P1 < fifo/il_P1 &
# --- Do ground up loss computes ---
eltcalc < fifo/gul_S1_eltcalc_P1 > work/kat/gul_S1_eltcalc_P1 & pid5=$!
summarycalctocsv < fifo/gul_S1_summarycalc_P1 > work/kat/gul_S1_summarycalc_P1 & pid6=$!
pltcalc < fifo/gul_S1_pltcalc_P1 > work/kat/gul_S1_pltcalc_P1 & pid7=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_eltcalc_P1 fifo/gul_S1_summarycalc_P1 fifo/gul_S1_pltcalc_P1 work/gul_S1_summaryaalcalc/P1.bin > /dev/null & pid8=$!
summarycalc -m -i -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 &
# --- Do insured loss computes ---
eltcalc < fifo/full_correlation/il_S1_eltcalc_P1 > work/full_correlation/kat/il_S1_eltcalc_P1 & pid9=$!
summarycalctocsv < fifo/full_correlation/il_S1_summarycalc_P1 > work/full_correlation/kat/il_S1_summarycalc_P1 & pid10=$!
pltcalc < fifo/full_correlation/il_S1_pltcalc_P1 > work/full_correlation/kat/il_S1_pltcalc_P1 & pid11=$!
tee < fifo/full_correlation/il_S1_summary_P1 fifo/full_correlation/il_S1_eltcalc_P1 fifo/full_correlation/il_S1_summarycalc_P1 fifo/full_correlation/il_S1_pltcalc_P1 work/full_correlation/il_S1_summaryaalcalc/P1.bin > /dev/null & pid12=$!
summarycalc -m -f -1 fifo/full_correlation/il_S1_summary_P1 < fifo/full_correlation/il_P1 &
# --- Do ground up loss computes ---
eltcalc < fifo/full_correlation/gul_S1_eltcalc_P1 > work/full_correlation/kat/gul_S1_eltcalc_P1 & pid13=$!
summarycalctocsv < fifo/full_correlation/gul_S1_summarycalc_P1 > work/full_correlation/kat/gul_S1_summarycalc_P1 & pid14=$!
pltcalc < fifo/full_correlation/gul_S1_pltcalc_P1 > work/full_correlation/kat/gul_S1_pltcalc_P1 & pid15=$!
tee < fifo/full_correlation/gul_S1_summary_P1 fifo/full_correlation/gul_S1_eltcalc_P1 fifo/full_correlation/gul_S1_summarycalc_P1 fifo/full_correlation/gul_S1_pltcalc_P1 work/full_correlation/gul_S1_summaryaalcalc/P1.bin > /dev/null & pid16=$!
summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P1 < fifo/full_correlation/gul_P1 &
tee < fifo/full_correlation/gul_fc_P1 fifo/full_correlation/gul_P1 | fmcalc -a2 > fifo/full_correlation/il_P1 &
eve 1 2 | getmodel | gulcalc -S0 -L0 -r -j fifo/full_correlation/gul_fc_P1 -a1 -i - | tee fifo/gul_P1 | fmcalc -a2 > fifo/il_P1 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P1 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P1 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P1 > output/il_S1_summarycalc.csv & kpid3=$!
# --- Do insured loss kats for fully correlated output ---
kat -s work/full_correlation/kat/il_S1_eltcalc_P1 > output/full_correlation/il_S1_eltcalc.csv & kpid4=$!
kat work/full_correlation/kat/il_S1_pltcalc_P1 > output/full_correlation/il_S1_pltcalc.csv & kpid5=$!
kat work/full_correlation/kat/il_S1_summarycalc_P1 > output/full_correlation/il_S1_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P1 > output/gul_S1_eltcalc.csv & kpid7=$!
kat work/kat/gul_S1_pltcalc_P1 > output/gul_S1_pltcalc.csv & kpid8=$!
kat work/kat/gul_S1_summarycalc_P1 > output/gul_S1_summarycalc.csv & kpid9=$!
# --- Do ground up loss kats for fully correlated output ---
kat -s work/full_correlation/kat/gul_S1_eltcalc_P1 > output/full_correlation/gul_S1_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P1 > output/full_correlation/gul_S1_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P1 > output/full_correlation/gul_S1_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
|
import React from 'react'
import 'keen-slider/keen-slider.min.css'
import { useKeenSlider } from 'keen-slider/react'
const Slider = () => {
const [currentSlide, setCurrentSlide] = React.useState(0);
const [sliderRef, slider] = useKeenSlider({
initial: currentSlide,
slideChanged(s) {
setCurrentSlide(s.details().relativeSlide);
}
});
return (
<>
<div className="navigation-wrapper">
<div ref={sliderRef} className="keen-slider">
<div className="keen-slider__slide number-slide1">1</div>
<div className="keen-slider__slide number-slide2">2</div>
<div className="keen-slider__slide number-slide3">3</div>
<div className="keen-slider__slide number-slide4">4</div>
<div className="keen-slider__slide number-slide5">5</div>
<div className="keen-slider__slide number-slide6">6</div>
</div>
{slider && (
<>
<ArrowLeft
onClick={e => e.stopPropagation() || slider.prev()}
disabled={currentSlide === 0}
/>
<ArrowRight
onClick={e => e.stopPropagation() || slider.next()}
disabled={currentSlide === slider.details().size - 1}
/>
</>
)}
</div>
{slider && (
<div className="dots">
{[...Array(slider.details().size).keys()].map(idx => {
return (
<button
key={idx}
onClick={() => {
slider.moveToSlide(idx);
}}
className={"dot" + (currentSlide === idx ? " active" : "")}
/>
);
})}
</div>
)}
</>
);
}
function ArrowLeft(props) {
const disabeld = props.disabled ? " arrow--disabled" : "";
return (
<svg
onClick={props.onClick}
className={"arrow arrow--left" + disabeld}
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
>
<path d="M16.67 0l2.83 2.829-9.339 9.175 9.339 9.167-2.83 2.829-12.17-11.996z" />
</svg>
);
}
function ArrowRight(props) {
const disabeld = props.disabled ? " arrow--disabled" : "";
return (
<svg
onClick={props.onClick}
className={"arrow arrow--right" + disabeld}
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
>
<path d="M5 3l3.057-3 11.943 12-11.943 12-3.057-3 9-9z" />
</svg>
);
}
export default Slider
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. Welcome Welcome!
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
#!/bin/sh
PEAR='/Applications/MAMP/bin/php5/bin/pear'
echo "Installing PHPUnit for MAMP"
sudo $PEAR channel-discover pear.phpunit.de
sudo $PEAR channel-discover pear.symfony-project.com
sudo $PEAR upgrade PEAR
sudo $PEAR install phpunit/PHPUnit
echo "Please check installation by visiting dev/tests in SilverStripe, and ensure you get no error message"
|
#!/bin/bash
TOOLS_DIR="Tools"
BIN_DIR="Bin"
if [ -d $TOOLS_DIR ]; then
rm -rf $TOOLS_DIR
fi
if [ -d $BIN_DIR ]; then
rm -rf $BIN_DIR
fi |
class Client {
get(url) {
return new Promise((resolve, reject) => {
fetch(url)
.then(response => {
if (!response.ok) {
throw new Error('Network response was not ok');
}
return response.json();
})
.then(json => resolve(json))
.catch(error => reject(error.message));
});
}
}
const client = new Client();
export default client; |
def repeat_string(string, n):
repeated_string = string * n
return repeated_string
print(repeat_string("Hello", 4)) |
def predict_species(petal_length, petal_width, sepal_length, sepal_width):
# Train the model
model = Model()
model.train([petal_length, petal_width, sepal_length, sepal_width])
# Run the model
prediction = model.predict([petal_length, petal_width, sepal_length, sepal_width])
return prediction |
'use strict';
(function(window) {
/**
* AppWindowFactory handle the launch request from gecko and
* wrap the config with properer parameters.
*
* If gecko is asking us to open a webapp,
* AppWindowFactory would do the instantiation and let
* AppWindowManager to do the following app opening control via
* event <code>launchapp</code>.
*
* If gecko is asking us to open an inline activity page,
* AppWindowFactory would wrap the configuration and sent it to
* ActivityWindowFactory for it to do instantiation via event
* <code>launchactivity</code>.
*
* 
*
* @module AppWindowFactory
*/
var AppWindowFactory = {
init: function awf_init() {
/**
* Wait for applicationready event to do the following work.
*
* @listens webapps-launch
*/
if (Applications.ready) {
window.addEventListener('webapps-launch', this);
window.addEventListener('webapps-close', this);
window.addEventListener('open-app', this);
} else {
var self = this;
window.addEventListener('applicationready', function appReady(e) {
window.removeEventListener('applicationready', appReady);
window.addEventListener('webapps-launch', self);
window.addEventListener('webapps-close', self);
window.addEventListener('open-app', self);
});
}
},
handleEvent: function awf_handleEvent(evt) {
var detail = evt.detail;
var manifestURL = detail.manifestURL;
if (!manifestURL) {
return;
}
var config = new BrowserConfigHelper(detail.url, detail.manifestURL);
if (!config.manifest) {
return;
}
switch (evt.type) {
case 'webapps-launch':
// TODO: Look up current opened window list,
// and then create a new instance here.
this.launch(config);
break;
case 'open-app':
// System Message Handler API is asking us to open the specific URL
// that handles the pending system message.
// We will launch it in background if it's not handling an activity.
config.isSystemMessage = true;
if (detail.isActivity) {
config.isActivity = true;
if (detail.target.disposition &&
detail.target.disposition == 'inline') {
config.inline = true;
}
}
config.changeURL = !detail.onlyShowApp;
config.stayBackground = !detail.showApp;
// TODO: Create activity window instance
// or background app window instance for system message here.
this.launch(config);
break;
case 'webapps-close':
this.publish('killapp', config);
break;
}
},
launch: function awf_launch(config) {
if (config.url === window.location.href) {
return;
}
if (config.isActivity && config.inline) {
this.publish('launchactivity', config);
return;
}
// The rocketbar currently handles the management of
// the search app
if (config.manifest.role === 'search') {
return;
} else if (!AppWindowManager.isRunning(config) &&
config.origin !== HomescreenLauncher.origin) {
new AppWindow(config);
} else if (config.origin == HomescreenLauncher.origin) {
HomescreenLauncher.getHomescreen().ensure();
}
this.publish('launchapp', config);
},
publish: function awf_publish(event, detail) {
var evt = document.createEvent('CustomEvent');
evt.initCustomEvent(event, true, false, detail);
window.dispatchEvent(evt);
}
};
window.AppWindowFactory = AppWindowFactory;
AppWindowFactory.init();
}(this));
|
<reponame>LaudateCorpus1/oci-ruby-sdk
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'openssl'
require 'securerandom'
module OCI
module Auth
# Contains utility methods to support functionality in the {OCI::Auth} module, for example being able
# to extract information from certificates and scrubbing certificate information for calls to Auth Service
module Util
AUTHORIZATION_HEADER = 'Authorization'.freeze
AUTHORIZATION_HEADER_VALUE = 'Bearer Oracle'.freeze
def self.get_tenancy_id_from_certificate(x509_certificate)
subject_array = x509_certificate.subject.to_a
subject_array.each do |subject_name|
# subject_name is actually a triple like:
# ["OU", "<name>", "<number>"]
if subject_name[0] == 'OU' && subject_name[1].include?('opc-tenant:')
# 'opc-tenant:' is 11 character long, so we want to start at the index after that and to the end of the string (-1)
return subject_name[1][11..-1]
end
end
raise 'Certificate did not contain a tenancy in its subject'
end
def self.colon_separate_fingerprint(raw_fingerprint)
raw_fingerprint.gsub(/(.{2})(?=.)/, '\1:\2')
end
def self.sanitize_certificate_string(cert_string)
cert_string.gsub('-----BEGIN CERTIFICATE-----', '')
.gsub('-----END CERTIFICATE-----', '')
.gsub('-----BEGIN PUBLIC KEY-----', '')
.gsub('-----END PUBLIC KEY-----', '')
.delete("\n")
end
def self.get_metadata_request(request_url, type)
case type
when 'post'
request = Net::HTTP::Post.new(request_url)
when 'get'
request = Net::HTTP::Get.new(request_url)
when 'put'
request = Net::HTTP::Put.new(request_url)
else
raise "Unknown request-type #{type} provided."
end
request[AUTHORIZATION_HEADER] = AUTHORIZATION_HEADER_VALUE
request
end
def self.load_private_key_from_file(private_key_file, passphrase)
private_key_data = File.read(File.expand_path(private_key_file)).to_s.strip
load_private_key(private_key_data, passphrase)
end
def self.load_private_key(private_key_date, passphrase)
OpenSSL::PKey::RSA.new(
private_key_date,
passphrase || <PASSWORD>
)
end
end
end
end
|
function calculateAvg(num1, num2) {
return (num1 + num2)/2;
} |
<reponame>DarioSilva7/afip.js<filename>src/Class/RegisterScopeTen.js<gh_stars>10-100
const AfipWebService = require('./AfipWebService');
/**
* SDK for AFIP Register Scope Ten (ws_sr_padron_a10)
*
* @link http://www.afip.gob.ar/ws/ws_sr_padron_a10/manual_ws_sr_padron_a10_v1.1. WS Specification
**/
module.exports = class RegisterScopeTen extends AfipWebService {
constructor(afip){
const options = {
soapV12: false,
WSDL: 'ws_sr_padron_a10-production.wsdl',
URL: 'https://aws.afip.gov.ar/sr-padron/webservices/personaServiceA10',
WSDL_TEST: 'ws_sr_padron_a10.wsdl',
URL_TEST: 'https://awshomo.afip.gov.ar/sr-padron/webservices/personaServiceA10',
afip
}
super(options);
}
/**
* Asks to web service for servers status {@see WS
* Specification item 3.1}
*
* @return object { appserver : Web Service status,
* dbserver : Database status, authserver : Autentication
* server status}
**/
async getServerStatus() {
return this.executeRequest('dummy');
}
/**
* Asks to web service for taxpayer details {@see WS
* Specification item 3.2}
*
* @throws Exception if exists an error in response
*
* @return object|null if taxpayer does not exists, return null,
* if it exists, returns full response {@see
* WS Specification item 3.2.2}
**/
async getTaxpayerDetails(identifier) {
// Get token and sign
let { token, sign } = await this.afip.GetServiceTA('ws_sr_padron_a10');
// Prepare SOAP params
let params = {
token, sign,
cuitRepresentada: this.afip.CUIT,
idPersona: identifier
};
return this.executeRequest('getPersona', params)
.then(res => res.persona)
.catch(err => { if (err.message.indexOf('No existe') !== -1) { return null } else { throw err }});
}
/**
* Send request to AFIP servers
*
* @param operation SOAP operation to execute
* @param params Parameters to send
*
* @return mixed Operation results
**/
async executeRequest(operation, params = {})
{
let results = await super.executeRequest(operation, params);
return results[operation === 'getPersona' ? 'personaReturn' : 'return'];
}
}
|
require 'nokogiri'
require 'set'
# NOTE: Export your bookmarks to
# ~/vimwiki/bookmarks.html
module Markita
class Base
class Bookmarks
KW = /\b\w+\b/
Bookmark = Struct.new(:href, :title, :tags, :keywords)
attr_reader :list, :tags, :topics
def initialize
@list = []
traverse!
@tags = @list.map{_1.tags}.flatten.uniq.sort
topics = Hash.new{|h,k| h[k]=0}
@list.each do |bookmark|
bookmark.keywords.each do |kw|
topics[kw] += 1
end
end
n = Math.sqrt(@list.length)
topics.delete_if{|k,v|m=Math.sqrt(3.0*[10,k.length-0.5].min); v>m*n or v*m<n}
@topics = topics.keys.sort{|a,b|topics[b]<=>topics[a]}
end
def traverse!
@doc = Nokogiri::HTML File.read File.join(ROOT, 'bookmarks.html')
@folders = []
@doc.xpath('./html/body/dl').each do |shoot|
traverse(shoot)
end
# Don't need to carry these around anymore:
@doc = @folders = nil
end
def traverse(branch)
name = branch.name
case name
when 'h3'
@folders.push branch.text
when 'dl', 'dt'
branch.xpath('./*').each do |shoot|
traverse(shoot)
end
@folders.pop if name == 'dl'
when 'a'
href,title = branch['href'],branch.text
keywords = (title+' '+href).scan(KW).map{|kw| kw.downcase}.uniq
tags = @folders[1..-1].uniq
bookmark = Bookmark.new
bookmark.href = href
bookmark.title = title.empty? ? href : title
bookmark.tags = tags
bookmark.keywords = keywords
@list.push bookmark
end
end
end
get '/bookmarks.html' do
search = params['search']&.scan(Bookmarks::KW)&.map{|kw| kw.downcase}
topic = params['topic']
tag = params['tag']
bookmarks = Bookmarks.new
text = "# Bookmarks\n"
text << %Q(! Search:[search] [submit="Go!"] ()\n)
text << "Tags:\n"
bookmarks.tags.each{text << "[#{_1}](?tag=#{_1})\n"}
text << "\nKeywords:\n"
bookmarks.topics.each{text << "[#{_1}](?topic=#{_1})\n"}
seen = Set.new
sort = lambda {|a,b| (_=a.tags<=>b.tags)==0 ? a.title<=>b.title : _}
bookmarks.list.sort{sort[_1,_2]}.each do |bookmark|
keywords,tags = bookmark.keywords,bookmark.tags
next unless tag.nil? or tags.include? tag
next unless topic.nil? or keywords.include? topic
next unless search.nil? or search.all?{keywords.include? _1}
unless seen.include? tags
seen.add tags
text << "# #{tags.to_a.join('/')}\n"
end
title = bookmark.title.gsub('[', '[').gsub(']', ']')
href = bookmark.href
text << "* [#{title}](#{href})\n"
end
Markdown.new('Bookmarks').markdown text
end
end
end
|
#include "BMPInfoHeaderType.h"
BF::BMPInfoHeaderType BF::ConvertBMPInfoHeaderType(unsigned int infoHeaderType)
{
switch (infoHeaderType)
{
case 12u:
//const unsigned char bitMapCoreHeaderSize = 12;
return BMPInfoHeaderType::OS21XBitMapHeader;
case 16u:
return BMPInfoHeaderType::OS22XBitMapHeader;
case 40u:
return BMPInfoHeaderType::BitMapInfoHeader;
case 52u:
return BMPInfoHeaderType::BitMapV2InfoHeader;
case 56u:
return BMPInfoHeaderType::BitMapV3InfoHeader;
case 108u:
return BMPInfoHeaderType::BitMapV4Header;
case 124u:
return BMPInfoHeaderType::BitMapV5Header;
default:
return BMPInfoHeaderType::UnkownOrInvalid;
}
}
unsigned int BF::ConvertBMPInfoHeaderType(BMPInfoHeaderType infoHeaderType)
{
switch (infoHeaderType)
{
default:
case BF::BMPInfoHeaderType::UnkownOrInvalid:
return -1;
case BF::BMPInfoHeaderType::BitMapCoreHeader:
case BF::BMPInfoHeaderType::OS21XBitMapHeader:
return 12u;
case BF::BMPInfoHeaderType::OS22XBitMapHeader:
return 16u;
case BF::BMPInfoHeaderType::BitMapInfoHeader:
return 40u;
case BF::BMPInfoHeaderType::BitMapV2InfoHeader:
return 52u;
case BF::BMPInfoHeaderType::BitMapV3InfoHeader:
return 56u;
case BF::BMPInfoHeaderType::BitMapV4Header:
return 108u;
case BF::BMPInfoHeaderType::BitMapV5Header:
return 124u;
}
} |
#!/bin/bash
set -x
set -eu
set -o pipefail
SOCKET=/var/tmp/mysql.sock
rm -f /tmp/healthy
# Wait for mysql server to be ready.
function serverwait {
for i in {60..0};
do
if mysqladmin ping -uroot --socket=$SOCKET >/dev/null 2>&1; then
return 0
fi
# Test to make sure we got it started in the first place. kill -s 0 just tests to see if process exists.
if ! kill -s 0 $pid 2>/dev/null; then
echo "MariaDB initialization startup failed"
return 2
fi
sleep 1
done
return 1
}
# If we have a restore_snapshot arg, get the snapshot directory
# otherwise, fail and abort startup
if [ $# = "2" -a "${1:-}" = "restore_snapshot" ] ; then
snapshot_dir="/mnt/ddev_config/db_snapshots/${2:-nothingthere}"
if [ -d "$snapshot_dir" ] ; then
echo "Restoring from snapshot directory $snapshot_dir"
# Ugly macOS .DS_Store in this directory can break the restore
find ${snapshot_dir} -name .DS_Store -print0 | xargs rm -f
rm -rf /var/lib/mysql/*
else
echo "$snapshot_dir does not exist, not attempting restore of snapshot"
unset snapshot_dir
exit 101
fi
fi
server_db_version=$(PATH=$PATH:/usr/sbin:/usr/local/bin:/usr/local/mysql/bin mysqld -V 2>/dev/null | awk '{sub( /\.[0-9]+(-.*)?$/, "", $3); print $3 }')
# If we have extra mariadb cnf files,, copy them to where they go.
if [ -d /mnt/ddev_config/mysql -a "$(echo /mnt/ddev_config/mysql/*.cnf)" != "/mnt/ddev_config/mysql/*.cnf" ] ; then
echo "!includedir /mnt/ddev_config/mysql" >/etc/mysql/conf.d/ddev.cnf
fi
export BACKUPTOOL=mariabackup
if command -v xtrabackup; then BACKUPTOOL="xtrabackup"; fi
# If mariadb has not been initialized, copy in the base image from either the default starter image (/mysqlbase)
# or from a provided $snapshot_dir.
if [ ! -f "/var/lib/mysql/db_mariadb_version.txt" ]; then
# If snapshot_dir is not set, this is a normal startup, so
# tell healthcheck to wait by touching /tmp/initializing
if [ -z "${snapshot_dir:-}" ] ; then
touch /tmp/initializing
fi
target=${snapshot_dir:-/mysqlbase/}
name=$(basename $target)
rm -rf /var/lib/mysql/* /var/lib/mysql/.[a-z]*
${BACKUPTOOL} --prepare --skip-innodb-use-native-aio --target-dir "$target" --user=root --password=root --socket=$SOCKET 2>&1 | tee "/var/log/mariabackup_prepare_$name.log"
${BACKUPTOOL} --copy-back --skip-innodb-use-native-aio --force-non-empty-directories --target-dir "$target" --user=root --password=root --socket=$SOCKET 2>&1 | tee "/var/log/mariabackup_copy_back_$name.log"
echo "Database initialized from ${target}"
rm -f /tmp/initializing
fi
database_db_version=$(cat /var/lib/mysql/db_mariadb_version.txt)
if [ "${server_db_version}" != "${database_db_version}" ]; then
echo "Starting with db server version=${server_db_version} but database was created with '${database_db_version}'."
echo "Attempting upgrade, but it may not work, you may need to export your database, 'ddev delete --omit-snapshot', start, and reimport".
PATH=$PATH:/usr/sbin:/usr/local/bin:/usr/local/mysql/bin mysqld --skip-networking --skip-grant-tables --socket=$SOCKET >/tmp/mysqld_temp_startup.log 2>&1 &
pid=$!
set +x
if ! serverwait ; then
echo "Failed to get mysqld running to run mysql_upgrade"
exit 103
fi
set -x
echo "Attempting mysql_upgrade because db server version ${server_db_version} is not the same as database db version ${database_db_version}"
mysql_upgrade --socket=$SOCKET
kill $pid
fi
# And update the server db version we have here.
echo $server_db_version >/var/lib/mysql/db_mariadb_version.txt
cp -r /home/{.my.cnf,.bashrc} ~/
mkdir -p /mnt/ddev-global-cache/bashhistory/${HOSTNAME} || true
echo
echo 'MySQL init process done. Ready for start up.'
echo
echo "Starting mysqld."
tail -f /var/log/mysqld.log &
exec mysqld --server-id=0
|
# The Brewfile handles Homebrew-based app and library installs, but there may
# still be updates and installables in the Mac App Store. There's a nifty
# command line interface to it that we can use to just install everything, so
# yeah, let's do that.
echo "› sudo softwareupdate -i -a"
sudo softwareupdate -i -a |
#!/usr/bin/env bash
# replication group scaling tests: horizontal and vertical scaling
THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
ROOT_DIR="$THIS_DIR/../../../.."
SCRIPTS_DIR="$ROOT_DIR/scripts"
source "$SCRIPTS_DIR/lib/common.sh"
source "$SCRIPTS_DIR/lib/k8s.sh"
source "$SCRIPTS_DIR/lib/testutil.sh"
source "$SCRIPTS_DIR/lib/aws/elasticache.sh"
check_is_installed jq "Please install jq before running this script."
test_name="$( filenoext "${BASH_SOURCE[0]}" )"
ack_ctrl_pod_id=$( controller_pod_id )
debug_msg "executing test group: $service_name/$test_name------------------------------"
debug_msg "selected AWS region: $AWS_REGION"
# attempt to scale out a cluster mode disabled RG with no replicas: negative test, expect failure
test_modify_rg_cmd_scale_out() {
debug_msg "executing ${FUNCNAME[0]}"
# generate and apply yaml for replication group creation
clear_rg_parameter_variables
rg_id="test-cmd-scale-out"
automatic_failover_enabled="false"
num_node_groups="1"
replicas_per_node_group="0"
multi_az_enabled="false"
output_msg=$(provide_replication_group_yaml | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# ensure resource successfully created and available, check resource is as expected
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 1
k8s_assert_replication_group_replica_count "$rg_id" 0
# update config and apply: attempt to scale out
# config application should actually succeed in this case, but leave RG with Terminal Condition set True
num_node_groups=2
output_msg=$(provide_replication_group_yaml | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# ensure terminal condition exists, is set true, and has expected message
check_rg_terminal_condition_true "$rg_id" "Operation is only applicable for cluster mode enabled"
}
# create a cluster mode disabled RG with 3 replicas, and scale up
test_modify_rg_cmd_scale_up() {
debug_msg "executing ${FUNCNAME[0]}"
# generate and apply yaml for replication group creation
clear_rg_parameter_variables
rg_id="test-cmd-scale-up"
automatic_failover_enabled="true"
cache_node_type="cache.t3.micro"
num_node_groups=1
replicas_per_node_group=3
multi_az_enabled="false"
output_msg=$(provide_replication_group_yaml | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# ensure resource successfully created and available, check resource is as expected
wait_and_assert_replication_group_available_status
aws_assert_replication_group_property "$rg_id" ".CacheNodeType" "cache.t3.micro"
# update config and apply: scale up to larger instance
cache_node_type="cache.t3.small"
output_msg=$(provide_replication_group_yaml | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# wait and assert new state
wait_and_assert_replication_group_available_status
aws_assert_replication_group_property "$rg_id" ".CacheNodeType" "cache.t3.small"
}
# create a cluster mode enabled RG, then attempt to scale out and increase replica count
test_modify_rg_cme_scale_out_add_replica() {
debug_msg "executing ${FUNCNAME[0]}"
# generate and apply yaml for replication group creation
clear_rg_parameter_variables
rg_id="rg-cme-scale-out-add-replica"
num_node_groups="2"
replicas_per_node_group="1"
yaml_base="$(provide_replication_group_yaml)"
rg_yaml=$(cat <<EOF
$yaml_base
nodeGroupConfiguration:
- nodeGroupID: "0010"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
- nodeGroupID: "0020"
primaryAvailabilityZone: us-east-1b
replicaAvailabilityZones:
- us-east-1a
EOF
)
output_msg=$(echo "$rg_yaml" | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# ensure resource successfully created and available, check resource is as expected
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 2
k8s_assert_replication_group_replica_count "$rg_id" 1
k8s_assert_replication_group_total_node_count "$rg_id" 4
# update config and apply: scale out and add replicas
num_node_groups="3"
replicas_per_node_group="2"
yaml_base="$(provide_replication_group_yaml)"
rg_yaml=$(cat <<EOF
$yaml_base
nodeGroupConfiguration:
- nodeGroupID: "0010"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
- us-east-1a
- nodeGroupID: "0020"
primaryAvailabilityZone: us-east-1b
replicaAvailabilityZones:
- us-east-1a
- us-east-1b
- nodeGroupID: "0030"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
- us-east-1a
EOF
)
output_msg=$(echo "$rg_yaml" | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# wait and assert new resource state
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 3
k8s_assert_replication_group_replica_count "$rg_id" 2
k8s_assert_replication_group_total_node_count "$rg_id" 9
}
# scale out a cluster mode enabled RG where replica count is uneven between shards (i.e. there is a replicaCount
# specified for each node group rather than one replicasPerNodeGroup property for the entire RG)
test_modify_rg_cme_scale_out_uneven_shards() {
debug_msg "executing ${FUNCNAME[0]}"
# generate and apply yaml for replication group creation
clear_rg_parameter_variables
rg_id="rg-cme-scale-out-uneven-shards"
yaml_base=$(provide_replication_group_yaml_basic "$rg_id")
rg_yaml=$(cat <<EOF
$yaml_base
automaticFailoverEnabled: true
cacheNodeType: cache.t3.micro
numNodeGroups: 2
multiAZEnabled: true
nodeGroupConfiguration:
- nodeGroupID: "0010"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
replicaCount: 1
- nodeGroupID: "0020"
primaryAvailabilityZone: us-east-1b
replicaAvailabilityZones:
- us-east-1a
- us-east-1c
replicaCount: 2
EOF
)
output_msg=$(echo "$rg_yaml" | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# ensure resource successfully created and available, check resource is as expected
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 2
k8s_assert_replication_group_total_node_count "$rg_id" 5 #skip checking each node group for now
# update config and apply: scale out and add replicas
yaml_base=$(provide_replication_group_yaml_basic "$rg_id")
rg_yaml=$(cat <<EOF
$yaml_base
automaticFailoverEnabled: true
cacheNodeType: cache.t3.micro
numNodeGroups: 3
multiAZEnabled: true
nodeGroupConfiguration:
- nodeGroupID: "0010"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
replicaCount: 1
- nodeGroupID: "0020"
primaryAvailabilityZone: us-east-1b
replicaAvailabilityZones:
- us-east-1a
- us-east-1c
replicaCount: 2
- nodeGroupID: "0030"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
replicaCount: 1
EOF
)
output_msg=$(echo "$rg_yaml" | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# wait and assert new resource state
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 3
k8s_assert_replication_group_total_node_count "$rg_id" 7
}
# basic scale out test for cluster mode enabled replication groups, # replicas/node group unchanged
test_modify_rg_cme_scale_out_basic() {
debug_msg "executing ${FUNCNAME[0]}"
# generate and apply yaml for replication group creation
clear_rg_parameter_variables
rg_id="rg-cme-scale-out-basic"
num_node_groups="2"
replicas_per_node_group="1"
yaml_base="$(provide_replication_group_yaml)"
rg_yaml=$(cat <<EOF
$yaml_base
nodeGroupConfiguration:
- nodeGroupID: "0010"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
- nodeGroupID: "0020"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
EOF
)
output_msg=$(echo "$rg_yaml" | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# ensure resource successfully created and available, check resource is as expected
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 2
k8s_assert_replication_group_replica_count "$rg_id" 1
k8s_assert_replication_group_total_node_count "$rg_id" 4
# update config and apply: scale out
num_node_groups="3"
yaml_base="$(provide_replication_group_yaml)"
rg_yaml=$(cat <<EOF
$yaml_base
nodeGroupConfiguration:
- nodeGroupID: "0010"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
- nodeGroupID: "0020"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
- nodeGroupID: "0030"
primaryAvailabilityZone: us-east-1a
replicaAvailabilityZones:
- us-east-1b
EOF
)
output_msg=$(echo "$rg_yaml" | kubectl apply -f - 2>&1)
exit_if_rg_config_application_failed $? "$rg_id"
# wait and assert resource state
wait_and_assert_replication_group_available_status
k8s_assert_replication_group_shard_count "$rg_id" 3
k8s_assert_replication_group_replica_count "$rg_id" 1
k8s_assert_replication_group_total_node_count "$rg_id" 6
}
# run tests
test_modify_rg_cmd_scale_out # currently failing, terminal condition frequently toggles (is this desired behavior?)
test_modify_rg_cmd_scale_up
test_modify_rg_cme_scale_out_add_replica # failing, terminal condition shows "2 validation errors" after new config - issue with distribution of AZs in test case?
test_modify_rg_cme_scale_out_uneven_shards
test_modify_rg_cme_scale_out_basic
k8s_perform_rg_test_cleanup |
using System;
namespace AreaOfRectangle
{
class Program
{
static void Main(string[] args)
{
Console.WriteLine("Program to Calculate the Area of a Rectangle");
Console.WriteLine("Please enter the Length: ");
int length = int.Parse(Console.ReadLine());
Console.WriteLine("Please enter the Breadth: ");
int breadth = int.Parse(Console.ReadLine());
int area = length * breadth;
Console.WriteLine("The area of Rectangle is: {0}", area);
Console.ReadLine();
}
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.grasea.grandroid.service;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
import com.grasea.grandroid.data.DataAgent;
/**
* @author Rovers
*/
public class BasicService extends Service {
/**
*
*/
protected DataAgent dataAgent;
/**
* @param arg0
* @return
*/
@Override
public IBinder onBind(Intent arg0) {
return null;
}
/**
* @return
*/
public DataAgent getData() {
if (dataAgent == null) {
dataAgent = new DataAgent(this);
}
return dataAgent;
}
}
|
def nearestElements(A):
min1, min2 = float('inf'), float('inf')
x, y = None, None
n = len(A)
for i in range(n):
for j in range(i + 1, n):
if abs(A[i] - A[j]) < abs(min1 - min2):
min1, min2 = A[i], A[j]
x, y = i, j
return x, y, min1, min2
A = [5, 2, 4, 7, 8, 9, 1]
x, y, min1, min2 = nearestElements(A)
print("The nearest elements are", min1, "and", min2) |
<filename>ochk/sdk/gateway_policy.go
package sdk
import (
"context"
"errors"
"fmt"
"github.com/chmurakrajowa/terraform-provider-ochk/ochk/sdk/gen/client/gateway_policies"
"github.com/chmurakrajowa/terraform-provider-ochk/ochk/sdk/gen/models"
"net/http"
)
type GatewayPolicyProxy struct {
httpClient *http.Client
service gateway_policies.ClientService
}
func (p *GatewayPolicyProxy) Read(ctx context.Context, gatewayPolicyID string) (*models.GatewayPolicy, error) {
params := &gateway_policies.GatewayPolicyGetUsingGETParams{
GatewayPolicyID: gatewayPolicyID,
Context: ctx,
HTTPClient: p.httpClient,
}
response, err := p.service.GatewayPolicyGetUsingGET(params)
if err != nil {
var notFound *gateway_policies.GatewayPolicyGetUsingGETNotFound
if ok := errors.As(err, ¬Found); ok {
return nil, &NotFoundError{Err: err}
}
return nil, fmt.Errorf("error while reading gateway policy: %w", err)
}
if !response.Payload.Success {
return nil, fmt.Errorf("retrieving gateway policy failed: %s", response.Payload.Messages)
}
return response.Payload.GatewayPolicy, nil
}
func (p *GatewayPolicyProxy) ListByDisplayName(ctx context.Context, displayName string) ([]*models.GatewayPolicy, error) {
params := &gateway_policies.GatewayPolicyListUsingGETParams{
DisplayName: &displayName,
Context: ctx,
HTTPClient: p.httpClient,
}
response, err := p.service.GatewayPolicyListUsingGET(params)
if err != nil {
return nil, fmt.Errorf("error while listing gateway policies: %w", err)
}
if !response.Payload.Success {
return nil, fmt.Errorf("listing gateway policies failed: %s", response.Payload.Messages)
}
return response.Payload.GatewayPolicyCollection, nil
}
|
#!/bin/bash
python RunSimulation.py --Geo 1.0 --sim_num 32
|
package torsvc
import (
"fmt"
"net"
)
// RegularNet is an implementation of the Net interface that defines behaviour
// for Regular network connections
type RegularNet struct{}
// Dial on the regular network uses net.Dial
func (r *RegularNet) Dial(network, address string) (net.Conn, error) {
return net.Dial(network, address)
}
// LookupHost for regular network uses the net.LookupHost function
func (r *RegularNet) LookupHost(host string) ([]string, error) {
return net.LookupHost(host)
}
// LookupSRV for regular network uses net.LookupSRV function
func (r *RegularNet) LookupSRV(service, proto, name string) (string, []*net.SRV, error) {
return net.LookupSRV(service, proto, name)
}
// ResolveTCPAddr for regular network uses net.ResolveTCPAddr function
func (r *RegularNet) ResolveTCPAddr(network, address string) (*net.TCPAddr, error) {
return net.ResolveTCPAddr(network, address)
}
// TorProxyNet is an implementation of the Net interface that defines behaviour
// for Tor network connections
type TorProxyNet struct {
// TorDNS is the IP:PORT of the DNS server for Tor to use for SRV queries
TorDNS string
// TorSocks is the port which Tor's exposed SOCKS5 proxy is listening on.
// This is used for an outbound-only mode, so the node will not listen for
// incoming connections
TorSocks string
}
// Dial on the Tor network uses the torsvc TorDial() function, and requires
// that network specified be tcp because only that is supported
func (t *TorProxyNet) Dial(network, address string) (net.Conn, error) {
if network != "tcp" {
return nil, fmt.Errorf("Cannot dial non-tcp network via Tor")
}
return TorDial(address, t.TorSocks)
}
// LookupHost on Tor network uses the torsvc TorLookupHost function.
func (t *TorProxyNet) LookupHost(host string) ([]string, error) {
return TorLookupHost(host, t.TorSocks)
}
// LookupSRV on Tor network uses the torsvc TorLookupHost function.
func (t *TorProxyNet) LookupSRV(service, proto, name string) (string, []*net.SRV, error) {
return TorLookupSRV(service, proto, name, t.TorSocks, t.TorDNS)
}
// ResolveTCPAddr on Tor network uses the towsvc TorResolveTCP function, and
// requires network to be "tcp" because only "tcp" is supported
func (t *TorProxyNet) ResolveTCPAddr(network, address string) (*net.TCPAddr, error) {
if network != "tcp" {
return nil, fmt.Errorf("Cannot dial non-tcp network via Tor")
}
return TorResolveTCP(address, t.TorSocks)
}
|
from typing import List, Dict, Set
class TaskManager:
def __init__(self):
self.tasks = {} # Dictionary to store tasks and their parameters
self.connections = {} # Dictionary to store connections between tasks
def add_task(self, task_id: str, input_params: List[str], output_params: List[str]):
self.tasks[task_id] = {'input_params': input_params, 'output_params': output_params}
def add_connection(self, output_task_id: str, output_param: str, input_task_id: str, input_param: str):
if output_task_id not in self.connections:
self.connections[output_task_id] = {}
if output_param not in self.connections[output_task_id]:
self.connections[output_task_id][output_param] = set()
self.connections[output_task_id][output_param].add(input_task_id)
def get_tasks_connected_to(self, task_id: str) -> List[str]:
connected_tasks = set()
for output_task_id, output_params in self.connections.items():
for param, connected_task_ids in output_params.items():
if task_id in connected_task_ids:
connected_tasks.add(output_task_id)
for input_task_id, input_params in self.connections.items():
for param, connected_task_ids in input_params.items():
if task_id in connected_task_ids:
connected_tasks.add(input_task_id)
return list(connected_tasks) |
var tcProps = {};
var optionalPlugins = [];
var optionalReporters = [];
// var browsers = ['Chrome'];
var browsers = ['PhantomJS'];
module.exports = function(config) {
config.set({
basePath: './src/client',
browsers: browsers,
frameworks: ['browserify', 'jasmine'],
files: [
//3rd party
'../../.tmp/vendors.js',
'../../.tmp/bundle**.js',
'../../.tmp/templates.mdl.js',
// ngmock should be loaded with require on each spec
// '../../node_modules/angular-mocks/angular-mocks.js',
//app-specific
'app/**/*.html',
'common/**/*.html',
'app/**/*spec.js',
'!app/login/**/*spec.js',
'common/**/*spec.js',
'../server/**/*mock.json'
],
autoWatch: true,
preprocessors: {
'app/**/*.html': ['ng-html2js'],
'common/**/*.html': ['ng-html2js'],
'../server/**/*mock.json': ['json_fixtures'],
'app/**/*spec.js': [ 'browserify' ]
},
ngHtml2JsPreprocessor: {
moduleName: 'templates'
},
jsonFixturesPreprocessor: {
// strip this from the file path \ fixture name
stripPrefix: '.+mocks/',
// strip this to the file path \ fixture name
// prependPrefix: 'mock/',
// change the global fixtures variable name
variableName: 'mocks'
},
browserify: {
debug: true,
transform: []
},
plugins : [
'karma-phantomjs-launcher',
'karma-chrome-launcher',
'karma-jasmine',
'karma-ng-html2js-preprocessor',
'karma-html2js-preprocessor',
'karma-mocha-reporter',
'karma-json-fixtures-preprocessor',
'karma-browserify'
].concat(optionalPlugins),
reporters: ['mocha'].concat(optionalReporters)
});
};
|
#!/usr/bin/env bash
unset PYTORCH_VERSION
# For unittest, nightly PyTorch is used as the following section,
# so no need to set PYTORCH_VERSION.
# In fact, keeping PYTORCH_VERSION forces us to hardcode PyTorch version in config.
set -e
this_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
root_dir="$(git rev-parse --show-toplevel)"
cd "${root_dir}"
eval "$(./conda/Scripts/conda.exe 'shell.bash' 'hook')"
conda activate ./env
printf "* Installing PyTorch\n"
conda install -y -c "pytorch-${UPLOAD_CHANNEL}" ${CONDA_CHANNEL_FLAGS} pytorch cpuonly
printf "Installing torchdata nightly\n"
pip install --pre torchdata --extra-index-url https://download.pytorch.org/whl/nightly/cpu
printf "* Installing torchtext\n"
git submodule update --init --recursive
"$root_dir/packaging/vc_env_helper.bat" python setup.py develop
printf "* Installing parameterized\n"
pip install parameterized
|
#!/bin/bash
pwd
g++ $gccBaseSwitch $gccExtraSwitch -I../../../../src rgb.cpp -o rgb
|
import { IonIcon, IonInput, IonItem, IonLabel } from '@ionic/react';
import { checkmarkCircle, createOutline, informationCircleOutline, trash } from 'ionicons/icons';
import { useLayoutEffect, useRef, useState } from 'react';
import { Link } from 'react-router-dom';
import { BaseTask, TaskResponse } from '../../api/tasks/tasks.model';
type TaskItemProps = {
task: TaskResponse;
onDelete: (id: string) => void;
onUpdate: (id: string, task: Partial<BaseTask>) => void;
};
const TaskItem: React.FC<TaskItemProps> = ({ task, onDelete, onUpdate }: TaskItemProps) => {
const [isEditing, updateEditing] = useState(false);
const titleEl = useRef<HTMLIonInputElement>(null);
useLayoutEffect(() => {
// move the focus to next JS Event Loop because of Web Component Lifecycle
setTimeout(() => titleEl.current?.setFocus());
});
const endEditing = () => {
updateEditing(false);
const name = titleEl.current?.value?.toString() ?? '';
if (task.name !== name) onUpdate(task._id, { name });
};
const startEditing = () => {
updateEditing(true);
};
return (
<IonItem key={task._id}>
<Link slot="start" to={`/tasks/${task._id}`}>
<IonIcon
title="Icono para más información"
icon={informationCircleOutline}
color="primary"
/>
</Link>
{isEditing ? (
<IonInput ref={titleEl} value={task.name}></IonInput>
) : (
<IonLabel>
<h2>{task.name}</h2>
</IonLabel>
)}
{isEditing ? (
<IonIcon
onClick={endEditing}
title="Icono aceptar la edición"
icon={checkmarkCircle}
color="success"
slot="end"
tabIndex={0}
/>
) : (
<IonIcon
onClick={startEditing}
title="Icono editar tarea actual"
icon={createOutline}
color="primary"
slot="end"
tabIndex={0}
/>
)}
<IonIcon
onClick={() => onDelete(task._id)}
title="Icono borrar tarea actual"
icon={trash}
color="danger"
slot="end"
tabIndex={0}
/>
</IonItem>
);
};
export default TaskItem;
|
pip --no-cache-dir install -r ./requirements/requirements.in
pip freeze -> ./requirements/requirements.txt
pip --no-cache-dir install -r ./requirements/requirements-dev.in
pip freeze -> ./requirements/requirements-dev.txt
pip check
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHumanoid-v1_ddpg_hardcopy_action_noise_seed3_run9_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHumanoid-v1 --random-seed 3 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHumanoid-v1/ddpg_hardcopy_action_noise_seed3_run9 --continuous-act-space-flag --double-ddpg-flag --target-hard-copy-flag
|
<filename>ex010.js
/**
* Definir variável umNumeroPequeno e umNumeroGrande
* defina uma variável eMenor e atriibua - se um umNumeroPequeno é menor do que um número grande
* defina uma variável eMaior com o resultado da comparação umNumeroPequeno é maior que umNumeroGrande
*/
var umNumeroPequeno = 1;
var umNumeroGrande = 10;
var eMenor = umNumeroPequeno < umNumeroGrande;
var eMaior = umNumeroPequeno > umNumeroGrande;
console.log(eMenor);
console.log(eMaior); |
package edu.neu.coe.csye6225.service;
import edu.neu.coe.csye6225.entity.Note;
import edu.neu.coe.csye6225.entity.User;
import edu.neu.coe.csye6225.mapper.NoteMapper;
import edu.neu.coe.csye6225.mapper.UserMapper;
import static org.hamcrest.CoreMatchers.*;
import org.junit.*;
import static org.junit.Assert.*;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
@SpringBootTest
@RunWith(SpringRunner.class)
public class NoteServiceImplTest {
@Autowired
private AccountService accountService;
@Autowired
private NoteService noteService;
@Autowired
private UserMapper userMapper;
@Autowired
private NoteMapper noteMapper;
private User user;
private Note note;
/**
* before each method
* create user and a note and insert them into database
*/
@Before
public void beforeClass() throws Exception{
user = new User();
user.setPassword("<PASSWORD>!");
user.setUsername("<EMAIL>");
accountService.signUp(user);
note = new Note(user.getUsername(), "Note In Test", "This is a test note created to test NoteService. ");
noteMapper.insertNote(note);
}
/**
* after each method
* delete user and note from database
*/
@After
public void afterClass() throws Exception{
userMapper.deleteUserByUsername(user.getUsername());
noteMapper.deleteNote(note);
}
/**
* create a user and rollback to avoid dirty data
*
* if using mysql, it should use InnoDB as storage engine
* mysql> show variables like '%storage_engine%';
* latest version already supports that
*/
@Test
@Transactional
public void creatNoteTest() {
Note newNote = noteService.createNote(user.getUsername());
assertThat(noteMapper.getNoteById(newNote.getNoteId()), notNullValue());
}
/**
* delete a existed note
*/
@Test
public void deleteNoteTest() {
noteService.deleteNote(note.getNoteId());
assertThat(noteMapper.getNoteById(note.getNoteId()), nullValue());
}
/**
* update a note test
*/
@Test
public void updateNoteTest() {
String content = "I've changed my mind, this is a diary! ";
note.setContent(content);
noteService.updateNote(note);
assertThat(noteMapper.getNoteById(note.getNoteId()).getContent(), equalTo(content));
}
/**
* get a note by its id test
*/
@Test
public void getNodeByIdTest() {
// TODO: verify the date format of returned note object
assertThat(noteService.getNoteById( note.getNoteId()).getNoteId(),
equalTo(noteMapper.getNoteById(note.getNoteId()).getNoteId()));
}
/**
* get all notes test
*/
@Test
public void getAllNotesTest() {
assertThat(noteService.getAllNotes(user.getUsername()).size(), equalTo(1));
assertThat(noteService.getAllNotes(user.getUsername()).get(0).getNoteId(),
equalTo(note.getNoteId()));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.