text stringlengths 1 1.05M |
|---|
#!/bin/bash
set -e # DO NOT REMOVE -- used to fail test if intermediate command fails
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
. "$DIR"/../common/util/bc-messages.sh
suite_started SOTA
"$DIR"/vagrant-up.sh
cleanup() {
suite_finished SOTA
}
trap cleanup 0
# Need a Yocto vagrant image in order to test SOTA on a node.
test_with_command "SOTA_TEST_PASS" \
vagrant ssh -c \"sudo /test/sota/SOTA_TEST_PASS.sh\"
suite_finished SOTA |
<gh_stars>1-10
// Given an array consisting of n integers, find the contiguous subarray of given length k that has the maximum average value. And you need to output the maximum average value.
//
//
// Example 1:
//
// Input: [1,12,-5,-6,50,3], k = 4
// Output: 12.75
// Explanation: Maximum average is (12-5-6+50)/4 = 51/4 = 12.75
//
//
//
// Note:
//
// 1 <= k <= n <= 30,000.
// Elements of the given array will be in the range [-10,000, 10,000].
/**
* @param {number[]} nums
* @param {number} k
* @return {number}
*/
var findMaxAverage = function(nums, k) {
var sum = 0;
for(let i = 0; i < k; i++){
sum += nums[i];
}
var maxSum = sum;
for(let i = k; i < nums.length; i++){
sum = sum + nums[i] -nums[i - k];
maxSum = Math.max(sum, maxSum);
}
return maxSum/k;
};
|
<gh_stars>1-10
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_AURA_REMOTE_ROOT_WINDOW_HOST_WIN_H_
#define UI_AURA_REMOTE_ROOT_WINDOW_HOST_WIN_H_
#include <vector>
#include "base/compiler_specific.h"
#include "ui/aura/root_window_host.h"
#include "ui/base/win/window_impl.h"
namespace ui {
class ViewProp;
}
namespace aura {
// RootWindowHost implementaton that receives events from a different process.
class AURA_EXPORT RemoteRootWindowHostWin : public RootWindowHost {
public:
static RemoteRootWindowHostWin* Instance();
static RemoteRootWindowHostWin* Create(const gfx::Rect& bounds);
void OnMouseMoved(int32 x, int32 y, int32 extra);
void OnMouseClick(int32 x, int32 y, int32 extra);
void OnKeyDown(uint32 vkey,
uint32 repeat_count,
uint32 scan_code,
uint32 flags);
void OnKeyUp(uint32 vkey,
uint32 repeat_count,
uint32 scan_code,
uint32 flags);
void OnChar(uint32 key_code,
uint32 repeat_count,
uint32 scan_code,
uint32 flags);
private:
RemoteRootWindowHostWin(const gfx::Rect& bounds);
virtual ~RemoteRootWindowHostWin();
// RootWindowHost overrides:
virtual void SetDelegate(RootWindowHostDelegate* delegate) OVERRIDE;
virtual RootWindow* GetRootWindow() OVERRIDE;
virtual gfx::AcceleratedWidget GetAcceleratedWidget() OVERRIDE;
virtual void Show() OVERRIDE;
virtual void Hide() OVERRIDE;
virtual void ToggleFullScreen() OVERRIDE;
virtual gfx::Rect GetBounds() const OVERRIDE;
virtual void SetBounds(const gfx::Rect& bounds) OVERRIDE;
virtual gfx::Point GetLocationOnNativeScreen() const OVERRIDE;
virtual void SetCapture() OVERRIDE;
virtual void ReleaseCapture() OVERRIDE;
virtual void SetCursor(gfx::NativeCursor cursor) OVERRIDE;
virtual bool QueryMouseLocation(gfx::Point* location_return) OVERRIDE;
virtual bool ConfineCursorToRootWindow() OVERRIDE;
virtual void UnConfineCursor() OVERRIDE;
virtual void MoveCursorTo(const gfx::Point& location) OVERRIDE;
virtual void SetFocusWhenShown(bool focus_when_shown) OVERRIDE;
virtual bool CopyAreaToSkCanvas(const gfx::Rect& source_bounds,
const gfx::Point& dest_offset,
SkCanvas* canvas) OVERRIDE;
virtual bool GrabSnapshot(
const gfx::Rect& snapshot_bounds,
std::vector<unsigned char>* png_representation) OVERRIDE;
virtual void PostNativeEvent(const base::NativeEvent& native_event) OVERRIDE;
virtual void OnDeviceScaleFactorChanged(float device_scale_factor) OVERRIDE;
virtual void PrepareForShutdown() OVERRIDE;
RootWindowHostDelegate* delegate_;
scoped_ptr<ui::ViewProp> prop_;
DISALLOW_COPY_AND_ASSIGN(RemoteRootWindowHostWin);
};
} // namespace aura
#endif // UI_AURA_REMOTE_ROOT_WINDOW_HOST_WIN_H_
|
<gh_stars>0
import math
def theater_point_calculation(current_score, object_score, items):
current_score = int(current_score)
object_score = int(object_score)
items = int(items)
needed_point = object_score - (current_score + (math.fabs(items / 180) * 537))
#MM_ticket_maximum_play = math.fabs(needed_point / 59.5)
#needed_point = object_score - current_score
# ์ป๋ ํฌ์ธํธ์ ์ฌํ์ ๊ฐฏ์๋ ๊ฐ์ ?
# ์ฌ๊ธฐ์ 59.5๋ MM ํฐ์ผ 1ํ pt
# needed_point = (getting_item_point / 180 * 537) + getting_item_point
# ์์๋๋ก MM ํฐ์ผ / ๋ผ์ด๋ธ, 6M ํฐ์ผ / ๋ผ์ด๋ธ, 4M ํฐ์ผ / ๋ผ์ด๋ธ, 2M+ ํฐ์ผ / ๋ผ์ด๋ธ, 2M ํฐ์ผ / ๋ผ์ด๋ธ
# ๊ฐ ํ๋ ์ด ๋์ด๋๋ณ๋ก ์ฌ๋ฆผ์๋ฅผ ํด์ผํ๊ธฐ๋๋ฌธ์ ceil ๋๋ฐฐ
getting_point = needed_point #((getting_item / 180) * 537) + getting_item
while needed_point > getting_point:
global MM_ticket
global MM_live
global M6_ticket
global M6_live
global M4_ticket
global M4_live
global M2plus_ticket
global M2plus_live
global M2_ticket
global M2_live
getting_item = 0
MM_ticket = 0
MM_live = 0
M6_ticket = 0
M6_live = 0
M4_ticket = 0
M4_live = 0
M2plus_ticket = 0
M2plus_live = 0
M2_ticket = 0
M2_live = 0
for m2live in range(20):
if getting_item == needed_point:
answer = [MM_ticket, MM_live, M6_ticket, M6_live, M4_ticket, M4_live, M2plus_ticket, M2plus_live, M2_ticket, M2_live]
print(MM_ticket, MM_live, M6_ticket, M6_live, M4_ticket, M4_live, M2plus_ticket, M2plus_live, M2_ticket, M2_live)
return answer
break
M2_live = M2_live + 1
for m2ticket in range(20):
if getting_item == needed_point:
break
M2_ticket = M2_ticket + 1
for m2plive in range(20):
if getting_item == needed_point:
break
M2plus_live = M2plus_live + 1
for m2pticket in range(20):
if getting_item == needed_point:
break
M2plus_ticket = M2plus_ticket + 1
for m4live in range(20):
if getting_item == needed_point:
break
M4_live = M4_live + 1
for m4ticket in range(20):
if getting_item == needed_point:
break
M4_ticket = M4_ticket + 1
for m6live in range(20):
if getting_item == needed_point:
break
M6_live = M6_live + 1
for m6ticket in range(20):
if getting_item == needed_point:
break
M6_ticket = M6_ticket + 1
for mmlive in range(20):
if getting_item == needed_point:
break
MM_live = MM_live + 1
for mmticket in range(20):
if getting_item == needed_point:
break
MM_ticket = MM_ticket + 1
getting_item = (math.ceil(59.5 * MM_ticket) + math.ceil(85.0 * MM_live) + math.ceil(
45.0 * M6_ticket) + math.ceil(64.0 * M6_live) + math.ceil(34.3 * M4_ticket) + math.ceil(
49.0 * M4_live) + math.ceil(43.4 * M2plus_ticket) + math.ceil(62.0 * M2plus_live) + math.ceil(
25.0 * M2_ticket) + math.ceil(35.0 * M2_live) / 180 * 537 + getting_item)
if needed_point == getting_point:
print("์ฑ๊ณต!")
break
'''
for MM_ticket in range(maximum_play):
MM_ticket_playcount = maximum_play
MM_ticket_point = MM_ticket_playcount * 59.5
if MM_ticket_point == needed_point:
print(MM_ticket_playcount)
break
elif MM_ticket_point <= needed_point:
MM_ticket_playcount = MM_ticket_playcount - 1
elif MM_ticket_playcount < 0:
'''
answer = [MM_ticket, MM_live, M6_ticket, M6_live, M4_ticket, M4_live, M2plus_ticket, M2plus_live, M2_ticket,
M2_live]
return answer
def tour_point_calculation(current_score, object_score, items, progressivity):
#๊ณต์์ด ์ง์ง ๋ญ์์ง
print("ํฌ์ด")
if __name__ == '__main__':
main()
# ์์ด๋๋ง์คํฐ ๋ฐ๋ฆฌ์ธ ๋ผ์ด๋ธ ์์ดํฐ ๋ฐ์ด์ฆ ํฌ์ด ์ด๋ฒคํธ์ ์์ดํฐ ์ด๋ฒคํธ์ ์ ์์ํธ ๊ณ์ฐ๊ธฐ์
๋๋ค. ์์ง ๋ฏธ์์ฑ์
๋๋ค.
#''' if message.content.startswith('-ํฌ์ด') or message.content.startswith('-์์ดํฐ'):
# try:
# get_message = message.content.split()
# # ํฌ์ด ์ด๋ฒคํธ ์ ์์ํธ ๊ณ์ฐ๊ธฐ
# if get_message[0] == "-ํฌ์ด":
# MLTH.tour_point_calculation(get_message[1], get_message[2], get_message[3], get_message[4])
# await message.reply("์ด๊ฒ ๋ญ์์?")
# # ์์ดํฐ ์ด๋ฒคํธ ์ ์์ํธ ๊ณ์ฐ๊ธฐ
# elif get_message[0] == "-์์ดํฐ":
# MLTH.theater_point_calculation(get_message[1], get_message[2], get_message[3])
# await message.reply("ํ์ด์ด์ด์ต")
# ์๋ฌ๊ฐ ์๊ธฐ๋ฉด ์๋ฌ ๋ก๊ทธ๊ฐ ๋์ต๋๋ค. ์์ง ๋ฏธ์์ฑ์
๋๋ค.
# except Exception as e:
# await message.reply(traceback.format_exc())
# '''
|
var ss = require('../')
, should = require('should')
, assert = require('assert');
var push = ss.socket('push')
, pull = ss.socket('pull');
// basic 1-1 push/pull
var msgs = []
, n = 0;
push.bind(4000);
pull.connect(4000);
var id = setInterval(function(){
push.send(String(n++));
}, 2);
pull.on('message', function(msg){
msgs.push(msg.toString());
switch (msgs.length) {
case 10:
pull.close();
pull.once('close', function(){
setTimeout(function(){
pull.connect(4000);
}, 50);
});
break;
case 300:
for (var i = 0; i < 299; ++i) {
msgs[i].should.equal(i.toString());
}
clearInterval(id);
push.close();
pull.close();
break;
}
}); |
# Get list of all background services
Get-Service | Select-Object -Property Name, Status | Format-Table |
<filename>design-editor/src/vsc/vsc-editor.js
'use babel';
/**
* This is a copy of similar file brackets-editor.js
* TODO: Rewrite this file to enable works Design Editor in VSCode.
*/
import brackets from 'brackets';
import fs from 'fs';
import $ from 'jquery';
import {packageManager} from 'content-manager';
import tauComponents from 'tau-component-packages';
import formComponents from 'closet-default-component-packages';
import closetComponents from 'closet-component-packages';
import {VSCPreferenceManager} from './vsc-preference-manager';
import {PreferenceManager} from '../preference-manager';
import bracketsDefaultConfig from '../package-config';
import {BracketsStatusBar} from './brackets-status-bar';
import {ConfigurationDesignAreaElement} from '../panel/configuration-design-area-element';
import {stageManager} from '../system/stage-manager';
import {appManager} from '../app-manager';
import {DesignEditorElement} from '../pane/design-editor-element';
import {panelManager} from '../system/panel-manager';
import {EVENTS, eventEmitter} from '../events-emitter';
import {ModelManager} from '../pane/model-manager';
let element,
statusBarElement,
configurationDesignAreaElement;
let _instance = null;
let modelManager = null;
/**
* @class VSCEditor
* Responsible for launch Design-Editor in Web-view.
*/
class VSCEditor {
/**
* Constructor
*/
constructor() {
// Adding brackets mockup on the top of Design Editor
window.brackets = brackets;
// This will register BracketsPreferenceManager inside PreferenceManager
VSCPreferenceManager.initialize();
element = new DesignEditorElement();
statusBarElement = new BracketsStatusBar();
configurationDesignAreaElement = new ConfigurationDesignAreaElement();
modelManager = ModelManager.getInstance();
//appManager = AppManager.getInstance();
eventEmitter.on(EVENTS.InsertComponent, (event, componentPackageInfo, element) => {
const activeClosetEditor = appManager.getActiveDesignEditor();
if (activeClosetEditor) {
activeClosetEditor.insertComponent(event, componentPackageInfo, element);
}
});
}
/**
* Return instance
* @returns {*}
*/
static getInstance() {
if (_instance === null) {
_instance = new VSCEditor();
}
return _instance;
}
/**
* Define default preferences
* @private
*/
_defineDefaultPreferences() {
Object.keys(bracketsDefaultConfig).forEach((key) => {
const dotIndex = key.indexOf('.');
// dotIndex + 1 -> even in case we will not have any dot inside of the config
// this will return key starting from 0 index
PreferenceManager.setDefault(
key.substring(0, Math.max(dotIndex, 0)),
key.substring(dotIndex + 1),
bracketsDefaultConfig[key].default,
{
title: bracketsDefaultConfig[key].title,
description: bracketsDefaultConfig[key].description
}
);
});
}
/**
* Init
* @returns {VSCEditor}
*/
initialize() {
const self = this;
// self._defineDefaultPreferences();
closetComponents.consumeCloset(packageManager, () => {
formComponents.consumeCloset(packageManager, () => {
tauComponents.consumeCloset(packageManager, () => {
panelManager.initialize(document.body);
stageManager.initialize()._onActiveEditorUpdated(1, element);
document.querySelector('.closet-container' +
' .closet-panel-container-middle .closet-panel-container-center').appendChild(element);
element.appendChild(statusBarElement);
statusBarElement.addItem(configurationDesignAreaElement);
document.querySelector('.closet-container').classList.add('full');
fs.readFile(window.globalData.fileUrl, 'utf8', (err, data) => {
if (err) throw err;
$('.closet-container').addClass('full design-view-active');
self.update(data, window.globalData);
element.show();
// inform about activate design editor
eventEmitter.emit(EVENTS.ActiveEditorUpdated, 1, element);
});
window.saveToFile = (callback, quiet) => {
const
modelID = window.globalData.fileUrl,
html = modelManager.getHTML(modelID, true),
save = function () {
fs.writeFile(modelID, html, () => {
modelManager.markModelClean(modelID);
callback();
});
};
// empty string means that no change was done
if (html) {
//@TODO this needs rework!
if (!quiet && modelManager.isModelDirty(modelID)) {
if (window.confirm('Document was changed, do you want to save?')) {
save();
} else {
callback();
}
} else {
save();
}
} else {
callback();
}
};
window.writeFile = (path, fileData, additionalFileOptions, callback) => {
fs.writeFile(path, fileData, () => {
callback();
}, additionalFileOptions);
};
window.existsDir = (path, callback) => {
fs.existsDir(path, callback);
};
window.makeDir = (path, callback) => {
fs.makeDir(path, callback);
};
window.loadFromFile = () => {
fs.readFile(window.globalData.fileUrl, 'utf8', (err, data) => {
if (err) {
throw err;
}
self.update(data, window.globalData);
element.show();
// inform about activate design editor
eventEmitter.emit(EVENTS.ActiveEditorUpdated, 1, element);
});
};
});
});
});
return self;
}
/**
* Update model for design editor after changing file.
* @param {string} data File content
* @param {string} basePath
* @param {string} uri
*/
update(data, state) {
const model = modelManager.update(state.fileUrl, true);
// eslint-disable-next-line no-console
console.log('update with state', state);
element.update(model, data, state.basePath, state, state.networks);
}
}
const VSCEditorInstance = VSCEditor.getInstance().initialize();
export {VSCEditor, VSCEditorInstance};
|
def update_gitlab_system_user_photo_to_v1(apps, schema_editor):
User = apps.get_model('your_app_name', 'User') # Replace 'your_app_name' with the actual app name
try:
db_alias = schema_editor.connection.alias
user = User.objects.using(db_alias).get(username__startswith="gitlab-",
is_active=False,
is_system=True)
f = open("taiga/hooks/gitlab/migrations/logo-v1.png", "rb") # Update the file name to logo-v1.png
user.photo.save("logo.png", File(f))
user.save()
except User.DoesNotExist:
pass |
source "bs/bundle.sh"
sudo cp dist/axon /usr/bin/axon
sudo cp dist/axon-export /usr/bin/axon-export
sudo cp dist/axon-import /usr/bin/axon-import
echo '๐ง axon installed.'
ls /usr/bin/axon*
|
<reponame>ssayanm/gatsby-strapi-stripe-frontend
import React from "react";
import Layout from "../components/Layout";
import TitleBar from "../components/TitleBar";
import Seo from "../components/Seo";
import MiniContact from "../components/MiniContact";
import { Fade } from "react-reveal";
import { graphql } from "gatsby";
import ReactMarkdown from "react-markdown";
import { GatsbyImage } from "gatsby-plugin-image";
const About = ({ data }) => {
const { title, description, image, caption } = data.strapiInnerpages;
return (
<Layout>
<Seo
title="About Pleasant"
description="Connecticut Certified Relationship Coach <NAME>"
/>
<TitleBar title={title} />
<section className="about-page">
<div className="about-center section-center">
<Fade left>
<div>
<GatsbyImage
image={image.childImageSharp.gatsbyImageData}
alt={title}
className="about-img"
/>
<br />
<em>{caption}</em>
</div>
</Fade>
<Fade right>
<div className="about-text">
<ReactMarkdown children={description} />
</div>
</Fade>
</div>
</section>
<MiniContact />
</Layout>
);
};
export const query = graphql`
{
strapiInnerpages(title: { eq: "About Us" }) {
title
description
slug
caption
image {
childImageSharp {
gatsbyImageData(width: 1920, quality: 100, formats: [AUTO])
}
}
}
}
`;
export default About;
|
<reponame>Trice254/alx-higher_level_programming<gh_stars>0
#!/usr/bin/python3
"""
Module Devide Matrix
"""
def matrix_divided(matrix, div):
"""
Devides all elements in matrix
Args:
matrix (list[list[int/float]]) : matrice
div (int/float) Devider
Raise:
TypeError: div not int or float
TypeError: matix is not a list of list of number
ZeroDivisionError: Div is 0
Return : New matrix Devided
"""
if type(div) not in [int, float]:
raise TypeError("div must be a number")
if div == 0:
raise ZeroDivisionError('division by zero')
if type(matrix) is not list or not all((type(l) is list)for l in matrix) \
or not all((isinstance(n, (int, float))for n in l)for l in matrix) \
or len(matrix[0]) == 0:
raise TypeError(
"matrix must be a matrix "
"(list of lists) of integers/floats")
l = len(matrix[0])
if not all((len(x) == l)for x in matrix):
raise TypeError("Each row of the matrix must have the same size")
return [list(map(lambda x: round(x / div, 2), r))for r in matrix]
|
<reponame>LinuxSuRen/satellity
import '../node_modules/noty/src/noty.scss';
import '../node_modules/noty/src/themes/nest.scss';
import '../node_modules/normalize.css/normalize.css';
import '../node_modules/@fortawesome/fontawesome-free/css/all.css';
import './assets/css/h5bp.css';
import './index.scss';
import React from 'react';
import ReactDOM from 'react-dom';
import { BrowserRouter as Router, Route, Switch } from 'react-router-dom';
import showdown from 'showdown';
import Locale from './locale/index.js';
import MainLayout from './layouts/main.js';
import AdminRoute from './admin/admin.js';
import NoMatch from './sink.js';
import Oauth from './users/oauth.js';
import { library } from '@fortawesome/fontawesome-svg-core';
import { faBookmark, faComment, faComments, faEdit, faEye, faTrashAlt, faHeart } from '@fortawesome/free-regular-svg-icons';
import { faChalkboard, faEllipsisV, faHome, faPlus, faUsersCog } from '@fortawesome/free-solid-svg-icons';
library.add(
faBookmark, faComment, faComments,
faEdit, faEye, faTrashAlt,
faHeart,
faChalkboard, faEllipsisV, faHome,
faPlus, faUsersCog
);
showdown.setOption('customizedHeaderId', true);
showdown.setOption('simplifiedAutoLink', true);
showdown.setOption('strikethrough', true);
showdown.setOption('simpleLineBreaks', true);
window.i18n = new Locale(navigator.language);
ReactDOM.render((
<Router>
<div>
<Switch>
<Route path='/oauth/callback' component={Oauth} />
<Route path='/admin' component={AdminRoute} />
<Route path='/404' component={NoMatch} />
<MainLayout />
</Switch>
</div>
</Router>
), document.querySelector('#layout-container'));
|
////////////////////////////////////////////////////////////
launchNet();
swapChain(EXCHAINS.classic.ncid);
stopSession(sestime);
statsEthers(refresh);
showDefault();
//////////////////////////////////////////////////////////// |
<gh_stars>0
import {
DeployVerifierInstance,
RelayVerifierInstance,
TestTokenInstance,
SmartWalletFactoryInstance,
SmartWalletInstance,
TestVerifiersInstance
} from '../types/truffle-contracts'
import { expectRevert, expectEvent } from '@openzeppelin/test-helpers'
import { ethers } from 'ethers'
import { toBuffer, bufferToHex, privateToAddress, BN } from 'ethereumjs-util'
import { toChecksumAddress } from 'web3-utils'
import { RelayRequest } from '../src/common/EIP712/RelayRequest'
import { getTestingEnvironment, createSmartWalletFactory, createSmartWallet, bytes32 } from './TestUtils'
import { constants } from '../src/common/Constants'
import { Address } from '../src/relayclient/types/Aliases'
import { getDomainSeparatorHash } from '../src/common/EIP712/TypedRequestData'
const DeployVerifier = artifacts.require('DeployVerifier')
const RelayVerifier = artifacts.require('RelayVerifier')
const TestToken = artifacts.require('TestToken')
const SmartWallet = artifacts.require('SmartWallet')
const TestRecipient = artifacts.require('TestRecipient')
const TestVerifiers = artifacts.require('TestVerifiers')
const gasPrice = '10'
const gasLimit = '1000000'
const senderNonce = '0'
const tokensPaid = 1
let relayRequestData: RelayRequest
contract('DeployVerifier', function ([relayHub, dest, other1, relayWorker, senderAddress, other2, verifierOwner, other3]) {
let deployVerifier: DeployVerifierInstance
let token: TestTokenInstance
let template: SmartWalletInstance
let factory: SmartWalletFactoryInstance
let testVerifiers: TestVerifiersInstance
let expectedAddress: Address
const ownerPrivateKey = toBuffer(bytes32(1))
let ownerAddress: string
const recoverer = constants.ZERO_ADDRESS
const index = '0'
beforeEach(async function () {
ownerAddress = toChecksumAddress(bufferToHex(privateToAddress(ownerPrivateKey)), (await getTestingEnvironment()).chainId).toLowerCase()
token = await TestToken.new()
template = await SmartWallet.new()
factory = await createSmartWalletFactory(template)
deployVerifier = await DeployVerifier.new(factory.address, { from: verifierOwner })
testVerifiers = await TestVerifiers.new(deployVerifier.address)
// We simulate the testVerifiers contract is a relayHub to make sure
// the onlyRelayHub condition is correct
relayRequestData = {
request: {
relayHub: relayHub,
to: constants.ZERO_ADDRESS,
data: '0x',
from: ownerAddress,
nonce: senderNonce,
value: '0',
gas: gasLimit,
tokenContract: token.address,
tokenAmount: tokensPaid.toString(),
tokenGas: '50000'
},
relayData: {
gasPrice,
relayWorker,
callForwarder: constants.ZERO_ADDRESS,
callVerifier: deployVerifier.address,
domainSeparator: '0x'
}
}
// we mint tokens to the sender
expectedAddress = await factory.getSmartWalletAddress(ownerAddress, recoverer, index)
await token.mint(tokensPaid + 4, expectedAddress)
})
it('Should not fail on checks of preRelayCall', async function () {
await deployVerifier.acceptToken(token.address, { from: verifierOwner })
const { logs } = await testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub })
expectEvent.inLogs(logs, 'Accepted', {
tokenAmount: new BN(tokensPaid),
from: ownerAddress
})
})
it('SHOULD fail on address already created on preRelayCall', async function () {
await deployVerifier.acceptToken(token.address, { from: verifierOwner })
const toSign: string = web3.utils.soliditySha3(
{ t: 'bytes2', v: '0x1910' },
{ t: 'address', v: ownerAddress },
{ t: 'address', v: recoverer },
{ t: 'uint256', v: index }
) ?? ''
const toSignAsBinaryArray = ethers.utils.arrayify(toSign)
const signingKey = new ethers.utils.SigningKey(ownerPrivateKey)
const signature = signingKey.signDigest(toSignAsBinaryArray)
const signatureCollapsed = ethers.utils.joinSignature(signature)
const { logs } = await factory.createUserSmartWallet(ownerAddress, recoverer,
index, signatureCollapsed)
relayRequestData.request.from = ownerAddress
relayRequestData.request.to = constants.ZERO_ADDRESS
relayRequestData.request.data = '0x'
const salt = web3.utils.soliditySha3(
{ t: 'address', v: ownerAddress },
{ t: 'address', v: recoverer },
{ t: 'uint256', v: index }
) ?? ''
const expectedSalt = web3.utils.toBN(salt).toString()
// Check the emitted event
expectEvent.inLogs(logs, 'Deployed', {
addr: expectedAddress,
salt: expectedSalt
})
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'Address already created!')
})
it('SHOULD fail on Balance Too Low of preRelayCall', async function () {
await deployVerifier.acceptToken(token.address, { from: verifierOwner })
// We change the initParams so the smart wallet address will be different
// So there wont be any balance
relayRequestData.request.data = '0x01'
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'balance too low'
)
})
it('SHOULD fail on Token contract not allowed of preRelayCall', async function () {
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'Token contract not allowed'
)
})
it('SHOULD fail when factory is incorrect on preRelayCall', async function () {
deployVerifier = await DeployVerifier.new(other1, { from: verifierOwner })
// We simulate the testVerifiers contract is a relayHub to make sure
// the onlyRelayHub condition is correct
testVerifiers = await TestVerifiers.new(deployVerifier.address)
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'Invalid factory'
)
})
})
contract('RelayVerifier', function ([_, dest, relayManager, relayWorker, other, other2, verifierOwner, relayHub]) {
let template: SmartWalletInstance
let sw: SmartWalletInstance
let relayVerifier: RelayVerifierInstance
let token: TestTokenInstance
let relayRequestData: RelayRequest
let factory: SmartWalletFactoryInstance
let testVerifiers: TestVerifiersInstance
const senderPrivateKey = toBuffer(bytes32(1))
let senderAddress: string
before(async function () {
const env = await getTestingEnvironment()
const chainId = env.chainId
senderAddress = toChecksumAddress(bufferToHex(privateToAddress(senderPrivateKey)), chainId).toLowerCase()
token = await TestToken.new()
template = await SmartWallet.new()
factory = await createSmartWalletFactory(template)
relayVerifier = await RelayVerifier.new(factory.address, { from: verifierOwner })
testVerifiers = await TestVerifiers.new(relayVerifier.address)
sw = await createSmartWallet(relayHub, senderAddress, factory, senderPrivateKey, chainId)
const smartWallet = sw.address
const recipientContract = await TestRecipient.new()
// We simulate the testVerifiers contract is a relayHub to make sure
// the onlyRelayHub condition is correct
relayRequestData = {
request: {
relayHub: relayHub,
to: recipientContract.address,
data: '0x00',
from: senderAddress,
nonce: senderNonce,
value: '0',
gas: gasLimit,
tokenContract: token.address,
tokenAmount: tokensPaid.toString(),
tokenGas: '50000'
},
relayData: {
gasPrice,
relayWorker,
callForwarder: smartWallet,
callVerifier: relayVerifier.address,
domainSeparator: getDomainSeparatorHash(smartWallet, chainId)
}
}
// we mint tokens to the sender,
await token.mint(tokensPaid + 4, smartWallet)
})
it('Should not fail on checks of preRelayCall', async function () {
await relayVerifier.acceptToken(token.address, { from: verifierOwner })
// run method
const { logs } = await testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub })
// All checks should pass
expectEvent.inLogs(logs, 'Accepted', {
tokenAmount: new BN(tokensPaid),
from: senderAddress
})
})
it('SHOULD fail on Balance Too Low of preRelayCall', async function () {
await relayVerifier.acceptToken(token.address, { from: verifierOwner })
relayRequestData.relayData.callForwarder = other
// run method
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'balance too low'
)
})
it('SHOULD fail on Token contract not allowed of preRelayCall', async function () {
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'Token contract not allowed'
)
})
it('SHOULD fail on SW different to template of preRelayCall', async function () {
await relayVerifier.acceptToken(token.address, { from: verifierOwner })
// Forwarder needs to be a contract with balance
// But a different than the template needed
relayRequestData.relayData.callForwarder = token.address
await token.mint(tokensPaid + 4, token.address)
// run method
await expectRevert.unspecified(
testVerifiers.verifyRelayedCall(relayRequestData, '0x00', { from: relayHub }),
'SW different to template'
)
})
})
|
#!/bin/bash
set -e
LANG=en_US.utf8
default_compatibility_version=v1.0.0-rc4 # update this every release
compatibility_version=
LOG_INFO()
{
local content=${1}
echo -e "\033[32m[INFO] ${content}\033[0m"
}
LOG_ERROR()
{
local content=${1}
echo -e "\033[31m[ERROR] ${content}\033[0m"
}
help()
{
echo "$1"
cat << EOF
Usage: Download WeCross demo
-t <tag name> [Optional] download demo from a given tag
-h call for help
e.g
bash $0
EOF
exit 0
}
parse_command()
{
while getopts "t:h" option;do
# shellcheck disable=SC2220
case ${option} in
t)
compatibility_version=$OPTARG
;;
h) help;;
esac
done
}
download_demo()
{
local github_url=https://github.com/WeBankFinTech/WeCross/releases/download/
local cdn_url=https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeCross/Demo/
#local compatibility_version=
local release_pkg=demo.tar.gz
local release_pkg_checksum_file=demo.tar.gz.md5
if [ -d ./demo/ ];then
LOG_INFO "./demo/ exists"
exit 0
fi
LOG_INFO "Checking latest release"
if [ -z "${compatibility_version}" ];then
compatibility_version=$(curl -s https://api.github.com/repos/WeBankFinTech/WeCross/releases/latest | grep "tag_name"|awk -F '\"' '{print $4}')
fi
if [ -z "${compatibility_version}" ];then
# could not get version from github
compatibility_version=${default_compatibility_version}
fi
LOG_INFO "Latest release: ${compatibility_version}"
download_release_pkg ${github_url} ${cdn_url} ${compatibility_version} ${release_pkg} ${release_pkg_checksum_file}
}
download_release_pkg()
{
local github_url=${1}
local cdn_url=${2}
local compatibility_version=${3}
local release_pkg=${4}
local release_pkg_checksum_file=${5}
#download checksum
LOG_INFO "Try to Download checksum from ${cdn_url}/${compatibility_version}/${release_pkg_checksum_file}"
if ! curl --fail -LO ${cdn_url}/${compatibility_version}/${release_pkg_checksum_file}; then
LOG_INFO "Download checksum from ${github_url}/${compatibility_version}/${release_pkg_checksum_file}"
curl -LO ${github_url}/${compatibility_version}/${release_pkg_checksum_file}
fi
if [ ! -e ${release_pkg_checksum_file} ] || [ -z "$(grep ${release_pkg} ${release_pkg_checksum_file})" ]; then
LOG_ERROR "Download checksum file error"
exit 1
fi
# download
if [ -f "${release_pkg}" ] && md5sum -c ${release_pkg_checksum_file}; then
LOG_INFO "Latest release ${release_pkg} exists."
else
LOG_INFO "Try to download from: ${cdn_url}/${compatibility_version}/${release_pkg}"
if ! curl --fail -LO ${cdn_url}/${compatibility_version}/${release_pkg}; then
# If CDN failed, download from github release
LOG_INFO "Download from: ${github_url}/${compatibility_version}/${release_pkg}"
curl -C - -LO ${github_url}/${compatibility_version}/${release_pkg}
fi
if ! md5sum -c ${release_pkg_checksum_file}; then
LOG_ERROR "Download package error"
rm -f ${release_pkg}
exit 1
fi
fi
tar -zxf ${release_pkg}
}
main()
{
download_demo
}
print_result()
{
LOG_INFO "Download completed. WeCross Demo is in: ./demo/"
LOG_INFO "Please: \"cd ./demo/ \" and \"bash build.sh\" to build the demo."
}
parse_command $@
main
print_result |
package org.multibit.hd.ui.views.wizards.send_bitcoin;
import com.google.common.base.Optional;
import org.multibit.hd.ui.views.wizards.AbstractHardwareWalletWizard;
import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView;
import java.util.Map;
/**
* <p>Wizard to provide the following to UI for "Send Bitcoin":</p>
* <ol>
* <li>Enter amount (or empty entirely)</li>
* <li>Confirm details</li>
* <li>Report progress</li>
* </ol>
*
* @since 0.0.1
* ย
*/
public class SendBitcoinWizard extends AbstractHardwareWalletWizard<SendBitcoinWizardModel> {
public SendBitcoinWizard(SendBitcoinWizardModel model) {
super(model, false, Optional.absent());
}
@Override
protected void populateWizardViewMap(Map<String, AbstractWizardPanelView> wizardViewMap) {
wizardViewMap.put(
SendBitcoinState.SEND_DISPLAY_PAYMENT_REQUEST.name(),
new SendBitcoinDisplayPaymentRequestPanelView(this, SendBitcoinState.SEND_DISPLAY_PAYMENT_REQUEST.name()));
wizardViewMap.put(
SendBitcoinState.SEND_ENTER_AMOUNT.name(),
new SendBitcoinEnterAmountPanelView(this, SendBitcoinState.SEND_ENTER_AMOUNT.name()));
wizardViewMap.put(
SendBitcoinState.SEND_CONFIRM_AMOUNT.name(),
new SendBitcoinConfirmPanelView(this, SendBitcoinState.SEND_CONFIRM_AMOUNT.name()));
wizardViewMap.put(
SendBitcoinState.SEND_CONFIRM_HARDWARE.name(),
new SendBitcoinConfirmHardwarePanelView(this, SendBitcoinState.SEND_CONFIRM_HARDWARE.name()));
wizardViewMap.put(
SendBitcoinState.SEND_ENTER_PIN_FROM_CONFIRM_HARDWARE.name(),
new SendBitcoinEnterPinPanelView(this, SendBitcoinState.SEND_ENTER_PIN_FROM_CONFIRM_HARDWARE.name()));
wizardViewMap.put(
SendBitcoinState.SEND_REPORT.name(),
new SendBitcoinReportPanelView(this, SendBitcoinState.SEND_REPORT.name()));
wizardViewMap.put(
SendBitcoinState.SEND_BIP70_PAYMENT_MEMO.name(),
new SendBitcoinEnterPaymentMemoPanelView(this, SendBitcoinState.SEND_BIP70_PAYMENT_MEMO.name()));
wizardViewMap.put(
SendBitcoinState.SEND_BIP70_PAYMENT_ACK_MEMO.name(),
new SendBitcoinShowPaymentACKMemoPanelView(this, SendBitcoinState.SEND_BIP70_PAYMENT_ACK_MEMO.name()));
}
}
|
<reponame>fermi-lat/CalibData
// $Header: /nfs/slac/g/glast/ground/cvs/GlastRelease-scons/CalibData/CalibData/Tkr/TkrBase.h,v 1.6 2007/06/11 20:31:56 jrb Exp $
/// @file TkrBase.h
/// @author <NAME>
#ifndef CalibData_TkrBase_h
#define CalibData_TkrBase_h
#include <vector>
#include "CalibData/CalibBase.h"
// #include "CalibData/Tkr/TkrFinder.h"
#include "idents/TkrId.h"
#define TKRBASE_MAXROW 4
#define TKRBASE_MAXCOL 4
#define TKRBASE_MAXTOWER (TKRBASE_MAXROW * TKRBASE_MAXCOL)
class RootTkrBaseCnv;
namespace CalibData {
class UniBase;
class TkrFinder;
/**
Each derived, completely implemented tkr calibration class should
register a suitable factory object which will produce the right
kind of object of a class derived from UniBase
*/
class UniFactoryBase {
public:
UniFactoryBase() {}
virtual ~UniFactoryBase() { };
virtual UniBase* makeUni();
};
/**
Base class for Tkr calibrations other than bad strips, which have
a fixed amount of data per Si layer, typically involving a
per-strip or per-gtfe structure
*/
class TkrBase : public CalibBase {
friend class ::RootTkrBaseCnv;
public:
/**
Constructor configures its TkrFinder
*/
TkrBase(unsigned nTowerRow=4, unsigned nTowerCol=4, unsigned nTray=19,
bool indirect=true);
virtual ~TkrBase();
virtual UniBase* getUni(const idents::TkrId& id);
virtual UniBase* getUni(unsigned towerRow, unsigned towerCol,
unsigned tray, bool top);
virtual bool putUni(UniBase* data, const idents::TkrId& id);
virtual bool putUni(UniBase* data, unsigned towerRow,
unsigned towerCol, unsigned tray, bool top);
virtual const std::string* getHwserial(unsigned towerRow,
unsigned TowerCol)
const;
// Get dimensioning information; needed when transforming to
// permanent storage...might not need this
/// Get # tower rows
unsigned getNTowerRow() const;
/// Get # tower columns
unsigned getNTowerCol() const;
/// Get # trays
unsigned getNUnilayer() const;
/// Get pointer to vector of uni for specified tower.
std::vector<UniBase*> *getUnis(int iTow) {
// or maybe RootTkrBaseCnv should provide this service
return &(m_towers[iTow]->m_unis);
}
/// Get # fe chips / unilayer
// unsigned getNChip() const {return m_finder->getNChip();}
virtual const CLID& clID() const = 0; // must be overridden
static const CLID& classID(); // shouldn't get called
// Maybe won't need to be virtual after all
virtual StatusCode update(CalibBase& other, MsgStream* log);
protected:
/**
@class TkrTower
Represents a tower's worth of tracker calibration data.
Identify the tower, keep remaining information by uniplane.
Different derived classes will have their own class
for per-uniplane data, derived from UniBase.
*/
class TkrTower {
friend class ::RootTkrBaseCnv;
public:
unsigned m_iRow;
unsigned m_iCol;
std::string m_hwserial;
std::vector <UniBase* > m_unis;
TkrTower(unsigned row=0, unsigned col=0,
std::string hwserial="")
: m_iRow(row), m_iCol(col), m_hwserial(hwserial)
{ }
void resize(unsigned n);
~TkrTower();
}; // end def TkrTower
// Array below is only of pointers to towers. Allocate a tower
// only when needed.
TkrTower* makeTower(unsigned iTow, unsigned nUni=38);
TkrFinder* m_finder;
UniFactoryBase* m_factory;
TkrTower* m_towers[TKRBASE_MAXTOWER];
/// Default is true: keep vector of pointers to data.
/// Else derived class keeps vector of data values and must
/// do its own fetching and putting.
bool m_indirect;
private:
static const CLID noCLID;
/** Due to bug in gcc, gdb can't find symbols in constructors.
This method is called by the constructor and does most
of the work
*/
void cGuts(unsigned nTowerRow, unsigned nTowerCol,
unsigned nTray);
};
}
#endif
|
'use strict';
const Pipeline = require('coinstac-pipeline');
const path = require('path');
const { fork } = require('child_process');
const exitHook = require('exit-hook');
/**
* Starts a simulator run with the given pipeline spec
*
* @param {Object} spec a valid pipeline spec
* @param {String} [runMode='local'] wether to run in local or decentralized mode
* @param {Number} [clientCount=1] [description]
* @param {String} [operatingDirectory='test' }] [description]
* @return {[type]} [description]
*/
const startRun = ({
spec, runMode = 'local', clientCount = 1, operatingDirectory = 'test',
}) => {
return new Promise((resolve, reject) => {
// the execArgv opt are a work around for https://github.com/nodejs/node/issues/9435
const mqtt = fork(path.resolve(__dirname, 'mqtt-server.js'), { execArgv: [], stdio: ['inherit', 'inherit', 'inherit', 'ipc'] });
mqtt.on('message', (m) => {
if (m.e) return reject(m.e);
if (m.started) resolve();
});
exitHook(() => {
mqtt.kill();
});
})
.then(async () => {
const pipelines = {
locals: [],
};
clientCount = parseInt(clientCount, 10);
if (runMode === 'decentralized') {
const remoteSpec = Array.isArray(spec) ? spec[0] : spec;
const remoteManager = await Pipeline.create({
clientId: 'remote',
mode: 'remote',
operatingDirectory: path.resolve(operatingDirectory),
mqttRemoteURL: 'localhost',
mqttRemotePort: '1883',
mqttRemoteProtocol: 'mqtt:',
});
pipelines.remote = {
manager: remoteManager,
pipeline: remoteManager.startPipeline({
spec: remoteSpec,
runId: 'simulatorRun',
clients: Array.from(Array(clientCount)).reduce((acc, elem, idx) => {
acc[`local${idx}`] = `local${idx}`;
return acc;
}, {}),
owner: 'local0',
}),
};
}
for (let i = 0; i < clientCount; i += 1) {
const localSpec = Array.isArray(spec) ? spec[i] : spec;
const localPipelineManager = await Pipeline.create({ // eslint-disable-line no-await-in-loop, max-len
clientId: `local${i}`,
mode: 'local',
operatingDirectory: path.resolve(operatingDirectory),
});
pipelines.locals.push({
manager: localPipelineManager,
pipeline: localPipelineManager.startPipeline({
spec: localSpec,
runId: 'simulatorRun',
owner: 'local0',
}),
});
}
const allResults = Promise.all(Object.keys(pipelines).map((key) => {
if (key === 'locals') {
return Promise.all(pipelines[key].map(
(localP, index) => localP.pipeline.result
.then((res) => { pipelines[key][index].pipeline.result = res; })
.catch((e) => {
return e;
})
));
}
return pipelines.remote.pipeline.result
.then((res) => { pipelines[key].pipeline.result = res; })
.catch((e) => {
return e;
});
}))
.then((errors) => {
// error sent to remote or the first local for local runs
if (errors[1] || errors[0][0]) throw errors[1] || errors[0][0];
return {
remote: runMode === 'decentralized' ? pipelines.remote.pipeline.result : {},
locals: pipelines.locals.map(local => local.pipeline.result),
};
});
return { pipelines, allResults };
});
};
module.exports = {
startRun,
};
|
<reponame>sigtot/sanntid<gh_stars>0
package indicators
import (
"encoding/json"
"fmt"
"github.com/sigtot/elevio"
"github.com/sigtot/sanntid/pubsub"
"github.com/sigtot/sanntid/types"
"log"
"sync"
"testing"
"time"
)
// This test cannot fail. Just watch the lights :)
func TestStartHandlingIndicators(t *testing.T) {
elevio.Init("localhost:15657", 4)
var wg sync.WaitGroup
quit := make(chan int)
StartIndicatorHandler(quit, &wg)
ackPubChan := pubsub.StartPublisher(pubsub.AckDiscoveryPort)
orderDeliveredPubChan := pubsub.StartPublisher(pubsub.OrderDeliveredDiscoveryPort)
call := types.Call{Type: types.Cab, Floor: 2, Dir: types.InvalidDir, ElevatorID: ""}
order1 := types.Order{Call: call}
bid1 := types.Bid{Call: call, Price: 1, ElevatorID: ""}
ack1 := types.Ack{Bid: bid1}
js, err := json.Marshal(ack1)
if err != nil {
log.Fatalf(fmt.Sprintf("Could not marshal ack %s", err.Error()))
}
time.Sleep(2 * time.Second)
ackPubChan <- js
time.Sleep(2 * time.Second)
js, err = json.Marshal(order1)
if err != nil {
log.Fatalf(fmt.Sprintf("Could not marshal order %s", err.Error()))
}
orderDeliveredPubChan <- js
time.Sleep(2 * time.Second)
quit <- 0
}
|
#!/bin/bash
echo "Applying migration DirectorNino"
echo "Adding routes to conf/register.company.routes"
echo "" >> ../conf/app.routes
echo "GET /directorNino controllers.register.company.DirectorNinoController.onPageLoad(mode: Mode = NormalMode)" >> ../conf/register.company.routes
echo "POST /directorNino controllers.register.company.DirectorNinoController.onSubmit(mode: Mode = NormalMode)" >> ../conf/register.company.routes
echo "GET /changeDirectorNino controllers.register.company.DirectorNinoController.onPageLoad(mode: Mode = CheckMode)" >> ../conf/register.company.routes
echo "POST /changeDirectorNino controllers.register.company.DirectorNinoController.onSubmit(mode: Mode = CheckMode)" >> ../conf/register.company.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "directorNino.title = directorNino" >> ../conf/messages.en
echo "directorNino.heading = directorNino" >> ../conf/messages.en
echo "directorNino.option1 = directorNino" Option 1 >> ../conf/messages.en
echo "directorNino.option2 = directorNino" Option 2 >> ../conf/messages.en
echo "directorNino.checkYourAnswersLabel = directorNino" >> ../conf/messages.en
echo "directorNino.error.required = Please give an answer for directorNino" >> ../conf/messages.en
echo "Adding helper method to CheckYourAnswersHelper"
awk '/class/ {\
print;\
print "";\
print " def directorNino: Option[AnswerRow] = userAnswers.get(identifiers.register.company.DirectorNinoId) map {";\
print " x => AnswerRow(\"directorNino.checkYourAnswersLabel\", s\"directorNino.$x\", true, controllers.register.company.routes.DirectorNinoController.onPageLoad(CheckMode).url)";\
print " }";\
next }1' ../app/utils/CheckYourAnswersHelper.scala > tmp && mv tmp ../app/utils/CheckYourAnswersHelper.scala
echo "Moving test files from generated-test/ to test/"
rsync -avm --include='*.scala' -f 'hide,! */' ../generated-test/ ../test/
rm -rf ../generated-test/
echo "Migration DirectorNino completed"
|
<reponame>queued/SLR4V<filename>src/com/github/queued/slr4v/utils/DatasetParser.java<gh_stars>0
package com.github.queued.slr4v.utils;
import com.github.queued.slr4v.model.entity.Day;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Stream;
public class DatasetParser
{
public static List<Day> getArrayListFromCSV(String fileName) throws IOException {
List<Day> days = new ArrayList<>();
try (Stream<String> stream = Files.lines(Paths.get(fileName))) {
stream.forEach(line -> {
String[] parts = line.split(","); // First column is the day. The other 3, are the report.
Day day = new Day(Integer.parseInt(parts[0]), new double[] { Integer.parseInt(parts[1]), Integer.parseInt(parts[2]), Integer.parseInt(parts[3]) });
days.add(day);
});
}
return days;
}
}
|
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Solids/PhysBAM_Solids/Collisions/BW_COLLISIONS.cpp<gh_stars>10-100
//#####################################################################
// Copyright 2010.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class BW_COLLISIONS
//#####################################################################
#include <PhysBAM_Tools/Krylov_Solvers/KRYLOV_VECTOR_BASE.h>
#include <PhysBAM_Tools/Matrices/FRAME.h>
#include <PhysBAM_Geometry/Implicit_Objects/IMPLICIT_OBJECT_TRANSFORMED.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Deformable_Objects/DEFORMABLE_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Particles/DEFORMABLES_PARTICLES_FORWARD.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Particles/PARTICLES.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Particles/RIGID_BODY_PARTICLES.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Collisions/BW_COLLISIONS.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Solids/SOLID_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Solids_Evolution/BW_BACKWARD_EULER_SYSTEM.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> BW_COLLISIONS<TV>::
BW_COLLISIONS(SOLID_BODY_COLLECTION<TV>& solid_body_collection_input)
:solid_body_collection(solid_body_collection_input)
{
}
//#####################################################################
// Destructor
//#####################################################################
template<class TV> BW_COLLISIONS<TV>::
~BW_COLLISIONS()
{}
//#####################################################################
// Function Detect_Cloth_Body_Contact
//#####################################################################
template<class TV> void BW_COLLISIONS<TV>::
Detect_Cloth_Body_Contact()
{
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
RIGID_BODY_PARTICLES<TV>& rigid_body_particles=solid_body_collection.rigid_body_collection.rigid_body_particle;
// Detect cloth/body constraints
for(int rb=1;rb<=rigid_body_particles.array_collection->Size();rb++){
RIGID_BODY<TV>& rigid_body=solid_body_collection.rigid_body_collection.Rigid_Body(rb);
IMPLICIT_OBJECT<VECTOR<T,TV::m> >& object_space_implicit_object=*rigid_body.implicit_object->object_space_implicit_object;
FRAME<TV> frame=rigid_body.Frame().Inverse();
for(int p=1;p<=particles.array_collection->Size();p++)
if(object_space_implicit_object.Lazy_Inside(frame*particles.X(p)))
cloth_body_constraints.Append_Unique(PAIR<int,int>(p,rb));}
}
//#####################################################################
// Function Remove_Separating_Cloth_Body_Contacts
//#####################################################################
template<class TV> void BW_COLLISIONS<TV>::
Remove_Separating_Cloth_Body_Contacts(BW_BACKWARD_EULER_SYSTEM<TV>& system,KRYLOV_VECTOR_BASE<T>& R,KRYLOV_VECTOR_BASE<T>& B,KRYLOV_VECTOR_BASE<T>& V,KRYLOV_VECTOR_BASE<T>& Q)
{
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
R=B;system.Multiply(V,Q);R-=Q;
VECTOR_T& actual_R=debug_cast<VECTOR_T&>(R);
for(int i=cloth_body_constraints.m;i>=1;i--){
RIGID_BODY<TV>& rigid_body=solid_body_collection.rigid_body_collection.Rigid_Body(cloth_body_constraints(i).y);
int p=cloth_body_constraints(i).x;
if(TV::Dot_Product(actual_R.V(p),rigid_body.Implicit_Geometry_Normal(particles.X(p)))<=0) // TODO make sure this is the right direction
cloth_body_constraints.Remove_Index_Lazy(i);}
}
//#####################################################################
template class BW_COLLISIONS<VECTOR<float,1> >;
template class BW_COLLISIONS<VECTOR<float,2> >;
template class BW_COLLISIONS<VECTOR<float,3> >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class BW_COLLISIONS<VECTOR<double,1> >;
template class BW_COLLISIONS<VECTOR<double,2> >;
template class BW_COLLISIONS<VECTOR<double,3> >;
#endif
|
<filename>open-sphere-base/mantle/src/test/java/io/opensphere/mantle/data/impl/specialkey/SpeedKeyTest.java<gh_stars>10-100
package io.opensphere.mantle.data.impl.specialkey;
import org.junit.Assert;
import org.junit.Test;
import io.opensphere.mantle.data.MetaDataInfo;
import io.opensphere.mantle.data.impl.DefaultMetaDataInfo;
/** Tests for {@link SpeedKey}. */
public class SpeedKeyTest
{
/** Test for {@link SpeedKey#markSpecialColumn(MetaDataInfo, String)}. */
@Test
public void testMarkSpecialColumn()
{
DefaultMetaDataInfo metaData = new DefaultMetaDataInfo();
String column = "Speed (m/s)";
metaData.addKey(column, Double.class, this);
SpeedKey.DEFAULT.markSpecialColumn(metaData, column);
SpeedKey actual = (SpeedKey)metaData.getSpecialTypeForKey(column);
SpeedKey expected = new SpeedKey(SpeedUnit.METERS_PER_SECOND);
Assert.assertEquals(expected, actual);
Assert.assertEquals(expected.getKeyUnit(), actual.getKeyUnit());
metaData = new DefaultMetaDataInfo();
column = "Speed (km/hr)";
metaData.addKey(column, Double.class, this);
SpeedKey.DEFAULT.markSpecialColumn(metaData, column);
actual = (SpeedKey)metaData.getSpecialTypeForKey(column);
expected = new SpeedKey(SpeedUnit.KILOMETERS_PER_HOUR);
Assert.assertEquals(expected, actual);
Assert.assertEquals(expected.getKeyUnit(), actual.getKeyUnit());
}
}
|
#! /bin/bash
if [ ! -f PUBLISH ]; then
echo Skipping publishing, PUBLISH file is missing
exit
fi
IFS="$(printf '\n\t')"
DATE=$(node -e 'console.log((new Date()).toISOString())')
# Latest power
mkdir -p dist/miner-power-daily-average-latest
if [ -f ../work/output/miner_power/by_miner_region/sum_avg_daily/json/_SUCCESS ] ; then
PART=$(ls ../work/output/miner_power/by_miner_region/sum_avg_daily/json/part*.json | head -1)
cat $PART | jq -s "{ \
date: \"$DATE\", \
rows: .
}" > dist/miner-power-daily-average-latest/miner-power-by-region.json
fi
(cd dist/miner-power-daily-average-latest; head miner-power-by-region.json; hub bucket push -y)
|
<reponame>w2ogroup/titan
package com.thinkaurelius.titan.graphdb.query;
import com.google.common.base.Preconditions;
import com.thinkaurelius.titan.core.*;
import com.thinkaurelius.titan.graphdb.query.keycondition.*;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.collections.comparators.ComparableComparator;
import org.apache.commons.lang.StringUtils;
import java.util.Comparator;
/**
* (c) <NAME> (<EMAIL>)
*/
public class StandardElementQuery implements Query<StandardElementQuery> {
public enum Type {
VERTEX, EDGE;
public Class<? extends Element> getElementType() {
switch(this) {
case VERTEX: return Vertex.class;
case EDGE: return Edge.class;
default: throw new IllegalArgumentException();
}
}
}
private final KeyCondition<TitanKey> condition;
private final Type type;
private final String index;
private final int limit;
public StandardElementQuery(Type type, KeyCondition<TitanKey> condition, int limit, String index) {
Preconditions.checkNotNull(condition);
Preconditions.checkNotNull(type);
Preconditions.checkArgument(limit>=0);
this.condition = condition;
this.type=type;
this.index = index;
this.limit=limit;
}
public StandardElementQuery(StandardElementQuery query, String index) {
Preconditions.checkNotNull(query);
Preconditions.checkArgument(StringUtils.isNotBlank(index));
this.condition=query.condition;
this.type=query.type;
this.limit=query.limit;
this.index=index;
}
public KeyCondition<TitanKey> getCondition() {
return condition;
}
public Type getType() {
return type;
}
public boolean hasIndex() {
return index!=null;
}
public String getIndex() {
Preconditions.checkArgument(hasIndex());
return index;
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append("[").append(condition.toString()).append("]");
if (hasLimit()) b.append("(").append(limit).append(")");
b.append(":").append(type.toString());
return b.toString();
}
@Override
public int hashCode() {
return condition.hashCode()*9676463 + type.hashCode()*4711 + limit;
}
@Override
public boolean equals(Object other) {
if (this==other) return true;
else if (other==null) return false;
else if (!getClass().isInstance(other)) return false;
StandardElementQuery oth = (StandardElementQuery)other;
return type==oth.type && condition.equals(oth.condition) && limit==oth.limit;
}
@Override
public boolean hasLimit() {
return limit!=Query.NO_LIMIT;
}
@Override
public int getLimit() {
return limit;
}
@Override
public boolean isInvalid() {
return limit<=0;
}
@Override
public boolean isSorted() {
return false;
}
@Override
public Comparator getSortOrder() {
return new ComparableComparator();
}
@Override
public boolean hasUniqueResults() {
return true;
}
public boolean matches(TitanElement element) {
return matchesCondition(element,condition);
}
public static final<T extends TitanType> boolean matchesCondition(TitanElement element, KeyCondition<T> condition) {
if (condition instanceof KeyAtom) {
KeyAtom<T> atom = (KeyAtom<T>) condition;
Object value = null;
T type = atom.getKey();
if (type.isPropertyKey()) value = element.getProperty((TitanKey)type);
else value = ((TitanRelation)element).getProperty((TitanLabel)type);
return atom.getRelation().satisfiesCondition(value,atom.getCondition());
} else if (condition instanceof KeyNot) {
return !matchesCondition(element, ((KeyNot) condition).getChild());
} else if (condition instanceof KeyAnd) {
for (KeyCondition c : ((KeyAnd<T>)condition).getChildren()) {
if (!matchesCondition(element, c)) return false;
}
return true;
} else if (condition instanceof KeyOr) {
if (!condition.hasChildren()) return true;
for (KeyCondition c : ((KeyOr<T>)condition).getChildren()) {
if (matchesCondition(element, c)) return true;
}
return false;
} else throw new IllegalArgumentException("Invalid condition: " + condition);
}
}
|
python train/train.py \
test-stl-nw-profit-tiny+ \
--experiment-name=test-stl-nw-profit-tiny+ \
--num-env-steps=200000000 \
--algo=ppo \
--use-gae \
--lr=2.5e-4 \
--clip-param=0.1 \
--value-loss-coef=0.5 \
--num-processes=100 \
--eval-num-processes=50 \
--num-steps=500 \
--num-mini-batch=4 \
--log-interval=1 \
--save-interval=10 \
--eval-interval=20 \
--use-linear-lr-decay \
--popart-reward \
--entropy-coef=0.01 \
--gamma=0.999
|
<gh_stars>0
import LED_display as LD
import threading
import time
import copy
import os
import alphabet
import number
from score_func import *
delay = 0.1
t=threading.Thread(target=LD.main, args=())
t.setDaemon(True)
t.start()
#drawChar(alphabet.alpha_a, oScreen, 5, 7, (1, 1), 1)
iScreen =[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
w_list = [5, 5, 5, 3, 3, 3, 5, 5, 5, 3, 3, 3]
h_list = [7, 7, 7, 5, 5, 5, 7, 7, 7, 5, 5, 5]
xy_list = [(1, 1), (7, 1), (13, 1), (20, 2), (24, 2), (28, 2), (1, 9), (7, 9), (13, 9), (20, 10), (24, 10), (28, 10)]
color_list1 = [3, 3, 3, 3, 3, 3, 7, 7, 7, 7, 7, 7]
color_list2 = [7, 7, 7, 7, 7, 7, 3, 3, 3, 3, 3, 3]
def main():
game_num = int(input("game : "))
if game_num == 1:
game = "dodger"
elif game_num == 2:
game = "brick"
score = int(input("score : "))
index = get_index(game, score)
add_score(game, score)
matrix_list = get_matrix_list(game)
os.system('cls' if os.name == 'nt' else 'clear')
while True:
oScreen = copy.deepcopy(iScreen)
# fill matrix
# - Change oScreen matrix output in this area
if index == 0:
for i in range(len(matrix_list)):
drawChar(matrix_list[i], oScreen, w_list[i], h_list[i], xy_list[i], color_list1[i])
elif index == 1:
for i in range(len(matrix_list)):
drawChar(matrix_list[i], oScreen, w_list[i], h_list[i], xy_list[i], color_list2[i])
elif index == 2:
for i in range(len(matrix_list)):
drawChar(matrix_list[i], oScreen, w_list[i], h_list[i], xy_list[i], 7)
'''
drawChar(char1, oScreen, 5, 7, (1,1), 2)
drawChar(char2, oScreen, 5, 7, (7,1), 2)
drawChar(char3, oScreen, 5, 7, (13,1), 2)
drawChar(num1, oScreen, 3, 5, (20,2), 2)
drawChar(num2, oScreen, 3, 5, (24,2), 2)
drawChar(num3, oScreen, 3, 5, (28,2), 2)
drawChar(char4, oScreen, 5, 7, (1,9), 2)
drawChar(char5, oScreen, 5, 7, (7,9), 2)
drawChar(char6, oScreen, 5, 7, (13,9), 2)
drawChar(num4, oScreen, 3, 5, (20,10), 2)
drawChar(num5, oScreen, 3, 5, (24,10), 2)
drawChar(num6, oScreen, 3, 5, (28,10), 2)
'''
# - Draw Matrix
consoleMatrix(oScreen)
drawMatrix(oScreen)
time.sleep(delay)
os.system('cls' if os.name == 'nt' else 'clear')
def consoleMatrix(screen):
for i in screen:
print(i)
def drawChar(char, screen, width, height, direction, color):
for i in range(width):
for j in range(height):
if char[j][i] == 1:
screen[direction[1]+j][direction[0]+i] = color
def get_matrix_list(game):
matrix_list = []
an_list = get_alpha_num(game)
for i in an_list:
if i == '0':
matrix_list.append(number.num_0)
elif i == '1':
matrix_list.append(number.num_1)
elif i == '2':
matrix_list.append(number.num_2)
elif i == '3':
matrix_list.append(number.num_3)
elif i == '4':
matrix_list.append(number.num_4)
elif i == '5':
matrix_list.append(number.num_5)
elif i == '6':
matrix_list.append(number.num_6)
elif i == '7':
matrix_list.append(number.num_7)
elif i == '8':
matrix_list.append(number.num_8)
elif i == '9':
matrix_list.append(number.num_9)
elif i == 's':
matrix_list.append(number.alpha_s)
elif i == 'A':
matrix_list.append(alphabet.alpha_a)
elif i == 'B':
matrix_list.append(alphabet.alpha_b)
elif i == 'C':
matrix_list.append(alphabet.alpha_c)
elif i == 'D':
matrix_list.append(alphabet.alpha_d)
elif i == 'E':
matrix_list.append(alphabet.alpha_e)
elif i == 'F':
matrix_list.append(alphabet.alpha_f)
elif i == 'G':
matrix_list.append(alphabet.alpha_g)
elif i == 'H':
matrix_list.append(alphabet.alpha_h)
elif i == 'I':
matrix_list.append(alphabet.alpha_i)
elif i == 'J':
matrix_list.append(alphabet.alpha_j)
elif i == 'K':
matrix_list.append(alphabet.alpha_k)
elif i == 'L':
matrix_list.append(alphabet.alpha_l)
elif i == 'M':
matrix_list.append(alphabet.alpha_m)
elif i == 'N':
matrix_list.append(alphabet.alpha_n)
elif i == 'O':
matrix_list.append(alphabet.alpha_o)
elif i == 'P':
matrix_list.append(alphabet.alpha_p)
elif i == 'Q':
matrix_list.append(alphabet.alpha_q)
elif i == 'R':
matrix_list.append(alphabet.alpha_r)
elif i == 'S':
matrix_list.append(alphabet.alpha_s)
elif i == 'T':
matrix_list.append(alphabet.alpha_t)
elif i == 'U':
matrix_list.append(alphabet.alpha_u)
elif i == 'V':
matrix_list.append(alphabet.alpha_v)
elif i == 'W':
matrix_list.append(alphabet.alpha_w)
elif i == 'X':
matrix_list.append(alphabet.alpha_x)
elif i == 'Y':
matrix_list.append(alphabet.alpha_y)
elif i == 'Z':
matrix_list.append(alphabet.alpha_z)
return matrix_list
def drawBricks(bricks, screen):
for brick in bricks:
cnt = 0
for i in range(brick[0], brick[0]+4):
cnt += 1
for j in range(brick[1], brick[1]+2):
if cnt == 1 or cnt == 4:
screen[j][i] = 4
else:
screen[j][i] = 3
def drawMatrix(array):
for x in range(len(array[0])):
for y in range(len(array)):
LD.set_pixel(x, y, array[y][x])
# color = 0 : 'None', 1 : 'Red', 2 : 'Green', 3 : 'Yellow', 4 : 'Blue', 5 : 'Purple', 6 : 'Crystal', 7 : 'White'
if __name__ == '__main__':
main()
|
#include "Common.h"
namespace Lunia {
namespace XRated {
namespace Lobby {
UserRegistryClass<User>& UserRegistry(const uint32& timeout)
{
return UserRegistryClass<User>::GetInstance(timeout);
}
}
}
} |
import React from "react"
import { Container, Row, Col } from 'react-bootstrap'
// import Isometrics from "../components/isometrics"
import Layout from "../components/layout"
import SEO from "../components/seo"
const Lab = ({ location }) => {
return (
<Layout animateKey={location.key}>
<SEO title="Lab" />
<Container fluid="lg" className="my-5 p-5" >
<Row className="d-flex justify-content-center">
<Col xs={6} className="text-center">
<img width="100%" src="https://media.giphy.com/media/kHItTUhmy50guDJH2H/giphy.gif" alt="have a great day" />
</Col>
</Row>
{/* <Row className="d-flex justify-content-center mt-5">
<Col xs={6} className="text-center">
<Isometrics />
</Col>
</Row> */}
</Container>
</Layout>
)
}
export default Lab
|
<gh_stars>1-10
package com.nbouma.jsonldjava.impl;
import com.nbouma.jsonldjava.core.JsonLdTripleCallback;
import com.nbouma.jsonldjava.core.RDFDataset;
import com.nbouma.jsonldjava.core.RDFDatasetUtils;
public class NQuadTripleCallback implements JsonLdTripleCallback {
@Override
public Object call(RDFDataset dataset) {
return RDFDatasetUtils.toNQuads(dataset);
}
} |
def handle_turn_event(traitor, did_something, var):
debuglog(traitor, "(traitor) TURNING")
if did_something and var.PHASE in var.GAME_PHASES:
var.TRAITOR_TURNED = True
channels.Main.send(messages["traitor_turn_channel"])
evt.prevent_default = True
evt.stop_processing = True |
#!/bin/bash
# short url manager: https://github.com/nhoizey/1y
site="site"
if [ -n "$1" ]
then
title=$1
else
read -p "Title: " title
fi
if [ -n "$2" ]
then
note=$2
else
read -p "Note: " note
fi
if [ -n "$3" ]
then
shorty=$3
else
read -p "Enter the shorty: " shorty
fi
notes_dir="notes"
urls_dir="urls"
#Title=`echo $shorty | tr -d '[:punct:]' | tr 'A-Z' 'a-z'`
#Title=`echo $shorty | tr '[:blank:]' '-' | tr -s '-' | tr 'A-Z' 'a-z'`
Title=`echo $title | tr '[:punct:]' ' ' | awk '{$1=$1};1' | tr '[:blank:]' '-' | tr -s '-' | tr 'A-Z' 'a-z'`
for word in $Title
do
dashedTitle=${dashedTitle}-${word}
done
# create note file
dateDashedTitle="`date +%Y-%m-%d`${dashedTitle}"
filename=$notes_dir/$dateDashedTitle".md"
#filename="`date +%Y-%m-%d`${dashedTitle}.md"
touch $filename
echo "---" > $filename
echo "title: ${title}" >> $filename
echo "note: ${note}" >> $filename
echo "---" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "# 0" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "# 1" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "# 2" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "# 3" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "# 4" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "" >> $filename
echo "# 5" >> $filename
echo "" >> $filename
echo "" >> $filename
# edit file
vi +9 $filename
echo
# site name = dir name
#site="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd | sed 's#.*/##')"
#site=$(pwd | awk -F/ '{print $(NF-1)}')
site=$(pwd | awk -F/ '{print $(NF)}') # now launched from repo root
url="https://$site/notes/$dateDashedTitle"
rel_url="/notes/$dateDashedTitle" # relative url -- works with any 1y site
shorty_url="https://$site/$shorty"
echo
echo $shorty_url" -> "$url
echo
# create url file
shorty_filename="$shorty.md"
urls_filename=$urls_dir/$shorty_filename
touch $urls_filename
echo "---" > $urls_filename
echo "url: ${rel_url}" >> $urls_filename
#echo "url: ${url}" >> $urls_filename
echo "---" >> $urls_filename
echo "" >> $urls_filename
# Publish? N to add [skip ci] in git commit message, so it won't deploy
while true; do
read -p "Publish? N to add [skip ci] in git commit message: " yn
case $yn in
[Yy]* ) git add $filename $urls_filename; git commit -m "new 1y note : $url -> $note and new 1y shorty: $shorty_url"; git push -u origin master ; break ;;
[Nn]* ) git add $filename $urls_filename; git commit -m "new 1y note : $url -> $note and new 1y shorty: $shorty_url [skip ci]" ; git push -u origin master ; break ;;
* ) echo "Please answer yes or no.";;
esac
done
## pause before add, commit, publish
#echo "Press <enter> to publish"
#read
## commit and publish
#git add $filename
#git commit -m "new 1y note: $site/$shorty -> $note"
#git push -u origin master
|
import { Engine, EngineMode, SystemMode, System } from '@trixt0r/ecs';
import { RunOptions } from './runOptions';
import { YaveEngine } from '@/engine';
import { YaveSystem } from './system';
export class YaveECS extends Engine {
/**
* Reference to the actual yave engine that uses this ECS engine.
*/
public readonly yaveEngine: YaveEngine;
/** @inheritdoc */
constructor(engine: YaveEngine) {
super();
this.yaveEngine = engine;
}
/**
* Updates all systems in this engine with the given options.
*
* @param {RunOptions} options
* @param {EngineMode} [mode = EngineMode.DEFAULT]
* @returns {void | Promise<void>}
*/
run(
// Overriding run with an interface "RunOptions" so that isRenderSystem can work (also make it required)
options: RunOptions,
mode: EngineMode = EngineMode.DEFAULT
): void | Promise<void> {
return this[mode](options);
}
/**
* Updates all systems in this engine with the given options,
* without waiting for a resolve or reject of each system.
*
* @param {RunOptions} [options]
* @returns {void}
*/
protected runDefault(options?: RunOptions): void {
const length = this._activeSystems.length;
for (let i = 0; i < length; i++) {
if (this.shouldRunSystem(this._activeSystems[i], options)) {
this._activeSystems[i].run(options, SystemMode.SYNC);
}
}
}
/**
* Updates all systems in this engine with the given options,
* by waiting for a system to resolve or reject before continuing with the next one.
*
* @param {RunOptions} [options]
* @returns {Promise<void>}
*/
protected async runSuccessive(options?: RunOptions): Promise<void> {
const length = this._activeSystems.length;
for (let i = 0; i < length; i++)
if (this.shouldRunSystem(this._activeSystems[i], options)) {
await this._activeSystems[i].run(options, SystemMode.SYNC);
}
}
/**
* Updates all systems in this engine with the given options,
* by running all systems in parallel and waiting for all systems to resolve or reject.
*
* @param {RunOptions} [options]
* @returns {Promise<void>}
*/
protected async runParallel(options?: RunOptions): Promise<void> {
const mapped = this._activeSystems.map(system => {
if (this.shouldRunSystem(system, options)) {
return system.run(options, SystemMode.ASYNC);
}
});
await Promise.all(mapped);
}
/**
* Checks whether or not the given system should run when passed the given options.
* @param system The system to check for.
* @param options The options to pass.
*/
protected shouldRunSystem(system: System, options?: RunOptions): boolean {
const isRendering: boolean | undefined = options?.isRendering;
const isRenderSystem: boolean | undefined = (system as YaveSystem)
.isRenderSystem;
// TODO: Maybe I should just keep track of renderSystems and systems as two separate arrays so I don't have to check every frame
// Runs systems according to render system requirements
// If either is undefined, it will act as if it was not rendering/not a render system
return (
(isRendering === true && isRenderSystem === true) ||
(isRendering !== true && isRenderSystem !== true)
);
}
}
|
<reponame>hoelzl/CleanCodeCppCatch<gh_stars>0
#pragma once
#include <cstdint>
#include <iostream>
#include <string>
#include <vector>
template <typename T>
void print_address(const std::string& prefix, T obj)
{
std::cout << prefix << " Ox" << std::hex
<< reinterpret_cast<std::uintptr_t>(obj) << std::endl;
}
void print_content_addresses(const std::string& name, const std::vector<int>& v);
template <typename T1, typename T2>
void print_dist(const std::string& prefix, T1 obj1, T2 obj2)
{
std::cout << prefix << std::dec
<< reinterpret_cast<std::uintptr_t>(obj2) - reinterpret_cast<std::uintptr_t>(obj1)
<< std::endl;
}
void print_memory_layout();
|
function prompt() {
echo "Syntax: bash refine.sh <GPU_ID> <SIZE> <ADDON [concrete/commit]>"
exit;
}
if [[ $# -lt 2 ]]; then
prompt;
fi
GPU=$1;
SIZE=${2};
ADDON=${3:-""}
TASK="refine"
if [[ ${ADDON} != "" ]]; then
TASK="${TASK}-${ADDON}";
fi
CODE_HOME_DIR=`realpath ../..`;
DATA_DIR="${CODE_HOME_DIR}/data/finetuning";
function download() {
mkdir -p ${DATA_DIR}/${TASK};
cdir=`pwd`;
cd $DATA_DIR/refine;
## Add the code code downloading the data;
cd ${cdir};
}
function finetune() {
OUTPUT_DIR="${CODE_HOME_DIR}/models/finetuning/${TASK}";
mkdir -p ${OUTPUT_DIR};
LOG="${OUTPUT_DIR}/finetuning.log";
SUMMARY_DIR="${OUTPUT_DIR}/summary";
mkdir -p ${SUMMARY_DIR};
CACHE_DIR="${OUTPUT_DIR}/cache";
mkdir -p ${CACHE_DIR};
RES_DIR="${OUTPUT_DIR}/results";
mkdir -p $RES_DIR;
PRETEAINED_MODEL_BASE="${CODE_HOME_DIR}/models/pretrained/";
PRETRAINING_EXP_NAME="unidir"
PRETRAINED_MODEL_NAME=""; ## Add the name here
PRETRAINED_MODEL_PATH="${PRETEAINED_MODEL_BASE}/${PRETRAINING_EXP_NAME}/${PRETRAINED_MODEL_NAME}";
export PYTHONIOENCODING=utf-8;
export PYTHONPATH=$PYTHONPATH:$CODE_HOME_DIR;
SCRIPT_PATH="${CODE_HOME_DIR}/src/finetuning/generation.py";
export CUDA_VISIBLE_DEVICES=${GPU};
BATCH_SIZE=32;
GRADIENT_ACCUM_STEP=16;
NUM_EPOCHS=30;
PATIENCE=15;
LEARNING_RATE=5e-5;
SRC_LEN=320;
TGT_LEN=128;
python $SCRIPT_PATH \
--do_train --do_eval --do_eval_bleu --do_test \
--task ${TASK} --sub_task ${size} \
--model_type codet5 \
--data_num -1 \
--warmup_steps 500 \
--learning_rate ${LEARNING_RATE} \
--num_train_epochs ${NUM_EPOCHS} \
--patience ${PATIENCE} \
--tokenizer_name ${PRETRAINED_MODEL_PATH} \
--model_name_or_path ${PRETRAINED_MODEL_PATH} \
--data_dir ${DATA_DIR} \
--cache_path ${CACHE_DIR} \
--output_dir ${OUTPUT_DIR} \
--summary_dir ${SUMMARY_DIR} \
--save_last_checkpoints \
--always_save_model \
--res_dir ${RES_DIR} \
--res_fn ${RES_DIR}/results.txt \
--train_batch_size ${BATCH_SIZE} \
--gradient_accumulation_steps ${GRADIENT_ACCUM_STEP} \
--eval_batch_size ${BATCH_SIZE} \
--max_source_length ${SRC_LEN} \
--max_target_length ${TGT_LEN} 2>&1 | tee ${LOG}
}
download;
finetune;
|
kind create cluster
kubectl apply -f ./faas-netes/namespaces.yml
kubectl -n openfaas create secret generic basic-auth \
--from-literal=basic-auth-user=admin \
--from-literal=basic-auth-password=admin
kubectl apply -f ./faas-netes/yaml
kubectl get pods -n openfaas
kubectl get service -n openfaas |
<filename>website/src/templates/blogPostTemplate.tsx
import { graphql } from 'gatsby'
import { Link } from 'gatsby'
import * as React from 'react'
import { Helmet } from 'react-helmet'
import Layout from '../components/Layout'
import SocialLinks from '../components/SocialLinks'
import { eventLogger } from '../EventLogger'
export default class ContentfulTemplate extends React.Component<any, any> {
constructor(props: any) {
super(props)
}
private logSelectDockercommand(): void {
if (
document.getSelection().baseNode &&
document.getSelection().baseNode.parentNode &&
document.getSelection().baseNode.parentNode.nodeName === 'CODE' &&
document.getSelection().baseNode.parentNode.textContent.includes('docker run')
) {
eventLogger.trackInstallServerCommandHighlighted('blog')
}
}
public componentDidMount(): void {
if (document) {
document.getElementsByTagName('body')[0].setAttribute('style', 'background-image:none')
document.addEventListener('mouseup', this.logSelectDockercommand)
}
}
public render(): JSX.Element | null {
const md = this.props.data.markdownRemark
const title = md.frontmatter.title
const author = md.frontmatter.author
const content = md.html
const date = md.frontmatter.publishDate
const excerpt = md.excerpt
const tags = md.frontmatter.tags || ''
const image = md.heroImage
? `https:${md.heroImage.file.url}`
: 'https://about.sourcegraph.com/sourcegraph-mark.png'
let slug = md.slug
let readMoreLink
if (tags.includes('graphql')) {
slug = 'graphql/' + slug
readMoreLink = '/graphql'
} else if (tags.includes('gophercon') || tags.includes('dotGo')) {
slug = 'go/' + slug
readMoreLink = '/go'
} else {
slug = 'blog/' + slug
readMoreLink = '/blog'
}
return (
<Layout location={this.props.location}>
<div className="bg-white text-dark">
<Helmet>
<title>{title}</title>
<meta property="og:title" content={title} />
<meta property="og:url" content={`https://about.sourcegraph.com/${slug}`} />
<meta property="og:description" content={excerpt} />
<meta property="og:image" content={image} />
<meta property="og:type" content="website" />
<meta name="twitter:site" content="@srcgraph" />
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:title" content={title} />
<meta name="twitter:image" content={image} />
<meta name="twitter:description" content={excerpt} />
<meta name="description" content={excerpt} />
</Helmet>
<div className="blog-post">
<div className="blog-post__wrapper">
<section className="blog-post__title">
<h1>{title}</h1>
<p>
By {author} on {date}
</p>
</section>
<hr className="blog-post__title--rule" />
<section className="blog-post__body">
<div dangerouslySetInnerHTML={{ __html: content }} />
<hr />
<div style={{ height: '0.5em' }} />
<Link to={readMoreLink}>
<button className="btn btn-outline-primary">Read more posts</button>
</Link>
<div style={{ height: '1em' }} />
<div>
<div className="mb-4">
<SocialLinks url={`https://about.sourcegraph.com/${slug}`} title={title} />
</div>
</div>
</section>
</div>
</div>
</div>
</Layout>
)
}
}
export const pageQuery = graphql`
query blogPostTemplate($fileSlug: String) {
markdownRemark(fields: { slug: { eq: $fileSlug } }) {
frontmatter {
title
heroImage
author
tags
publishDate(formatString: "MMMM D, YYYY")
}
html
excerpt
fields {
slug
}
}
}
`
|
<gh_stars>0
import {ReportCardRescheduledAtom} from './report-card-rescheduled.model'
import {maxBy} from '../utils/functions'
export const latestReportCardReschedule = (reschedules?: ReportCardRescheduledAtom[]): ReportCardRescheduledAtom | undefined =>
reschedules && maxBy(reschedules, (a, b) => a.lastModified.getTime() > b.lastModified.getTime())
|
module BrNfe
module Product
module Nfe
module ItemTax
class Ipi < BrNfe::ActiveModelBase
# CรDIGO DA SITUAรรO TRIBUTรRIA do IPI
# 00 = Entrada com recuperaรงรฃo de crรฉdito
# 01 = Entrada tributada com alรญquota zero
# 02 = Entrada isenta
# 03 = Entrada nรฃo-tributada
# 04 = Entrada imune
# 05 = Entrada com suspensรฃo
# 49 = Outras entradas
# 50 = Saรญda tributada
# 51 = Saรญda tributada com alรญquota zero
# 52 = Saรญda isenta
# 53 = Saรญda nรฃo-tributada
# 54 = Saรญda imune
# 55 = Saรญda com suspensรฃo
# 99 = Outras saรญdas
#
# <b>Type: </b> _Number_ <- string
# <b>Required: </b> _Yes_
# <b>Example: </b> _00_
# <b>Length: </b> _2_
# <b>tag: </b> CST
#
attr_accessor :codigo_cst
def codigo_cst
"#{@codigo_cst}".rjust(2, '0') if @codigo_cst.present?
end
alias_attribute :CST, :codigo_cst
# CLASSE DE ENQUADRAMENTO DO IPI PARA CIGARROS E BEBIDAS
# Preenchimento conforme Atos Normativos editados pela
# Receita Federal (Observaรงรฃo 2)
#
# <b>Type: </b> _String_
# <b>Required: </b> _No_
# <b>Example: </b> _C1324_
# <b>Length: </b> _max: 5_
# <b>tag: </b> clEnq
#
attr_accessor :classe_enquadramento
alias_attribute :clEnq, :classe_enquadramento
# CNPJ DO PRODUTOR DA MERCADORIA, <NAME>MITENTE.
# SOMENTE PARA OS CASOS DE EXPORTAรรO DIRETA OU INDIRETA.
# Informar os zeros nรฃo significativos
#
# <b>Type: </b> _String_
# <b>Required: </b> _No_
# <b>Example: </b> _00.000.000/0001-00 OU 01234567890123_
# <b>Length: </b> _14_
# <b>tag: </b> CNPJProd
#
attr_accessor :cnpj_produtor
def cnpj_produtor
"#{@cnpj_produtor}".gsub(/[^\d]/,'')
end
alias_attribute :CNPJProd, :cnpj_produtor
# CรDIGO DO SELO DE CONTROLE IPI
# Preenchimento conforme Atos Normativos editados pela
# Receita Federal (Observaรงรฃo 3)
#
# <b>Type: </b> _String_
# <b>Required: </b> _No_
# <b>Example: </b> _SELO12345654_
# <b>Length: </b> _max: 60_
# <b>tag: </b> cSelo
#
attr_accessor :codigo_selo
alias_attribute :cSelo, :codigo_selo
# QUANTIDADE DE SELO DE CONTROLE
#
# <b>Type: </b> _Number_
# <b>Required: </b> _No_
# <b>Example: </b> _50_
# <b>Length: </b> _max: 12_
# <b>tag: </b> qSelo
#
attr_accessor :quantidade_selo
def quantidade_selo
"#{@quantidade_selo}".gsub(/[^\d]/,'')
end
alias_attribute :qSelo, :quantidade_selo
# CรDIGO DE ENQUADRAMENTO LEGAL DO IPI
# Tabela a ser criada pela RFB, informar 999 enquanto a tabela
# nรฃo for criada
#
# <b>Type: </b> _String_
# <b>Required: </b> _Yes_
# <b>Example: </b> _999_
# <b>Default: </b> _999_
# <b>Length: </b> _max: 3_
# <b>tag: </b> cEnq
#
attr_accessor :codigo_enquadramento
alias_attribute :cEnq, :codigo_enquadramento
# Valor da BC do IPI
# Informar se o cรกlculo do IPI for por alรญquota.
#
# <b>Type: </b> _Float_
# <b>Required: </b> _No_
# <b>Example: </b> _152.38_
# <b>tag: </b> vBC
#
attr_accessor :base_calculo
alias_attribute :vBC, :base_calculo
# Alรญquota do IPI
#
# <b>Type: </b> _Float_
# <b>Required: </b> _No_
# <b>Example: </b> _12.00_
# <b>tag: </b> pIPI
#
attr_accessor :aliquota
alias_attribute :pIPI, :aliquota
# QUANTIDADE TOTAL NA UNIDADE PADRรO PARA TRIBUTAรรO
# (SOMENTE PARA OS PRODUTOS TRIBUTADOS POR UNIDADE)
#
# <b>Type: </b> _Float_
# <b>Required: </b> _No_
# <b>Example: </b> _12.00_
# <b>tag: </b> qUnid
#
attr_accessor :quantidade_unidade
alias_attribute :qUnid, :quantidade_unidade
# VALOR POR UNIDADE TRIBUTรVEL
#
# <b>Type: </b> _Float_
# <b>Required: </b> _No_
# <b>Example: </b> _12.00_
# <b>tag: </b> vUnid
#
attr_accessor :total_unidade
alias_attribute :vUnid, :total_unidade
# VALOR DO IPI
#
# <b>Type: </b> _Float_
# <b>Required: </b> _No_ (Yes if CST [00 49 50 99])
# <b>Example: </b> _12.00_
# <b>tag: </b> vIPI
#
attr_accessor :total
alias_attribute :vIPI, :total
def default_values
{
codigo_enquadramento: '999',
}
end
validates :codigo_cst, presence: true
validates :codigo_cst, inclusion: {in: %w[00 01 02 03 04 05 49 50 51 52 53 54 55 99]}
validates :classe_enquadramento, length: {maximum: 5}
validates :cnpj_produtor, length: {is: 14}, allow_blank: true
validates :codigo_selo, length: {maximum: 60}
validates :quantidade_selo, length: {maximum: 12}
validates :codigo_enquadramento, presence: true
validates :codigo_enquadramento, length: {maximum: 3}
with_options if: lambda{|r| r.codigo_cst.in?(%w[00 49 50 99])} do |record|
record.validates :total, presence: true
record.validates :total, numericality: {greater_than_or_equal_to: 0.0}, allow_blank: true
end
end
end
end
end
end |
package admin
import (
adminHandlers "github.com/backpulse/core/handlers/admin"
"github.com/gorilla/mux"
)
func handleGalleries(r *mux.Router) {
r.Handle("/gallery/{id}", ProtectedRoute(adminHandlers.GetGallery)).Methods("GET")
r.Handle("/gallery/{id}", ProtectedRoute(adminHandlers.DeleteGallery)).Methods("DELETE")
r.Handle("/gallery/{id}", ProtectedRoute(adminHandlers.UpdateGallery)).Methods("PUT")
r.Handle("/galleries/{name}/{galleryID}/preview/{id}", ProtectedRoute(adminHandlers.SetGalleryPreview)).Methods("PUT")
r.Handle("/galleries/{name}/default/{id}", ProtectedRoute(adminHandlers.SetDefaultGallery)).Methods("PUT")
r.Handle("/galleries/{name}/indexes", ProtectedRoute(adminHandlers.UpdateGalleriesIndexes)).Methods("PUT")
r.Handle("/galleries/{name}/{galleryName}", ProtectedRoute(adminHandlers.CreateGallery)).Methods("POST")
r.Handle("/galleries/{name}", ProtectedRoute(adminHandlers.GetGalleries)).Methods("GET")
}
|
package cyclops.monads.jdk;
import static cyclops.monads.AnyM.fromStream;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.oath.cyclops.anym.AnyMSeq;
import cyclops.container.control.Maybe;
import cyclops.container.immutable.impl.Seq;
import cyclops.container.immutable.impl.Vector;
import cyclops.function.combiner.Monoid;
import cyclops.function.companion.Reducers;
import cyclops.monads.AnyM;
import cyclops.monads.Witness;
import cyclops.monads.Witness.optional;
import cyclops.monads.function.AnyMFunction1;
import cyclops.monads.function.AnyMFunction2;
import cyclops.reactive.ReactiveSeq;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.Test;
public class AnyMTest {
int count;
@Test
public void listTest() {
List<String> l = AnyM.fromList(Arrays.asList(1,
2,
3))
.map(i -> "hello" + i)
.unwrap();
assertThat(l,
equalTo(Arrays.asList("hello1",
"hello2",
"hello3")));
}
@Test
public void collectList() {
assertThat(AnyM.fromList(Arrays.asList(1,
2,
2))
.collect(Collectors.toSet())
.size(),
equalTo(2));
}
@Test
public void flatMapWithListComprehender() {
List<Integer> list = Arrays.asList(1,
2,
3);
AnyMSeq<Witness.list, Integer> any = AnyM.fromList(list);
AnyM<Witness.list, Integer> mapped = any.flatMap(e -> any.unit(e));
List<Integer> unwrapped = mapped.unwrap();
assertEquals(list,
unwrapped);
}
@Test
public void testForEach() {
fromStream(Stream.of(asList(1,
3))).flatMap(c -> AnyM.fromArray(c))
.stream()
.forEach(System.out::println);
}
/**
* should no longer compile!
*
* @Test public void testForEachCfFlatMapToStream() { AnyM.fromCompletableFuture(CompletableFuture.completedFuture(asList(1,3)))
* .flatMap(c->AnyM.fromStream(c.stream())) .stream() .forEach(System.out::println);
* <p>
* }
**/
@Test
public void test() {
List<Integer> list = fromStream(Stream.of(asList(1,
3))).flatMap(c -> fromStream(c.stream()))
.stream()
.map(i -> i * 2)
.peek(System.out::println)
.collect(Collectors.toList());
assertThat(Arrays.asList(2,
6),
equalTo(list));
}
@Test
public void testCycleWhile() {
count = 0;
assertThat(fromStream(Stream.of(1,
2,
2)).stream()
.cycleWhile(next -> count++ < 6)
.collect(Collectors.toList()),
equalTo(Arrays.asList(1,
2,
2,
1,
2,
2)));
}
@Test
public void testCycleUntil() {
count = 0;
assertThat(fromStream(Stream.of(1,
2,
2)).stream()
.cycleUntil(next -> count++ > 6)
.collect(Collectors.toList()),
equalTo(Arrays.asList(1,
2,
2,
1,
2,
2,
1)));
}
@Test
public void testCycleUntilReactiveSeq() {
count = 0;
assertThat(fromStream(ReactiveSeq.of(1,
2,
2)).stream()
.cycleUntil(next -> count++ > 6)
.collect(Collectors.toList()),
equalTo(Arrays.asList(1,
2,
2,
1,
2,
2,
1)));
}
@Test
public void testCycle() {
assertThat(fromStream(Stream.of(1,
2,
2)).stream()
.cycle(3)
.collect(Collectors.toList()),
equalTo(Arrays.asList(1,
2,
2,
1,
2,
2,
1,
2,
2)));
}
@Test
public void testCycleReduce() {
assertThat(fromStream(Stream.of(1,
2,
2)).stream()
.cycle(Reducers.toCountInt(),
3)
.collect(Collectors.toList()),
equalTo(Arrays.asList(3,
3,
3)));
}
@Test
public void testJoin() {
assertThat(fromStream(Stream.of(1,
2,
2)).map(b -> AnyM.fromArray(b))
.to(AnyM::flatten)
.stream()
.toList(),
equalTo(Arrays.asList(1,
2,
2)));
}
@Test
public void testToSet() {
assertThat(fromStream(Stream.of(1,
2,
2)).stream()
.toSet()
.size(),
equalTo(2));
}
@Test
public void testToList() {
assertThat(fromStream(Stream.of(1,
2,
3)).stream()
.toList(),
equalTo(Arrays.asList(1,
2,
3)));
}
@Test
public void testCollect() {
assertThat(fromStream(Stream.of(1,
2,
3)).stream()
.collect(Collectors.toList()),
equalTo(Arrays.asList(1,
2,
3)));
}
@Test
public void testToListFlatten() {
assertThat(fromStream(Stream.of(1,
2,
3,
null)).map(Maybe::ofNullable)
.filter(Maybe::isPresent)
.map(Maybe::toOptional)
.map(Optional::get)
.stream()
.toList(),
equalTo(Arrays.asList(1,
2,
3)));
}
@Test
public void testToListOptional() {
assertThat(AnyM.fromOptional(Optional.of(1))
.stream()
.toList(),
equalTo(Arrays.asList(1)));
}
@Test
public void testFold() {
Supplier<AnyM<Witness.stream, String>> s = () -> fromStream(Stream.of("a",
"b",
"c"));
assertThat("cba",
equalTo(s.get()
.stream()
.foldRight(Reducers.toString(""))));
assertThat("abc",
equalTo(s.get()
.stream()
.reduce(Reducers.toString(""))));
assertThat(3,
equalTo(s.get()
.map(i -> "" + i.length())
.stream()
.foldMapRight(Reducers.toCountInt())));
assertThat(3,
equalTo(s.get()
.map(i -> "" + i.length())
.stream()
.foldMap(Reducers.toCountInt())));
}
@Test
public void traversableTest() {
List<List<Integer>> list = AnyM.fromOptional(Optional.of(Arrays.asList(1,
2,
3,
4,
5,
6)))
.stream()
.collect(Collectors.toList());
assertThat(list.get(0),
hasItems(1,
2,
3,
4,
5,
6));
}
@Test
public void testFlatMap() {
AnyMSeq<Witness.stream, List<Integer>> m = fromStream(Stream.of(Arrays.asList(1,
2,
3),
Arrays.asList(1,
2,
3)));
AnyM<Witness.stream, Integer> intM = m.flatMap(c -> fromStream(c.stream()));
List<Integer> list = intM.stream()
.toList();
assertThat(list,
equalTo(Arrays.asList(1,
2,
3,
1,
2,
3)));
}
@SuppressWarnings("unchecked")
@Test
public void zipOptional() {
Stream<List<Integer>> zipped = fromStream(Stream.of(1,
2,
3)).stream()
.zip(AnyM.fromOptional(Optional.of(2)),
(a, b) -> Arrays.asList(a,
b))
.stream();
List<Integer> zip = zipped.collect(Collectors.toList())
.get(0);
assertThat(zip.get(0),
equalTo(1));
assertThat(zip.get(1),
equalTo(2));
}
@Test
public void zipStream() {
Stream<List<Integer>> zipped = fromStream(Stream.of(1,
2,
3)).stream()
.zipWithStream(Stream.of(2,
3,
4),
(a, b) -> Arrays.asList(a,
b))
.stream();
List<Integer> zip = zipped.collect(Collectors.toList())
.get(1);
assertThat(zip.get(0),
equalTo(2));
assertThat(zip.get(1),
equalTo(3));
}
@Test
public void sliding() {
List<Seq<Integer>> list = fromStream(Stream.of(1,
2,
3,
4,
5,
6)).stream()
.sliding(2)
.collect(Collectors.toList());
assertThat(list.get(0),
hasItems(1,
2));
assertThat(list.get(1),
hasItems(2,
3));
}
@Test
public void slidingIncrement() {
List<Seq<Integer>> list = fromStream(Stream.of(1,
2,
3,
4,
5,
6)).stream()
.sliding(3,
2)
.collect(Collectors.toList());
assertThat(list.get(0),
hasItems(1,
2,
3));
assertThat(list.get(1),
hasItems(3,
4,
5));
}
@Test
public void grouped() {
List<Vector<Integer>> list = fromStream(Stream.of(1,
2,
3,
4,
5,
6)).stream()
.grouped(3)
.collect(Collectors.toList());
assertThat(list.get(0),
hasItems(1,
2,
3));
assertThat(list.get(1),
hasItems(4,
5,
6));
}
@Test
public void startsWith() {
assertTrue(fromStream(Stream.of(1,
2,
3,
4)).stream()
.startsWith(Arrays.asList(1,
2,
3)));
}
@Test
public void scanLeft() {
assertEquals(asList("",
"a",
"ab",
"abc"),
fromStream(Stream.of("a",
"b",
"c")).stream()
.scanLeft(Reducers.toString(""))
.toList());
}
@Test
public void reducer1() {
Monoid<Integer> sum = Monoid.of(0,
(a, b) -> a + b);
Monoid<Integer> mult = Monoid.of(1,
(a, b) -> a * b);
Seq<Integer> result = fromStream(Stream.of(1,
2,
3,
4)).stream()
.reduce(Arrays.asList(sum,
mult));
assertThat(result,
equalTo(Seq.of(10,
24)));
}
@Test
public void aggregate() {
List<Integer> result = fromStream(Stream.of(1,
2,
3,
4)).aggregate(AnyM.fromArray(5))
.stream()
.concatMap(Seq::stream)
.toList();
assertThat(result,
equalTo(Arrays.asList(1,
2,
3,
4,
5)));
}
@Test
public void aggregate2() {
Seq<Integer> result = AnyM.fromOptional(Optional.of(1))
.aggregate(AnyM.ofNullable(2))
.stream()
.toList()
.get(0);
assertThat(result,
equalTo(Seq.of(1,
2)));
}
@Test
public void testSorted() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.sorted()
.toList(),
equalTo(Arrays.asList(3,
4,
6,
7)));
}
@Test
public void testSortedCompartor() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.sorted((a, b) -> b - a)
.toList(),
equalTo(Arrays.asList(7,
6,
4,
3)));
}
@Test
public void testSkip() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.skip(2)
.toList(),
equalTo(Arrays.asList(6,
7)));
}
@Test
public void testSkipUntil() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.dropUntil(i -> i == 6)
.toList(),
equalTo(Arrays.asList(6,
7)));
}
@Test
public void testSkipWhile() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.sorted()
.dropWhile(i -> i < 6)
.toList(),
equalTo(Arrays.asList(6,
7)));
}
@Test
public void testLimit() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.limit(2)
.toList(),
equalTo(Arrays.asList(4,
3)));
}
@Test
public void testLimitUntil() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.takeUntil(i -> i == 6)
.toList(),
equalTo(Arrays.asList(4,
3)));
}
@Test
public void testLimitWhile() {
assertThat(fromStream(Stream.of(4,
3,
6,
7)).stream()
.sorted()
.takeWhile(i -> i < 6)
.toList(),
equalTo(Arrays.asList(3,
4)));
}
@Test
public void testLiftMSimplex() {
AnyMFunction1<optional, Integer, Integer> lifted = AnyM.liftF((Integer a) -> a + 3);
AnyM<optional, Integer> result = lifted.apply(AnyM.ofNullable(3));
assertThat(result.<Optional<Integer>>unwrap().get(),
equalTo(6));
}
@Test
public void testLiftM2Simplex() {
AnyMFunction2<optional, Integer, Integer, Integer> lifted = AnyM.liftF2((Integer a, Integer b) -> a + b);
AnyM<optional, Integer> result = lifted.apply(AnyM.ofNullable(3),
AnyM.ofNullable(4));
assertThat(result.<Optional<Integer>>unwrap().get(),
equalTo(7));
}
@Test
public void testLiftM2AnyMValue() {
AnyMFunction2<optional, Integer, Integer, Integer> lifted = AnyM.liftF2((Integer a, Integer b) -> a + b);
AnyM<optional, Integer> result = lifted.apply(AnyM.ofNullable(3),
AnyM.ofNullable(4));
assertThat(result.<Optional<Integer>>unwrap().get(),
equalTo(7));
}
@Test
public void testLiftM2SimplexNull() {
AnyMFunction2<optional, Integer, Integer, Integer> lifted = AnyM.liftF2((Integer a, Integer b) -> a + b);
AnyM<optional, Integer> result = lifted.apply(AnyM.ofNullable(3),
AnyM.ofNullable(null));
assertThat(result.<Optional<Integer>>unwrap().isPresent(),
equalTo(false));
}
private Integer add(Integer a,
Integer b) {
return a + b;
}
@Test
public void testLiftM2Mixed() {
AnyMFunction2<optional, Integer, Integer, Integer> lifted = AnyM.liftF2(this::add);
AnyM<optional, Integer> result = lifted.apply(AnyM.ofNullable(3),
AnyM.ofNullable(4));
assertThat(result.<Optional<Integer>>unwrap().get(),
equalTo(7));
}
}
|
import React from 'react';
import {makeStyles} from '@material-ui/core/styles';
import {getDukeImgPath} from "../../functions/paths";
const useStyles = makeStyles({
root: {
height: "100%",
backgroundColor: "var(--primary-color)",
position: "relative",
width: 50
},
});
type SideBarCompProps = {
children: React.ReactNode;
}
const SideBarComp = (props: SideBarCompProps) => {
const classes = useStyles();
return (
<div className={classes.root}>
{/* <img src={dukeImgPath} className={classes.logo}/> */}
{props.children}
</div>
);
};
export default SideBarComp;
|
#!/bin/bash
set -ex
EXIT_STATUS=0
# Autoformatter *first*, to avoid double-reporting errors
black --check setup.py asynctb \
|| EXIT_STATUS=$?
# Run flake8 without pycodestyle and import-related errors
flake8 asynctb/ \
--ignore=D,E,W,F401,F403,F405,F821,F822\
|| EXIT_STATUS=$?
# Uncomment to run mypy (to check static typing)
mypy --strict -p asynctb || EXIT_STATUS=$?
# Finally, leave a really clear warning of any issues and exit
if [ $EXIT_STATUS -ne 0 ]; then
cat <<EOF
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Problems were found by static analysis (listed above).
To fix formatting and see remaining errors, run
pip install -r test-requirements.txt
black setup.py asynctb
./check.sh
in your local checkout.
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
EOF
exit 1
fi
exit 0
|
import numpy as np
# Given parameters
m = 1.0 # mass
k = 1.0 # spring constant
c = 0.1 # damping coefficient
dt = 0.01 # time step
t_max = 10.0 # maximum time for simulation
# External force function
def f(t):
return np.sin(t) # Example external force function
# Initial conditions
x = 1.0 # initial displacement
v = 0.0 # initial velocity
# Simulation loop
t = 0.0
while t < t_max:
# Calculate acceleration
a = (f(t) - c*v - k*x) / m
# Update velocity and displacement using Euler method
v += a * dt
x += v * dt
# Update time
t += dt
# Output or store the results as needed
print(t, x, v) # Example output format |
package Bigdata.Twitter
case class SocketConfig(host: String, port: Int) |
<filename>src/app/thought/thought-process/thought-process.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { ThoughtProcessComponent } from './thought-process.component';
describe('ThoughtProcessComponent', () => {
let component: ThoughtProcessComponent;
let fixture: ComponentFixture<ThoughtProcessComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ThoughtProcessComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ThoughtProcessComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
#!/bin/bash
module load picard
module load bwa
module load samtools
module load parallel
PICARD_HOME="/work/LAS/mhufford-lab/arnstrm/newNAM/analyses/l-gatk-illumina"
REF="/work/LAS/mhufford-lab/arnstrm/newNAM/analyses/l-gatk-illumina/b-genome/B73.PLATINUM.pseudomolecules-v1.fasta"
ulimit -c unlimited
PLT="illumina"
TDATE=$(date '+%Y-%m-%d %H:%M:%S' |sed 's/ /T/g')
# check
echo $PLT
echo $TDATE
R1=(*_R1_*.gz)
R2=(*_R1_*.gz)
# convert fastq to sam and add readgroups
#parallel -j 4 --link "java -Djava.io.tmpdir=$TMPDIR -Xmx50G -jar $PICARD_HOME/picard.jar FastqToSam FASTQ={1} FASTQ2={2} OUTPUT={1.}_fastqtosam.bam READ_GROUP_NAME={1.} SAMPLE_NAME={1.}_name LIBRARY_NAME={1.}_lib PLATFORM_UNIT=${PLT} PLATFORM=illumina SEQUENCING_CENTER=ISU RUN_DATE=${TDATE}" ::: ${R1[@]} ::: ${R2[@]}
# mark adapters
#parallel -j 4 "java -Djava.io.tmpdir=$TMPDIR -Xmx50G -jar $PICARD_HOME/picard.jar MarkIlluminaAdapters I={.}_fastqtosam.bam O={.}_markilluminaadapters.bam M={.}_markilluminaadapters_metrics.txt TMP_DIR=${TMPDIR}" ::: ${R1[@]}
# convert bam back to fastq for mapping
#parallel -j 4 "java -Djava.io.tmpdir=$TMPDIR -Xmx50G -jar $PICARD_HOME/picard.jar SamToFastq I={.}_markilluminaadapters.bam FASTQ={.}_samtofastq_interleaved.fq CLIPPING_ATTRIBUTE=XT CLIPPING_ACTION=2 INTERLEAVE=true NON_PF=true TMP_DIR=${TMPDIR}" ::: ${R1[@]}
# mapping reads to indexed genome
#parallel -j 1 "bwa mem -M -t 35 -p $REF {.}_samtofastq_interleaved.fq | samtools view -buS - > {.}_bwa_mem.bam" ::: ${R1[@]}
# merging alignments
parallel -j 1 "java -Djava.io.tmpdir=$TMPDIR -Xmx180G -jar $PICARD_HOME/picard.jar MergeBamAlignment R=$REF UNMAPPED_BAM={.}_fastqtosam.bam ALIGNED_BAM={.}_bwa_mem.bam O={.}_snippet_mergebamalignment.bam CREATE_INDEX=true ADD_MATE_CIGAR=true CLIP_ADAPTERS=false CLIP_OVERLAPPING_READS=true INCLUDE_SECONDARY_ALIGNMENTS=true MAX_INSERTIONS_OR_DELETIONS=-1 PRIMARY_ALIGNMENT_STRATEGY=MostDistant ATTRIBUTES_TO_RETAIN=XS TMP_DIR=${TMPDIR}" ::: ${R1[@]}
# mark duplicates
parallel -j 1 "java -Djava.io.tmpdir=$TMPDIR -Xmx183G -jar $PICARD_HOME/picard.jar MarkDuplicates INPUT={.}_snippet_mergebamalignment.bam OUTPUT={.}_final.bam METRICS_FILE={.}_mergebamalignment_markduplicates_metrics.txt OPTICAL_DUPLICATE_PIXEL_DISTANCE=2500 CREATE_INDEX=true TMP_DIR=$TMPDIR" ::: ${R1[@]}
echo >&2 "ALL DONE!"
#parallel "rm {.}_fastqtosam.bam {.}_markilluminaadapters.bam {.}_samtofastq_interleaved.fq {.}_bwa_mem.bam {.}_snippet_mergebamalignment.bam {.}_snippet_mergebamalignment.bai" ::: ${R1[@]}
|
<gh_stars>0
import Ux from 'ux';
const fnLogout = (reference, key = "") => {
Ux.aiConfirm(reference, () => {
// ๆธ
ๆSession
Ux.toLogout();
// ่ทฏ็ฑๅค็
Ux.toRoute(reference, Ux.Env.ENTRY_LOGIN);
// ๆธ
ๆฅStateไธ็ๆฐๆฎ
Ux.eraseTree(reference, ["user"]);
}, "window", key);
};
const _dispatch = {
"key.menu.logout": fnLogout
};
const fnSelect = (reference) => (event) => (_dispatch[event.key]) ?
_dispatch[event.key](reference, event.key) : () => {
};
export default {
fnSelect
}
|
const db = require('../db')
const Sequelize = require('sequelize')
const Order = db.define(
'order',
{
status: {
type: Sequelize.ENUM('pending', 'purchased', 'cancelled', 'fulfilled'),
defaultValue: 'pending'
},
sessionId: Sequelize.STRING,
purchasedAt: Sequelize.DATE,
cancelledAt: Sequelize.DATE,
fulfilledAt: Sequelize.DATE,
totalPriceAtPurchase: Sequelize.INTEGER
},
{
hooks: {
beforeValidate: async orderInstance => {
if (orderInstance.purchaseProfileId && orderInstance.userId) {
await orderInstance.update({userId: null})
return orderInstance
}
},
afterUpdate: orderInstance => {
if (
orderInstance.status === 'purchased' &&
orderInstance.purchasedAt === null
)
return orderInstance.update({purchasedAt: new Date()})
return orderInstance
}
}
}
)
module.exports = Order
|
#!/bin/sh
# Check usage, and get the platform.
if [ $# -ne 1 ]; then
echo 'Usage: ./build_qts_functionality_bitstream.sh fpga_platform.'
exit 1;
fi
fpga_platform=$1
# Make sure the environment is set up.
source $XILINX_VIVADO_DIR/settings64.sh
# Make a fresh workspace directory.
rm -frv workspace
mkdir -p workspace
# Have Vivado make the bitstream.
vivado -mode batch -source scripts/tcl/generate_test_system_bitstream_for_functionality.tcl -tclargs quartet_test_system_top workspace/zynq_project $fpga_platform qts_functionality.bit | tee vivado_build_log.txt
if grep -E '^ERROR' vivado_build_log.txt; then
echo
echo 'Vivado failed to generate the bitstream.'
echo 'See vivado_build_log.txt for more information.'
echo
exit 1
fi
# Show that the job has been completed.
echo
echo 'qts_functionality.bit generated successfully.'
echo
# Remove any logs to clean up the main directory.
rm -frv webtalk* vivado*.log vivado*.jou
# Exit successfully.
exit 0
|
class BankAccount:
transaction_counter = 0
def __init__(self, initial_balance):
self.balance = initial_balance
self.transaction_history = []
def deposit(self, amount):
self.balance += amount
BankAccount.transaction_counter += 1
transaction_id = f"D{str(BankAccount.transaction_counter).zfill(3)}"
self.transaction_history.append((transaction_id, amount))
def withdraw(self, amount):
if amount <= self.balance:
self.balance -= amount
BankAccount.transaction_counter += 1
transaction_id = f"W{str(BankAccount.transaction_counter).zfill(3)}"
self.transaction_history.append((transaction_id, -amount))
else:
print("Insufficient funds")
def display_balance(self):
print(f"Current Balance: {self.balance}")
def display_transaction_history(self):
print("Transaction History:")
for transaction in self.transaction_history:
transaction_id, amount = transaction
transaction_type = "Deposit" if amount > 0 else "Withdrawal"
print(f"{transaction_id}: {transaction_type} of {abs(amount)}") |
<filename>src/index.ts
export {
Pool,
DEFAULT_DECONSTRUCTOR,
IPoolConstructor,
IPoolDeconstructor,
} from "./Pool";
|
#!/bin/bash
sudo apt-get install -y cachefilesd
sudo sed -i -e 's/#RUN=yes/RUN=yes/g' /etc/default/cachefilesd
: ${FSCACHE_DIR:=/var/cache/fscache}
cat <<EOF | sudo tee /etc/cachefilesd.conf > /dev/null
dir $FSCACHE_DIR
tag dgx1cache
brun 25%
bcull 15%
bstop 5%
frun 10%
fcull 7%
fstop 3%
EOF
sudo modprobe cachefiles
sudo service cachefilesd start
|
/*
*
*/
package net.community.chest.math.test;
import net.community.chest.AbstractTestSupport;
import net.community.chest.math.DivisionSigns;
import org.junit.Test;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Jun 12, 2011 8:20:57 AM
*/
public class DivisionSignsTest extends AbstractTestSupport {
private static final long[] VALUES={
0L, 12L, 123L, 10203L, 3777347L, 17041690L, 7031965L, 1669974L, 6334353L, 2541006L
};
public DivisionSignsTest ()
{
super();
}
@Test
public void testSumDigits ()
{
for (final long v : VALUES)
{
final long strSum=DivisionSigns.sumDigits(String.valueOf(v)),
numSum=DivisionSigns.sumDigits(v);
assertEquals("[" + v + "] Mismatched string vs. number digits sum", strSum, numSum);
assertEquals("[" + v + "] Mismatched negative digits sum", 0L - numSum, DivisionSigns.sumDigits(0L - v));
}
}
@Test
public void testIsMultiple3 ()
{
for (final long v : VALUES)
{
final boolean expected=(v != 0L) && ((v % 3L) == 0L);
assertEquals("[" + v + "] mismatched multiple-3 numberical result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple3(v)));
assertEquals("[" + v + "] mismatched multiple-3 string result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple3(String.valueOf(v))));
if ((v != 0L) && (!expected)) // if number not originally multiple of 3 then make it so
assertTrue("[" + (3L * v) + "] non-triplet multiple-3", DivisionSigns.isMultiple3(3L * v));
}
}
@Test
public void testIsMultiple6 ()
{
for (final long v : VALUES)
{
final boolean expected=(v != 0L) && ((v % 6L) == 0L);
assertEquals("[" + v + "] mismatched numerical multiple-6 result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple6(v)));
assertEquals("[" + v + "] mismatched string multiple-6 result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple6(String.valueOf(v))));
if ((v != 0L) && (!expected)) // if number not originally multiple of 3 then make it so
{
if ((v & 0x01) == 0L)
assertTrue("[" + (3L * v) + "] non-even multiple-6", DivisionSigns.isMultiple6(3L * v));
else if ((v % 3L) == 0L)
assertTrue("[" + (2L * v) + "] non-triplet multiple-6", DivisionSigns.isMultiple6(2L * v));
else
assertTrue("[" + (6L * v) + "] non-six multiple-6", DivisionSigns.isMultiple6(6L * v));
}
}
}
@Test
public void testIsMultiple9 ()
{
for (final long v : VALUES)
{
final boolean expected=(v != 0L) && ((v % 9L) == 0L);
assertEquals("[" + v + "] mismatched numerical multiple-9 result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple9(v)));
assertEquals("[" + v + "] mismatched string multiple-9 result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple9(String.valueOf(v))));
if ((v != 0L) && (!expected)) // if number not originally multiple of 9 then make it so
assertTrue("[" + (9L * v) + "] non-nine multiple-9", DivisionSigns.isMultiple3(9L * v));
}
}
@Test
public void testIsMultiple5 ()
{
for (final long v : VALUES)
{
final boolean expected=(v % 5L) == 0L;
assertEquals("[" + v + "] mismatched multiple-5 result", Boolean.valueOf(expected), Boolean.valueOf(DivisionSigns.isMultiple5(String.valueOf(v))));
if ((v != 0L) && (!expected)) // if number not originally multiple of 5 then make it so
assertTrue("[" + (5L * v) + "] non-nine multiple-9", DivisionSigns.isMultiple5(String.valueOf(5L * v)));
}
}
}
|
import { Module } from '@nestjs/common';
import { MailerModule } from '@nestjs-modules/mailer';
import { SentMailConfigService } from './sent-mail-config.service';
@Module({
imports:[MailerModule.forRootAsync({useClass:SentMailConfigService})]
})
export class SentmailModule {}
|
platform='unknown'
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
platform='linux'
elif [[ "$unamestr" == 'Darwin' ]]; then
platform='darwin'
fi
# 1-100: g3.4
# 101-200: p2
# AMI="ami-660ae31e"
AMI="ami-a7fa31df" #extract
INSTANCE_TYPE="g3.4xlarge"
# INSTANCE_TYPE="g3.4xlarge"
#INSTANCE_TYPE="c3.2xlarge"
INSTANCE_COUNT=1
KEY_NAME="taskonomy"
SECURITY_GROUP="launch-wizard-1"
SPOT_PRICE=1.001
ZONE="us-west-2"
SUB_ZONES=( a b c )
# 11 - X
START_AT=1
EXIT_AFTER=50
# Intermediate tasks left out
# ego_motion
# INTERMEDIATE_TASKS="autoencoder colorization curvature denoise edge2d edge3d \
# fix_pose impainting_whole jigsaw keypoint2d keypoint3d \
# non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm \
# room_layout segment25d segment2d vanishing_point_well_defined pixels random \
# segmentsemantic_rb class_selected class_1000"
# INTERMEDIATE_TASKS="ego_motion"
INTERMEDIATE_TASKS="autoencoder colorization curvature denoise edge2d edge3d \
fix_pose impainting_whole jigsaw keypoint2d keypoint3d \
non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm \
room_layout segment25d segment2d vanishing_point_well_defined \
segmentsemantic_rb class_selected class_1000"
INTERMEDIATE_TASKS="reshade colorization ego_motion class_places"
# DST tasks left out
# denoise fix_pose point_match class_1000 autoencoder non_fixated_pose rgb2depth segment2d keypoint2d
TARGET_DECODER_FUNCS="DO_NOT_REPLACE_TARGET_DECODER"
COUNTER=0
#for src in $SRC_TASKS; do
for intermediate in $INTERMEDIATE_TASKS; do
sleep 2
COUNTER=$[$COUNTER +1]
SUB_ZONE=${SUB_ZONES[$((COUNTER%3))]}
if [ "$COUNTER" -lt "$START_AT" ]; then
echo "Skipping at $COUNTER (starting at $START_AT)"
continue
fi
echo "running $COUNTER"
if [[ "$platform" == "linux" ]];
then
OPTIONS="-w 0"
ECHO_OPTIONS="-d"
else
OPTIONS=""
ECHO_OPTIONS="-D"
fi
USER_DATA=$(base64 $OPTIONS << END_USER_DATA
export INSTANCE_TAG="${intermediate}";
# export INSTANCE_TAG="SASHA P2 WORK INSTANCE";
export HOME="/home/ubuntu"
# export ACTION=SHUTDOWN;
export ACTION=FINETUNE_IMAGENET_SELECTED;
watch -n 300 "bash /home/ubuntu/task-taxonomy-331b/tools/script/reboot_if_disconnected.sh" &> /dev/null &
cd /home/ubuntu/task-taxonomy-331b
git stash
git remote add autopull https://alexsax:328d7b8a3e905c1400f293b9c4842fcae3b7dc54@github.com/alexsax/task-taxonomy-331b.git
git pull autopull perceptual-transfer
git pull autopull perceptual-transfer
# bash /home/ubuntu/task-taxonomy-331b/tools/script/generate_all_transfer_configs.sh
# python /home/ubuntu/task-taxonomy-331b/tools/transfer.py \
# /home/ubuntu/task-taxonomy-331b/experiments/full_taskonomy_beta1/${decode}/${data}/${intermediate}__${dst}__8__unlocked/
END_USER_DATA
)
echo "$USER_DATA" | base64 $ECHO_OPTIONS
aws ec2 request-spot-instances \
--spot-price $SPOT_PRICE \
--instance-count $INSTANCE_COUNT \
--region $ZONE \
--launch-specification \
"{ \
\"ImageId\":\"$AMI\", \
\"InstanceType\":\"$INSTANCE_TYPE\", \
\"KeyName\":\"$KEY_NAME\", \
\"SecurityGroups\": [\"$SECURITY_GROUP\"], \
\"UserData\":\"$USER_DATA\", \
\"Placement\": { \
\"AvailabilityZone\": \"us-west-2${SUB_ZONE}\" \
} \
}"
if [ "$COUNTER" -ge "$EXIT_AFTER" ]; then
echo "EXITING before $COUNTER (after $EXIT_AFTER)"
break
fi
done
|
#!/bin/sh
set -euo pipefail
# Assumption: Being in toplevel "syndesis" workingdir
# Usage:
# patch_pr.sh <module dir> <patch nr>
#
# This will create a branch "pr/<patch nr>" which contains the merged PR
# Repos map: key == dir name, value == GitHub repo name
REPOS=(
"rest:syndesis-rest"
"ui:syndesis-ui"
"project:syndesis-project"
"verifier:syndesis-verifier"
"ux:syndesis-ux"
"tests:syndesis-system-tests"
"connectors:connectors"
"runtime:syndesis-integration-runtime"
"s2i:syndesis-s2i-image"
"deploy:syndesis-openshift-templates"
)
if [ -d syndesis ]; then
cd syndesis
fi
get_pr_url() {
local key="$1"
local pr="$2"
for repo in "${REPOS[@]}" ; do
project="${repo%%:*}"
if [ "$project" = "$key" ]; then
repo_name="${repo##*:}"
echo "https://github.com/syndesisio/${repo_name}/pull/${pr}"
fi
done
}
module_dir=$1
pr=$2
if [ ! -d $module_dir ]; then
echo "No module directory $module_dir found"
exit 1;
fi
url=$(get_pr_url $module_dir $pr)
if [ -z "$url" ]; then
echo "Usage: $0 <module dir> <patch nr>"
fi
patch_file=/tmp/pr_${module_dir}_${pr}.patch
curl -L $url.patch > $patch_file
cd $module_dir
git checkout -b "pr/${module_dir}-$pr"
patch -p1 < $patch_file
git status -s | grep -v -e '^ M ' | sed -e 's/^?? //' | xargs git add
git commit -a -m "Applied PR $url (Module: $module_dir, PR: $pr)"
cat - <<EOT
============================================================================
PR $pr applied successfully.
The patch has been committed locally to branch pr/${module_dir}-$pr
Please push this branch to your fork and create a new PR against Syndesis
(possibly copying over relevant comments from the original PR)
EOT
|
class A {
static name = 1;
static length = 2;
static foo = 3;
static [bar] = 4;
static ["name"] = 5;
static [name] = 6;
static "name" = 7;
name = 8;
length = 9;
} |
#!/bin/bash
set -e
echo "checking style ..."
docker run -i --rm registry.sonata-nfv.eu:5000/tng-sdk-benchmark flake8 --exclude .eggs --exclude venv --exclude build .
echo "done."
# always exit with 0 (ugly code style is not an error :))
#exit 0
|
<gh_stars>0
#include "defines.h"
#include <stm32f4xx_hal.h>
HAL_StatusTypeDef HAL_CRC_Init(CRC_HandleTypeDef *)
{
return HAL_ERROR;
}
uint32_t HAL_CRC_Calculate(CRC_HandleTypeDef *, uint32_t[], uint32_t)
{
return 0;
}
|
package org.jenkinsci.plugins.hue_light;
import hudson.Extension;
import hudson.Launcher;
import hudson.model.BallColor;
import hudson.model.BuildListener;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Notifier;
import hudson.tasks.Publisher;
import hudson.util.FormValidation;
import java.io.IOException;
import java.util.HashSet;
import javax.servlet.ServletException;
import net.sf.json.JSONObject;
import nl.q42.jue.Light;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.DataBoundSetter;
public class LightNotifier extends Notifier {
private final HashSet<String> lightId;
private final String preBuild;
private final String goodBuild;
private final String unstableBuild;
private final String badBuild;
private LightController lightController;
@DataBoundConstructor
public LightNotifier(String lightId, String preBuild, String goodBuild, String unstableBuild, String badBuild) {
this.lightId = new HashSet<String>();
if(lightId != null) {
String[] lightIds = lightId.split(",");
for(String id : lightIds) {
this.lightId.add(id.trim());
}
}
this.preBuild = preBuild;
this.goodBuild = goodBuild;
this.unstableBuild = unstableBuild;
this.badBuild = badBuild;
}
public String getLightId() {
String lid = "";
if(this.lightId != null && this.lightId.size() > 0) {
for(String id : this.lightId) {
lid += id + ",";
}
lid = lid.substring(0, lid.length() - 1);
}
return lid;
}
public String getPreBuild() {
return this.preBuild;
}
public String getGoodBuild() {
return this.goodBuild;
}
public String getUnstableBuild() {
return this.unstableBuild;
}
public String getBadBuild() {
return this.badBuild;
}
@Override
/**
* CJA: Note that old prebuild using Build is deprecated. Now using AbstractBuild parameter.
*/
public boolean prebuild(AbstractBuild build, BuildListener listener) {
// does not work in constructor...
final DescriptorImpl descriptor = this.getDescriptor();
this.lightController = new LightController(descriptor, listener.getLogger());
for(String id : this.lightId) {
Light light = this.lightController.getLightForId(id);
this.lightController.setPulseBreathe(light, "Build Starting", ConfigColorToHue(this.preBuild));
}
return super.prebuild(build, listener);
}
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
// Allowable values from build results:
//
// RED - Bad Build
// RED_ANIME
// YELLOW - Unstable Build
// YELLOW_ANIME
// BLUE - Good Build
// BLUE_ANIME
// GREY
// GREY_ANIME
// DISABLED
// DISABLED_ANIME
// ABORTED
// ABORTED_ANIME
// NOTBUILT
// NOTBUILT_ANIME
BallColor ballcolor = build.getResult().color;
for(String id : this.lightId) {
Light light = this.lightController.getLightForId(id);
switch (ballcolor) {
case RED:
this.lightController.setColor(light, "Bad Build", ConfigColorToHue(this.badBuild));
break;
case YELLOW:
this.lightController.setColor(light, "Unstable Build", ConfigColorToHue(this.unstableBuild));
break;
case BLUE:
this.lightController.setColor(light, "Good Build", ConfigColorToHue(this.goodBuild));
break;
}
}
return true;
}
/**
* Note that we support Blue, Green, Yellow and Red as named colors. Anything else, we presume it's
* an integer. If we can't decode it, we return 0, which is actually red, but hey, we have to return
* something.
*
* @param color The color we want to turn into a numeric hue
* @return The numeric hue
*/
private Integer ConfigColorToHue(String color) {
if (color.equalsIgnoreCase("blue")) {
return Integer.parseInt(this.getDescriptor().getBlue());
} else if (color.equalsIgnoreCase("green")) {
return Integer.parseInt(this.getDescriptor().getGreen());
} else if (color.equalsIgnoreCase("yellow")) {
return Integer.parseInt(this.getDescriptor().getYellow());
} else if (color.equalsIgnoreCase("red")) {
return Integer.parseInt(this.getDescriptor().getRed());
} else {
if (DescriptorImpl.isInteger(color))
return Integer.parseInt(color);
else
return 0;
}
}
@Override
public boolean needsToRunAfterFinalized() {
return true;
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.NONE;
}
@Extension
public static final class DescriptorImpl extends BuildStepDescriptor<Publisher> {
private String bridgeIp;
private String bridgeUsername;
private String blue;
private String green;
private String yellow;
private String red;
private String saturation;
private String brightness;
public DescriptorImpl() {
this.load();
}
public static boolean isInteger(String s) {
try {
Integer.parseInt(s);
} catch (NumberFormatException e) {
return false;
}
return true;
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> aClass) {
return true;
}
@Override
public String getDisplayName() {
return "Colorize Hue-Light";
}
/**
* Validates that some IP address was entered for the bridge. A hostname is also valid (do not change variable
* name because this would be a breaking change).
*
* @param value The bridge IP address
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckBridgeIp(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the IP or hostname of the bridge");
return FormValidation.ok();
}
/**
* Validates that some username was entered. This could really be anything.
*
* @param value The user name
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckBridgeUsername(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the username");
return FormValidation.ok();
}
/**
* Validates that some light ID was entered and that it's a non-negative integer
*
* @param value The ID of the light to be used
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckLightId(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the ID(s) of the light(s) separated by commas");
String[] lightIds = value.split(",");
for(String id : lightIds) {
id = id.trim();
if (!isInteger(id))
return FormValidation.error("Please enter positive integers only");
if (Integer.parseInt(id) < 0)
return FormValidation.error("Please enter non-negative numbers only");
}
return FormValidation.ok();
}
/**
* Validates that some value was entered for blue and that it's a non-negative integer
*
* @param value The hue value for blue
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckBlue(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the hue value for blue");
if (!isInteger(value))
return FormValidation.error("Please enter a number");
if (Integer.parseInt(value) < 0)
return FormValidation.error("Please enter a non-negative number");
return FormValidation.ok();
}
/**
* Validates that some value was entered for green and that it's a non-negative integer
*
* @param value The hue value for green
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckGreen(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the hue value for green");
if (!this.isInteger(value))
return FormValidation.error("Please enter a number");
if (Integer.parseInt(value) < 0)
return FormValidation.error("Please enter a non-negative number");
return FormValidation.ok();
}
/**
* Validates that some value was entered for yellow and that it's a non-negative integer
*
* @param value The hue value for yellow
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckYellow(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the hue value for yellow");
if (!isInteger(value))
return FormValidation.error("Please enter a number");
if (Integer.parseInt(value) < 0)
return FormValidation.error("Please enter a non-negative number");
return FormValidation.ok();
}
/**
* Validates that some value was entered for red and that it's a non-negative integer
*
* @param value The hue value for red
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckRed(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the hue value for red");
if (!isInteger(value))
return FormValidation.error("Please enter a number");
if (Integer.parseInt(value) < 0)
return FormValidation.error("Please enter a non-negative number");
return FormValidation.ok();
}
/**
* Validates that some value was entered for saturation and that it's [0..255]
*
* @param value The hue value for saturation
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckSaturation(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the hue value for saturation");
if (!isInteger(value))
return FormValidation.error("Please enter a number");
if (Integer.parseInt(value) < 0 || Integer.parseInt(value) > 255)
return FormValidation.error("Please enter number in range [0...255]");
return FormValidation.ok();
}
/**
* Validates that some value was entered for brightness and that it's [1..255]
*
* @param value The hue value for brightness
* @throws IOException
* @throws ServletException
*/
public FormValidation doCheckBrightness(@QueryParameter String value)
throws IOException, ServletException {
if (value.length() == 0)
return FormValidation.error("Please set the hue value for saturation");
if (!isInteger(value))
return FormValidation.error("Please enter a number");
if (Integer.parseInt(value) < 1 || Integer.parseInt(value) > 255)
return FormValidation.error("Please enter number in range [1...255]");
return FormValidation.ok();
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
req.bindJSON(this, json);
save();
return true;
}
public String getBridgeIp() {
return this.bridgeIp;
}
public String getBridgeUsername() {
return this.bridgeUsername;
}
public String getBlue() {
return this.blue;
}
public String getGreen() {
return this.green;
}
public String getYellow() {
return this.yellow;
}
public String getRed() {
return this.red;
}
public String getSaturation() {
return this.saturation;
}
public String getBrightness() {
return this.brightness;
}
@DataBoundSetter
public void setBridgeIp(String bridgeIp) {
this.bridgeIp = bridgeIp;
}
@DataBoundSetter
public void setBridgeUsername(String bridgeUsername) {
this.bridgeUsername = bridgeUsername;
}
@DataBoundSetter
public void setBlue(String blue) {
this.blue = blue;
}
@DataBoundSetter
public void setGreen(String green) {
this.green = green;
}
@DataBoundSetter
public void setYellow(String yellow) {
this.yellow = yellow;
}
@DataBoundSetter
public void setRed(String red) {
this.red = red;
}
@DataBoundSetter
public void setSaturation(String saturation) {
this.saturation = saturation;
}
@DataBoundSetter
public void setBrightness(String brightness) {
this.brightness = brightness;
}
}
}
|
#!/bin/bash
SCHEMAS=$1
tmpd=`mktemp -d`
pushd ${tmpd} >>/dev/null
echo "include /etc/openldap/schema/core.schema" >> convert.dat
echo "include /etc/openldap/schema/cosine.schema" >> convert.dat
echo "include /etc/openldap/schema/nis.schema" >> convert.dat
echo "include /etc/openldap/schema/inetorgperson.schema" >> convert.dat
for schema in ${SCHEMAS} ; do
echo "include ${schema}" >> convert.dat
done
slaptest -f convert.dat -F .
if [ $? -ne 0 ] ; then
echo "** [openldap] ERROR: slaptest conversion failed!"
exit
fi
for schema in ${SCHEMAS} ; do
fullpath=${schema}
schema_name=`basename ${fullpath} .schema`
schema_dir=`dirname ${fullpath}`
ldif_file=${schema_name}.ldif
find . -name *\}${schema_name}.ldif -exec mv '{}' ./${ldif_file} \;
# TODO: these sed invocations could all be combined
sed -i "/dn:/ c dn: cn=${schema_name},cn=schema,cn=config" ${ldif_file}
sed -i "/cn:/ c cn: ${schema_name}" ${ldif_file}
sed -i '/structuralObjectClass/ d' ${ldif_file}
sed -i '/entryUUID/ d' ${ldif_file}
sed -i '/creatorsName/ d' ${ldif_file}
sed -i '/createTimestamp/ d' ${ldif_file}
sed -i '/entryCSN/ d' ${ldif_file}
sed -i '/modifiersName/ d' ${ldif_file}
sed -i '/modifyTimestamp/ d' ${ldif_file}
# slapd seems to be very sensitive to how a file ends. There should be no blank lines.
sed -i '/^ *$/d' ${ldif_file}
mv ${ldif_file} ${schema_dir}
done
popd >>/dev/null
rm -rf $tmpd
|
'use strict';
(function(){
function get(line) {
var pixState = document[line].src.charAt(
(document[line].src.length) - 6);
return pixState != "0";
}
function getC(line) {
var pixState = document[line].src.charAt(
(document[line].src.length) - 7);
return pixState != "0";
}
function set(line) {
if(!get(line)) { togImage(line) }
}
function clear(line) {
if(get(line)) { togImage(line) }
}
function setC(line) {
if(!getC(line)) { togImageC(line) }
}
function clearC(line) {
if(getC(line)) { togImageC(line) }
}
function read(addr) {
return 0x00;
}
function write(addr, val) {
var r = addr - 0xdff0, i = 0;
if(r == 0) {
clear('rs');
} else if(r == 1) {
set('rs');
} else {
console.log('bad address:', addr);
return;
}
for(i=0; i<8; i++) {
if((val&(1<<i)) != 0) {
set('d' + i);
} else {
clear('d' + i);
}
}
clear('rw');
setC('e');
clearC('e');
setC('rw');
}
var ws = new WebSocket('ws://' + location.host + '/bus');
ws.onmessage = function(event) {
var m = JSON.parse(event.data);
if(m._type == 'write') {
write(m.address, m.value);
} else if(m._type == 'read') {
ws.send(JSON.stringify({
_type: 'readresp',
address: m.address,
value: read(m.address),
}));
} else {
console.log('unknown msg:', m);
}
}
})();
|
<gh_stars>0
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import { Button, Card, CardBody, CardGroup, Col, Container, Input, InputGroup, InputGroupAddon, InputGroupText, Row } from 'reactstrap';
import qs from 'qs';
import axioApi from './../../../config/axioConfig';
let $this;
class Login extends Component {
constructor(props){
super(props);
this.state = {email:'', password:''}
$this = this;
}
handleEmailChange(e){
$this.setState({
email : e.target.value
})
}
handlePasswordChange(e){
$this.setState({
password : e.target.value
})
}
saveRegister(e){
e.preventDefault();
const user = {email:$this.state.email, password:$<PASSWORD>}
console.log(user);
axioApi.post("auth/login", qs.stringify(user)).then((res) => {
// here we go// success login
if(res.data.auth === true){
// store in localStorage
localStorage.setItem('token', res.data.token);
// set axios header
axioApi.defaults.headers.common['x-access-token'] = res.data.token;
$this.props.history.push({
pathname: '/',
//search: '?query=abc',
redirectfrom: 'dashboard'
})
}
}).catch((err) => {
alert("Username password mismatch");
console.log(err);
});
}
render() {
return (
<div className="app flex-row align-items-center">
<Container>
<Row className="justify-content-center">
<Col md="8">
<CardGroup>
<Card className="p-4">
<CardBody>
<form onSubmit={this.saveRegister}>
<h1>ฤฤng nhแบญp</h1>
<p className="text-muted">Hแป thแปng quแบฃn trแป AI</p>
<InputGroup className="mb-3">
<InputGroupAddon addonType="prepend">
<InputGroupText>
<i className="icon-user"></i>
</InputGroupText>
</InputGroupAddon>
<Input type="email" onChange={this.handleEmailChange} name="email" placeholder="Email" autoComplete="email" required/>
</InputGroup>
<InputGroup className="mb-4">
<InputGroupAddon addonType="prepend">
<InputGroupText>
<i className="icon-lock"></i>
</InputGroupText>
</InputGroupAddon>
<Input type="password" onChange={this.handlePasswordChange} name="password" placeholder="<PASSWORD>" autoComplete="password" required/>
</InputGroup>
<InputGroup className="mb-4">
<button type="submit" className="btn btn-primary">Submit</button>
</InputGroup>
{/* <Row>
<Col xs="6">
<Button color="primary" className="px-4">ฤฤng</Button>
</Col>
<Col xs="6" className="text-right">
<Button color="link" className="px-0">Quรชn mแบญt khแบฉu?</Button>
</Col>
</Row> */}
</form>
</CardBody>
</Card>
<Card className="text-white bg-primary py-5 d-md-down-none" style={{ width: '44%' }}>
<CardBody className="text-center">
<div>
<h2>Giแปi thiแปu</h2>
<p>Chรบng tรดi chuyรชn cแบฅp cแบฅp dแปch vแปฅ website, phแบงn mแปm. Hแป trแปฃ kแปน thuแบญt kinh doanh xin gแปi sแป 0966665040</p>
<Link to="/register">
<Button color="primary" className="mt-3" active tabIndex={-1}>Liรชn hแป!</Button>
</Link>
</div>
</CardBody>
</Card>
</CardGroup>
</Col>
</Row>
</Container>
</div>
);
}
}
export default Login;
|
const express = require('express');
const app = express();
const bodyParser = require('body-parser');
const Read = require('@google-cloud/text-to-speech');
const client = new Read.TextToSpeechClient();
// Configure express
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: '1mb' }));
app.post('/translate', async (req, res) => {
// Pass the text snippet you want to convert
const request = {
input: {
text: req.body.query
},
// Set the language code
voice: {
languageCode: 'en-US',
ssmlGender: 'NEUTRAL'
},
// Select the type of audio encoding
audioConfig: {
audioEncoding: 'LINEAR16',
},
};
// Get the content back
const [response] = await client.synthesizeSpeech(request);
// Save the audio to a file
const writeFile = util.promisify(fs.writeFile);
await writeFile('output.mp3', response.audioContent, 'binary');
res.status(200).send({
success: 'Audio file created',
});
});
// listen for requests
const port = 5000;
app.listen(port, () => {
console.log(`Server listening on port: ${port}`);
}); |
package com.balceda.archj.controller;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ManagedProperty;
import javax.faces.bean.SessionScoped;
import javax.faces.event.ActionEvent;
import javax.faces.event.ValueChangeEvent;
import com.balceda.archj.model.Book;
import com.balceda.archj.model.Category;
import com.balceda.archj.service.interfaces.BookService;
import com.balceda.archj.service.interfaces.CategoryService;
@ManagedBean(name = "bookMB")
@SessionScoped
public class BookMB {
@ManagedProperty("#{bookService}")
private BookService bookService;
@ManagedProperty("#{categoryService}")
private CategoryService categoryService;
private String isbn;
private String title;
private String category;
private List<Book> books;
private List<Category> categories;
public BookMB() {
}
@PostConstruct
public void init() {
books = getBookService().selectAll();
categories = getCategoryService().selectAll();
}
public String getIsbn() {
return isbn;
}
public void setIsbn(String isbn) {
this.isbn = isbn;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public List<Category> getCategories() {
return categories;
}
public void setCategories(List<Category> categories) {
this.categories = categories;
}
public List<Book> getBooks() {
return books;
}
public void setBooks(List<Book> books) {
this.books = books;
}
public BookService getBookService() {
return bookService;
}
public void setBookService(BookService bookService) {
this.bookService = bookService;
}
public CategoryService getCategoryService() {
return categoryService;
}
public void setCategoryService(CategoryService categoryService) {
this.categoryService = categoryService;
}
public void insert(ActionEvent event) {
getBookService().insert(new Book(isbn, title, new Category(category)));
setBooks(getBookService().selectAll());
category = "0";
}
public void delete(String id) {
getBookService().delete(new Book(id));
setBooks(getBookService().selectAll());
}
public void edit(String id) {
Book b = getBookService().selectById(id);
this.isbn = b.getIsbn();
this.title = b.getTitle();
this.category = b.getCategory().getId();
}
public void update(ActionEvent event) {
getBookService().update(new Book(isbn, title, new Category(category)));
setBooks(getBookService().selectAll());
category = "0";
}
public void newbook() {
isbn = "";
title = "";
category = "";
}
public void filter(ValueChangeEvent event) {
int id = Integer.parseInt(event.getComponent().getAttributes().get("value").toString());
System.out.println("Category id: " + id);
if (id != 0) {
setBooks(getBookService().selectByCategory(new Category(String.valueOf(id))));
} else {
setBooks(getBookService().selectAll());
}
}
}
|
#!/bin/bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR/../../../.."
DOWNLOADS_DIR=tensorflow/lite/tools/make/downloads
BZL_FILE_PATH=tensorflow/workspace.bzl
# Ensure it is being run from repo root
if [ ! -f $BZL_FILE_PATH ]; then
echo "Could not find ${BZL_FILE_PATH}":
echo "Likely you are not running this from the root directory of the repository.";
exit 1;
fi
EIGEN_URL="$(grep -o 'https.*gitlab.com/libeigen/eigen/-/archive/.*tar\.gz' "${BZL_FILE_PATH}" | grep mirror.tensorflow | head -n1)"
EIGEN_SHA="$(eval echo $(grep '# SHARED_EIGEN_SHA' "${BZL_FILE_PATH}" | grep -o '\".*\"'))"
GEMMLOWP_URL="$(grep -o 'https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/gemmlowp/.*zip' "${BZL_FILE_PATH}" | head -n1)"
GEMMLOWP_SHA="$(eval echo $(grep '# SHARED_GEMMLOWP_SHA' "${BZL_FILE_PATH}" | grep -o '\".*\"'))"
GOOGLETEST_URL="https://github.com/google/googletest/archive/release-1.8.0.tar.gz"
ABSL_URL="$(grep -o 'https://github.com/abseil/abseil-cpp/.*tar.gz' "${BZL_FILE_PATH}" | head -n1)"
ABSL_SHA="$(eval echo $(grep '# SHARED_ABSL_SHA' "${BZL_FILE_PATH}" | grep -o '\".*\"'))"
NEON_2_SSE_URL="https://github.com/intel/ARM_NEON_2_x86_SSE/archive/master.zip"
FARMHASH_URL="https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz"
FARMHASH_SHA="$(eval echo $(grep '# SHARED_FARMHASH_SHA' "${BZL_FILE_PATH}" | grep -o '\".*\"'))"
FLATBUFFERS_URL="https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/flatbuffers/archive/v1.11.0.tar.gz"
FFT2D_URL="https://storage.googleapis.com/mirror.tensorflow.org/www.kurims.kyoto-u.ac.jp/~ooura/fft2d.tgz"
# TODO(petewarden): Some new code in Eigen triggers a clang bug with iOS arm64,
# so work around it by patching the source.
replace_by_sed() {
local regex="${1}"
shift
# Detect the version of sed by the return value of "--version" flag. GNU-sed
# supports "--version" while BSD-sed doesn't.
if ! sed --version >/dev/null 2>&1; then
# BSD-sed.
sed -i '' -e "${regex}" "$@"
else
# GNU-sed.
sed -i -e "${regex}" "$@"
fi
}
download_and_extract() {
local usage="Usage: download_and_extract URL DIR [SHA256]"
local url="${1:?${usage}}"
local dir="${2:?${usage}}"
local sha256="${3}"
echo "downloading ${url}" >&2
mkdir -p "${dir}"
tempdir=$(mktemp -d)
filepath="${tempdir}/$(basename ${url})"
curl -Lo ${filepath} ${url}
if [ -n "${sha256}" ]; then
echo "checking sha256 of ${dir}"
echo "${sha256} ${filepath}" > "${filepath}.sha256"
sha256sum -c "${filepath}.sha256"
fi
if [[ "${url}" == *gz ]]; then
tar -C "${dir}" --strip-components=1 -xzf ${filepath}
elif [[ "${url}" == *zip ]]; then
tempdir2=$(mktemp -d)
unzip ${filepath} -d ${tempdir2}
# If the zip file contains nested directories, extract the files from the
# inner directory.
if ls ${tempdir2}/*/* 1> /dev/null 2>&1; then
# unzip has no strip components, so unzip to a temp dir, and move the
# files we want from the tempdir to destination.
cp -R ${tempdir2}/*/* ${dir}/
else
cp -R ${tempdir2}/* ${dir}/
fi
rm -rf ${tempdir2}
fi
rm -rf ${tempdir}
# Delete any potential BUILD files, which would interfere with Bazel builds.
find "${dir}" -type f -name '*BUILD' -delete
}
download_and_extract "${EIGEN_URL}" "${DOWNLOADS_DIR}/eigen" "${EIGEN_SHA}"
download_and_extract "${GEMMLOWP_URL}" "${DOWNLOADS_DIR}/gemmlowp" "${GEMMLOWP_SHA}"
download_and_extract "${GOOGLETEST_URL}" "${DOWNLOADS_DIR}/googletest"
download_and_extract "${ABSL_URL}" "${DOWNLOADS_DIR}/absl" "${ABSL_SHA}"
download_and_extract "${NEON_2_SSE_URL}" "${DOWNLOADS_DIR}/neon_2_sse"
download_and_extract "${FARMHASH_URL}" "${DOWNLOADS_DIR}/farmhash" "${FARMHASH_SHA}"
download_and_extract "${FLATBUFFERS_URL}" "${DOWNLOADS_DIR}/flatbuffers"
download_and_extract "${FFT2D_URL}" "${DOWNLOADS_DIR}/fft2d"
replace_by_sed 's#static uint32x4_t p4ui_CONJ_XOR = vld1q_u32( conj_XOR_DATA );#static uint32x4_t p4ui_CONJ_XOR; // = vld1q_u32( conj_XOR_DATA ); - Removed by script#' \
"${DOWNLOADS_DIR}/eigen/Eigen/src/Core/arch/NEON/Complex.h"
replace_by_sed 's#static uint32x2_t p2ui_CONJ_XOR = vld1_u32( conj_XOR_DATA );#static uint32x2_t p2ui_CONJ_XOR;// = vld1_u32( conj_XOR_DATA ); - Removed by scripts#' \
"${DOWNLOADS_DIR}/eigen/Eigen/src/Core/arch/NEON/Complex.h"
replace_by_sed 's#static uint64x2_t p2ul_CONJ_XOR = vld1q_u64( p2ul_conj_XOR_DATA );#static uint64x2_t p2ul_CONJ_XOR;// = vld1q_u64( p2ul_conj_XOR_DATA ); - Removed by script#' \
"${DOWNLOADS_DIR}/eigen/Eigen/src/Core/arch/NEON/Complex.h"
echo "download_dependencies.sh completed successfully." >&2
|
elm-make src/examples/editor/AbcEditor.elm --output=distjs/elmAbcEditor.js
|
#!/bin/bash
echo Running on $HOSTNAME
num_obj=$1
name=$2
encoder=$3
cmap=$4
seed=$5
loss=$6
mode=$7
emb=$8
dir="models_"$emb
data="data/FixedObserved/wshapes_fixedunobserved"
save=$dir"/Observed/"$name"_"$seed"/"
name=$name"_"$loss"_"$encoder"_"$num_obj"_"$cmap
echo $name
extras=""
if [[ $name == *"LSTM"* ]]; then
extras="--recurrent"
fi
if [[ $name == *"RIM"* ]]; then
extras="--recurrent"
fi
if [[ $name == *"SCOFF"* ]]; then
extras="--recurrent"
fi
echo $extras
python ./eval.py --dataset $data"_"$num_obj"_"$cmap"_"$mode".h5" \
--save-folder $save""$name --save $dir $extras
|
/*
*
*/
package net.community.chest.ui.helpers.frame;
import net.community.chest.dom.DOMUtils;
import net.community.chest.swing.component.frame.BaseFrameReflectiveProxy;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @param <F> The reflected {@link HelperFrame}
* @author <NAME>.
* @since Dec 11, 2008 3:59:02 PM
*/
public class HelperFrameReflectiveProxy<F extends HelperFrame> extends BaseFrameReflectiveProxy<F> {
public HelperFrameReflectiveProxy (Class<F> objClass) throws IllegalArgumentException
{
this(objClass, false);
}
protected HelperFrameReflectiveProxy (Class<F> objClass, boolean registerAsDefault)
throws IllegalArgumentException, IllegalStateException
{
super(objClass, registerAsDefault);
}
public String getSectionName (F src, Element elem)
{
if ((null == src) || (null == elem))
return null;
return elem.getAttribute(NAME_ATTR);
}
/*
* @see net.community.chest.dom.transform.AbstractReflectiveProxy#handleUnknownXmlChild(java.lang.Object, org.w3c.dom.Element)
*/
@Override
public F handleUnknownXmlChild (F src, Element elem) throws Exception
{
final String n=getSectionName(src, elem);
if ((n != null) && (n.length() > 0))
{
final Element prev=src.addSection(n, elem);
if (prev != null)
throw new IllegalStateException("handleUnknownXmlChild(" + n + "[" + DOMUtils.toString(elem) + "] duplicate section found: " + DOMUtils.toString(prev));
return src;
}
return super.handleUnknownXmlChild(src, elem);
}
public static final HelperFrameReflectiveProxy<HelperFrame> HLPRFRM=
new HelperFrameReflectiveProxy<HelperFrame>(HelperFrame.class, true) {
/* Need to override this in order to ensure correct auto-layout
* @see net.community.chest.dom.transform.AbstractReflectiveProxy#createInstance(org.w3c.dom.Element)
*/
@Override
public HelperFrame fromXml (Element elem) throws Exception
{
return (null == elem) ? null : new HelperFrame(elem);
}
};
}
|
import { User } from 'src/auth/entities/user.entity';
import {
Column,
CreateDateColumn,
Entity,
ManyToOne,
PrimaryColumn,
} from 'typeorm';
@Entity()
export class Note {
@PrimaryColumn()
id: string;
@Column()
title: string;
@Column()
body: string;
@Column()
createdAt: Date;
@ManyToOne((type) => User, (user) => user.notes, { eager: false })
user: User;
}
|
<filename>node_modules/react-icons-kit/metrize/magnet.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.magnet = void 0;
var magnet = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256c0,141.391,114.609,256,256,256c141.391,0,256-114.609,256-256\r\n\tC512,114.609,397.391,0,256,0z M256,472c-119.297,0-216-96.702-216-216c0-119.297,96.703-216,216-216c119.298,0,216,96.703,216,216\r\n\tC472,375.298,375.298,472,256,472z"
},
"children": []
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"x": "320",
"y": "144",
"width": "32",
"height": "32"
},
"children": [{
"name": "rect",
"attribs": {
"x": "320",
"y": "144",
"width": "32",
"height": "32"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M320,272.438C320,307.546,291.546,336,256.453,336h-0.906C220.453,336,192,307.546,192,272.438V192h-32v80.656\r\n\t\tC160,325.313,202.688,368,255.328,368h1.344C309.313,368,352,325.313,352,272.656V192h-32V272.438z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M320,272.438C320,307.546,291.546,336,256.453,336h-0.906C220.453,336,192,307.546,192,272.438V192h-32v80.656\r\n\t\tC160,325.313,202.688,368,255.328,368h1.344C309.313,368,352,325.313,352,272.656V192h-32V272.438z"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "160",
"y": "144",
"width": "32",
"height": "32"
},
"children": [{
"name": "rect",
"attribs": {
"x": "160",
"y": "144",
"width": "32",
"height": "32"
},
"children": []
}]
}]
}]
};
exports.magnet = magnet; |
<filename>typings/aurelia/aurelia-dialog.d.ts
declare module 'aurelia-dialog' {
import {
customElement,
bindable,
customAttribute,
ViewSlot,
CompositionEngine
} from 'aurelia-templating';
import {
Origin
} from 'aurelia-metadata';
import {
Container
} from 'aurelia-dependency-injection';
export class AiDialogBody {
}
/**
* * View-model for footer of Dialog.
* */
export class AiDialogFooter {
static inject: any;
buttons: any[];
useDefaultButtons: boolean;
constructor(controller: DialogController);
close(buttonValue: string): any;
useDefaultButtonsChanged(newValue: boolean): any;
static isCancelButton(value: string): any;
}
export class AiDialogHeader {
static inject: any;
constructor(controller: any);
}
export class AiDialog {
}
export class AttachFocus {
static inject: any;
value: any;
constructor(element: any);
attached(): any;
valueChanged(newValue: any): any;
}
export class DialogController {
settings: any;
constructor(renderer: DialogRenderer, settings: any, resolve: Function, reject: Function);
ok(result: any): any;
cancel(result: any): any;
error(message: any): any;
close(ok: boolean, result: any): any;
}
export let globalSettings: any;
export class DialogRenderer {
defaultSettings: any;
constructor();
createDialogHost(dialogController: DialogController): any;
showDialog(dialogController: DialogController): any;
hideDialog(dialogController: DialogController): any;
destroyDialogHost(dialogController: DialogController): any;
}
export class DialogService {
static inject: any;
constructor(container: Container, compositionEngine: any, renderer: any);
open(settings: any): any;
}
export function invokeLifecycle(instance: any, name: string, model: any): any;
} |
import React, { Suspense } from 'react'
import { Canvas as Fiber } from 'react-three-fiber'
import { Html } from '@react-three/drei'
import { Controls } from './Controls'
function loaders() {
const captions = [
'Loading...',
'harnessing the bits',
'do you have faith?',
'experiencing real time',
'building upon the present',
'moving backwards',
'loading the preloaders',
'unloading onto you',
'involving you in the experience',
]
return captions[Math.floor(Math.random() * captions.length)]
}
export default function Canvas(props) {
const center = props.center ? props.center : [0, 0, 0]
const cameraPlacement = props.cameraPosition
? props.cameraPosition
: [1, 0, 2]
// this is a hacky fix for gatsby SSR - where window is not available
if (typeof window === 'undefined' || !window.document) {
return null
}
return (
<Fiber
pixelRatio={[1, 3]}
onCreated={({ gl }) => {
gl.alpha = false
gl.antialias = false
gl.setClearColor(props.background || '#000000')
gl.outputEncoding = 3001 // sRGBEncoding
gl.physicallyCorrectLights = true
}}
shadows
concurrent
>
<Controls
center={center}
cameraPlacement={cameraPlacement}
enableZoom={props.debug}
maxPolarAngle={props.maxPolarAngle}
minPolarAngle={props.minPolarAngle}
far={props.far}
/>
<Suspense
fallback={
<Html center className="loader" position={[0, 0, 0]}>
<span>{loaders()}</span>
</Html>
}
>
<scene>{props.children}</scene>
</Suspense>
</Fiber>
)
}
// note: https://codesandbox.io/s/mo0xrqrj79
|
from dataclasses import fields, is_dataclass
def pretty_dataclass_instance(value, ctx):
if is_dataclass(type(value)):
field_values = {field.name: getattr(value, field.name) for field in fields(value)}
return field_values
else:
return {} |
import { IImport, Named } from "..";
export class Import extends Named implements IImport {
private _default?: string;
private _imports: [string, string | undefined][] = [];
get Imports() {
return this._imports as ReadonlyArray<[string, string | undefined]>;
}
get Default() {
return this._default;
}
static parseObjects(objs: ArrayLike<IImport>) {
return this.genericParseObjects(Import, objs);
}
ToObject(): IImport {
return {
Default: this.Default,
Name: this.Name,
Imports: this.Imports
};
}
ParseObject(obj: Import) {
this
.SetDefault(obj.Default)
.SetName(obj.Name)
.AddImport(...obj.Imports);
return this;
}
AddImport(...importInfos: [string, string?][]) {
return this.addToArray(this._imports, ...importInfos);
}
SetDefault(importAs: string) {
if (importAs) {
this._default = importAs;
}
return this;
}
}
|
module.exports = ( promises, params, transaction ) => params.select.split( "," ).forEach( ( _field ) => {
const fieldSplited = _field.split( "." );
let table = fieldSplited[ 0 ];
let field = fieldSplited[ 1 ];
const theFieldIsTheSameOfFrom = !field;
if ( theFieldIsTheSameOfFrom ){
field = table;
table = params.from;
}
const _params = Object.assign( { field, table }, params );
let query = "MERGE (field:FIELD {field: {field}}) RETURN field";
promises.push( transaction.run( query, { field } ) );
query = `MATCH (q:QUERY {on: {on}, where: {where}}), (field:FIELD {field: {field}})
CREATE UNIQUE (q)-[:uses_field]->(field)`;
promises.push( transaction.run( query, _params ) );
if ( table !== params.from && table !== params.join ) return;
query = `MATCH (tb:TABLE {table: {table}}), (field:FIELD {field: {field}})
CREATE UNIQUE (tb)-[:has_field]->(field)`;
promises.push( transaction.run( query, _params ) );
} );
|
def get_houses(file_path):
houses = []
with open(file_path, 'r') as file:
for line in file:
data = line.strip().split(',')
if len(data) == 4: # Assuming the file format is: house_name, address, price, area
house = {
'name': data[0],
'address': data[1],
'price': float(data[2]),
'area': float(data[3])
}
houses.append(house)
return houses
def test_get_houses_1():
# Arrange
input_type = InputType.StringInput
string = "text"
file_path = "tests//resources//test_file.txt"
expected_houses = [
{'name': 'House1', 'address': '123 Main St', 'price': 250000.0, 'area': 2000.0},
{'name': 'House2', 'address': '456 Elm St', 'price': 300000.0, 'area': 2200.0}
]
# Act
actual_houses = get_houses(file_path)
# Assert
assert actual_houses == expected_houses, "Failed to extract correct house information from the file" |
import { Manifest } from '../../generator/manifest';
export default (manifest: Manifest) => {
manifest.addComponent({
name: 'Component1',
displayName: 'Component1',
placeholders: [
{ name: 'page-header' },
{ name: 'page-content' },
],
});
};
|
<reponame>skimah/skimah
import {
Attribute,
Criteria,
CriteriaFilter,
Datasource,
Model,
MutationModel,
MutationResponse,
QueryModel
} from "@skimah/api";
import sift from "./sift";
export interface Config {
/**
* The path of the csv file that should be loaded
*/
filepath?: string;
/**
* An array of objects that should used instead of loading it from a file
*/
records?: any[];
}
export default class implements Datasource {
protected records: any[];
constructor(private config: Config) {}
private loadFile(): any[] {
const { readFileSync } = require("fs");
const jsonText = readFileSync(this.config.filepath);
const rows = JSON.parse(jsonText.toString());
return rows;
}
private mapField(record: any, attributes: { [key: string]: Attribute }): any {
const newRecord = {};
Object.keys(attributes).forEach(attrKey => {
const attr = attributes[attrKey];
newRecord[attr.name] = record[attr.sourceName];
});
return newRecord;
}
private sort(rows: any[], model: QueryModel): any[] {
try {
const [[attr, order]] = Object.entries(model.criteria.orderBy || {});
return rows.sort((r1, r2) => {
if (order === "asc") {
return r1[attr] > r2[attr] ? 1 : -1;
}
return r2[attr] > r1[attr] ? 1 : -1;
});
} catch {
return rows;
}
}
private page(rows: any[], model: QueryModel): any[] {
if (model.criteria.skip || model.criteria.limit) {
const skip = model.criteria.skip ?? 0;
const limit = model.criteria.limit
? skip + model.criteria.limit
: rows.length;
return rows.slice(skip, limit);
}
return rows;
}
private criteria(criteria: Criteria) {
const query = {};
const covertFilters = (filters: { [key: string]: CriteriaFilter }[]) => {
return filters.map(c => {
const [[field, filter]] = Object.entries(c);
const newFilter = {};
Object.entries(filter).forEach(([key, value]) => {
if (key === "like") {
newFilter[`$regex`] = value;
return;
}
newFilter[`$${key}`] = value;
});
return { [field]: newFilter };
});
};
if (criteria.and.length) {
const and = criteria.and.filter(c => Object.keys(c).length);
Object.assign(query, { $and: covertFilters(and) });
}
if (criteria.or.length) {
const or = criteria.and.filter(c => Object.keys(c).length);
Object.assign(query, { $or: covertFilters(or) });
}
return query;
}
private filter(rows: any[], model: QueryModel): any[] {
const query: any = this.criteria(model.criteria);
const filtered = rows.filter(sift(query));
const mappedRows = filtered.map(row =>
this.mapField(row, model.projectedAttributes)
);
const sorted = this.sort(mappedRows, model);
const paged = this.page(sorted, model);
return paged;
}
public async initialize(_: Model[]) {
if (this.config.filepath) {
this.records = this.loadFile();
}
if (this.config.records) {
this.records = this.config.records;
}
}
public async create(models: MutationModel[]): Promise<MutationResponse<any>> {
const affectedRecordIDs = [];
const inputRecords = [];
const sourceRecords = [];
models.forEach(model => {
const sourceRecord = {};
const inputRecord = {};
const [id] = model.identities;
if (!id) {
throw new Error(`${model.name} does not have a unique field`);
}
Object.values(model.mutatedAttributes).forEach(attr => {
sourceRecord[attr.sourceName] = attr.value;
inputRecord[attr.name] = attr.value;
});
affectedRecordIDs.push(model.mutatedAttributes[id.name].value);
inputRecords.push(inputRecord);
sourceRecords.push(sourceRecord);
});
this.records = this.records.concat(...sourceRecords);
return {
affected: affectedRecordIDs,
records: inputRecords
};
}
public async select(selection: QueryModel): Promise<any[]> {
const rows = this.filter(this.records, selection);
return rows;
}
public async update(
criteria: Criteria,
changes: MutationModel
): Promise<MutationResponse<any>> {
const query: any = this.criteria(criteria);
const recordsToUpdate = this.records.filter(sift(query));
const affectedRecordIDs = [];
const changedRecord = {};
Object.values(changes.mutatedAttributes).forEach(attr => {
changedRecord[attr.sourceName] = attr.value;
});
recordsToUpdate.forEach(recordToUpdate => {
const responseRecord = {};
Object.assign(recordToUpdate, changedRecord);
Object.values(changes.mutatedAttributes).forEach(attr => {
responseRecord[attr.name] = recordToUpdate[attr.sourceName];
});
const [id] = changes.identities;
affectedRecordIDs.push(recordToUpdate[id.sourceName]);
});
return {
affected: affectedRecordIDs
};
}
public async delete(
criteria: Criteria,
model: Model
): Promise<MutationResponse<any>> {
const query: any = this.criteria(criteria);
const recordsToDelete = this.records.filter(sift(query));
const [id] = model.identities;
const affectedIDs = recordsToDelete.map(record => {
return record[id.sourceName];
});
return {
affected: affectedIDs
};
}
}
|
SELECT
s.address,
g.alias,
s.last_seen_turn,
sp.nation_id,
sp.custom_nation_name,
l.owner_id,
l.era,
l.player_count,
l.description
FROM players p
JOIN server_players sp on sp.player_id = p.id
JOIN game_servers g on g.id = sp.server_id
LEFT JOIN lobbies l on l.id = g.lobby_id
LEFT JOIN started_servers s on s.id = g.started_server_id
WHERE p.discord_user_id = ?1;
|
set -ex
LOCAL_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
ROOT_DIR=$(cd "$LOCAL_DIR"/../.. && pwd)
TEST_DIR="$ROOT_DIR/test"
gtest_reports_dir="${TEST_DIR}/test-reports/cpp"
pytest_reports_dir="${TEST_DIR}/test-reports/python"
# Figure out which Python to use
PYTHON="$(which python)"
if [[ "${BUILD_ENVIRONMENT}" =~ py((2|3)\.?[0-9]?\.?[0-9]?) ]]; then
PYTHON=$(which "python${BASH_REMATCH[1]}")
fi
if [[ "${BUILD_ENVIRONMENT}" == *rocm* ]]; then
# HIP_PLATFORM is auto-detected by hipcc; unset to avoid build errors
unset HIP_PLATFORM
if which sccache > /dev/null; then
# Save sccache logs to file
sccache --stop-server || true
rm -f ~/sccache_error.log || true
SCCACHE_ERROR_LOG=~/sccache_error.log SCCACHE_IDLE_TIMEOUT=0 sccache --start-server
# Report sccache stats for easier debugging
sccache --zero-stats
fi
fi
# /usr/local/caffe2 is where the cpp bits are installed to in cmake-only
# builds. In +python builds the cpp tests are copied to /usr/local/caffe2 so
# that the test code in .jenkins/test.sh is the same
INSTALL_PREFIX="/usr/local/caffe2"
mkdir -p "$gtest_reports_dir" || true
mkdir -p "$pytest_reports_dir" || true
mkdir -p "$INSTALL_PREFIX" || true
|
package no.mnemonic.commons.logging.log4j;
import org.junit.Test;
import static org.junit.Assert.*;
public class Log4jLoggingContextTest {
private Log4jLoggingContext context = new Log4jLoggingContext();
@Test
public void getByNull() throws Exception {
assertNull(context.get(null));
}
@Test
public void getByEmpty() throws Exception {
assertNull(context.get(""));
}
@Test
public void putGet() throws Exception {
context.put("key1", "value1");
assertEquals("value1", context.get("key1"));
}
@Test
public void putNullValue() throws Exception {
context.put("key1", null);
assertFalse(context.containsKey("key1"));
}
@Test
public void putEmptyValue() throws Exception {
context.put("key1", "");
assertFalse(context.containsKey("key1"));
}
@Test
public void containsKey() throws Exception {
context.put("key1", "value1");
assertTrue(context.containsKey("key1"));
}
@Test
public void containsNullKey() throws Exception {
assertFalse(context.containsKey(null));
}
@Test
public void containsEmptyKey() throws Exception {
assertFalse(context.containsKey(""));
}
@Test
public void remove() throws Exception {
context.put("key1", "value1");
context.remove("key1");
assertFalse(context.containsKey("key1"));
}
@Test
public void clear() throws Exception {
context.put("key1", "value1");
context.clear();
assertFalse(context.containsKey("key1"));
}
}
|
/******************************************************************************
Copyright (c) 2015, Intel Corporation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************/
/*
* Copyright ยฉ 2015 Intel Corporation. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Prepare for web server
var fs = require("fs");
var path = require("path");
var url = require('url');
var config = require('./config');
var account = require('./vendermodule');
var dirname = __dirname || path.dirname(fs.readlinkSync('/proc/self/exe'));
var httpsOptions = {
key: fs.readFileSync(path.resolve(dirname, 'cert/key.pem')).toString(),
cert: fs.readFileSync(path.resolve(dirname, 'cert/cert.pem')).toString()
};
var app = require('express')();
var server = app.listen(config.port.plain);
var servers = require("https").createServer(httpsOptions, app).listen(config.port.secured);
var io = require('socket.io').listen(server);
var ios = require('socket.io').listen(servers);
var sessionMap = {}; // Key is uid, and value is session object.
// Check user's token from partner
function validateUser(token, successCallback, failureCallback){
// TODO: Should check token first, replace this block when engagement with different partners.
if(token){
account.authentication(token,function(uid){
successCallback(uid);
},function(){
console.log('Account system return false.');
failureCallback(0);
});
}
else
failureCallback(0);
}
function disconnectClient(uid){
if(sessionMap[uid]!==undefined){
var session=sessionMap[uid];
session.emit('server-disconnect');
session.disconnect();
console.log('Force disconnected '+uid);
}
}
function createUuid(){
return 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = Math.random()*16|0, v = c === 'x' ? r : (r&0x3|0x8);
return v.toString(16);
});
}
function emitChatEvent(targetUid, eventName, message, successCallback, failureCallback){
if(sessionMap[targetUid]){
sessionMap[targetUid].emit(eventName,message);
if(successCallback)
successCallback();
}
else
if(failureCallback)
failureCallback(2201);
}
function authorization(socket, next){
var query=url.parse(socket.request.url,true).query;
var token=query.token;
var clientVersion=query.clientVersion;
var clientType=query.clientType;
switch(clientVersion){
case '2.0':
case '1.5':
case '1.1':
case '2.1':
case '2.0.1':
case '2.1.1':
// socket.user stores session related information.
if(token){
validateUser(token, function(uid){ // Validate user's token successfully.
socket.user={id:uid};
console.log(uid+' authentication passed.');
},function(error){
// Invalid login.
console.log('Authentication failed.');
next();
});
}else{
socket.user=new Object();
socket.user.id=createUuid()+'@anonymous';
console.log('Anonymous user: '+socket.user.id);
}
next();
break;
default:
next(new Error('2103'));
console.log('Unsupported client. Client version: '+query.clientVersion);
break;
}
}
function onConnection(socket){
// Disconnect previous session if this user already signed in.
var uid=socket.user.id;
disconnectClient(uid);
sessionMap[uid]=socket;
socket.emit('server-authenticated',{uid:uid}); // Send current user's id to client.
console.log('A new client has connected. Online user number: '+Object.keys(sessionMap).length);
socket.on('disconnect',function(){
if(socket.user){
var uid=socket.user.id;
// Delete session
if(socket===sessionMap[socket.user.id]){
delete sessionMap[socket.user.id];
}
console.log(uid+' has disconnected. Online user number: '+Object.keys(sessionMap).length);
}
});
// Forward events
var forwardEvents=['chat-invitation','chat-accepted','stream-type','chat-negotiation-needed','chat-negotiation-accepted','chat-stopped','chat-denied','chat-signal'];
for (var i=0;i<forwardEvents.length;i++){
socket.on(forwardEvents[i],(function(i){
return function(data, ackCallback){
console.log('Received '+forwardEvents[i]);
data.from=socket.user.id;
var to=data.to;
delete data.to;
emitChatEvent(to,forwardEvents[i],data,function(){
if(ackCallback)
ackCallback();
},function(errorCode){
if(ackCallback)
ackCallback(errorCode);
});
};
})(i));
}
}
function listen(io) {
io.use(authorization);
io.on('connection',onConnection);
}
listen(io);
listen(ios);
// Signaling server only allowed to be connected with Socket.io.
// If a client try to connect it with any other methods, server returns 405.
app.get('*', function(req, res, next) {
res.send(405, 'WebRTC signaling server. Please connect it with Socket.IO.');
});
console.info('Listening port: ' + config.port.plain + '/' + config.port.secured);
|
import { Component } from '@angular/core';
import { NavController } from 'ionic-angular';
import { CalculosDirective } from '../../directives/calculos/calculos';
//import { DirectivesModule } from '../../directives/directives.module';
@Component({
selector: 'page-home',
templateUrl: 'home.html'
})
export class HomePage {
peso: number = 0;
altura: number = 0;
imc: number = 0;
condicao: string = "";
constructor( public navCtrl: NavController ) { }
calcular(){
this.imc = CalculosDirective.calcularImc( this.altura, this.peso );
this.condicao = CalculosDirective.informarImc(this.imc);
}
}
|
#!/usr/bin/env bash
set -eu
: ${NOOP:=}
HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )"
source $HERE/../../common.sh
docker_task="docker"
push_msg=
while [[ $# -gt 0 ]]
do
case $1 in
--push|--push-docker-images)
docker_task="dockerBuildAndPush"
push_msg="Pushed the docker images."
;;
-v|--version*)
shift
VERSION=$(get_version $@)
;;
*)
error "Unrecognized argument $1"
;;
esac
shift
done
[[ -n $VERSION ]] || error "Version string can't be empty!"
info2 "Using version $VERSION"
cd ${HERE}/source/core
$NOOP sbt -no-colors "set version in ThisBuild := \"$VERSION\"" clean $docker_task
echo "$PWD: built package and Docker images. $push_msg"
|
<html>
<head>
<title>Countdown Timer</title>
<style>
#timer-container {
width: 300px;
height: 150px;
text-align: center;
margin: 20px auto;
}
#timer {
font-size: 4rem;
font-weight: bold;
color: #f00;
}
</style>
</head>
<body>
<div id="timer-container">
<div id="timer"></div>
</div>
<script>
// set the date we're counting down to
let target_date = new Date('January 10, 2021').getTime();
// update the count down every 1 second
let timer = setInterval(function() {
// get today's date and time
let current_date = new Date().getTime();
// find the distance between now and the target date
let distance = target_date - current_date;
// do some time calculations
let days = Math.floor(distance / (1000 * 60 * 60 * 24));
let hours = Math.floor((distance % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
let minutes = Math.floor((distance % (1000 * 60 * 60)) / (1000 * 60));
let seconds = Math.floor((distance % (1000 * 60)) / 1000);
// format and show the result in an element with id="timer"
document.getElementById("timer").innerHTML = days + "d " + hours + "h " + minutes + "m " + seconds + "s ";
// do something when the countdown ends
if (distance < 0) {
clearInterval(timer);
document.getElementById("timer").innerHTML = "Countdown has ended";
}
}, 1000);
</script>
</body>
</html> |
/*
* Filter.hpp
*
*
* Copyright (C) 2019 <NAME> <<EMAIL>>
*
*/
#ifndef FILTER_HPP
#define FILTER_HPP
#include "HeadTail.hpp"
#include "Select.hpp"
#include "IsEmpty.hpp"
template <template <class> class Pred, class cont,
bool empty = IsEmpty<cont>::result>
struct Filter
{
typedef typename Filter<Pred, typename Tail<cont>::Result>::Result Tail;
typedef typename Select<Pred<typename cont::Type>::check(Head<cont>::result),
typename PushFront<Tail, Head<cont>::result>::Result,
Tail>::Result Result;
};
template <template <class> class Pred, class cont>
struct Filter<Pred, cont, true>
{
typedef cont Result;
};
#endif /* FILTER_HPP */
|
import datetime
import sys
# noinspection PyPackageRequirements
import bson
import pymongo
import colors
slow_ms = 5
def remove_indexes(db):
print(colors.bold_color + "Removing indexes ... ", end='')
sys.stdout.flush()
db.Publisher.drop_indexes()
db.User.drop_indexes()
db.Book.drop_indexes()
print(colors.subdue_color + "done.")
def reset_profile_data(db):
print(colors.bold_color + "Resetting profiling ... ", end='')
sys.stdout.flush()
db.set_profiling_level(pymongo.OFF)
profile_coll = db.system.profile
profile_coll.drop()
print(colors.subdue_color + "done.")
def renable_profiling(db):
print(colors.bold_color + "Enabling profiling for slow queries ... ", end='')
sys.stdout.flush()
db.set_profiling_level(pymongo.SLOW_ONLY, slow_ms=slow_ms)
print(colors.subdue_color + "done.")
def query_data(db):
print(colors.bold_color + "Running standard queries (generates profiling data) ... ", end='')
sys.stdout.flush()
users = db.User
publishers = db.Publisher
books = db.Book
users.find({"Age": 18}).count()
users.find_one({"UserId": 3})
users.find({"Age": {'$gt': 61}}).count()
users.find({"Location.City": "moscow"}).count()
users.find({"Location.City": "moscow"}).count()
p = publishers.find_one({"Name": "2nd Avenue Publishing, Inc."})
d = list(books.find({"Publisher": p['_id']}, {"Title": 1}))[-1]
d = datetime.datetime(2010, 1, 1)
books.find({"Published": {"$gt": d}}).count()
s = {r['Title'] for r in books.find({"Author": "<NAME>"}, {"Title": 1, "_id": 0})}
d = datetime.datetime(2003, 1, 1)
s = {r['Title'] for r in books.find({"Author": "<NAME>", "Published": {"$gt": d}},
{"Title": 1, "_id": 0})}
books.find({"Ratings.Value": {"$gte": 10}}).count()
user_id = bson.ObjectId("525867733a93bb2198146309")
books.find({"Ratings.UserId": user_id}).count()
books.find({"Ratings.UserId": user_id, "Ratings.Value": {"$gte": 10}}).count()
print(colors.subdue_color + "done.")
def display_bad_queries(db):
print(colors.bold_color + "Displaying top 10 worst queries (slower than 5 ms) ...")
queries = list(db.system.profile.find({"protocol": "op_query"}))
print(colors.subdue_color + "Found {} bad queries".format(len(queries)))
for q in queries:
cmd = "unset"
if q.get('op') == 'command':
cmd = q.get('command', {'query': 'n/a'}).get('query')
elif q.get('op') == 'query':
cmd = q.get('query')
print(colors.subdue_color + "time: ", end='')
print(colors.highlight_color + '{:,}ms'.format(q['millis']), end='')
print(colors.subdue_color + ", coll: {}, query: ".format(
q['ns'].replace('books.', '')
), end='')
print(colors.notice_color + '{}'.format(cmd))
print(colors.subdue_color + "done.")
def add_indexes(db):
print(colors.bold_color + "Adding indexes (this is s.l.o.w.) ...", end=' ')
sys.stdout.flush()
users = db.User
publishers = db.Publisher
books = db.Book
books.create_index("Ratings.Value", name='books_by_rating_value')
books.create_index([("Ratings.UserId", pymongo.ASCENDING),
("Ratings.Value", pymongo.ASCENDING)],
name='books_by_rating_userid_and_value')
books.create_index("Publisher", name="books_by_publisher")
books.create_index("Published", name="books_by_published_date")
books.create_index("Author", name="books_by_author")
users.create_index("Age", name="users_by_age")
users.create_index("UserId", name="users_by_id")
users.create_index("Location.City", name="users_by_city")
publishers.create_index("Name", name='publishers_by_name')
print(colors.subdue_color + 'done.')
def display_times_and_query_plans(db, run_with_indexes):
total_time = 0
if run_with_indexes:
print(colors.highlight_color + "Showing query times and plans with results (should be slow) ...")
else:
print(colors.highlight_color + "Showing query times and plans with results (should be faster) ...")
users = db.User
publishers = db.Publisher
books = db.Book
print(colors.subdue_color + "There are {:,} users who are 18 years old.".format(
users.find({"Age": 18}).count()
))
x = users.find({"Age": 18}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
print(colors.subdue_color + "User with ID 3 is from {}".format(users.find_one({"UserId": 3})['Location']['City']))
x = users.find({"UserId": 3}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
print(colors.subdue_color + "There are {:,} users over 61".format(users.find({"Age": {'$gt': 61}}).count()))
x = users.find({"Age": {'$gt': 61}}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
c = users.find({"Location.City": "moscow"}).count()
print(colors.subdue_color + "{:,} users are from Moscow".format(c))
x = users.find({"Location.City": "moscow"}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
p = publishers.find_one({"Name": "2nd Avenue Publishing, Inc."})
print(colors.subdue_color + "2nd Avenue Publishing has ID {}".format(p.get('_id')))
x = publishers.find({"Name": "2nd Avenue Publishing, Inc."}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
ff = list(books.find({"Publisher": p['_id']}, {"Title": 1}))[-1]
x = books.find({"Publisher": p['_id']}, {"Title": 1}).explain()
print(colors.subdue_color + "Books published by 2nd Avenue Publishing, Inc.: {}".format(len(ff)))
total_time += print_explain_info(x, run_with_indexes)
print()
d = datetime.datetime(2010, 1, 1)
f = books.find({"Published": {"$gt": d}}).count()
print(colors.subdue_color + "{:,} books have been published since 2001".format(f))
x = books.find({"Published": {"$gt": d}}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
ff = {r['Title'] for r in books.find({"Author": "<NAME>"}, {"Title": 1, "_id": 0})}
print(colors.subdue_color + "Books written by <NAME>, count: {}".format(len(ff)))
print(ff)
x = books.find({"Author": "<NAME>"}, {"Title": 1, "_id": 0}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
d = datetime.datetime(2003, 1, 1)
ff = {r['Title'] for r in books.find({"Author": "<NAME>", "Published": {"$gt": d}},
{"Title": 1, "_id": 0})}
x = books.find({"Author": "<NAME>", "Published": {"$gt": d}},
{"Title": 1, "_id": 0}).explain()
print(colors.subdue_color + "Books written by <NAME> after 2003, count: {}".format(len(ff)))
print(ff)
total_time += print_explain_info(x, run_with_indexes)
print()
ff = books.find({"Ratings.Value": {"$gte": 10}}).count()
print(colors.subdue_color + "Books rated 'perfect 10': {:,}".format(ff))
x = books.find({"Ratings.Value": {"$gte": 10}}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
user_id = bson.ObjectId("525867733a93bb2198146309")
ff = books.find({"Ratings.UserId": user_id}).count()
print(colors.subdue_color + "Books rated by particular user {}".format(ff))
x = books.find({"Ratings.UserId": user_id}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
ff = books.find({"Ratings.UserId": user_id, "Ratings.Value": {"$gte": 10}}).count()
print(colors.subdue_color + "Boos rated perfect 10 by {}: {}".format(user_id, ff))
x = books.find({"Ratings.UserId": user_id, "Ratings.Value": {"$gte": 10}}).explain()
total_time += print_explain_info(x, run_with_indexes)
print()
print(colors.subdue_color + "done, total time: ", end='')
print(colors.highlight_color + '{:,}'.format(total_time), end='')
print(colors.subdue_color + ' ms.')
def print_explain_info(explain_object, run_with_indexes):
x = explain_object
time_ms = 'unknown'
execution_stats = x.get('executionStats')
if execution_stats:
execution_time_ms = execution_stats.get('executionTimeMillis')
if execution_time_ms is not None:
time_ms = execution_time_ms
index_name = "NO INDEX"
has_index = False
non_index_stage = 'UNKNOWN_STATUS'
query_planner = x.get('queryPlanner')
if query_planner:
winning_plan = query_planner.get('winningPlan')
if winning_plan:
input_stage = winning_plan.get('inputStage', {'indexName': None})
index_name = input_stage.get('indexName')
if not index_name:
input_stage = input_stage.get('inputStage', {'indexName': None})
index_name = input_stage.get('indexName')
if not index_name:
non_index_stage = winning_plan.get('stage')
else:
has_index = True
else:
has_index = True
if has_index:
print(colors.notice_color + "INDEXED", end='')
print(colors.subdue_color + ": time: ", end='')
print(colors.highlight_color + '{} ms'.format(time_ms), end='')
print(colors.subdue_color + ', index: ', end='')
print(colors.highlight_color + "{}".format(index_name))
else:
print(colors.notice_color + "No index", end='')
print(colors.subdue_color + ": time: ", end='')
print(colors.highlight_color + '{} ms'.format(time_ms), end='')
print(colors.subdue_color + ', index: {}'.format(index_name), end='')
print(colors.subdue_color + ', stage: ', end='')
print(colors.highlight_color + "{}".format(non_index_stage))
# if run_with_indexes:
# print(pprint.pprint(x))
return time_ms
def show_data_size(db):
print(colors.bold_color + "Computing data set size...")
sys.stdout.flush()
print(colors.highlight_color + "{:,}".format(db.Book.count()),end='')
print(colors.subdue_color + ' books')
sys.stdout.flush()
review_count = 0
for b in db.Book.find({}, {'_id': 0, 'Ratings.Value': 1}):
review_count += len(b['Ratings'])
print(colors.highlight_color + "{:,}".format(review_count),end='')
print(colors.subdue_color + " reviews")
print()
def run():
client = pymongo.MongoClient()
db = client.books
show_data_size(db)
remove_indexes(db)
reset_profile_data(db)
renable_profiling(db)
query_data(db)
input(colors.subdue_color + "--------- paused - enter to see bad queries -----")
display_bad_queries(db)
input(colors.subdue_color + "--------- paused - enter to display times -------")
display_times_and_query_plans(db, False)
input(colors.subdue_color + "--------- paused - enter to add indexes ---------")
add_indexes(db)
reset_profile_data(db)
renable_profiling(db)
query_data(db)
display_bad_queries(db)
input(colors.subdue_color + "--------- paused - enter to display times -------")
display_times_and_query_plans(db, True)
print()
|
#!/bin/bash
WORDS="${@:2}"
if [[ "$1" = "-d" ]]; then
echo -e "Deleting the following words from wordlist.txt:\n${WORDS}"
for i in $WORDS; do
sed -i -r "/^${i}$/d" wordlist.txt
done
elif [[ "$1" = "-a" ]]; then
echo -e "Adding the following words to wordlist.txt:\n${WORDS}"
for i in $WORDS; do
echo "$i" >> wordlist.txt
done
fi
echo -e "\nSorting wordlist..."
sort -u wordlist.txt | awk 'NF>0' > tmp && mv tmp wordlist.txt
echo "Done."
|
class OverlayController {
private _overlayView: OverlayView;
constructor(sections: SectionData[], config: WheelOfFortuneConfig) {
this._overlayView = new OverlayView(sections, config);
}
}
|
<reponame>MccreeFei/jframe<filename>jframe/jframe-core/src/main/java/jframe/core/plugin/annotation/MsgRecv.java
/**
*
*/
package jframe.core.plugin.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* DispatchTarget.receive(Msg<?> msg)
*
* @author dzh
* @date Oct 9, 2013 3:06:54 PM
* @since 1.0
*/
@Target({ ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
public @interface MsgRecv {
}
|
from typing import List, Tuple
import psycopg2
def retrieve_tag_data(tag_name: str) -> List[Tuple[str, str]]:
tag_data_list = []
with AppState.Database.CONN.cursor() as cur:
if tag_name is not None and tag_name != 'global':
query = "SELECT tag_name, tag_data FROM tags WHERE tag_name = %s"
cur.execute(query, (tag_name,))
tag_data_list = cur.fetchall()
return tag_data_list |
#!/bin/bash
##################################################################################
#Copyright (c) 2016, Intel Corporation
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#1. Redistributions of source code must retain the above copyright notice, this
#list of conditions and the following disclaimer.
#
#2. Redistributions in binary form must reproduce the above copyright notice,
#this list of conditions and the following disclaimer in the documentation
#and/or other materials provided with the distribution.
#
#3. Neither the name of the copyright holder nor the names of its contributors
#may be used to endorse or promote products derived from this software without
#specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
#AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
#IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
#FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
#DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
#OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##################################################################################
source /intel/euclid/config/settings.bash
su euclid -c "$EUCLID_ROOT/oobe-services/ros_services/S3ROSBridge/startServer.sh"
exit 0
|
<filename>LZUISDK/SDK/LSDeviceManagerFramework.framework/Headers/WeatherData.h
//
// WeatherData.h
// LSWearable
//
// Created by ้ญๆฏ็
on 17/1/11.
// Copyright ยฉ 2017ๅนด lifesense. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface WeatherData : NSObject
@end
|
import Trie from "../utils/trie";
/**
* Utility class to manage the Waste Wizard Data
*/
class WasteWizardDataManager {
/**
* Store the keywords into a Trie for searching
*/
static storeKeywordsInTrie() {
Object.keys(this.keywordDictionary).forEach(keyword => {
this.keywords.insert(keyword);
});
}
/**
* Map the keywords strings of the entries into a dictionary for faster access
*/
static mapWasteWizardKeywords(wasteWizardJson) {
const wasteWizardObject = wasteWizardJson.allWastewizardJson.edges;
Object.keys(wasteWizardObject).forEach(key => {
const wasteEntry = wasteWizardObject[key].node;
this.sanAndTok(wasteEntry.keywords).forEach(key => {
this.keywordDictionary[key] = wasteEntry;
});
});
this.storeKeywordsInTrie();
}
/**
* Convert the keyword strings to lowercase, tokenize them and trim the white spaces off the edges
*/
static sanAndTok(keywords) {
const toLower = keywords.toLowerCase();
const tokenize = toLower.split(",");
const trimTokenized = tokenize.map(key => key.trim());
return trimTokenized;
}
}
WasteWizardDataManager.keywordDictionary = {};
WasteWizardDataManager.keywords = new Trie();
export default WasteWizardDataManager;
|
<reponame>groupon/nakala
/*
Copyright (c) 2013, Groupon, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of GROUPON nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.ml.svm;
import com.groupon.ml.ClassificationAnalysis;
import com.groupon.ml.Classifier;
import com.groupon.nakala.analysis.Analysis;
import com.groupon.nakala.core.Analyzable;
import com.groupon.nakala.core.Constants;
import com.groupon.nakala.core.Identifiable;
import com.groupon.nakala.core.Parameters;
import com.groupon.nakala.exceptions.AnalyzerFailureException;
import com.groupon.nakala.exceptions.ResourceInitializationException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
/**
* @author <EMAIL>
* <p/>
* This convenience class wraps around a number of smaller classifiers. The output is
* an aggregation of the output of all the smaller classifiers.
* The label "other" from the wrapped classifiers is ignored.
*/
public class MultiModelClassifier extends Classifier {
private List<Classifier> classifiers;
private ExecutorService executorService;
private int numberOfThreads;
@Override
public void initialize(Parameters params) throws ResourceInitializationException {
if (params.contains(Constants.ANALYZERS)) {
classifiers = (List<Classifier>) params.get(Constants.ANALYZERS);
} else {
throw new ResourceInitializationException("No classifiers specified.");
}
if (params.contains(Constants.NUMBER_OF_THREADS)) {
numberOfThreads = params.getInt(Constants.NUMBER_OF_THREADS);
if (numberOfThreads < 1) {
throw new ResourceInitializationException(Constants.NUMBER_OF_THREADS + " must be >= 1");
}
numberOfThreads = Math.min(numberOfThreads, classifiers.size());
} else {
numberOfThreads = classifiers.size();
}
executorService = Executors.newFixedThreadPool(numberOfThreads);
}
@Override
public void shutdown() {
executorService.shutdown();
for (Classifier classifier : classifiers) {
classifier.shutdown();
}
}
@Override
public Analysis analyze(Analyzable a) throws AnalyzerFailureException {
if (!(a instanceof Identifiable)) {
throw new AnalyzerFailureException("Input not Identifiable.");
}
Identifiable identifiable = (Identifiable) a;
List<Future<ClassificationAnalysis>> analyses = new ArrayList<Future<ClassificationAnalysis>>(classifiers.size());
for (Classifier classifier : classifiers) {
analyses.add(executorService.submit(new CallableClassifier(classifier, a)));
}
ClassificationAnalysis analysis = new ClassificationAnalysis(identifiable.getId());
for (Future<ClassificationAnalysis> future : analyses) {
try {
for (Map.Entry<String, Double> e : future.get().getClassifications().entrySet()) {
if (e.getKey().equals("other")) continue;
analysis.addClassification(e.getKey(), e.getValue());
}
} catch (Exception e) {
throw new AnalyzerFailureException(e);
}
}
return analysis;
}
class CallableClassifier implements Callable {
private Classifier classifier;
private Analyzable analyzable;
public CallableClassifier(Classifier classifier, Analyzable analyzable) {
this.classifier = classifier;
this.analyzable = analyzable;
}
@Override
public ClassificationAnalysis call() throws Exception {
return (ClassificationAnalysis) classifier.analyze(analyzable);
}
}
}
|
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Solids/PhysBAM_Rigids/Collisions/RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR.cpp<gh_stars>10-100
//#####################################################################
// Copyright 2011.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR
//#####################################################################
#include <PhysBAM_Tools/Arrays_Computations/SUMMATIONS.h>
#include <PhysBAM_Tools/Data_Structures/HASHTABLE.h>
#include <PhysBAM_Tools/Math_Tools/RANGE.h>
#include <PhysBAM_Geometry/Spatial_Acceleration/BOX_HIERARCHY_DEFINITION.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Collisions/RIGID_STRUCTURE_INTERACTION_GEOMETRY.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Collisions/RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Collisions/RIGID_TRIANGLE_COLLISIONS_GEOMETRY.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<TV>::
RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR(ARRAY<VECTOR<int,2*d-2> >& pairs_internal, ARRAY<VECTOR<int,2*d-2> >& pairs_external,
const RIGID_STRUCTURE_INTERACTION_GEOMETRY<TV>& edge_structure1,const RIGID_STRUCTURE_INTERACTION_GEOMETRY<TV>& edge_structure2,
const RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>& geometry,const T collision_thickness,MPI_RIGIDS<TV>* mpi_solids)
:pairs_internal(pairs_internal),pairs_external(pairs_external),edges1(edge_structure1.Edges()),edges2(edge_structure2.Edges()),
edge_structure1(edge_structure1),edge_structure2(edge_structure2),collision_thickness(collision_thickness),mpi_solids(mpi_solids)
{}
//#####################################################################
// Destructor
//#####################################################################
template<class TV> RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<TV>::
~RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR()
{}
//#####################################################################
// Function Store_Helper
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<TV>::
Store_Helper(const int point1,const int point2,VECTOR<T,2>)
{
int p1=edges1(point1),p2=edges2(point2);
TV dX_average=(edge_structure1.X(p1)-edge_structure1.X_self_collision_free(p1))+(edge_structure2.X(p2)-edge_structure2.X_self_collision_free(p2));
dX_average*=(T).5; // TODO: This is separated because of compiler bugs
RANGE<TV> box1=RANGE<TV>(edge_structure1.X_self_collision_free(p1))+dX_average;box1.Enlarge_Nonempty_Box_To_Include_Point(edge_structure1.X(p1));
RANGE<TV> box2=RANGE<TV>(edge_structure2.X_self_collision_free(p2))+dX_average;box2.Enlarge_Nonempty_Box_To_Include_Point(edge_structure2.X(p2));
if(!box1.Intersection(box2,collision_thickness)) return;
VECTOR<int,2> nodes(p1,p2);
if (mpi_solids){
VECTOR<PARTITION_ID,2> processors(mpi_solids->partition_id_from_particle_index.Subset(nodes));
if (processors(1)!=processors(2)) pairs_external.Append(nodes);
else pairs_internal.Append(nodes);}
else pairs_internal.Append(nodes);
}
//#####################################################################
// Function Store_Helper_Helper
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<TV>::
Store_Helper_Helper(const int segment1,const int segment2)
{
const VECTOR<int,2> &segment1_nodes=edges1(segment1),&segment2_nodes=edges2(segment2);
TV dX_average=(ARRAYS_COMPUTATIONS::Average(edge_structure1.X.Subset(segment1_nodes)) // TODO: optimize scalar multiplications
-ARRAYS_COMPUTATIONS::Average(edge_structure1.X_self_collision_free.Subset(segment1_nodes))
+ARRAYS_COMPUTATIONS::Average(edge_structure2.X.Subset(segment2_nodes))
-ARRAYS_COMPUTATIONS::Average(edge_structure2.X_self_collision_free.Subset(segment2_nodes)));
dX_average*=(T).5; // TODO: This is separated because of compiler bugs
RANGE<TV> box1=RANGE<TV>::Bounding_Box(edge_structure1.X_self_collision_free.Subset(segment1_nodes))+dX_average;box1.Enlarge_Nonempty_Box_To_Include_Points(edge_structure1.X.Subset(segment1_nodes));
RANGE<TV> box2=RANGE<TV>::Bounding_Box(edge_structure2.X_self_collision_free.Subset(segment2_nodes))+dX_average;box2.Enlarge_Nonempty_Box_To_Include_Points(edge_structure2.X.Subset(segment2_nodes));
if(!box1.Intersection(box2,collision_thickness)) return;
VECTOR<int,4> nodes(segment1_nodes[1],segment1_nodes[2],segment2_nodes[1],segment2_nodes[2]);
if (mpi_solids){
VECTOR<PARTITION_ID,4> processors(mpi_solids->partition_id_from_particle_index.Subset(nodes));
for(int i=1;i<=3;i++) if (processors(i)!=processors(4)) {pairs_external.Append(nodes);return;}
pairs_internal.Append(nodes);}
else pairs_internal.Append(nodes);
}
//####################################################################
#define INSTANTIATION_HELPER(T,d) \
template void BOX_HIERARCHY<VECTOR<T,d> >::Intersection_List<BOX_VISITOR_MPI<RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> > >,T>(BOX_HIERARCHY<VECTOR<T,d> > const&, \
BOX_VISITOR_MPI<RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> > >&,T) const; \
template void BOX_HIERARCHY<VECTOR<T,d> >::Intersection_List<RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >,T>(BOX_HIERARCHY<VECTOR<T,d> > const&, \
RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >&,T) const; \
template void BOX_HIERARCHY<VECTOR<T,d> >::Swept_Intersection_List<RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >,T>(VECTOR<FRAME<VECTOR<T,d> >,2> const&,VECTOR<FRAME<VECTOR<T,d> >,2> const&, \
BOX_HIERARCHY<VECTOR<T,d> > const&, RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >&,T) const; \
template void BOX_HIERARCHY<VECTOR<T,d> >::Intersection_List<RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >&,T>(const FRAME<VECTOR<T,d> >&,const FRAME<VECTOR<T,d> >&, \
const BOX_HIERARCHY<VECTOR<T,d> >&, RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >&,const T) const; \
template void BOX_HIERARCHY<VECTOR<T,d> >::Intersection_List<RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >,T>(const FRAME<VECTOR<T,d> >&,const FRAME<VECTOR<T,d> >&, \
const BOX_HIERARCHY<VECTOR<T,d> >& other_hierarchy,RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<T,d> >&,const T) const;
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<float,2> >::RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR(ARRAY<VECTOR<int,2>,int>&,ARRAY<VECTOR<int,2>,int>&,
RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<float,2> > const&,RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<float,2> > const&,
RIGID_TRIANGLE_COLLISIONS_GEOMETRY<VECTOR<float,2> > const&,float,MPI_RIGIDS<VECTOR<float,2> >*);
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<float,2> >::~RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR();
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<float,3> >::RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR(ARRAY<VECTOR<int,4>,int>&,ARRAY<VECTOR<int,4>,int>&,
RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<float,3> > const&,RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<float,3> > const&,
RIGID_TRIANGLE_COLLISIONS_GEOMETRY<VECTOR<float,3> > const&,float,MPI_RIGIDS<VECTOR<float,3> >*);
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<float,3> >::~RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR();
template void RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<float,2> >::Store_Helper(int,int,VECTOR<float,2>);
template void RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<float,3> >::Store_Helper_Helper(int,int);
INSTANTIATION_HELPER(float,2);
INSTANTIATION_HELPER(float,3);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<double,2> >::RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR(ARRAY<VECTOR<int,2>,int>&,ARRAY<VECTOR<int,2>,int>&,
RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<double,2> > const&,RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<double,2> > const&,
RIGID_TRIANGLE_COLLISIONS_GEOMETRY<VECTOR<double,2> > const&,double,MPI_RIGIDS<VECTOR<double,2> >*);
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<double,2> >::~RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR();
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<double,3> >::RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR(ARRAY<VECTOR<int,4>,int>&,ARRAY<VECTOR<int,4>,int>&,
RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<double,3> > const&,RIGID_STRUCTURE_INTERACTION_GEOMETRY<VECTOR<double,3> > const&,
RIGID_TRIANGLE_COLLISIONS_GEOMETRY<VECTOR<double,3> > const&,double,MPI_RIGIDS<VECTOR<double,3> >*);
template RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<double,3> >::~RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR();
template void RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<double,2> >::Store_Helper(int,int,VECTOR<double,2>);
template void RIGID_TRIANGLE_COLLISIONS_EDGE_EDGE_VISITOR<VECTOR<double,3> >::Store_Helper_Helper(int,int);
INSTANTIATION_HELPER(double,2);
INSTANTIATION_HELPER(double,3);
#endif
|
#!/bin/bash
rm -r local_mount
rm build.sh
rm Dockerfile
rm runDocker.sh
mkdir -p /home/codeql_home
mv /home/playground/codeql-linux64.zip /home/codeql_home/
cd /home/codeql_home
unzip codeql-linux64.zip
# clone stable version
git clone https://github.com/github/codeql.git --branch v1.26.0 codeql-repo
echo "export PATH=/home/codeql_home/codeql:$PATH" >> /root/.bashrc
cd /home/playground
cd GroundTruthGeneration
unzip externalData.qll.zip
rm externalData.qll.zip
cd ../MinedData
unzip listen_merged_data.out.zip
rm listen_merged_data.out.zip
unzip listen_merged_data_withFile.out.zip
rm listen_merged_data_withFile.out.zip
unzip listeners-and-emitters.zip
rm listeners-and-emitters.zip
cd ..
tar -xzvf data_exps.tgz
rm data_exps.tgz
|
package com.vlkan.hrrs.distiller.cli;
import com.beust.jcommander.Parameter;
import com.vlkan.hrrs.commons.jcommander.JCommanderConfig;
import com.vlkan.hrrs.commons.jcommander.JCommanderConfigs;
import com.vlkan.hrrs.commons.jcommander.validator.LoggerLevelSpecsValidator;
import com.vlkan.hrrs.commons.jcommander.validator.UriValidator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
public class Config implements JCommanderConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(Config.class);
@Parameter(
names = {"--inputUri", "-i"},
validateWith = UriValidator.class,
description = "input URI for HTTP records",
required = true)
private URI inputUri;
public URI getInputUri() {
return inputUri;
}
@Parameter(
names = {"--outputUri", "-o"},
validateWith = UriValidator.class,
description = "output URI for HTTP records",
required = true)
private URI outputUri;
public URI getOutputUri() {
return outputUri;
}
@Parameter(
names = {"--scriptUri", "-s"},
validateWith = UriValidator.class,
description = "input URI for script file",
required = true)
private URI scriptUri;
public URI getScriptUri() {
return scriptUri;
}
@Parameter(
names = {"--loggerLevelSpecs", "-L"},
validateWith = LoggerLevelSpecsValidator.class,
description = "comma-separated list of loggerName=loggerLevel pairs")
private String loggerLevelSpecs = "*=warn,com.vlkan.hrrs=info";
public String getLoggerLevelSpecs() {
return loggerLevelSpecs;
}
@Parameter(
names = {"--help", "-h"},
help = true,
description = "display this help and exit")
private boolean help;
@Override
public boolean isHelp() {
return help;
}
public void dump() {
LOGGER.debug("inputUri = {}", inputUri);
LOGGER.debug("outputUri = {}", outputUri);
LOGGER.debug("scriptUri = {}", scriptUri);
LOGGER.debug("loggerLevelSpecs={}", loggerLevelSpecs);
}
public static Config of(String[] args) {
return JCommanderConfigs.create(args, new Config());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.