text stringlengths 1 1.05M |
|---|
#!/bin/bash
source `dirname $0`/../common.sh
docker run -v `pwd`/cache:/tmp/cache -v `pwd`/builds:/tmp/output -e VERSION=1.9.2-p330 -e BUILD=true -e STACK=cedar hone/ruby-builder:cedar
|
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/* globals window */
// Display a warning banner when browsing nightly documentation build.
if ( window.location.host === 'ckeditor5.github.io' ) {
window.umberto.showWarningBanner(
'Nightly documentation ahead. Switch to the <a href="https://ckeditor.com/docs/ckeditor5">stable editor documentation</a>.'
);
}
|
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.$$ = exports.$ = exports.ObjectScanUtility = void 0;
const object_scan_1 = __importDefault(require("object-scan"));
class ObjectScanUtility {
/**
* Queries an object using a selector & returns the first result
* @param selector A CSS selector string (excluding `+` & `~`)
* @param reverse Defaults to `false`. Whether to reverse the query order
* @returns A function that when called with an object, will perform the search with said object, returning the first result
*/
static $(selector, reverse) {
return ObjectScanUtility.create$(selector, true, reverse);
}
/**
* Queries an object using a selector & returns an array of results
* @param selector A CSS selector string (excluding `+` & `~`)
* @param reverse Defaults to `false`. Whether to reverse the query order
* @returns A function that when called with an object, will perform the search with said object, returning an array of results
*/
static $$(selector, reverse) {
return ObjectScanUtility.create$(selector, false, reverse);
}
static create$(selector, abort, reverse = false) {
const possibleRtns = [
'key',
'value',
'entry',
'property',
'gproperty',
'parent',
'gparent',
'parents',
'isMatch',
'matchedBy',
'excludedBy',
'traversedBy',
'isCircular',
'isLeaf',
'depth',
'result',
'bool',
'count',
];
let rtn = 'value';
const result = (obj) => (0, object_scan_1.default)(this.adaptCSSSelector(selector), {
get rtn() {
return rtn;
},
abort,
reverse,
})(obj);
possibleRtns.forEach((possibleRtn) => {
Object.defineProperty(result, possibleRtn, {
get() {
rtn = possibleRtn;
return result;
},
});
});
return result;
}
static adaptCSSSelector(cssSelector) {
// doesn't support + or ~ selectors
return cssSelector
.split(',')
.map((cssSelectorPart) => {
let result = cssSelectorPart;
// remove space around selector
result = result.trim();
// remove css child selector
result = result.replace(/\s*>\s*(?=\W)\.?/gm, '.');
// replace spaces
result = result.replace(/ +?(?=\W)\.?/gm, '.**.');
// specify level at where search starts
result = result.startsWith(':root')
? result.replace(/:root\.?/, '')
: '**' + result;
return result;
});
}
}
exports.ObjectScanUtility = ObjectScanUtility;
exports.$ = ObjectScanUtility.$, exports.$$ = ObjectScanUtility.$$;
//# sourceMappingURL=objectScan.utility.js.map |
# "queue.pl" uses qsub. The options to it are
# options to qsub. If you have GridEngine installed,
# change this to a queue you have access to.
# Otherwise, use "run.pl", which will run jobs locally
# (make sure your --num-jobs options are no more than
# the number of cpus on your machine.
#a) JHU cluster options
#export train_cmd="queue.pl -l arch=*64"
#export decode_cmd="queue.pl -l arch=*64,mem_free=2G,ram_free=2G"
#export mkgraph_cmd="queue.pl -l arch=*64,ram_free=4G,mem_free=4G"
#export cuda_cmd=run.pl
#if [[ $(hostname -f) == *.clsp.jhu.edu ]]; then
# export train_cmd="queue.pl -l arch=*64*"
# export decode_cmd="queue.pl -l arch=*64* --mem 3G"
# export mkgraph_cmd="queue.pl -l arch=*64* --mem 4G"
# export cuda_cmd="queue.pl -l gpu=1"
#elif [[ $(hostname -f) == *.fit.vutbr.cz ]]; then
# #b) BUT cluster options
# queue="all.q@@blade,all.q@@speech,all.q@dellgpu*,all.q@supergpu*"
# export train_cmd="queue.pl -q $queue -l ram_free=2500M,mem_free=2500M,matylda5=0.5"
# export decode_cmd="queue.pl -q $queue -l ram_free=3000M,mem_free=3000M,matylda5=0.1"
# export mkgraph_cmd="queue.pl -q $queue -l ram_free=4G,mem_free=4G,matylda5=3"
# export cuda_cmd="queue.pl -q long.q@pcspeech-gpu,long.q@dellgpu1,long.q@pcgpu*,long.q@supergpu1 -l gpu=1"
#else
# echo "$0: you need to define options for your cluster."
# exit 1;
#fi
#c) run locally...
export train_cmd=run.pl
export decode_cmd=run.pl
export cuda_cmd=run.pl
export mkgraph_cmd=run.pl
|
<gh_stars>0
///////////////////////////////////////////////////////////////////////////////
// Name: src/generic/treebkg.cpp
// Purpose: generic implementation of wxTreebook
// Author: <NAME>, <NAME>
// Modified by:
// Created: 2005-09-15
// Copyright: (c) 2005 <NAME> <<EMAIL>>
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#if wxUSE_TREEBOOK
#include "wx/treebook.h"
#ifndef WX_PRECOMP
#include "wx/settings.h"
#endif
#include "wx/imaglist.h"
#include "wx/treectrl.h"
// ----------------------------------------------------------------------------
// various wxWidgets macros
// ----------------------------------------------------------------------------
// check that the page index is valid
#define IS_VALID_PAGE(nPage) ((nPage) < DoInternalGetPageCount())
// ----------------------------------------------------------------------------
// event table
// ----------------------------------------------------------------------------
wxIMPLEMENT_DYNAMIC_CLASS(wxTreebook, wxBookCtrlBase);
wxDEFINE_EVENT( wxEVT_TREEBOOK_PAGE_CHANGING, wxBookCtrlEvent );
wxDEFINE_EVENT( wxEVT_TREEBOOK_PAGE_CHANGED, wxBookCtrlEvent );
wxDEFINE_EVENT( wxEVT_TREEBOOK_NODE_COLLAPSED, wxBookCtrlEvent );
wxDEFINE_EVENT( wxEVT_TREEBOOK_NODE_EXPANDED, wxBookCtrlEvent );
wxBEGIN_EVENT_TABLE(wxTreebook, wxBookCtrlBase)
EVT_TREE_SEL_CHANGED (wxID_ANY, wxTreebook::OnTreeSelectionChange)
EVT_TREE_ITEM_EXPANDED (wxID_ANY, wxTreebook::OnTreeNodeExpandedCollapsed)
EVT_TREE_ITEM_COLLAPSED(wxID_ANY, wxTreebook::OnTreeNodeExpandedCollapsed)
wxEND_EVENT_TABLE()
// ============================================================================
// wxTreebook implementation
// ============================================================================
// ----------------------------------------------------------------------------
// wxTreebook creation
// ----------------------------------------------------------------------------
bool
wxTreebook::Create(wxWindow *parent,
wxWindowID id,
const wxPoint& pos,
const wxSize& size,
long style,
const wxString& name)
{
// Check the style flag to have either wxTBK_RIGHT or wxTBK_LEFT
if ( (style & wxBK_ALIGN_MASK) == wxBK_DEFAULT )
{
style |= wxBK_LEFT;
}
style |= wxTAB_TRAVERSAL;
// no border for this control, it doesn't look nice together with the tree
style &= ~wxBORDER_MASK;
style |= wxBORDER_NONE;
if ( !wxControl::Create(parent, id, pos, size,
style, wxDefaultValidator, name) )
return false;
m_bookctrl = new wxTreeCtrl
(
this,
wxID_ANY,
wxDefaultPosition,
wxDefaultSize,
wxBORDER_THEME |
wxTR_DEFAULT_STYLE |
wxTR_HIDE_ROOT |
wxTR_SINGLE
);
GetTreeCtrl()->SetQuickBestSize(false); // do full size calculation
GetTreeCtrl()->AddRoot(wxEmptyString); // label doesn't matter, it's hidden
#ifdef __WXMSW__
// We need to add dummy size event to force possible scrollbar hiding
wxSizeEvent evt;
GetEventHandler()->AddPendingEvent(evt);
#endif
return true;
}
// insert a new page just before the pagePos
bool wxTreebook::InsertPage(size_t pagePos,
wxWindow *page,
const wxString& text,
bool bSelect,
int imageId)
{
return DoInsertPage(pagePos, page, text, bSelect, imageId);
}
bool wxTreebook::InsertSubPage(size_t pagePos,
wxWindow *page,
const wxString& text,
bool bSelect,
int imageId)
{
return DoInsertSubPage(pagePos, page, text, bSelect, imageId);
}
bool wxTreebook::AddPage(wxWindow *page, const wxString& text, bool bSelect,
int imageId)
{
return DoInsertPage(m_treeIds.size(), page, text, bSelect, imageId);
}
// insertion time is linear to the number of top-pages
bool wxTreebook::AddSubPage(wxWindow *page, const wxString& text, bool bSelect, int imageId)
{
return DoAddSubPage(page, text, bSelect, imageId);
}
bool wxTreebook::DoInsertPage(size_t pagePos,
wxWindow *page,
const wxString& text,
bool bSelect,
int imageId)
{
wxCHECK_MSG( pagePos <= DoInternalGetPageCount(), false,
wxT("Invalid treebook page position") );
if ( !wxBookCtrlBase::InsertPage(pagePos, page, text, bSelect, imageId) )
return false;
wxTreeCtrl *tree = GetTreeCtrl();
wxTreeItemId newId;
if ( pagePos == DoInternalGetPageCount() )
{
// append the page to the end
wxTreeItemId rootId = tree->GetRootItem();
newId = tree->AppendItem(rootId, text, imageId);
}
else // insert the new page before the given one
{
wxTreeItemId nodeId = m_treeIds[pagePos];
wxTreeItemId previousId = tree->GetPrevSibling(nodeId);
wxTreeItemId parentId = tree->GetItemParent(nodeId);
if ( previousId.IsOk() )
{
// insert before the sibling - previousId
newId = tree->InsertItem(parentId, previousId, text, imageId);
}
else // no prev siblings -- insert as a first child
{
wxASSERT_MSG( parentId.IsOk(), wxT( "Tree has no root node?" ) );
newId = tree->PrependItem(parentId, text, imageId);
}
}
if ( !newId.IsOk() )
{
//something wrong -> cleaning and returning with false
(void)wxBookCtrlBase::DoRemovePage(pagePos);
wxFAIL_MSG( wxT("Failed to insert treebook page") );
return false;
}
DoInternalAddPage(pagePos, page, newId);
DoUpdateSelection(bSelect, pagePos);
return true;
}
bool wxTreebook::DoAddSubPage(wxWindow *page, const wxString& text, bool bSelect, int imageId)
{
wxTreeCtrl *tree = GetTreeCtrl();
wxTreeItemId rootId = tree->GetRootItem();
wxTreeItemId lastNodeId = tree->GetLastChild(rootId);
wxCHECK_MSG( lastNodeId.IsOk(), false,
wxT("Can't insert sub page when there are no pages") );
// now calculate its position (should we save/update it too?)
size_t newPos = tree->GetCount() -
(tree->GetChildrenCount(lastNodeId, true) + 1);
return DoInsertSubPage(newPos, page, text, bSelect, imageId);
}
bool wxTreebook::DoInsertSubPage(size_t pagePos,
wxTreebookPage *page,
const wxString& text,
bool bSelect,
int imageId)
{
wxTreeItemId parentId = DoInternalGetPage(pagePos);
wxCHECK_MSG( parentId.IsOk(), false, wxT("invalid tree item") );
wxTreeCtrl *tree = GetTreeCtrl();
size_t newPos = pagePos + tree->GetChildrenCount(parentId, true) + 1;
wxASSERT_MSG( newPos <= DoInternalGetPageCount(),
wxT("Internal error in tree insert point calculation") );
if ( !wxBookCtrlBase::InsertPage(newPos, page, text, bSelect, imageId) )
return false;
wxTreeItemId newId = tree->AppendItem(parentId, text, imageId);
if ( !newId.IsOk() )
{
(void)wxBookCtrlBase::DoRemovePage(newPos);
wxFAIL_MSG( wxT("Failed to insert treebook page") );
return false;
}
DoInternalAddPage(newPos, page, newId);
DoUpdateSelection(bSelect, newPos);
return true;
}
bool wxTreebook::DeletePage(size_t pagePos)
{
wxCHECK_MSG( IS_VALID_PAGE(pagePos), false, wxT("Invalid tree index") );
wxTreebookPage *oldPage = DoRemovePage(pagePos);
if ( !oldPage )
return false;
delete oldPage;
return true;
}
wxTreebookPage *wxTreebook::DoRemovePage(size_t pagePos)
{
wxTreeItemId pageId = DoInternalGetPage(pagePos);
wxCHECK_MSG( pageId.IsOk(), NULL, wxT("Invalid tree index") );
wxTreebookPage * oldPage = GetPage(pagePos);
wxTreeCtrl *tree = GetTreeCtrl();
size_t subCount = tree->GetChildrenCount(pageId, true);
wxASSERT_MSG ( IS_VALID_PAGE(pagePos + subCount),
wxT("Internal error in wxTreebook::DoRemovePage") );
// here we are going to delete ALL the pages in the range
// [pagePos, pagePos + subCount] -- the page and its children
// deleting all the pages from the base class
for ( size_t i = 0; i <= subCount; ++i )
{
wxTreebookPage *page = wxBookCtrlBase::DoRemovePage(pagePos);
// don't delete the page itself though -- it will be deleted in
// DeletePage() when we return
if ( i )
{
delete page;
}
}
DoInternalRemovePageRange(pagePos, subCount);
tree->DeleteChildren( pageId );
tree->Delete( pageId );
return oldPage;
}
bool wxTreebook::DeleteAllPages()
{
wxBookCtrlBase::DeleteAllPages();
m_treeIds.clear();
wxTreeCtrl *tree = GetTreeCtrl();
tree->DeleteChildren(tree->GetRootItem());
return true;
}
void wxTreebook::DoInternalAddPage(size_t newPos,
wxTreebookPage *page,
wxTreeItemId pageId)
{
wxASSERT_MSG( newPos <= m_treeIds.size(),
wxT("Invalid index passed to wxTreebook::DoInternalAddPage") );
// hide newly inserted page initially (it will be shown when selected)
if ( page )
page->Hide();
if ( newPos == m_treeIds.size() )
{
// append
m_treeIds.push_back(pageId);
}
else // insert
{
m_treeIds.insert(m_treeIds.begin() + newPos, pageId);
if ( m_selection != wxNOT_FOUND && newPos <= (size_t)m_selection )
{
// selection has been moved one unit toward the end
++m_selection;
}
}
}
void wxTreebook::DoInternalRemovePageRange(size_t pagePos, size_t subCount)
{
// Attention: this function is only for a situation when we delete a node
// with all its children so pagePos is the node's index and subCount is the
// node children count
wxASSERT_MSG( pagePos + subCount < m_treeIds.size(),
wxT("Invalid page index") );
wxTreeItemId pageId = m_treeIds[pagePos];
wxVector<wxTreeItemId>::iterator itPos = m_treeIds.begin() + pagePos;
m_treeIds.erase(itPos, itPos + subCount + 1);
if ( m_selection != wxNOT_FOUND )
{
if ( (size_t)m_selection > pagePos + subCount)
{
// selection is far after the deleted page, so just update the index and move on
m_selection -= 1 + subCount;
}
else if ( (size_t)m_selection >= pagePos )
{
wxTreeCtrl *tree = GetTreeCtrl();
// as selected page is going to be deleted, try to select the next
// sibling if exists, if not then the parent
wxTreeItemId nodeId = tree->GetNextSibling(pageId);
m_selection = wxNOT_FOUND;
if ( nodeId.IsOk() )
{
// selecting next siblings
tree->SelectItem(nodeId);
}
else // no next sibling, select the parent
{
wxTreeItemId parentId = tree->GetItemParent(pageId);
if ( parentId.IsOk() && parentId != tree->GetRootItem() )
{
tree->SelectItem(parentId);
}
else // parent is root
{
// we can't select it as it's hidden
DoUpdateSelection(false, wxNOT_FOUND);
}
}
}
//else: nothing to do -- selection is before the deleted node
}
else
{
DoUpdateSelection(false, wxNOT_FOUND);
}
}
void wxTreebook::DoUpdateSelection(bool bSelect, int newPos)
{
int newSelPos;
if ( bSelect )
{
newSelPos = newPos;
}
else if ( m_selection == wxNOT_FOUND && DoInternalGetPageCount() > 0 )
{
newSelPos = 0;
}
else
{
newSelPos = wxNOT_FOUND;
}
if ( newSelPos != wxNOT_FOUND )
{
SetSelection((size_t)newSelPos);
}
}
wxTreeItemId wxTreebook::DoInternalGetPage(size_t pagePos) const
{
if ( pagePos >= m_treeIds.size() )
{
// invalid position but ok here, in this internal function, don't assert
// (the caller will do it)
return wxTreeItemId();
}
return m_treeIds[pagePos];
}
int wxTreebook::DoInternalFindPageById(wxTreeItemId pageId) const
{
const size_t count = m_treeIds.size();
for ( size_t i = 0; i < count; ++i )
{
if ( m_treeIds[i] == pageId )
return i;
}
return wxNOT_FOUND;
}
bool wxTreebook::IsNodeExpanded(size_t pagePos) const
{
wxTreeItemId pageId = DoInternalGetPage(pagePos);
wxCHECK_MSG( pageId.IsOk(), false, wxT("invalid tree item") );
return GetTreeCtrl()->IsExpanded(pageId);
}
bool wxTreebook::ExpandNode(size_t pagePos, bool expand)
{
wxTreeItemId pageId = DoInternalGetPage(pagePos);
wxCHECK_MSG( pageId.IsOk(), false, wxT("invalid tree item") );
if ( expand )
{
GetTreeCtrl()->Expand( pageId );
}
else // collapse
{
GetTreeCtrl()->Collapse( pageId );
// rely on the events generated by wxTreeCtrl to update selection
}
return true;
}
int wxTreebook::GetPageParent(size_t pagePos) const
{
wxTreeItemId nodeId = DoInternalGetPage( pagePos );
wxCHECK_MSG( nodeId.IsOk(), wxNOT_FOUND, wxT("Invalid page index spacified!") );
const wxTreeItemId parent = GetTreeCtrl()->GetItemParent( nodeId );
return parent.IsOk() ? DoInternalFindPageById(parent) : wxNOT_FOUND;
}
bool wxTreebook::SetPageText(size_t n, const wxString& strText)
{
wxTreeItemId pageId = DoInternalGetPage(n);
wxCHECK_MSG( pageId.IsOk(), false, wxT("invalid tree item") );
GetTreeCtrl()->SetItemText(pageId, strText);
return true;
}
wxString wxTreebook::GetPageText(size_t n) const
{
wxTreeItemId pageId = DoInternalGetPage(n);
wxCHECK_MSG( pageId.IsOk(), wxString(), wxT("invalid tree item") );
return GetTreeCtrl()->GetItemText(pageId);
}
int wxTreebook::GetPageImage(size_t n) const
{
wxTreeItemId pageId = DoInternalGetPage(n);
wxCHECK_MSG( pageId.IsOk(), wxNOT_FOUND, wxT("invalid tree item") );
return GetTreeCtrl()->GetItemImage(pageId);
}
bool wxTreebook::SetPageImage(size_t n, int imageId)
{
wxTreeItemId pageId = DoInternalGetPage(n);
wxCHECK_MSG( pageId.IsOk(), false, wxT("invalid tree item") );
GetTreeCtrl()->SetItemImage(pageId, imageId);
return true;
}
void wxTreebook::UpdateSelectedPage(size_t newsel)
{
GetTreeCtrl()->SelectItem(DoInternalGetPage(newsel));
}
wxBookCtrlEvent* wxTreebook::CreatePageChangingEvent() const
{
return new wxBookCtrlEvent(wxEVT_TREEBOOK_PAGE_CHANGING, m_windowId);
}
void wxTreebook::MakeChangedEvent(wxBookCtrlEvent &event)
{
event.SetEventType(wxEVT_TREEBOOK_PAGE_CHANGED);
}
wxWindow *wxTreebook::TryGetNonNullPage(size_t n)
{
wxWindow* page = wxBookCtrlBase::GetPage(n);
if ( !page )
{
// Find the next suitable page, i.e. the first (grand)child
// of this one with a non-NULL associated page
wxTreeCtrl* const tree = GetTreeCtrl();
for ( wxTreeItemId childId = m_treeIds[n]; childId.IsOk(); )
{
wxTreeItemIdValue cookie;
childId = tree->GetFirstChild( childId, cookie );
if ( childId.IsOk() )
{
page = wxBookCtrlBase::GetPage(++n);
if ( page )
break;
}
}
}
return page;
}
void wxTreebook::SetImageList(wxImageList *imageList)
{
wxBookCtrlBase::SetImageList(imageList);
GetTreeCtrl()->SetImageList(imageList);
}
void wxTreebook::AssignImageList(wxImageList *imageList)
{
wxBookCtrlBase::AssignImageList(imageList);
GetTreeCtrl()->SetImageList(imageList);
}
// ----------------------------------------------------------------------------
// event handlers
// ----------------------------------------------------------------------------
void wxTreebook::OnTreeSelectionChange(wxTreeEvent& event)
{
if ( event.GetEventObject() != m_bookctrl )
{
event.Skip();
return;
}
wxTreeItemId newId = event.GetItem();
if ( (m_selection == wxNOT_FOUND &&
(!newId.IsOk() || newId == GetTreeCtrl()->GetRootItem())) ||
(m_selection != wxNOT_FOUND && newId == m_treeIds[m_selection]) )
{
// this event can only come when we modify the tree selection ourselves
// so we should simply ignore it
return;
}
int newPos = DoInternalFindPageById(newId);
if ( newPos != wxNOT_FOUND )
SetSelection( newPos );
}
void wxTreebook::OnTreeNodeExpandedCollapsed(wxTreeEvent & event)
{
if ( event.GetEventObject() != m_bookctrl )
{
event.Skip();
return;
}
wxTreeItemId nodeId = event.GetItem();
if ( !nodeId.IsOk() || nodeId == GetTreeCtrl()->GetRootItem() )
return;
int pagePos = DoInternalFindPageById(nodeId);
wxCHECK_RET( pagePos != wxNOT_FOUND, wxT("Internal problem in wxTreebook!..") );
wxBookCtrlEvent ev(GetTreeCtrl()->IsExpanded(nodeId)
? wxEVT_TREEBOOK_NODE_EXPANDED
: wxEVT_TREEBOOK_NODE_COLLAPSED,
m_windowId);
ev.SetSelection(pagePos);
ev.SetOldSelection(pagePos);
ev.SetEventObject(this);
GetEventHandler()->ProcessEvent(ev);
}
// ----------------------------------------------------------------------------
// wxTreebook geometry management
// ----------------------------------------------------------------------------
int wxTreebook::HitTest(wxPoint const & pt, long * flags) const
{
int pagePos = wxNOT_FOUND;
if ( flags )
*flags = wxBK_HITTEST_NOWHERE;
// convert from wxTreebook coorindates to wxTreeCtrl ones
const wxTreeCtrl * const tree = GetTreeCtrl();
const wxPoint treePt = tree->ScreenToClient(ClientToScreen(pt));
// is it over the tree?
if ( wxRect(tree->GetSize()).Contains(treePt) )
{
int flagsTree;
wxTreeItemId id = tree->HitTest(treePt, flagsTree);
if ( id.IsOk() && (flagsTree & wxTREE_HITTEST_ONITEM) )
{
pagePos = DoInternalFindPageById(id);
}
if ( flags )
{
if ( pagePos != wxNOT_FOUND )
*flags = 0;
if ( flagsTree & (wxTREE_HITTEST_ONITEMBUTTON |
wxTREE_HITTEST_ONITEMICON |
wxTREE_HITTEST_ONITEMSTATEICON) )
*flags |= wxBK_HITTEST_ONICON;
if ( flagsTree & wxTREE_HITTEST_ONITEMLABEL )
*flags |= wxBK_HITTEST_ONLABEL;
}
}
else // not over the tree
{
if ( flags && GetPageRect().Contains( pt ) )
*flags |= wxBK_HITTEST_ONPAGE;
}
return pagePos;
}
#endif // wxUSE_TREEBOOK
|
#! /bin/sh
#PBS -l nodes=1:ppn=1
#PBS -l walltime=1:00:00
#PBS -j oe
if [ -n "$PBS_JOBNAME" ]
then
source "${PBS_O_HOME}/.bash_profile"
cd "$PBS_O_WORKDIR"
module load gcc/5.3.0
fi
prefix=../../gekko-output/no-data-run-9
ecoevolity --seed 768856366 --prefix ../../gekko-output/no-data-run-9 --ignore-data --relax-missing-sites --relax-constant-sites --relax-triallelic-sites ../../configs/gekko-conc044-rate005-nopoly-varonly.yml 1>../../gekko-output/no-data-run-9-gekko-conc044-rate005-nopoly-varonly.out 2>&1
|
ALTER TABLE results_metadata ADD COLUMN "group" TEXT; |
function matchControllerAction($url, $config) : string {
$rules = $config['rules'];
foreach ($rules as $pattern => $action) {
if ($pattern === $url) {
return $action;
} elseif (strpos($pattern, '*') !== false) {
$pattern = str_replace('*', '(.*)', $pattern);
if (preg_match('#^' . $pattern . '$#', $url, $matches)) {
return $action;
}
}
}
return 'default/index';
} |
#!/bin/bash
. training.cfg
# Read the training params into this var
args=$(<$TRAINING_PARAMS)
args="${args//$'\n'/ }"
! [[ -f "./_prepared.jsonl" ]] && openai tools fine_tunes.prepare_data -f $TRAIN_DATA $PREPROCESSING_MODE
openai api fine_tunes.create -t "./_prepared.jsonl" -m $MODEL_NAME $args > tmp_file
trained_model=$(awk -F"Uploaded model: " '{print $2}' tmp_file | tr -d "\n")
# Output the name of our trained model to a file
echo "$trained_model" >> fine_tuned_models
rm tmp_file
|
<filename>packages/util/src/lib/model/model.modify.ts
import { ArrayOrValue, asArray } from '../array/array';
import { filterMaybeValues } from '../array/array.value';
import { Maybe, MaybeMap } from '../value/maybe.type';
import { maybeMergeModifiers, ModifierFunction } from '../value/modifier';
import { ModelConversionOptions, ModelMapFunction, ModelMapFunctions } from './model.conversion';
export type ModelInputDataModifier<D extends object> = {
modifyData: ModifierFunction<D>;
};
export type ModelInputModelModifier<V extends object> = {
modifyModel: ModifierFunction<V>;
};
export type ModelModifier<V extends object, D extends object> = ModelInputModelModifier<V> & ModelInputDataModifier<D>;
export type PartialModelModifier<V extends object, D extends object> = Partial<MaybeMap<ModelModifier<V, D>>>;
export function maybeMergeModelModifiers<V extends object, D extends object>(input: ArrayOrValue<PartialModelModifier<V, D>>): PartialModelModifier<V, D> {
const modifiers = asArray(input);
const allModifyData = filterMaybeValues(modifiers.map((x) => x.modifyData));
const allModifyModel = filterMaybeValues(modifiers.map((x) => x.modifyModel));
const modifyData = maybeMergeModifiers(allModifyData);
const modifyModel = maybeMergeModifiers(allModifyModel);
return {
modifyData,
modifyModel
};
}
export interface ModifyModelMapFunctionsConfig<V extends object, D extends object> {
readonly mapFunctions: ModelMapFunctions<V, D>;
/**
* Partial model modifiers to use.
*/
readonly modifiers: ArrayOrValue<PartialModelModifier<V, D>>;
/**
* Provides a default value for both copyModel and copyData.
*/
readonly copy?: boolean;
/**
* Whether or not to copy the input model before applying modifiers.
*
* Defaults to true.
*/
readonly copyModel?: boolean;
/**
* Whether or not to copy the input data before applying modifiers.
*
* Defaults to true.
*/
readonly copyData?: boolean;
}
export function modifyModelMapFunctions<V extends object, D extends object>(config: ModifyModelMapFunctionsConfig<V, D>): ModelMapFunctions<V, D> {
const { copy, copyModel = copy, copyData = copy, mapFunctions, modifiers } = config;
const { from, to } = mapFunctions;
const { modifyData, modifyModel } = maybeMergeModelModifiers(modifiers);
const modifyFrom = modifyModelMapFunction(from, modifyData, copyData);
const modifyTo = modifyModelMapFunction(to, modifyModel, copyModel);
return {
from: modifyFrom,
to: modifyTo
};
}
/**
* Merges a ModifierFunction with a ModelMapFunction
*
* @param mapFn
* @param modifyModel
* @param copy
* @returns
*/
export function modifyModelMapFunction<I extends object, O extends object>(mapFn: ModelMapFunction<I, O>, modifyModel: Maybe<ModifierFunction<I>>, copy = true): ModelMapFunction<I, O> {
return modifyModel
? (input: Maybe<I>, target?: Maybe<Partial<O>>, options?: Maybe<ModelConversionOptions<I>>) => {
const inputToMap = copy && input != null ? { ...input } : input;
if (inputToMap != null) {
modifyModel(inputToMap);
}
return mapFn(inputToMap, target, options);
}
: mapFn;
}
|
/*
* Copyright 2020 ZUP IT SERVICOS EM TECNOLOGIA E INOVACAO SA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Returns a promise that is resolved as soon as the function mock passed as parameter is called
* for the nth time, where n is the parameter `times`.
*
* The function calls will be checked every 20ms, so when this is resolved, the function might
* have been called more then desired number of times.
*
* If the function is not called the desired number of times before `timeout` ms, the promise is
* either rejected or it's resolved with an error message logged to the console. The behavior will
* depend on the parameter `rejectOnTimeout`.
*
* @param fn the function mock to check
* @param times the minimum number of times you wish the function to be called before resolving the
* promise
* @param timeout the maximum time (ms) to wait. Default is 500ms.
* @param rejectOnTimeout default is false. When true, will reject once it times out. When false,
* will resolve even on timeout, but with an error message logged to the console
* @returns the promise
*/
export function whenCalledTimes(
fn: jest.Mock,
times: number,
timeout = 500,
rejectOnTimeout = true,
) {
const interval = 20
let attempts = 0
return new Promise<void>((resolve, reject) => {
const id = setInterval(() => {
if (fn.mock.calls.length >= times) {
clearInterval(id)
resolve()
}
else if (attempts++ * interval >= timeout) {
clearInterval(id)
const message = `Timeout while waiting function to be executed ${times} times.`
if (rejectOnTimeout) reject(message)
else {
console.error(message)
resolve()
}
}
}, 20)
})
}
/**
* Gets the nth parameter of the mth call to the function `fn`, where n is `parameterIndex` and m is
* `callIndex`.
*
* @param fn the function mock
* @param callIndex the index of the call to get parameter from
* @param parameterIndex the index of the desired parameter
* @returns the parameter value
*/
export function getParameter(fn: jest.Mock, callIndex = 0, parameterIndex = 0) {
return fn.mock.calls[callIndex][parameterIndex]
}
/**
* Gets the nth parameter of all calls to the function `fn`, where n is `parameterIndex`.
*
* @param fn the function mock
* @param parameterIndex the index of the desired parameter
* @returns an array with the parameter value for each one of the calls
*/
export function getParameterByCalls(fn: jest.Mock, parameterIndex = 0) {
return fn.mock.calls.map(call => call[parameterIndex])
}
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from keras.models import Sequential
from keras.layers import Dense
# Load the data
wine_data = pd.read_csv("wine.csv")
# Split the data into training and test sets
X = wine_data.drop("quality", axis=1)
y = np.array(wine_data["quality"])
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
# Scale the data
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.fit_transform(X_test)
# Create the model
model = Sequential()
model.add(Dense(128, input_shape=(len(X_train[0]),), activation="relu"))
model.add(Dense(64, activation="relu"))
model.add(Dense(32, activation="relu"))
model.add(Dense(16, activation="relu"))
model.add(Dense(1, activation="sigmoid"))
# Compile the model
model.compile(optimizer="adam", loss="mean_squared_error", metrics=["accuracy"])
# Fit the model
model.fit(
X_train,
y_train,
validation_data=(X_test, y_test),
epochs=500,
batch_size=32
) |
<gh_stars>0
/////////////////////////////////////////////////////////////////
// SceneManager Extension
// By <NAME>, April 2016
/////////////////////////////////////////////////////////////////
import SceneManagerPanel from './Viewing.Extension.SceneManager.Panel'
import ViewerToolkit from 'ViewerToolkit'
import ExtensionBase from 'ExtensionBase'
import Lockr from 'lockr'
class SceneManagerExtension extends ExtensionBase {
/////////////////////////////////////////////////////////////////
// Class constructor
//
/////////////////////////////////////////////////////////////////
constructor(viewer, options) {
super(viewer, options)
this.modelCollection = {}
}
/////////////////////////////////////////////////////////////////
// Extension Id
//
/////////////////////////////////////////////////////////////////
static get ExtensionId() {
return 'Viewing.Extension.SceneManager'
}
/////////////////////////////////////////////////////////////////
// Load callback
//
/////////////////////////////////////////////////////////////////
load() {
this._control = ViewerToolkit.createButton(
'scene-manager-control',
'glyphicon glyphicon-picture',
'Manage Scenes', ()=>{
this._panel.toggleVisibility()
})
this.onAddSceneHandler =
(e) => this.onAddScene(e)
this.onRestoreSceneHandler =
(e) => this.onRestoreScene(e)
this.onRemoveSceneHandler =
(e) => this.onRemoveScene(e)
this.onSaveSequenceHandler =
(e) => this.onSaveSequence(e)
this._panel = new SceneManagerPanel(
this._viewer.container,
this._control.container)
this._panel.on('scene.add', (scene) => {
return this.onAddSceneHandler(scene)
})
this._panel.on('scene.restore', (scene)=>{
return this.onRestoreSceneHandler(scene)
})
this._panel.on('scene.remove', (scene)=>{
return this.onRemoveSceneHandler(scene)
})
this._panel.on('sequence.update', (sequence)=>{
return this.onSaveSequenceHandler(sequence)
})
this.parentControl = this._options.parentControl
if(!this.parentControl){
var viewerToolbar = this._viewer.getToolbar(true)
this.parentControl = new Autodesk.Viewing.UI.ControlGroup(
'scene-manager')
viewerToolbar.addControl(this.parentControl)
}
this.parentControl.addControl(
this._control)
this.sceneMap = Lockr.get(
SceneManagerExtension.ExtensionId + '.scenes') || {}
this.sequence = Lockr.get(
SceneManagerExtension.ExtensionId + '.sequence') || []
this.sequence.forEach((sceneId) => {
this._panel.addItem(this.sceneMap[ sceneId ])
})
console.log('Viewing.Extension.SceneManager loaded')
return true
}
/////////////////////////////////////////////////////////////////
// Unload callback
//
/////////////////////////////////////////////////////////////////
unload() {
this.parentControl.removeControl(
this._control)
this._panel.setVisible(false)
console.log('Viewing.Extension.SceneManager unloaded')
return true
}
/////////////////////////////////////////////////////////////////
// Save current scene
//
////////////////////////////////////////////////////////////////
onAddScene (data) {
var filter = {
renderOptions: false,
objectSet: false,
viewport: true,
guid: true
}
var scene = this._viewer.getState(filter)
scene.name = (data.name.length ?
data.name : new Date().toString('d/M/yyyy H:mm:ss'))
scene.modelInfo = []
for(var modelId in this.modelCollection) {
var model = this.modelCollection[modelId]
scene.modelInfo.push({
storageUrn: model.storageUrn,
transform: model.transform,
version: model.version,
name: model.name
})
}
this.sequence.push(scene.guid)
this.sceneMap[ scene.guid ] = scene
Lockr.set(
SceneManagerExtension.ExtensionId + '.sequence',
this.sequence)
Lockr.set(
SceneManagerExtension.ExtensionId + '.scenes',
this.sceneMap)
return scene
}
/////////////////////////////////////////////////////////////////
// Restore scene
//
////////////////////////////////////////////////////////////////
onRestoreScene (scene) {
var filter = {
renderOptions: false,
objectSet: false,
viewport: true
}
//this._viewer.restoreState(scene, filter, false)
var deleteSet = Object.keys(this.modelCollection).map(
(modelId) => {
return this.modelCollection[modelId]
})
var transformSet = []
var loadSet = []
scene.modelInfo.forEach((modelInfo) => {
for(var i=0; i < deleteSet.length; ++i) {
if(modelInfo.storageUrn === deleteSet[i].storageUrn) {
var model = deleteSet[i]
model.transform = modelInfo.transform
transformSet.push(model)
deleteSet.splice(i, 1)
return
}
}
loadSet.push(modelInfo)
})
this.modelCollection = {}
this.emit('scene.restore', {
transformSet,
deleteSet,
loadSet
})
}
/////////////////////////////////////////////////////////////////
// Delete scene
//
////////////////////////////////////////////////////////////////
onRemoveScene (scene) {
var idx = this.sequence.indexOf(scene.guid)
this.sequence.splice(idx, 1)
delete this.sceneMap[scene.guid]
Lockr.set(
SceneManagerExtension.ExtensionId + '.sequence',
this.sequence)
Lockr.set(
SceneManagerExtension.ExtensionId + '.scenes',
this.sceneMap)
}
/////////////////////////////////////////////////////////////////
// Save scenes sequence
//
////////////////////////////////////////////////////////////////
onSaveSequence (sequence) {
this.sequence = sequence
Lockr.set(
SceneManagerExtension.ExtensionId + '.sequence',
this.sequence)
}
/////////////////////////////////////////////////////////////////
// Register model in SceneManager
//
////////////////////////////////////////////////////////////////
addModel (model) {
this.modelCollection[model.modelId] = model
}
/////////////////////////////////////////////////////////////////
// Unregister model in SceneManager
//
////////////////////////////////////////////////////////////////
removeModel (model) {
if(this.modelCollection[model.modelId]){
delete this.modelCollection[model.modelId]
}
}
}
Autodesk.Viewing.theExtensionManager.registerExtension(
SceneManagerExtension.ExtensionId,
SceneManagerExtension)
|
#!/bin/bash
#
##################################################################################################################
# Written to be used on 64 bits computers
# Author : Erik Dubois
# Website : http://www.erikdubois.be
##################################################################################################################
##################################################################################################################
#
# DO NOT JUST RUN THIS. EXAMINE AND JUDGE. RUN AT YOUR OWN RISK.
#
##################################################################################################################
package="sublime-text-dev"
command="subl3"
#----------------------------------------------------------------------------------
#checking if application is already installed or else install with aur helpers
if pacman -Qi $package &> /dev/null; then
echo "################################################################"
echo "################## "$package" is already installed"
echo "################################################################"
else
#checking which helper is installed
if pacman -Qi packer &> /dev/null; then
echo "Installing with packer"
packer -S --noconfirm --noedit $package
elif pacman -Qi pacaur &> /dev/null; then
echo "Installing with pacaur"
pacaur -S --noconfirm --noedit $package
elif pacman -Qi yaourt &> /dev/null; then
echo "Installing with yaourt"
yaourt -S --noconfirm $package
fi
# Just checking if installation was successful
if pacman -Qi $package &> /dev/null; then
echo "################################################################"
echo "######### "$package" has been installed"
echo "################################################################"
else
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo "!!!!!!!!! "$package" has NOT been installed"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
fi
fi |
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2017.4 (64-bit)
#
# Filename : FIFO16x7.sh
# Simulator : Aldec Riviera-PRO Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Thu May 24 22:22:36 +0100 2018
# SW Build 2086221 on Fri Dec 15 20:55:39 MST 2017
#
# Copyright 1986-2017 Xilinx, Inc. All Rights Reserved.
#
# usage: FIFO16x7.sh [-help]
# usage: FIFO16x7.sh [-lib_map_path]
# usage: FIFO16x7.sh [-noclean_files]
# usage: FIFO16x7.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'FIFO16x7.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "FIFO16x7.sh - Script generated by export_simulation (Vivado v2017.4 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./FIFO16x7.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy library.cfg file
copy_setup_file()
{
file="library.cfg"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="C:/Users/asroliveira/CloudStation/CR/2018/MultiClkDomainDemo/MultiClkDomainDemo.cache/compile_simlib/riviera"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work riviera)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./FIFO16x7.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: FIFO16x7.sh [-help]\n\
Usage: FIFO16x7.sh [-lib_map_path]\n\
Usage: FIFO16x7.sh [-reset_run]\n\
Usage: FIFO16x7.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
//
// LogonHandler+Logging.h
// TravelAgency_RKT
//
// Created by <NAME> on 8/12/14.
// Copyright (c) 2014 SAP. All rights reserved.
//
#import "LogonHandler.h"
#import "SAPClientLogManager.h"
@interface LogonHandler (Logging)
- (void) setupLogging;
- (void) uploadLogs;
@end
|
<gh_stars>1000+
package com.prisma.api.connector.jdbc.impl
import java.sql.{SQLException, SQLIntegrityConstraintViolationException}
import com.prisma.api.connector._
import com.prisma.api.connector.jdbc.database.JdbcActionsBuilder
import com.prisma.api.connector.jdbc.{NestedDatabaseMutactionInterpreter, TopLevelDatabaseMutactionInterpreter}
import com.prisma.api.schema.APIErrors
import com.prisma.gc_values.{IdGCValue, RootGCValue}
import com.prisma.shared.models.RelationField
import slick.dbio._
import scala.concurrent.ExecutionContext
case class CreateNodeInterpreter(
mutaction: CreateNode
)(implicit ec: ExecutionContext)
extends TopLevelDatabaseMutactionInterpreter {
val model = mutaction.model
override def dbioAction(mutationBuilder: JdbcActionsBuilder): DBIO[DatabaseMutactionResult] = {
for {
id <- mutationBuilder.createNode(model, mutaction.nonListArgs)
_ <- mutationBuilder.createScalarListValuesForNodeId(model, id, mutaction.listArgs)
} yield CreateNodeResult(id, mutaction)
}
override val errorMapper = {
case e: SQLException if e.getSQLState == "23505" && GetFieldFromSQLUniqueException.getFieldOption(mutaction.project, mutaction.model, e).isDefined =>
APIErrors.UniqueConstraintViolation(model.name, GetFieldFromSQLUniqueException.getFieldOption(mutaction.project, mutaction.model, e).get)
case e: SQLException if e.getSQLState == "23505" && e.getMessage.contains(s"${model.dbName}_pkey") =>
APIErrors.UniqueConstraintViolation(model.name, s"Field name = " + model.idField_!.name)
case e: SQLException if e.getSQLState == "23503" =>
APIErrors.NodeDoesNotExist("")
case e: SQLIntegrityConstraintViolationException
if e.getErrorCode == 1062 && GetFieldFromSQLUniqueException.getFieldOptionMySql(mutaction.nonListArgs.keys, e).isDefined =>
APIErrors.UniqueConstraintViolation(model.name, GetFieldFromSQLUniqueException.getFieldOptionMySql(mutaction.nonListArgs.keys, e).get)
case e: SQLIntegrityConstraintViolationException if e.getErrorCode == 1062 && e.getMessage.contains("'PRIMARY'") =>
APIErrors.UniqueConstraintViolation(model.name, s"Field name = " + model.idField_!.name)
case e: SQLException if e.getErrorCode == 19 && GetFieldFromSQLUniqueException.getFieldOptionSQLite(mutaction.nonListArgs.keys, e).isDefined =>
APIErrors.UniqueConstraintViolation(model.name, GetFieldFromSQLUniqueException.getFieldOptionSQLite(mutaction.nonListArgs.keys, e).get)
}
}
case class NestedCreateNodeInterpreter(
mutaction: NestedCreateNode
)(implicit val ec: ExecutionContext)
extends NestedDatabaseMutactionInterpreter
with NestedRelationInterpreterBase {
override def relationField = mutaction.relationField
val relatedModel = relationField.relatedModel_!
override def dbioAction(mutationBuilder: JdbcActionsBuilder, parentId: IdGCValue) = {
implicit val implicitMb = mutationBuilder
for {
_ <- DBIO.seq(requiredCheck(parentId), removalAction(parentId))
id <- createNodeAndConnectToParent(relationField, mutationBuilder, parentId)
_ <- mutationBuilder.createScalarListValuesForNodeId(relatedModel, id, mutaction.listArgs)
} yield CreateNodeResult(id, mutaction)
}
private def createNodeAndConnectToParent(
relationField: RelationField,
mutationBuilder: JdbcActionsBuilder,
parentId: IdGCValue
)(implicit ec: ExecutionContext) = {
relationField.relatedField.relationIsInlinedInParent match {
case true =>
val argsMap = mutaction.nonListArgs.raw.asRoot.map
val modifiedArgs = argsMap.updated(relationField.relatedField.name, parentId)
mutationBuilder.createNode(relatedModel, PrismaArgs(RootGCValue(modifiedArgs)))
case false =>
for {
id <- mutationBuilder.createNode(relatedModel, mutaction.nonListArgs)
_ <- mutationBuilder.createRelation(relationField, parentId, id)
} yield id
}
}
def requiredCheck(parentId: IdGCValue)(implicit mutationBuilder: JdbcActionsBuilder): DBIO[Unit] = {
mutaction.topIsCreate match {
case false =>
(p.isList, p.isRequired, c.isList, c.isRequired) match {
case (false, true, false, true) => requiredRelationViolation
case (false, true, false, false) => noCheckRequired
case (false, false, false, true) => checkForOldChild(parentId)
case (false, false, false, false) => noCheckRequired
case (true, false, false, true) => noCheckRequired
case (true, false, false, false) => noCheckRequired
case (false, true, true, false) => noCheckRequired
case (false, false, true, false) => noCheckRequired
case (true, false, true, false) => noCheckRequired
case _ => errorBecauseManySideIsRequired
}
case true =>
noCheckRequired
}
}
def removalAction(parentId: IdGCValue)(implicit mutationBuilder: JdbcActionsBuilder): DBIO[_] =
mutaction.topIsCreate match {
case false =>
(p.isList, c.isList) match {
case (false, false) => removalByParent(parentId)
case (true, false) => noActionRequired
case (false, true) => removalByParent(parentId)
case (true, true) => noActionRequired
}
case true =>
noActionRequired
}
override val errorMapper = {
case e: SQLException if e.getSQLState == "23505" && GetFieldFromSQLUniqueException.getFieldOption(mutaction.project, relatedModel, e).isDefined =>
APIErrors.UniqueConstraintViolation(relatedModel.name, GetFieldFromSQLUniqueException.getFieldOption(mutaction.project, relatedModel, e).get)
case e: SQLException if e.getSQLState == "23505" && e.getMessage.contains(s"${relatedModel.dbName}_pkey") =>
APIErrors.UniqueConstraintViolation(relatedModel.name, s"Field name = " + relatedModel.idField_!.name)
case e: SQLException if e.getSQLState == "23503" => //Foreign Key Violation
APIErrors.NodeDoesNotExist("")
case e: SQLIntegrityConstraintViolationException
if e.getErrorCode == 1062 && GetFieldFromSQLUniqueException.getFieldOptionMySql(mutaction.nonListArgs.keys, e).isDefined =>
APIErrors.UniqueConstraintViolation(relatedModel.name, GetFieldFromSQLUniqueException.getFieldOptionMySql(mutaction.nonListArgs.keys, e).get)
case e: SQLIntegrityConstraintViolationException if e.getErrorCode == 1062 && e.getMessage.contains("'PRIMARY'") =>
APIErrors.UniqueConstraintViolation(relatedModel.name, s"Field name = " + relatedModel.idField_!.name)
case e: SQLException if e.getErrorCode == 19 && GetFieldFromSQLUniqueException.getFieldOptionSQLite(mutaction.nonListArgs.keys, e).isDefined =>
APIErrors.UniqueConstraintViolation(relatedModel.name, GetFieldFromSQLUniqueException.getFieldOptionSQLite(mutaction.nonListArgs.keys, e).get)
}
}
|
#!/bin/bash
: '
#SYNOPSIS
Deployment of config and related resources for config based data collection.
.DESCRIPTION
This script will deploy all the services required for the config based data collection.
.NOTES
Copyright (c) Cloudneeti. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Version: 1.0
# PREREQUISITE
- Install aws cli
Link : https://docs.aws.amazon.com/cli/latest/userguide/install-linux-al2017.html
- Install json parser jq
Installation command: sudo apt-get install jq
- Configure your aws account using the below command:
aws configure
Enter the required inputs:
AWS Access Key ID: Access key of any admin user of the account in consideration.
AWS Secret Access Key: Secret Access Key of any admin user of the account in consideration
Default region name: Programmatic region name where you want to deploy the framework (eg: us-east-1)
Default output format: json
- Run this script in any bash shell (linux command prompt)
.EXAMPLE
Command to execute : bash deploy-config.sh [-a <12-digit-account-id>] [-e <environment-prefix>] [-n <config-aggregator-name>] [-p <primary-aggregator-region>] [-s <list of regions(secondary) where config is to enabled>]
.INPUTS
(-a)Account Id: 12-digit AWS account Id of the account where you want to deploy the AWS Config setup
(-e)Environment prefix: Enter any suitable prefix for your deployment
(-n)Config Aggregator Name: Suitable name for the config aggregator
(-p)Config Aggregator region(primary): Programmatic name of the region where the the primary config with an aggregator is to be created(eg:us-east-1)
(-s)Region list(secondary): Comma seperated list(with nos spaces) of the regions where the config(secondary) is to be enabled(eg: us-east-1,us-east-2)
**Pass "all" if you want to enable config in all other available regions
**Pass "na" if you do not want to enable config in any other region
.OUTPUTS
None
'
usage() { echo "Usage: $0 [-a <12-digit-account-id>] [-e <environment-prefix>] [-n <config-aggregator-name>] [-p <primary-aggregator-region>] [-s <list of regions(secondary) where config is to enabled>]" 1>&2; exit 1; }
aggregion_validation() { echo "Enter correct value for the aggregator region parameter. Following are the acceptable values: ${aggregator_regions[@]}" 1>&2; exit 1; }
region_validation() { echo "Enter correct value(s) for the secondary region parameter. Following are the acceptable values: ${enabled_regions[@]}" 1>&2; exit 1; }
env="dev"
regionlist=("na")
while getopts "a:e:n:p:s:" o; do
case "${o}" in
a)
awsaccountid=${OPTARG}
;;
e)
env=${OPTARG}
;;
n)
aggregatorname=${OPTARG}
;;
p)
aggregatorregion=${OPTARG}
;;
s)
regionlist=${OPTARG}
;;
*)
usage
;;
esac
done
shift $((OPTIND-1))
if [[ "$awsaccountid" == "" ]] || ! [[ "$awsaccountid" =~ ^[0-9]+$ ]] || [[ ${#awsaccountid} != 12 ]] || [[ "$aggregatorname" == "" ]]; then
usage
fi
echo "Validating input parameters..."
region_detail="$(aws ec2 describe-regions | jq '.Regions')"
region_count="$(echo $region_detail | jq length)"
aws_regions=()
for ((i = 0 ; i < region_count ; i++ )); do
aws_regions+=("$(echo $region_detail | jq -r --arg i "$i" '.[$i | tonumber]' | jq '.RegionName')")
done
enabled_regions=()
for index in ${!aws_regions[@]}
do
enabled_regions+=("$(echo ${aws_regions[index]//\"/})")
done
aggregator_regions=( "us-east-1" "us-east-2" "us-west-1" "us-west-2" "ap-south-1" "ap-northeast-2" "ap-southeast-1" "ap-southeast-2" "ap-northeast-1" "ca-central-1" "eu-central-1" "eu-west-1" "eu-west-2" "eu-west-3" "eu-north-1" "sa-east-1")
new_regions=()
for i in "${enabled_regions[@]}"; do
skip=
for j in "${aggregator_regions[@]}"; do
[[ $i == $j ]] && { skip=1; break; }
done
[[ -n $skip ]] || new_regions+=("$i")
done
configure_account="$(aws sts get-caller-identity)"
if [[ "$configure_account" != *"$awsaccountid"* ]];then
echo "AWS CLI configuration AWS account Id and entered AWS account Id does not match. Please try again with correct AWS Account Id."
exit 1
fi
env="$(echo "$env" | tr "[:upper:]" "[:lower:]")"
aggregatorname="$(echo "$aggregatorname" | tr "[:upper:]" "[:lower:]")"
aggregatorregion="$(echo "$aggregatorregion" | tr "[:upper:]" "[:lower:]")"
regionlist="$(echo "$regionlist" | tr "[:upper:]" "[:lower:]")"
if [[ " ${aggregator_regions[*]} " != *" $aggregatorregion "* ]] || [[ " ${aggregatorregion} " != *" $aggregatorregion "* ]]; then
aggregion_validation
fi
if [[ $regionlist == "all" ]]; then
input_regions="${enabled_regions[@]}"
elif [[ $regionlist == "na" ]]; then
input_regions=("na")
else
IFS=, read -a input_regions <<<"${regionlist}"
printf -v ips ',"%s"' "${input_regions[@]}"
ips="${ips:1}"
fi
input_regions=($(echo "${input_regions[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' '))
if [[ $regionlist != "all" ]] && [[ $regionlist != "na" ]]; then
validated_regions=()
for i in "${input_regions[@]}"; do
for j in "${enabled_regions[@]}"; do
if [[ $i == $j ]]; then
validated_regions+=("$i")
fi
done
done
if [[ ${#validated_regions[@]} != ${#input_regions[@]} ]]; then
region_validation
fi
fi
read -p "This script will update any existing config setup present in entered regions of the AWS Account: $awsaccountid, as per Cloudneeti requirements. Continue? (Y/N): " confirm && [[ $confirm == [yY] || $confirm == [yY][eE][sS] ]] || exit 1
echo "Verifying if the config aggregator or the config deployment bucket with the similar environment prefix exists in the account..."
s3_detail="$(aws s3api get-bucket-versioning --bucket config-bucket-$env-$awsaccountid 2>/dev/null)"
s3_status=$?
stack_detail="$(aws cloudformation describe-stacks --stack-name "cn-data-collector-"$env --region $aggregatorregion 2>/dev/null)"
stack_status=$?
if [[ $s3_status -eq 0 ]] && [[ $stack_status != 0 ]]; then
echo "Config bucket with name config-bucket-$env-$awsaccountid already exists in the account. Please verify if a cloudneeti aggregator (primary config) already exists in some other region or re-run the script with different environment variable."
exit 1
fi
if [[ $s3_status != 0 ]] && [[ $stack_status -eq 0 ]]; then
echo "Config stack with name cn-data-collector-$env already exists in the account but the associated bucket has been deleted. Please delete the stack and re-run the script."
exit 1
fi
echo "Fetching details for any existing config setup in the primary region: $aggregatorregion"
config_recorder="$(aws configservice describe-configuration-recorders --region $aggregatorregion | jq '.ConfigurationRecorders[0].name')"
role_arn="$(aws configservice describe-configuration-recorders --region $aggregatorregion | jq '.ConfigurationRecorders[0].roleARN')"
delivery_channel="$(aws configservice describe-delivery-channels --region $aggregatorregion | jq '.DeliveryChannels[0].name')"
if [[ $config_recorder != null ]] && [[ $delivery_channel != null ]]; then
echo "Found existing config setup!! Enhancing existing config setup to record all the resource types including global in the primary region..."
aws configservice put-configuration-recorder --configuration-recorder name=$config_recorder,roleARN=$role_arn --recording-group allSupported=true,includeGlobalResourceTypes=true --region $aggregatorregion 2>/dev/null
config_status=$?
if [[ $config_status -eq 0 ]]; then
aws configservice put-configuration-aggregator --region $aggregatorregion --configuration-aggregator-name $aggregatorname --account-aggregation-sources AccountIds=$awsaccountid,AllAwsRegions=true 2>/dev/null
aggregator_status=$?
else
echo "Error updating existing setup. Please contact Cloudneeti support!"
fi
else
echo "No existing config setup found. Deploying new config setup and aggregator in the primary region: $aggregatorregion"
aws cloudformation deploy --template-file config-aggregator.yml --stack-name "cn-data-collector-"$env --region $aggregatorregion --parameter-overrides env=$env awsaccountid=$awsaccountid aggregatorname=$aggregatorname --capabilities CAPABILITY_NAMED_IAM --no-fail-on-empty-changeset
aggregator_status=$?
fi
if [[ "$aggregator_status" -eq 0 ]] && [[ "${input_regions[0]}" != "na" ]]; then
for region in "${input_regions[@]}"; do
if [[ "$region" != "$aggregatorregion" ]]; then
echo "Fetching details for any existing config setup in the secondary region: $region"
config_recorder="$(aws configservice describe-configuration-recorders --region $region | jq '.ConfigurationRecorders[0].name')"
role_arn="$(aws configservice describe-configuration-recorders --region $region | jq '.ConfigurationRecorders[0].roleARN')"
delivery_channel="$(aws configservice describe-delivery-channels --region $region | jq '.DeliveryChannels[0].name')"
if [[ $config_recorder -ne null ]] && [[ $delivery_channel -ne null ]]; then
echo "Found existing config setup!! Enhancing existing config setup to record all the resource types in the secondary region..."
aws configservice put-configuration-recorder --configuration-recorder name=$config_recorder,roleARN=$role_arn --recording-group allSupported=true,includeGlobalResourceTypes=false --region $region 2>/dev/null
multiregionconfig_status=$?
else
if [[ " ${new_regions[*]} " == *" $region "* ]]; then
echo "No existing config setup found. Deploying/Re-deploying config in the secondary region: $region."
aws cloudformation deploy --template-file newregion-config.yml --stack-name "cn-data-collector-"$env --region $region --parameter-overrides env=$env awsaccountid=$awsaccountid --capabilities CAPABILITY_NAMED_IAM --no-fail-on-empty-changeset
multiregionconfig_status=$?
else
echo "No existing config setup found. Deploying/Re-deploying config in the secondary region: $region."
aws cloudformation deploy --template-file multiregion-config.yml --stack-name "cn-data-collector-"$env --region $region --parameter-overrides env=$env awsaccountid=$awsaccountid aggregatorregion=$aggregatorregion --capabilities CAPABILITY_NAMED_IAM --no-fail-on-empty-changeset
multiregionconfig_status=$?
fi
fi
fi
done
elif [[ "${input_regions[0]}" == "na" ]] || [[ "$multiregionconfig_status" -eq 0 ]]; then
echo "Successfully deployed/updated config(s) and aggregator in the mentioned regions!!"
elif [[ "${input_regions[0]}" == "na" ]]; then
echo "Successfully deployed/updated config and aggregator in the mentioned region!!"
else
echo "Something went wrong! Please contact Cloudneeti support for more details"
fi |
import { ErrorHandler, Injectable } from '@angular/core';
import { Store } from '@ngrx/store';
@Injectable()
export class GlobalErrorHandlerService implements ErrorHandler {
constructor( private store: Store<any> ) {}
handleError( error: Error ) {
console.error( 'Unexpected error: ', error );
// TODO: dispatch only user releavant errors to prevent spamming and flooding continiously
// this.store.dispatch( new ErrorThrownAction( 'Unexpected error', 'Please reload page and try again...' ) );
}
}
|
#!/usr/bin/env bash
bucket='gs://web-scraper-config/config.json'
set -v
apt-get update
apt-get install -y chromium
apt-get install -y libgbm-dev
curl -sL https://deb.nodesource.com/setup_12.x | bash -
apt-get install -yq git libgconf-2-4 nodejs
git clone https://github.com/sahava/web-scraper-gcp.git
cd web-scraper-gcp
sudo npm install
gsutil cp ${bucket} .
node index.js
shutdown -h now
|
package com.mybatis.bestPractice.batch;
import com.mybatis.project.mapper.UserMapper;
import com.mybatis.project.po.User;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.ExecutorType;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* @Project: mybatis
* @description: 批量更新
* @author: sunkang
* @create: 2018-10-28 17:38
* @ModificationHistory who when What
**/
public class BatchInsertTest {
private final static Logger log = LoggerFactory.getLogger(BatchInsertTest.class);
private SqlSessionFactory sqlSessionFactory ;
@Before
public void setUp() throws Exception {
//创建sqlsessionFactory
String resource = "SqlMapConfig.xml";
//得到配置文件流
InputStream inputStream = Resources.getResourceAsStream(resource);
//创建会话工厂,传入mybatis的配置文件的信息
sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream);
}
/**
* 方式一: for 循环 一个一个插入
* @throws Exception
*/
@Test
public void batchInsertTest() throws Exception{
long start = System.currentTimeMillis();
SqlSession sqlSession =sqlSessionFactory.openSession();
//创建userMapper对象,mybatis自动生成mapper代理对象
UserMapper userMapper = sqlSession.getMapper(UserMapper.class);
for (int i = 0; i < 100; i++) {
User user= new User();
user.setSex("男");
user.setUsername("sun");
user.setAddress("address");
userMapper.insert(user);
}
sqlSession.commit();
log.info("cost {}ms", System.currentTimeMillis() - start);
//输出日志如下: INFO [main] - cost 1928ms
}
//方式二: 拼装insert 的SQL语句,进行批量插入 ,这种方式的速度最快,推荐这种
@Test
public void insertBatchByConnSql() {
long start = System.currentTimeMillis();
SqlSession sqlSession =sqlSessionFactory.openSession();
//创建userMapper对象,mybatis自动生成mapper代理对象
UserMapper userMapper = sqlSession.getMapper(UserMapper.class);
List<User> userList = new ArrayList<User>();
for (int i = 0; i < 100; i++) {
User user= new User();
user.setSex("男");
user.setUsername("sun");
user.setAddress("address");
userList.add(user);
}
userMapper.insertBatch(userList);
sqlSession.commit();
log.info("cost {}ms", System.currentTimeMillis() - start);
// INFO [main] - cost 1610ms
}
//方式三: 执行批量插入,10个一插入
@Test
public void insertBatchByConnectSql() {
long start = System.currentTimeMillis();
SqlSession sqlSession =sqlSessionFactory.openSession(ExecutorType.BATCH, false);
//创建userMapper对象,mybatis自动生成mapper代理对象
UserMapper userMapper = sqlSession.getMapper(UserMapper.class);
List<User> userList = new ArrayList<User>();
for (int i = 0; i < 100; i++) {
User user= new User();
user.setSex("男");
user.setUsername("sun");
user.setAddress("address");
userMapper.insert(user);
if (i % 10 == 0 && i != 0) {
sqlSession.commit();
sqlSession.clearCache();
}
}
log.info("cost {}ms", System.currentTimeMillis() - start);
//输出日志如下:INFO [main] - cost 1673ms
}
}
|
angular.module('<%= appName %>').config(['$stateProvider',
function($stateProvider){
$stateProvider
.state('<%= moduleNameDeCap %>', {
parent: 'root',
url: '/<%= moduleNameDeCap %>',
templateUrl: 'app/client/<%= moduleNameDeCap %>/views/module.ng.html',
controller: '<%= moduleNameDeCap %>Ctrl'
});
}]);
|
#!/bin/bash
while read -r line
do
egrep -i "^Section" $line
#echo "problem!" >&2
done < "${1:-/dev/stdin}"
|
require "test_helper"
class ApplicationTest < ActiveSupport::TestCase
include ApplicationHelper
context "creating an application" do
setup do
@atts = {
name: "Tron-o-matic",
repo: "alphagov/tron-o-matic",
}
end
context "given valid attributes" do
should "be created successfully" do
application = Application.new(@atts)
assert application.valid?
application.save!
assert application.persisted?
end
end
should "be invalid with an empty name" do
application = Application.new(@atts.merge(name: ""))
assert_not application.valid?
end
should "be invalid with a duplicate name" do
FactoryBot.create(:application, name: "Tron-o-matic")
application = Application.new(@atts)
assert_not application.valid?
assert application.errors[:name].include?("has already been taken")
end
should "be invalid with an invalid repo" do
application = Application.new(@atts)
application.repo = "noslashes"
assert_not application.valid?
assert application.errors[:repo].include?("is invalid")
application.repo = "too/many/slashes"
assert_not application.valid?
assert application.errors[:repo].include?("is invalid")
application.repo = "/slashatfront"
assert_not application.valid?
assert application.errors[:repo].include?("is invalid")
application.repo = "slashatback/"
assert_not application.valid?
assert application.errors[:repo].include?("is invalid")
end
should "use the second half of the repo name as shortname if shortname not provided or empty" do
application = Application.create!(@atts)
assert_equal "tron-o-matic", application.shortname
end
should "use the provided shortname if not empty" do
application = Application.create!(@atts.merge(shortname: "giraffe"))
assert_equal "giraffe", application.shortname
end
should "know its location on the internet" do
application = Application.new(@atts)
assert_equal "https://github.com/alphagov/tron-o-matic", application.repo_url
end
should "default to not being archived" do
application = Application.new(@atts)
assert_equal false, application.archived
end
should "default to not being on AWS" do
application = Application.new(@atts)
assert_equal false, application.on_aws?
end
should "default to not be in deploy freeze" do
application = Application.new(@atts)
assert_equal false, application.deploy_freeze?
end
should "be invalid with a name that is too long" do
application = Application.new(@atts.merge(name: ("a" * 256)))
assert_not application.valid?
end
should "be invalid with a repo that is too long" do
application = Application.new(@atts.merge(repo: "alphagov/my-r#{'e' * 243}po"))
assert_not application.valid?
end
should "be invalid with a shortname that is too long" do
application = Application.new(@atts.merge(shortname: ("a" * 256)))
assert_not application.valid?
end
should "be invalid with status_notes that are too long" do
application = Application.new(@atts.merge(status_notes: "This app is n#{'o' * 233}t working!"))
assert_not application.valid?
end
end
context "display datetimes" do
should "use the word today if the release was today" do
assert_equal "10:02am today",
human_datetime(Time.zone.now.change(hour: 10, min: 2))
end
should "use the word yesterday if the release was yesterday" do
deploy_time = Time.zone.now.change(hour: 10, min: 2) - 1.day
assert_equal "10:02am yesterday", human_datetime(deploy_time)
end
should "use the day of the week for the current week" do
Timecop.freeze(Time.zone.parse("2014-07-04 12:44")) do # Friday
deploy_time = Time.zone.parse("2014-06-30 10:02")
assert_equal "10:02am on Monday", human_datetime(deploy_time)
end
end
should "display the date for last Sunday" do
Timecop.freeze(Time.zone.parse("2014-07-04 12:44")) do # Friday
deploy_time = Time.zone.parse("2014-06-29 10:02")
assert_equal "10:02am on 29 Jun", human_datetime(deploy_time)
end
end
should "show a year if the date is old" do
assert_equal "2pm on 3 Jul 2010",
human_datetime(Time.zone.now.change(year: 2010, month: 7, day: 3, hour: 14))
end
should "show nothing if the date is missing" do
assert_equal "", human_datetime(nil)
end
end
context "continuous deployment" do
setup do
@atts = {
name: "Tron-o-matic",
repo: "alphagov/tron-o-matic",
}
end
context "when the application is not continuously deployed" do
should "return false" do
application = Application.new(@atts)
assert_not application.cd_enabled?
Application.stub :cd_statuses, ["something-other-than-tron-o-matic"] do
assert_not application.cd_enabled?
end
end
end
context "when the application is continuously deployed" do
should "return true" do
application = Application.new(@atts)
Application.stub :cd_statuses, ["tron-o-matic"] do
assert application.cd_enabled?
end
end
end
end
end
|
<reponame>eSCT/oppfin
package com.searchbox.core.search.query;
import com.searchbox.core.SearchCondition;
import com.searchbox.core.SearchConverter;
import com.searchbox.core.search.AbstractSearchCondition;
@SearchCondition(urlParam = "q")
public class QueryCondition extends AbstractSearchCondition {
String query;
QueryCondition(String query) {
this.query = query;
}
public String getQuery() {
return query;
}
@Override
public String getParamValue() {
return query;
}
@SearchConverter
public static class Converter
implements
org.springframework.core.convert.converter.Converter<String, QueryCondition> {
@Override
public QueryCondition convert(String source) {
return new QueryCondition(source);
}
}
} |
#!/bin/bash
pex -r requirements.txt -o lambda_function.zip
lambdex build -s example_function.py lambda_function.zip
lambdex test lambda_function.zip <(echo '{"url": "https://github.com/pantsbuild"}')
|
#!/bin/bash
docker build -t thomas-blog-devel -f Dockerfile ../ |
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
from socket import *
from threading import Thread
def gettime(address):
sock = socket(AF_INET,SOCK_STREAM)
sock.connect(address)
tm = sock.recv(1024)
sock.close()
print("The time is %s" % tm.decode('ascii'))
t1 = [ Thread(target=gettime, args=(('localhost',10000),)) for i in range(100) ]
t2 = [ Thread(target=gettime, args=(('localhost',11000),)) for i in range(100) ]
for a,b in map(None,t1,t2):
a.run()
b.run()
|
# frozen_string_literal: true
module YuukiBot
module Textgen
def self.generate_string(template, parts, variables = nil)
# Choose random template
template_base = template.sample
# Parse variables
variables&.each do |variable|
template_base = template_base.gsub("{#{variable[0]}}", (variable[1]).to_s)
end
# Parse parts
parts.each do |part|
# First check this part is actually in the string.
dynamic_regex = "{#{part[0]}}"
loop do
match = Regexp.new(dynamic_regex).match(template_base)
# Check for match
break if match.nil?
# Okay, we can continue. Let's get a random part.
part_to_replace = part[1].sample
template_base = template_base.sub(dynamic_regex, part_to_replace.to_s)
end
end
template_base
end
end
end
|
<filename>custom/styles.js
import glamorous from 'glamorous-native'
const blue = '#335DC6'
const lightBlue = '#12c8fd'
const ITEM_HEIGHT = 60
export const ActionSheetTitleView = glamorous.text({
color: 'gray',
fontSize: 14,
textAlign: 'left',
backgroundColor: 'purple'
})
export const ActionSheetItemContainer = glamorous.view({
flex: 1,
flexDirection: 'row',
justifyContent: 'center',
paddingLeft: 20
})
export const ActionSheetItemImageContainer = glamorous.view({
alignItems: 'center',
justifyContent: 'center',
})
export const ActionSheetItemImageView = glamorous.image(
{
resizeMode: 'cover',
height: 24,
width: 24,
tintColor: 'black'
}
)
export const ActionSheetItemSubContainer = glamorous.view(
{
flexDirection: 'column',
justifyContent: 'space-evenly',
marginLeft: 20,
flex: 1,
borderBottomWidth: 1,
paddingVertical: 10
},
props => ({
borderBottomColor: props.withBorder ? '#ECF0F1' : 'white'
})
)
export const ActionSheetItemTitleView = glamorous.text(
{
fontSize: 13,
textAlign: 'left',
color: blue,
}
)
export const ActionSheetDescriptionView = glamorous.text(
{
fontSize: 10,
textAlign: 'left',
color: lightBlue,
marginTop: 5
}
)
export const ActionSheetItemBottomBorderView = glamorous.view({
flexWrap: 'wrap',
height: 1,
backgroundColor: '#ECF0F1',
bottom: 0
})
|
#!/bin/sh
# only works using Xorg not Wayland
xinput create-master c1
xinput create-master c2
# need to attach the mouse and keyboard slave to the mt1 master:
xinput reattach "Logitech F710 - Mouse Emulation" "c1 pointer"
xinput reattach "Logitech F710 - Keyboard Emulation" "c1 keyboard"
# same for the other controller and other master:
xinput reattach "Logitech F310 - Mouse Emulation" "c2 pointer"
xinput reattach "Logitech F310 - Keyboard Emulation" "c2 keyboard"
# Note: I have another controller, an old-style no-stick gamepad.
# xboxdrv uses evdev to map it. It won't work in daemon mode.
# IIRC, it didn't have a nice name like those above.
|
#!/usr/bin/env bash
set -x
set -e
if [ "${ACTIONS_OS_NAME}" == linux ] || \
[ "${ACTIONS_OS_NAME}" == windows ]; then
sudo apt-get -qq update
sudo apt-get install -yq build-essential
sudo add-apt-repository -y ppa:aacebedo/fasd
curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -
sudo apt-get -qq update
sudo apt-get install -yq zsh tmux vim fasd nodejs
sudo apt-get install -yq xclip
yes | sudo sh -c "$(curl -fsSL https://raw.github.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
sudo chown $USER:$USER $HOME/.zshrc
elif [ "${ACTIONS_OS_NAME}" == macos ]; then
brew update
brew install zsh tmux neovim fasd
brew upgrade node
brew install reattach-to-user-namespace
yes | sh -c "$(curl -fsSL https://raw.github.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
fi
mkdir -p "$HOME/.tmux/plugins/tpm"
git clone https://github.com/tmux-plugins/tpm "$HOME/.tmux/plugins/tpm"
mkdir -p "$HOME/.zsh"
git clone https://github.com/sindresorhus/pure.git "$HOME/.zsh/pure"
|
<gh_stars>0
export default class DateTools {
public static getTPTDate(date: Date): string {
return date.getDate().toString().padStart(2, '0') +
"." +
(date.getMonth() + 1).toString().padStart(2, '0') +
"." +
date.getFullYear().toString().padStart(4, '0') +
"T" +
date.getHours().toString().padStart(2, '0') +
":" +
date.getMinutes().toString().padStart(2, '0') +
":" +
"00";
}
public static getMiFazDate(date: Date): string {
return date.getFullYear().toString().padStart(4, '0') +
"-" +
(date.getMonth() + 1).toString().padStart(2, '0') +
"-" +
date.getDate().toString().padStart(2, '0');
}
public static miFazToDate(searchDate: string, time: string): Date {
return new Date(searchDate + "T" + time + ":00");
}
}
|
jQuery(document).ready(function(){
/*jQuery('#billing_email').blur(function(){
jQuery('body').trigger('update_checkout');
}); */
jQuery('#arcavis_voucher').off('keyup').on('keyup', function (event) {
if (jQuery('#arcavis_voucher').val() != '' && event.keyCode === 13) {
jQuery('#arcavis_applied_voucher').val(jQuery('#arcavis_voucher').val()).trigger('change');
}
});
jQuery('#arcavis_voucher').off('blur').on('blur',function(){
if(jQuery('#arcavis_voucher').val() != ''){
jQuery('#arcavis_applied_voucher').val(jQuery('#arcavis_voucher').val()).trigger('change');
}
});
jQuery('#arcavis_applied_voucher').off('change').on('change',function(){
jQuery('body').trigger('update_checkout');
// Wait until post_check_transaction finished...
jQuery(document).one( 'updated_checkout', function() {
jQuery.ajax({
url: website_url+"/wp-admin/admin-ajax.php",
type : 'post',
data: {
action :'arcavis_get_applied_voucher_code',
},
success:function(response) {
if(response){
jQuery(document).find('#applied_voucher_wrapper').remove();
data = JSON.parse(response);
jQuery('#arcavis_voucher').after("<div id='applied_voucher_wrapper'><h5>Gutschein erfolgreich hinzugefügt.</h5>"+data.voucher_code+ ' <a id="arcavis_voucher_remove_link" class="error" href="javascript:void(0)"> X </a></div>');
jQuery('#arcavis_voucher_remove_link').one("click",function(){
jQuery('#arcavis_voucher').val('');
jQuery('#arcavis_applied_voucher').val('');
jQuery(document).find('#applied_voucher_wrapper').remove();
jQuery('body').trigger('update_checkout');
});
}else{
alert('Gutschein ungültig.');
jQuery('#arcavis_voucher').val('');
jQuery(document).find('#applied_voucher_wrapper').remove();
}
},
error: function(errorThrown){
console.log(errorThrown);
}
});
});
});
}); |
#!/bin/sh
# $Id: calendar.sh,v 1.3 2005/12/27 15:53:06 tom Exp $
#
# Description:
# This demonstrates the CDK command line
# interface to the celendar widget.
#
#
# Create some global variables.
#
CDK_CALENDAR="${CDK_BINDIR=..}/cdkcalendar"
CDK_LABEL="${CDK_BINDIR=..}/cdklabel"
date="${TMPDIR=/tmp}/cal.$$"
tmp="${TMPDIR=/tmp}/tmp.$$"
xpos=CENTER
ypos=CENTER
#
# Get today's date.
#
day=`date +%d`
month=`date +%m`
year=`date +%Y`
#
# Chop up the command line.
#
set -- `getopt d:m:y:X:Y: $*`
if [ $? != 0 ]
then
echo $USAGE
exit 2
fi
for c in $*
do
case $c in
-d) day=$2; shift 2;;
-m) month=$2; shift 2;;
-y) year=$2; shift 2;;
-X) xpos=$2; shift 2;;
-Y) ypos=$2; shift 2;;
--) shift; break;;
esac
done
#
# Create the title and buttons.
#
title="<C><#HL(22)>
<C>Select a date
<C><#HL(22)>"
buttons=" OK
Cancel "
#
# Create the calendar widget.
#
${CDK_CALENDAR} -B "${buttons}" -d ${day} -m ${month} -y ${year} -T "${title}" -X ${xpos} -Y ${ypos} -O ${date} -S
selected=$?
test $selected = 255 && exit 1
answer=`cat ${date}`
#
# Create the message for the label widget.
#
echo "<C>You chose the following date" > ${tmp}
echo " " >> ${tmp}
echo "<C><#HL(10)>" >> ${tmp}
echo "<C>${answer}" >> ${tmp}
echo "<C><#HL(10)>" >> ${tmp}
echo " " >> ${tmp}
echo "<C>You chose button #${selected}" >> ${tmp}
echo " " >> ${tmp}
echo "<C>Hit </R>space<!R> to continue." >> ${tmp}
#
# Create the label widget to display the information.
#
${CDK_LABEL} -f ${tmp} -p " "
#
# Clean up.
#
rm -f ${tmp} ${date}
|
The function 'count()' takes a single parameter of type 'int' (integer) and returns the number of digits in the given number. It works by recursively dividing the number by 10 and adding 1 to the count until the number is equal to 0. For example, if the input is 123, the function will recursively divide it by 10 3 times (120, 12 and 1) and add 1 to the count in each iteration until the number is equal or lower than 0. Finally, the result is 3, which is the number of digits in the given number. |
#!/bin/bash
# Prepare Gperf for compilation:
./configure --prefix=/usr --docdir=/usr/share/doc/gperf-3.1
# Compile the package:
make
# The tests are known to fail if running multiple simultaneous tests (-j option greater than 1). To test the results, issue:
make -j1 check
# Install the package:
make install
|
package com.github.chen0040.leetcode.day25.easy;
/**
* Created by xschen on 20/8/2017.
*
* summary:
* Given a 2D integer matrix M representing the gray scale of an image, you need to design a smoother to make the gray scale of each cell becomes the average gray scale (rounding down) of all the 8 surrounding cells and itself. If a cell has less than 8 surrounding cells, then use as many as you can.
*
*/
public class ImageSmoother {
class Solution {
public int[][] imageSmoother(int[][] M) {
int rowCount = M.length;
int colCount = M[0].length;
int[][] M2 = new int[rowCount][];
for(int i=0; i < rowCount; ++i) {
M2[i] = new int[colCount];
}
for(int i=0; i < rowCount; ++i) {
for(int j=0; j < colCount; ++j) {
int sum = 0;
int count = 0;
for(int ii=-1; ii <=1; ++ii) {
int row = i + ii;
if(row < 0 || row >= rowCount) continue;
for(int jj=-1; jj <= 1; ++jj) {
int col = j + jj;
if(col < 0 || col >= colCount) continue;
sum += M[row][col];
count++;
}
}
int val = (int)(Math.floor((double)sum / count));
M2[i][j] = val;
}
}
return M2;
}
}
}
|
#!/bin/sh
set -e
echo $MINIO_ACCESS_KEY
mkdir -p /test-data/my-bucket
mkdir -p /root/.minio/certs
openssl req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj /CN=selfsigned \
-keyout /root/.minio/certs/private.key \
-out /root/.minio/certs/public.crt
/usr/bin/docker-entrypoint.sh -- "$@"
|
#!/usr/bin/env bash
set -euox pipefail
gem install bundler
if [ -v STYLE_CHECKS ]; then
set +ux
exit 0
fi
if [ ! -v IMAGEMAGICK_VERSION ]; then
echo "you must specify an ImageMagick version."
echo "example: 'IMAGEMAGICK_VERSION=6.8.9-10 bash ./before_install_osx.sh'"
exit 1
fi
export HOMEBREW_NO_AUTO_UPDATE=true
brew install wget pkg-config ghostscript freetype jpeg little-cms2 libomp libpng libtiff liblqr libtool libxml2 zlib webp
export LDFLAGS="-L/usr/local/opt/libxml2/lib -L/usr/local/opt/zlib/lib"
export CPPFLAGS="-I/usr/local/opt/libxml2/include/libxml2 -I/usr/local/opt/zlib/include"
project_dir=$(pwd)
build_dir="${project_dir}/build-ImageMagick/ImageMagick-${IMAGEMAGICK_VERSION}"
if [ -v CONFIGURE_OPTIONS ]; then
build_dir="${build_dir}-${CONFIGURE_OPTIONS}"
fi
build_imagemagick() {
mkdir -p build-ImageMagick
version=(${IMAGEMAGICK_VERSION//./ })
wget "https://imagemagick.org/download/releases/ImageMagick-${IMAGEMAGICK_VERSION}.tar.xz"
tar -xf "ImageMagick-${IMAGEMAGICK_VERSION}.tar.xz"
rm "ImageMagick-${IMAGEMAGICK_VERSION}.tar.xz"
mv "ImageMagick-${IMAGEMAGICK_VERSION}" "${build_dir}"
options="--with-magick-plus-plus=no --disable-docs"
if [ -v CONFIGURE_OPTIONS ]; then
options="${CONFIGURE_OPTIONS} ${options}"
fi
cd "${build_dir}"
./configure --prefix=/usr/local "${options}"
make -j
}
if [ ! -d "${build_dir}" ]; then
build_imagemagick
fi
cd "${build_dir}"
make install -j
cd "${project_dir}"
set +ux
|
#!/usr/bin/env bash
# for details about how it works see https://github.com/elastic/apm-integration-testing#continuous-integration
srcdir=$(dirname "$0")
test -z "$srcdir" && srcdir=.
# shellcheck disable=SC1090
. "${srcdir}/common.sh"
if [ -n "${APM_AGENT_DOTNET_VERSION}" ]; then
EXTRA_OPTS=${APM_AGENT_DOTNET_VERSION/'github;'/'--dotnet-agent-version='}
EXTRA_OPTS=${EXTRA_OPTS/'release;'/'--dotnet-agent-release='}
EXTRA_OPTS=${EXTRA_OPTS/'commit;'/'--dotnet-agent-version='}
BUILD_OPTS="${BUILD_OPTS} ${EXTRA_OPTS}"
fi
DEFAULT_COMPOSE_ARGS="${ELASTIC_STACK_VERSION} ${BUILD_OPTS} --no-apm-server-dashboards --no-apm-server-self-instrument --no-kibana --with-agent-dotnet --force-build"
export COMPOSE_ARGS=${COMPOSE_ARGS:-${DEFAULT_COMPOSE_ARGS}}
runTests env-agent-dotnet docker-test-agent-dotnet
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/core/src/main/java/com/bitsys/fade/mist/state/v4/QueryEntryType.java
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2018.03.30 at 02:21:16 PM MDT
//
package com.bitsys.fade.mist.state.v4;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
*
* Defines the structure specifying a query for a given layer, in a given
* region, using filters and areas.
*
*
* <p>Java class for QueryEntryType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="QueryEntryType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="layerId" use="required" type="{http://www.w3.org/2001/XMLSchema}token" />
* <attribute name="areaId" use="required" type="{http://www.w3.org/2001/XMLSchema}token" />
* <attribute name="filterId" type="{http://www.w3.org/2001/XMLSchema}token" />
* <attribute name="includeArea" use="required" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="filterGroup" use="required" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="temp" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "QueryEntryType")
public class QueryEntryType {
@XmlAttribute(name = "layerId", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String layerId;
@XmlAttribute(name = "areaId", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String areaId;
@XmlAttribute(name = "filterId")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String filterId;
@XmlAttribute(name = "includeArea", required = true)
protected boolean includeArea;
@XmlAttribute(name = "filterGroup", required = true)
protected boolean filterGroup;
@XmlAttribute(name = "temp")
protected Boolean temp;
/**
* Gets the value of the layerId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLayerId() {
return layerId;
}
/**
* Sets the value of the layerId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLayerId(String value) {
this.layerId = value;
}
public boolean isSetLayerId() {
return (this.layerId!= null);
}
/**
* Gets the value of the areaId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAreaId() {
return areaId;
}
/**
* Sets the value of the areaId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAreaId(String value) {
this.areaId = value;
}
public boolean isSetAreaId() {
return (this.areaId!= null);
}
/**
* Gets the value of the filterId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFilterId() {
return filterId;
}
/**
* Sets the value of the filterId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFilterId(String value) {
this.filterId = value;
}
public boolean isSetFilterId() {
return (this.filterId!= null);
}
/**
* Gets the value of the includeArea property.
*
*/
public boolean isIncludeArea() {
return includeArea;
}
/**
* Sets the value of the includeArea property.
*
*/
public void setIncludeArea(boolean value) {
this.includeArea = value;
}
public boolean isSetIncludeArea() {
return true;
}
/**
* Gets the value of the filterGroup property.
*
*/
public boolean isFilterGroup() {
return filterGroup;
}
/**
* Sets the value of the filterGroup property.
*
*/
public void setFilterGroup(boolean value) {
this.filterGroup = value;
}
public boolean isSetFilterGroup() {
return true;
}
/**
* Gets the value of the temp property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public boolean isTemp() {
return temp;
}
/**
* Sets the value of the temp property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setTemp(boolean value) {
this.temp = value;
}
public boolean isSetTemp() {
return (this.temp!= null);
}
public void unsetTemp() {
this.temp = null;
}
}
|
import styles from './Loading.module.css'
export default () => (
<div className={styles.ldsRing}>
<div></div>
<div></div>
<div></div>
<div></div>
</div>
) |
<reponame>DSM-GRAM/The-Manual-Backend
from flask import Response, abort, request
from flask_restful import Api
from flask_jwt_extended import create_access_token, create_refresh_token
from werkzeug.security import check_password_hash
from app.views import BaseResource, api_blueprint
from app.models.account import UserModel as User
api = Api(api_blueprint)
api.prefix = ''
@api.resource('/login')
class Login(BaseResource):
def post(self):
"""
로그인
"""
payload = request.json
id = payload['id']
pw = payload['pw']
users = User.select().where(User.id == id)
user = users[0]
if not user:
abort(401)
if not check_password_hash(user.pw_hashed, pw):
abort(401)
return {
'access_token': create_access_token(identity=id),
'refresh_token': create_refresh_token(identity=id)
}
|
import paho.mqtt.client as mqtt
def on_connect(client, userdata, flags, respons_code):
print("connected")
client.subscribe("#")
def on_message(client, userdata, msg):
print(msg.topic)
print(str(msg.payload))
client = mqtt.Client(transport="websockets")
client.on_connect = on_connect
client.on_message = on_message
client.connect("127.0.0.1", 60805, 60)
client.loop_forever()
|
<filename>lang/py/pylib/10/threading/threading_event.py
#!/usr/bin/env python
# encoding: UTF-8
import threading
import logging
import time
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-10s) %(message)s',
)
def wait_for_event(e):
logging.debug('wait_for_event starting')
event_is_set=e.wait()
logging.debug('event set:%s',event_is_set)
def wait_for_event_timeout(e,t):
while not e.isSet():
logging.debug('wait_for_event_timeout starting')
event_is_set=e.wait(t)
if event_is_set:
logging.debug('processing event')
else:
logging.debug('do other things')
e=threading.Event()
t1=threading.Thread(name='block',target=wait_for_event,args=(e,))
t1.start()
t2=threading.Thread(name='non-block',target=wait_for_event_timeout,args=(e,2))
t2.start()
logging.debug('Waiting before calling Event.set()')
time.sleep(10)
e.set()
logging.debug('Event is set')
|
<gh_stars>0
import * as tslib_1 from "tslib";
import { ToneEvent } from "./ToneEvent";
import { ToneWithContext } from "../core/context/ToneWithContext";
import { optionsFromArguments } from "../core/util/Defaults";
import { noOp } from "../core/util/Interface";
/**
* Loop creates a looped callback at the
* specified interval. The callback can be
* started, stopped and scheduled along
* the Transport's timeline.
* @example
* import { Loop, Transport } from "tone";
* const loop = new Loop((time) => {
* // triggered every eighth note.
* console.log(time);
* }, "8n").start(0);
* Transport.start();
* @category Event
*/
var Loop = /** @class */ (function (_super) {
tslib_1.__extends(Loop, _super);
function Loop() {
var _this = _super.call(this, optionsFromArguments(Loop.getDefaults(), arguments, ["callback", "interval"])) || this;
_this.name = "Loop";
var options = optionsFromArguments(Loop.getDefaults(), arguments, ["callback", "interval"]);
_this._event = new ToneEvent({
context: _this.context,
callback: _this._tick.bind(_this),
loop: true,
loopEnd: options.interval,
playbackRate: options.playbackRate,
probability: options.probability
});
_this.callback = options.callback;
// set the iterations
_this.iterations = options.iterations;
return _this;
}
Loop.getDefaults = function () {
return Object.assign(ToneWithContext.getDefaults(), {
interval: "4n",
callback: noOp,
playbackRate: 1,
iterations: Infinity,
probability: 1,
mute: false,
humanize: false
});
};
/**
* Start the loop at the specified time along the Transport's timeline.
* @param time When to start the Loop.
*/
Loop.prototype.start = function (time) {
this._event.start(time);
return this;
};
/**
* Stop the loop at the given time.
* @param time When to stop the Loop.
*/
Loop.prototype.stop = function (time) {
this._event.stop(time);
return this;
};
/**
* Cancel all scheduled events greater than or equal to the given time
* @param time The time after which events will be cancel.
*/
Loop.prototype.cancel = function (time) {
this._event.cancel(time);
return this;
};
/**
* Internal function called when the notes should be called
* @param time The time the event occurs
*/
Loop.prototype._tick = function (time) {
this.callback(time);
};
Object.defineProperty(Loop.prototype, "state", {
/**
* The state of the Loop, either started or stopped.
*/
get: function () {
return this._event.state;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "progress", {
/**
* The progress of the loop as a value between 0-1. 0, when the loop is stopped or done iterating.
*/
get: function () {
return this._event.progress;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "interval", {
/**
* The time between successive callbacks.
* @example
* import { Loop, Transport } from "tone";
* const loop = new Loop();
* loop.interval = "8n"; // loop every 8n
*/
get: function () {
return this._event.loopEnd;
},
set: function (interval) {
this._event.loopEnd = interval;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "playbackRate", {
/**
* The playback rate of the loop. The normal playback rate is 1 (no change).
* A `playbackRate` of 2 would be twice as fast.
*/
get: function () {
return this._event.playbackRate;
},
set: function (rate) {
this._event.playbackRate = rate;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "humanize", {
/**
* Random variation +/-0.01s to the scheduled time.
* Or give it a time value which it will randomize by.
*/
get: function () {
return this._event.humanize;
},
set: function (variation) {
this._event.humanize = variation;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "probability", {
/**
* The probably of the callback being invoked.
*/
get: function () {
return this._event.probability;
},
set: function (prob) {
this._event.probability = prob;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "mute", {
/**
* Muting the Loop means that no callbacks are invoked.
*/
get: function () {
return this._event.mute;
},
set: function (mute) {
this._event.mute = mute;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Loop.prototype, "iterations", {
/**
* The number of iterations of the loop. The default value is `Infinity` (loop forever).
*/
get: function () {
if (this._event.loop === true) {
return Infinity;
}
else {
return this._event.loop;
}
},
set: function (iters) {
if (iters === Infinity) {
this._event.loop = true;
}
else {
this._event.loop = iters;
}
},
enumerable: true,
configurable: true
});
Loop.prototype.dispose = function () {
_super.prototype.dispose.call(this);
this._event.dispose();
return this;
};
return Loop;
}(ToneWithContext));
export { Loop };
//# sourceMappingURL=Loop.js.map |
<gh_stars>1-10
package crypto
import (
"bytes"
"encoding/base64"
"io/ioutil"
"strings"
"testing"
)
func readFile(t *testing.T, name string) []byte {
data, err := ioutil.ReadFile(name)
if err != nil {
t.Fatalf("could not get file: %s", err)
}
return data
}
func readB64(t *testing.T, name string) []byte {
data := readFile(t, name)
decoded, err := base64.StdEncoding.DecodeString(string(data))
if err != nil {
t.Fatalf("could not parse file: %s", err)
}
return decoded
}
func readTestKeys(t *testing.T) (pubKey PublicKey, privKey PrivateKey) {
pubKey = readB64(t, "testdata/pubkey.txt")
privKey = readB64(t, "testdata/privkey.txt")
return pubKey, privKey
}
func TestEncryptAndDecryptStream(t *testing.T) {
pubKey, privKey := readTestKeys(t)
var cryptedBuf bytes.Buffer
err := EncryptStream(bytes.NewBufferString("hello!"), &cryptedBuf, pubKey)
if err != nil {
t.Fatalf("could not encrypt our virtual buffer: %s", err)
}
var decryptedBuf bytes.Buffer
err = DecryptStream(&cryptedBuf, &decryptedBuf, privKey)
if err != nil {
t.Fatalf("decrypt fail: %s", err)
}
if !bytes.Equal(decryptedBuf.Bytes(), []byte("hello!")) {
t.Fatalf("decrypted data differ")
}
}
func TestDecryptDataWithGoodHash(t *testing.T) {
_, privKey := readTestKeys(t)
encrypted := readFile(t, "testdata/encrypted_hash_ok.bin")
var decryptedBuf bytes.Buffer
err := DecryptStream(bytes.NewBuffer(encrypted), &decryptedBuf, privKey)
if err != nil {
t.Fatalf("decrypt fail: %s", err)
}
if !bytes.Equal(decryptedBuf.Bytes(), []byte("hello!\n")) {
t.Fatalf("decrypted data differ")
}
}
func TestDecryptDataWithBadHash(t *testing.T) {
_, privKey := readTestKeys(t)
encrypted := readFile(t, "testdata/encrypted_hash_bad.bin")
var decryptedBuf bytes.Buffer
err := DecryptStream(bytes.NewBuffer(encrypted), &decryptedBuf, privKey)
if err == nil {
t.Fatalf("decrypt should report fail but it didn't")
}
if !strings.Contains(err.Error(), "could not verify") {
t.Fatalf("it should report verifycation error")
}
}
|
<gh_stars>1-10
/* Using MCP9808 ambient temperatute sensor */
'use strict';
const r = require('array-gpio');
var i2c = r.I2C();
/* set data transfer speed to 100 kHz */
i2c.setTransferSpeed(200000);
/* MCP9808 device address */
let slave = 0x18;
/* access MCP9808 device */
i2c.selectSlave(slave);
/* setup write and read data buffer */
const wbuf = Buffer.alloc(16); // write buffer
const rbuf = Buffer.alloc(16); // read buffer
/* Based on MCP9808 datasheet, compute the temperature */
function computeTemp(){
let Temp;
let UpperByte = rbuf[0];
let LowerByte = rbuf[1];
UpperByte = UpperByte & 0x1F; // Clear flag bits
// Temp < 0°C
if ((UpperByte & 0x10) == 0x10){
UpperByte = UpperByte & 0x0F; // Clear SIGN
Temp = 256 - ((UpperByte * 16) + (LowerByte / 16));
// Temp > 0°C
}else {
Temp = ((UpperByte * 16) + (LowerByte / 16));
}
return Temp;
}
function precisionRound(number, precision) {
var factor = Math.pow(10, precision);
return Math.round(number * factor) / factor;
}
/* get temperature reading */
exports.getTemp = function () {
/* access the internal 16-bit configuration register within MCP9808 */
wbuf[0] = 0x01; // address of configuration register
wbuf[1] = 0x02; // register upper byte, THYST set with +1.5 C
wbuf[2] = 0x00; // register lower byte (power up defaults)
i2c.write(wbuf, 3);
/* access the internal 16-bit ambient temp register within MCP9808 */
wbuf[0] = 0x05; // address of ambient temperature register
i2c.write(wbuf, 1);
/* read content of ambient temp register */
i2c.read(rbuf, 2);
/* function call to compute temperature */
var T = computeTemp();
var t = precisionRound(T, 2);
return t;
}
exports.close = function(){
console.log('i2c closed');
i2c.end();
}
/*process.on('exit', (code) => {
console.log('i2c closed on process exit');
i2c.end();
});*/
|
<filename>src/test/java/pro/jiefzz/demo/uow/demo1/aggrGenerate/BankAccountGeneration.java
package pro.jiefzz.demo.uow.demo1.aggrGenerate;
import java.io.IOException;
import com.github.kimffy24.uow.util.GenerateSqlMapperUtil;
import pro.jiefzz.demo.uowdemo.aggr.demo.BankAccount;
@Deprecated
public class BankAccountGeneration {
public static void main(String[] args) {
try {
GenerateSqlMapperUtil.generateSqlMapper(
BankAccount.class,
"bank_account");
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
#!/bin/bash
set +e
NUM=$1
XDIR="out/pythia8_ttbar"
OUTROOT="tev14_pythia8_ttbar_$NUM.root"
OUT="tev14_pythia8_ttbar_$NUM.promc"
LOG="logfile_$NUM.txt"
rm -f $XDIR/$OUTROOT $XDIR/$OUT
source /opt/hepsim.sh
cp tev14_pythia8_ttbar.py tev14_pythia8_ttbar.py.${NUM}
echo "Random:seed=${NUM}" >> tev14_pythia8_ttbar.py.${NUM}
./main.exe tev14_pythia8_ttbar.py.${NUM} $XDIR/$OUT > $XDIR/$LOG 2>&1
/opt/hepsim/delphes-local/DelphesProMC delphes_card_CMS_PileUp.tcl $XDIR/$OUTROOT $XDIR/$OUT >> $XDIR/$LOG 2>&1
|
import React, {PureComponent} from 'react';
import AssetGrid from "./Media/AssetGrid";
import {oauthClient} from "../oauth";
import config from "../config";
import CollectionsPanel from "./Media/CollectionsPanel";
import MediaSelection from "./Media/MediaSelection";
import {UserContext} from "./Security/UserContext";
import MainAppBar from "./Layout/MainAppBar";
type State = {
searchQuery: string;
hideMenu: boolean;
}
export default class App extends PureComponent<{
authenticated: boolean,
}, State> {
static contextType = UserContext;
context: React.ContextType<typeof UserContext>;
state: State = {
searchQuery: '',
hideMenu: false,
}
logout = () => {
oauthClient.logout();
if (!config.isDirectLoginForm()) {
document.location.href = `${config.getAuthBaseUrl()}/security/logout?r=${encodeURIComponent(document.location.origin)}`;
}
}
onSearchQueryChange = (value: string) => {
this.setState({searchQuery: value});
}
toggleMenu = () => {
this.setState(prevState => ({
hideMenu: !prevState.hideMenu,
}))
}
render() {
return <>
<MainAppBar
toggleMenu={this.toggleMenu}
title={'Databox Client.'}
onLogout={this.logout}
username={this.context.user ? this.context.user.username : undefined}
onSearchQueryChange={this.onSearchQueryChange}
searchQuery={this.state.searchQuery}
/>
<MediaSelection>
<div className="main-layout">
{!this.state.hideMenu && <div className="main-left-menu">
<CollectionsPanel/>
</div>}
<div className="main-content">
<AssetGrid
query={this.state.searchQuery}
/>
</div>
</div>
</MediaSelection>
</>
}
}
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2021.1 (64-bit)
#
# Filename : design_reciever.sh
# Simulator : Mentor Graphics Questa Advanced Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Fri Jan 14 22:31:43 +0800 2022
# SW Build 3247384 on Thu Jun 10 19:36:33 MDT 2021
#
# Copyright 1986-2021 Xilinx, Inc. All Rights Reserved.
#
# usage: design_reciever.sh [-help]
# usage: design_reciever.sh [-lib_map_path]
# usage: design_reciever.sh [-noclean_files]
# usage: design_reciever.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'design_reciever.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "design_reciever.sh - Script generated by export_simulation (Vivado v2021.1 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <elaborate>
elaborate()
{
source elaborate.do 2>&1 | tee elaborate.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./design_reciever.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="G:/Chiro/Programs/urllc-demo-pynq/urllc-demo-vivado/urllc-demo-vivado.cache/compile_simlib/questa"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Create design library directory
create_lib_dir()
{
lib_dir="questa_lib"
if [[ -e $lib_dir ]]; then
rm -rf $lib_dir
fi
mkdir $lib_dir
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf questa_lib)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./design_reciever.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: design_reciever.sh [-help]\n\
Usage: design_reciever.sh [-lib_map_path]\n\
Usage: design_reciever.sh [-reset_run]\n\
Usage: design_reciever.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
def evaluateRPN(expression):
stack = []
for char in expression:
if char in ['+', '-', '*', '/']:
op1 = stack.pop()
op2 = stack.pop()
if char == '+':
stack.append(op1 + op2)
elif char == '-':
stack.append(op2 - op1)
elif char == '*':
stack.append(op1 * op2)
elif char == '/':
stack.append(op2 / op1)
else:
stack.append(float(char))
return stack[-1]
expression = ["2", "1", "+", "3", "*"]
result = evaluateRPN(expression)
print(result) # Outputs 9.0 |
<filename>src/tools/web/kdb.js<gh_stars>0
/**
* @file
*
* @brief small library to access Elektra’s kdb via node.js
*
* @copyright BSD License (see LICENSE.md or https://www.libelektra.org)
*/
const { exec } = require('child_process')
const { readFileSync, unlink } = require('fs')
const KDB_COMMAND = process.env.KDB || 'kdb'
// constants
const ERR_KEY_NOT_FOUND = 'Did not find key'
const ERROR_REGEX = /Sorry, module.*?([0-9]+):/
const AT_REGEX = /At: (.*)$/
const MOUNTPOINT_REGEX = /Mountpoint: (.*)$/
const CONFIGFILE_REGEX = /Configfile: (.*)$/
// KDBError
function KDBError (message) {
this.name = 'Elektra Error'
let isError = false
this.details = message
let isNextMessageReason = false;
for (let line of message.split('\n')) {
let res
if (isNextMessageReason) {
this.reason = this.reason + '\n' + line;
isNextMessageReason = false;
}
if (res = line.match(ERROR_REGEX)) {
this.num = Number(res[1]);
this.reason = line;
isError = true;
isNextMessageReason = true;
} else if (isError) {
if (res = line.match(AT_REGEX)) {
this.at = res[1]
} else if (res = line.match(MOUNTPOINT_REGEX)) {
this.mountpoint = res[1]
} else if (res = line.match(CONFIGFILE_REGEX)) {
this.configfile = res[1]
}
}
}
if (this.reason) {
this.message = this.reason
} else {
this.message = message || ''
}
}
KDBError.prototype = Error.prototype
// remove newline from the end of a string
const trimNewline = (str) =>
str.substring(0, str.length - 1)
// execute a script while catching and parsing errors
const safeExec = (script) => new Promise((resolve, reject) =>
exec(script, { maxBuffer: Infinity }, (err, stdout, stderr) => {
if (err) {
const errors = err.message.split('\n')
// ignore error if it's "key not found"
if (errors.length > 1 && errors[1].startsWith(ERR_KEY_NOT_FOUND)) {
return resolve()
} else {
return reject(new KDBError(err.message))
}
}
if (stderr) {
if (stderr !== ERR_KEY_NOT_FOUND) {
return reject(new KDBError(stderr))
} else {
return resolve() // no key found, return no value
}
}
const result = trimNewline(stdout)
if (!result) {
return resolve('') // empty result, return empty string
}
return resolve(result)
})
)
// escape strings by surrounding them with ""
const escapeValues = (template, ...values) =>
template.reduce((acc, part, i) => {
/*
Explanation of regular expression:
- $1 `(\\\\)*` Matches an even number of _backslashes_
- $4 Matches one _quote_ when there was an odd number of backslashes
- $5 Matches one _quote_ when there was an even number of backslashes
For instance in `\\\\\"`, $1 is `\\\\`, $4 is `"` and $5 is an empty string.
So `\\\\\"` gets replaced to itself.
In case of an even number of backslashes one backslash is added.
(source: @krit0n - https://github.com/ElektraInitiative/libelektra/pull/983#discussion_r83965059)
*/
let val = values[i - 1]
.replace(/`/g, '\\`') // escape backticks
.replace(/((\\\\)*)(\\(")|("))/g, '$1\\$4$5')
if (typeof val === 'string') val = `"${val}"`
return acc + val + part
})
const ELEKTRA_VERSION_REGEX = /KDB_VERSION\:\ ([0-9]+\.[0-9]+\.[0-9]+)\n/
// get Elektra version
const version = () =>
safeExec(`${KDB_COMMAND} --version`)
.then(output => { // parse result
const matches = ELEKTRA_VERSION_REGEX.exec(output)
if (!matches) {
throw new Error('invalid version: ' + output)
}
return matches
})
.then(matches => matches.length > 1 && matches[1]) // select version from matches
.then(fullVersion => {
const splitVersions = fullVersion.split('.')
return {
version: fullVersion,
major: Number(splitVersions[0]),
minor: Number(splitVersions[1]),
micro: Number(splitVersions[2])
}
})
// list available paths under a given `path`
const ls = (path) =>
safeExec(escapeValues`${KDB_COMMAND} ls -0 ${path}`)
.then(stdout => stdout && stdout.split('\0'))
// find paths given a search query
const find = (query) =>
safeExec(escapeValues`${KDB_COMMAND} find -0 ${query}`)
.then(stdout => stdout && stdout.split('\0'))
.then(res => res || [])
// get value from given `path`
const get = (path) =>
safeExec(escapeValues`${KDB_COMMAND} get ${path}`)
// set value at given `path`
const set = (path, value) =>
safeExec(escapeValues`${KDB_COMMAND} set -vd ${path} -- ${value}`)
// move value from given `path` to `destination`
const mv = (path, destination) =>
safeExec(escapeValues`${KDB_COMMAND} mv -r ${path} ${destination}`)
// copy value from given `path` to `destination`
const cp = (path, destination) =>
safeExec(escapeValues`${KDB_COMMAND} cp -r ${path} ${destination}`)
// remove single value at `path`
const rmSingle = (path) =>
safeExec(escapeValues`${KDB_COMMAND} rm ${path}`)
// remove value at given `path`
const rm = (path) => {
return ls(path)
.then(paths => Promise.all(
paths.map(p => {
if (p.startsWith('user/sw/elektra/web')) return { p, r: '1' } // always restricted
return getmeta(p, 'restrict/remove')
.then(r => ({ p, r }))
.catch(err => ({ p, r: '0' })) // restrict/remove key not present
})
))
.then(restricted => Promise.all(
restricted.map(({ p, r }) => {
if (r !== '1') return rmSingle(p)
})
))
}
// list meta values at given `path`
const lsmeta = (path) =>
safeExec(escapeValues`${KDB_COMMAND} lsmeta -0 ${path}`)
.then(stdout => stdout && stdout.split('\0'))
// get meta value from given `path`
const getmeta = (path, meta) =>
safeExec(escapeValues`${KDB_COMMAND} getmeta ${path} ${meta}`)
// set meta value at given `path`
const setmeta = (path, meta, value) =>
safeExec(escapeValues`${KDB_COMMAND} setmeta ${path} ${meta} ${value}`)
// remove meta value at given `path`
const rmmeta = (path, meta) =>
safeExec(escapeValues`${KDB_COMMAND} rmmeta ${path} ${meta}`)
// get all metavalues for given `path`
const getAllMeta = (path) =>
lsmeta(path)
.then(metaValues => metaValues && Promise.all(
metaValues.map(meta =>
getmeta(path, meta).then(val => {
return { [meta]: val }
})
)
))
// merge objects
.then(resolvedMetaValues =>
resolvedMetaValues && Object.assign.apply(Object, resolvedMetaValues)
)
const getBufferFile = () => `/tmp/elektra-web.${Date.now()}.buffer.json`
// export javascript object from given `path`
const _export = (path) => {
const buffer = getBufferFile()
return safeExec(escapeValues`${KDB_COMMAND} export ${path} yajl ${buffer}`)
.then(() => {
const data = JSON.parse(readFileSync(buffer))
unlink(buffer, (err) =>
err && console.error('could not delete buffer file:', err)
)
return data
})
}
// import javascript object at given `path`
const _import = (path, value) =>
safeExec(
// we can trust JSON.stringify to escape values for us
`echo '${JSON.stringify(value)}' | ` + // pipe json into kdb
escapeValues`${KDB_COMMAND} import ${path} yajl`
).then(result => _export(path))
// get value and available paths under a given `path`
const getAndLs = (path, { preload = 0 }) =>
Promise.all(
[ ls(path), get(path), getAllMeta(path) ] // execute ls and get in parallel
).then(([ lsRes, value, meta ]) => {
let result = { exists: value !== undefined, name: path.split('/').pop(), path, ls: lsRes || [], value, meta }
if (preload > 0 && Array.isArray(lsRes)) {
return Promise.all(lsRes
.filter(p => {
const isNotSame = p !== path
const isNotDeeplyNested = p.split('/').length <= (path.split('/').length + 1)
return isNotSame && isNotDeeplyNested
})
.map(p =>
getAndLs(p, { preload: preload - 1 })
))
.then(children => {
result.children = children
return result
})
}
return result // return results as object
})
// export kdb functions as `kdb` object
module.exports = {
version, ls, get, getAndLs, set, mv, cp, rm, export: _export, import: _import,
getmeta, setmeta, rmmeta, lsmeta, getAllMeta, find, KDB_COMMAND,
}
|
#!/bin/bash
INPUT_DIR="/data/marble/cmip6/CMIP6/DCPP/IPSL/IPSL-CM6A-LR/dcppC-atl-control/r1i1p1f1/Amon/tas/gr/v20190110"
~/.local/bin/kernprof -l -v scripts/scratch.py ${INPUT_DIR} &> performance_tests_results/small_crunch_profile.txt
INPUT_DIR="/data/marble/cmip6/CMIP6/CMIP/BCC/BCC-CSM2-MR/historical/r1i1p1f1/Amon/tas/gn/v20181126"
~/.local/bin/kernprof -l -v scripts/scratch.py ${INPUT_DIR} &> performance_tests_results/medium_crunch_profile.txt
INPUT_DIR="/data/marble/cmip6/CMIP6/CMIP/BCC/BCC-CSM2-MR/piControl/r1i1p1f1/Amon/tas/gn/v20181016"
#~/.local/bin/kernprof -l -v scripts/scratch.py ${INPUT_DIR} &> performance_tests_results/big_crunch_profile.txt
|
#!/bin/sh
exec gunicorn app:APP \
-w 2 -t 120 \
-b 0.0.0.0:5000 \
--max-requests 1000 \
--log-level=info
|
int[] arr = new int[10];
Random random = new Random();
for(int i = 0; i < 10; i++) {
int rand = random.nextInt(101);
while (Arrays.asList(arr).contains(rand)) {
rand = random.nextInt(101);
}
arr[i] = rand;
}
System.out.println(Arrays.toString(arr)); |
<filename>src/main/java/com/github/thebuildteam/obelisk/registries/ItemRegistry.java
package com.github.thebuildteam.obelisk.registries;
import com.github.thebuildteam.obelisk.Obelisk;
import com.github.thebuildteam.obelisk.lists.OBLBlockList;
import com.github.thebuildteam.obelisk.lists.OBLItemGroups;
import com.github.thebuildteam.obelisk.lists.OBLItemList;
import net.minecraft.util.Identifier;
import net.minecraft.util.registry.Registry;
public class ItemRegistry {
public static void register() {
// ============== DECORATION ===============
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earthenware_mug"), OBLItemList.EARTHENWARE_MUG);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earthenware_plate"), OBLItemList.EARTHENWARE_PLATE);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earthenware_set"), OBLItemList.EARTHENWARE_SET);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "candle_wallmounted"), OBLItemList.WALLMOUNTED_CANDLESTICK);
// =============== MISCELLANEOUS ===============
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "merchant_sign"), OBLItemList.MERCHANT_SIGN);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "copper_coin"), OBLItemList.COPPER_COIN);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "silver_coin"), OBLItemList.SILVER_COIN);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "gold_coin"), OBLItemList.GOLD_COIN);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_ingot"), OBLItemList.EARTH_ELEMENT_INGOT);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_ingot"), OBLItemList.FIRE_ELEMENT_INGOT);
// =============== OBELISK BLOCKS ===============
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_obelisk_block"), OBLItemList.EARTH_OBELISK_BLOCK);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_obelisk_ore"), OBLItemList.EARTH_OBELISK_ORE);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_obelisk_wall"), OBLItemList.EARTH_OBELISK_WALL);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_obelisk_slab"), OBLItemList.EARTH_OBELISK_SLAB);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_obelisk_stairs"), OBLItemList.EARTH_OBELISK_STAIRS);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "earth_obelisk_fence"), OBLItemList.EARTH_OBELISK_FENCE);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_obelisk_block"), OBLItemList.FIRE_OBELISK_BLOCK);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_obelisk_ore"), OBLItemList.FIRE_OBELISK_ORE);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_obelisk_wall"), OBLItemList.FIRE_OBELISK_WALL);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_obelisk_slab"), OBLItemList.FIRE_OBELISK_SLAB);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_obelisk_stairs"), OBLItemList.FIRE_OBELISK_STAIRS);
Registry.register(Registry.ITEM, new Identifier(Obelisk.modid, "fire_obelisk_fence"), OBLItemList.FIRE_OBELISK_FENCE);
}
} |
const jwt = require('jsonwebtoken');
const header = {
alg: 'HS256',
typ: 'JWT'
};
const payload = {
"sub": "1234567890",
"name": "John Doe",
"iat": 1516239022
};
const secret = "secret";
const token = jwt.sign(payload,secret,{ header : header });
console.log(token); |
require_relative "helper"
class Test100 < Minitest::Test
def setup
setup_common()
end
# --------------------------------
def test_selfhost
files = [
"addr_deref.pric",
"if.pric"
]
files.each do |file|
file_src = project_path("selfhost/test/selfhost/#{file}")
src = File.read(file_src)
diff_asm(src, file)
end
end
end
|
bash run.sh Scripts/shamir.sh dot-100000
|
<filename>src/me/atog/procore/commandos/MensajeGlobalCommand.java<gh_stars>0
package me.atog.procore.commandos;
import com.sun.istack.internal.NotNull;
import me.atog.procore.ProCorePlugin;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.file.FileConfiguration;
import javax.annotation.Nullable;
public class MensajeGlobalCommand
implements CommandExecutor
{
final static @NotNull @Nullable ProCorePlugin plugin = ProCorePlugin.getPlugin(ProCorePlugin.class);
@Override
public boolean onCommand(CommandSender a, Command b, String c, String[] d)
{
try
{
FileConfiguration config = plugin.getConfig();
if (d.length <= 0)
{
a.sendMessage(config.getString("LANG.USAGES.BROADCAST_USAGE"));
}
else
{
String message = " ";
for (int i = 0 ; i < c.length() ; i++)
{
message = message + " " + d[i];
}
Bukkit.broadcastMessage(cambiarSimbolosDeColorAColoresEntendiblesPorElCodigo(config.getString("UTILS.BROADCAST_MESSAGE.PREFIX") + " " + message));
}
}
catch (Exception e)
{
Bukkit.getLogger().info("Sorry, error in this :c");
for (int i = 0 ; i < c.length(); i++) {
Bukkit.broadcastMessage("Sorry, error in this :c");
e.printStackTrace();
}
}
return false;
}
private String cambiarSimbolosDeColorAColoresEntendiblesPorElCodigo(String textoQueSeVaALeer)
{
return ChatColor.translateAlternateColorCodes('&', textoQueSeVaALeer);
}
}
|
<reponame>ntrhieu89/YCSB
/**
* Copyright (c) 2012 - 2015 YCSB contributors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
/*
* MongoDB client binding for YCSB.
*
* Submitted by <NAME> on 5/11/2010.
*
* https://gist.github.com/000a66b8db2caf42467b#file_mongo_database.java
*/
package com.yahoo.ycsb.db;
import static com.mongodb.client.model.Filters.eq;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import org.bson.Document;
import org.bson.types.Binary;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.WriteConcern;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import com.yahoo.ycsb.ByteArrayByteIterator;
import com.yahoo.ycsb.ByteIterator;
import com.yahoo.ycsb.DB;
import com.yahoo.ycsb.DBException;
import com.yahoo.ycsb.Status;
import com.yahoo.ycsb.StringByteIterator;
/**
* MongoDB binding for YCSB framework using the MongoDB Inc.
* <a href="http://docs.mongodb.org/ecosystem/drivers/java/">driver</a>
* <p>
* See the <code>README.md</code> for configuration information.
* </p>
*
* @author ypai
* @see <a href="http://docs.mongodb.org/ecosystem/drivers/java/">MongoDB Inc.
* driver</a>
*/
public class MongoDbClient extends DB {
/** Used to include a field in a response. */
private static final Integer INCLUDE = Integer.valueOf(1);
private MongoClient mongoClient;
private static final List<Document> bulkInserts = new ArrayList<Document>();
private static final String MONGO_DB_NAME = "ycsb";
private static final String MONGO_TABLE = "users";
private final String ipAddress;
/**
* Cleanup any state for this DB. Called once per DB instance; there is one
* DB instance per client thread.
*/
@Override
public void cleanup() throws DBException {
try {
mongoClient.close();
} catch (Exception e1) {
System.err.println("Could not close MongoDB connection pool: " + e1.toString());
e1.printStackTrace();
return;
} finally {
mongoClient = null;
}
}
public void dropDatabase() {
System.out.println("drop database and create collection");
mongoClient.dropDatabase(MONGO_DB_NAME);
MongoDatabase db = mongoClient.getDatabase(MONGO_DB_NAME);
db.createCollection(MONGO_TABLE);
}
/**
* Delete a record from the database.
*
* @param table
* The name of the table
* @param key
* The record key of the record to delete.
* @return Zero on success, a non-zero error code on error. See the
* {@link DB} class's description for a discussion of error codes.
*/
@Override
public Status delete(String table, String key) {
try {
MongoCollection<Document> collection = this.mongoClient.getDatabase(MONGO_DB_NAME)
.getCollection(MONGO_TABLE);
DeleteResult result = collection.deleteOne(eq("_id", String.valueOf(key)));
if (result.wasAcknowledged() && result.getDeletedCount() == 0) {
System.err.println("Nothing deleted for key " + key);
return Status.NOT_FOUND;
}
return Status.OK;
} catch (Exception e) {
System.err.println(e.toString());
return Status.ERROR;
}
}
/**
* Initialize any state for this DB. Called once per DB instance; there is
* one DB instance per client thread.
*/
@Override
public void init() throws DBException {
this.mongoClient = new MongoClient(this.ipAddress, new MongoClientOptions.Builder().serverSelectionTimeout(1000)
.connectionsPerHost(500).writeConcern(WriteConcern.ACKNOWLEDGED).build());
}
public MongoDbClient(String ip) {
this.ipAddress = ip;
}
/**
* Insert a record in the database. Any field/value pairs in the specified
* values HashMap will be written into the record with the specified record
* key.
*
* @param table
* The name of the table
* @param key
* The record key of the record to insert.
* @param values
* A HashMap of field/value pairs to insert in the record
* @return Zero on success, a non-zero error code on error. See the
* {@link DB} class's description for a discussion of error codes.
*/
@Override
public Status insert(String table, String key, HashMap<String, ByteIterator> values) {
try {
Document toInsert = new Document("_id", key);
Map<String, String> fields = StringByteIterator.getStringMap(values);
for (Map.Entry<String, String> entry : fields.entrySet()) {
toInsert.put(entry.getKey(), entry.getValue());
}
synchronized (bulkInserts) {
bulkInserts.add(toInsert);
}
return Status.OK;
} catch (Exception e) {
System.err.println("Exception while trying bulk insert with " + bulkInserts.size());
e.printStackTrace();
return Status.ERROR;
}
}
public Status insert(String key, HashMap<String, String> values) {
try {
Document toInsert = new Document("_id", key);
for (Map.Entry<String, String> entry : values.entrySet()) {
toInsert.put(entry.getKey(), entry.getValue());
}
synchronized (bulkInserts) {
bulkInserts.add(toInsert);
}
return Status.OK;
} catch (Exception e) {
System.err.println("Exception while trying bulk insert with " + bulkInserts.size());
e.printStackTrace();
return Status.ERROR;
}
}
public void insertMany() {
synchronized (bulkInserts) {
System.out.println("bulk inserts " + bulkInserts.size());
MongoCollection<Document> collection = this.mongoClient.getDatabase(MONGO_DB_NAME).getCollection(MONGO_TABLE);
collection.insertMany(bulkInserts);
bulkInserts.clear();
}
}
/**
* Read a record from the database. Each field/value pair from the result
* will be stored in a HashMap.
*
* @param table
* The name of the table
* @param key
* The record key of the record to read.
* @param fields
* The list of fields to read, or null for all of them
* @param result
* A HashMap of field/value pairs for the result
* @return Zero on success, a non-zero error code on error or "not found".
*/
@Override
public Status read(String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) {
try {
MongoCollection<Document> collection = this.mongoClient.getDatabase(MONGO_DB_NAME)
.getCollection(MONGO_TABLE);
FindIterable<Document> findIterable = collection.find(eq("_id", String.valueOf(key)));
if (fields != null) {
Document projection = new Document();
for (String field : fields) {
projection.put(field, INCLUDE);
}
findIterable.projection(projection);
}
Document queryResult = findIterable.first();
if (queryResult != null) {
fillMap(result, queryResult);
}
return queryResult != null ? Status.OK : Status.NOT_FOUND;
} catch (Exception e) {
System.err.println(e.toString());
return Status.ERROR;
}
}
public Status read(String key, Map<String, String> result) {
try {
MongoCollection<Document> collection = this.mongoClient.getDatabase(MONGO_DB_NAME)
.getCollection(MONGO_TABLE);
FindIterable<Document> findIterable = collection.find(eq("_id", String.valueOf(key)));
Document queryResult = findIterable.first();
if (queryResult != null) {
queryResult.forEach((k, v) -> {
result.put(k, String.valueOf(v));
});
}
return queryResult != null ? Status.OK : Status.NOT_FOUND;
} catch (Exception e) {
System.err.println(e.toString());
return Status.ERROR;
}
}
/**
* Perform a range scan for a set of records in the database. Each
* field/value pair from the result will be stored in a HashMap.
*
* @param table
* The name of the table
* @param startkey
* The record key of the first record to read.
* @param recordcount
* The number of records to read
* @param fields
* The list of fields to read, or null for all of them
* @param result
* A Vector of HashMaps, where each HashMap is a set field/value
* pairs for one record
* @return Zero on success, a non-zero error code on error. See the
* {@link DB} class's description for a discussion of error codes.
*/
@Override
public Status scan(String table, String startkey, int recordcount, Set<String> fields,
Vector<HashMap<String, ByteIterator>> result) {
return Status.NOT_IMPLEMENTED;
}
/**
* Update a record in the database. Any field/value pairs in the specified
* values HashMap will be written into the record with the specified record
* key, overwriting any existing values with the same field name.
*
* @param table
* The name of the table
* @param key
* The record key of the record to write.
* @param values
* A HashMap of field/value pairs to update in the record
* @return Zero on success, a non-zero error code on error. See this class's
* description for a discussion of error codes.
*/
@Override
public Status update(String table, String key, HashMap<String, ByteIterator> value) {
try {
MongoCollection<Document> collection = this.mongoClient.getDatabase(MONGO_DB_NAME)
.getCollection(MONGO_TABLE);
Document fieldsToSet = new Document();
Map<String, String> values = StringByteIterator.getStringMap(value);
for (Map.Entry<String, String> entry : values.entrySet()) {
fieldsToSet.put(entry.getKey(), entry.getValue());
}
Document update = new Document("$set", fieldsToSet);
UpdateResult result = collection.updateOne(eq("_id", String.valueOf(key)), update);
if (result.wasAcknowledged() && result.getMatchedCount() == 0) {
System.err.println("Nothing updated for key " + key);
return Status.NOT_FOUND;
}
return Status.OK;
} catch (Exception e) {
System.err.println(e.toString());
return Status.ERROR;
}
}
public Status update(String key, Map<String, String> values) {
try {
MongoCollection<Document> collection = this.mongoClient.getDatabase(MONGO_DB_NAME)
.getCollection(MONGO_TABLE);
Document fieldsToSet = new Document();
for (Map.Entry<String, String> entry : values.entrySet()) {
fieldsToSet.put(entry.getKey(), entry.getValue());
}
Document update = new Document("$set", fieldsToSet);
UpdateResult result = collection.updateOne(eq("_id", String.valueOf(key)), update);
if (result.wasAcknowledged() && result.getMatchedCount() == 0) {
System.err.println("Nothing updated for key " + key);
return Status.NOT_FOUND;
}
return Status.OK;
} catch (Exception e) {
System.err.println(e.toString());
return Status.ERROR;
}
}
public static void main(String[] args) throws Exception {
MongoDbClient client = new MongoDbClient("127.0.0.1:27017");
client.update("Nuser2166894489591224238", new HashMap<>());
// client.cleanup();
}
/**
* Fills the map with the values from the DBObject.
*
* @param resultMap
* The map to fill/
* @param obj
* The object to copy values from.
*/
protected void fillMap(Map<String, ByteIterator> resultMap, Document obj) {
for (Map.Entry<String, Object> entry : obj.entrySet()) {
if (entry.getValue() instanceof Binary) {
resultMap.put(entry.getKey(), new ByteArrayByteIterator(((Binary) entry.getValue()).getData()));
}
}
}
}
|
<reponame>sethbrasile/Tone.js<gh_stars>0
import * as tslib_1 from "tslib";
import { TimeClass } from "../../core/type/Time";
import { TimelineValue } from "../../core/util/TimelineValue";
import { onContextClose, onContextInit } from "../context/ContextInitialization";
import { Gain } from "../context/Gain";
import { ToneWithContext } from "../context/ToneWithContext";
import { TicksClass } from "../type/Ticks";
import { TransportTimeClass } from "../type/TransportTime";
import { optionsFromArguments } from "../util/Defaults";
import { Emitter } from "../util/Emitter";
import { readOnly, writable } from "../util/Interface";
import { IntervalTimeline } from "../util/IntervalTimeline";
import { Timeline } from "../util/Timeline";
import { isArray, isDefined } from "../util/TypeCheck";
import { Clock } from "./Clock";
import { TransportEvent } from "./TransportEvent";
import { TransportRepeatEvent } from "./TransportRepeatEvent";
/**
* Transport for timing musical events.
* Supports tempo curves and time changes. Unlike browser-based timing (setInterval, requestAnimationFrame)
* Transport timing events pass in the exact time of the scheduled event
* in the argument of the callback function. Pass that time value to the object
* you're scheduling. <br><br>
* A single transport is created for you when the library is initialized.
* <br><br>
* The transport emits the events: "start", "stop", "pause", and "loop" which are
* called with the time of that event as the argument.
*
* @example
* import { Oscillator, Transport } from "tone";
* const osc = new Oscillator().toDestination();
* // repeated event every 8th note
* Transport.scheduleRepeat((time) => {
* // use the callback time to schedule events
* osc.start(time).stop(time + 0.1);
* }, "8n");
* // transport must be started before it starts invoking events
* Transport.start();
* @category Core
*/
var Transport = /** @class */ (function (_super) {
tslib_1.__extends(Transport, _super);
function Transport() {
var _this = _super.call(this, optionsFromArguments(Transport.getDefaults(), arguments)) || this;
_this.name = "Transport";
//-------------------------------------
// LOOPING
//-------------------------------------
/**
* If the transport loops or not.
*/
_this._loop = new TimelineValue(false);
/**
* The loop start position in ticks
*/
_this._loopStart = 0;
/**
* The loop end position in ticks
*/
_this._loopEnd = 0;
//-------------------------------------
// TIMELINE EVENTS
//-------------------------------------
/**
* All the events in an object to keep track by ID
*/
_this._scheduledEvents = {};
/**
* The scheduled events.
*/
_this._timeline = new Timeline();
/**
* Repeated events
*/
_this._repeatedEvents = new IntervalTimeline();
/**
* All of the synced Signals
*/
_this._syncedSignals = [];
/**
* The swing amount
*/
_this._swingAmount = 0;
var options = optionsFromArguments(Transport.getDefaults(), arguments);
// CLOCK/TEMPO
_this._ppq = options.ppq;
_this._clock = new Clock({
callback: _this._processTick.bind(_this),
context: _this.context,
frequency: 0,
units: "bpm",
});
_this._bindClockEvents();
_this.bpm = _this._clock.frequency;
_this._clock.frequency.multiplier = options.ppq;
_this.bpm.setValueAtTime(options.bpm, 0);
readOnly(_this, "bpm");
_this._timeSignature = options.timeSignature;
// SWING
_this._swingTicks = options.ppq / 2; // 8n
return _this;
}
Transport.getDefaults = function () {
return Object.assign(ToneWithContext.getDefaults(), {
bpm: 120,
loopEnd: "4m",
loopStart: 0,
ppq: 192,
swing: 0,
swingSubdivision: "8n",
timeSignature: 4,
});
};
//-------------------------------------
// TICKS
//-------------------------------------
/**
* called on every tick
* @param tickTime clock relative tick time
*/
Transport.prototype._processTick = function (tickTime, ticks) {
// handle swing
if (this._swingAmount > 0 &&
ticks % this._ppq !== 0 && // not on a downbeat
ticks % (this._swingTicks * 2) !== 0) {
// add some swing
var progress = (ticks % (this._swingTicks * 2)) / (this._swingTicks * 2);
var amount = Math.sin((progress) * Math.PI) * this._swingAmount;
tickTime += new TicksClass(this.context, this._swingTicks * 2 / 3).toSeconds() * amount;
}
// do the loop test
if (this._loop.get(tickTime)) {
if (ticks >= this._loopEnd) {
this.emit("loopEnd", tickTime);
this._clock.setTicksAtTime(this._loopStart, tickTime);
ticks = this._loopStart;
this.emit("loopStart", tickTime, this._clock.getSecondsAtTime(tickTime));
this.emit("loop", tickTime);
}
}
// invoke the timeline events scheduled on this tick
this._timeline.forEachAtTime(ticks, function (event) { return event.invoke(tickTime); });
};
//-------------------------------------
// SCHEDULABLE EVENTS
//-------------------------------------
/**
* Schedule an event along the timeline.
* @param callback The callback to be invoked at the time.
* @param time The time to invoke the callback at.
* @return The id of the event which can be used for canceling the event.
* @example
* import { Transport } from "tone";
* // schedule an event on the 16th measure
* Transport.schedule((time) => {
* // invoked on measure 16
* console.log("measure 16!");
* }, "16:0:0");
*/
Transport.prototype.schedule = function (callback, time) {
var event = new TransportEvent(this, {
callback: callback,
time: new TransportTimeClass(this.context, time).toTicks(),
});
return this._addEvent(event, this._timeline);
};
/**
* Schedule a repeated event along the timeline. The event will fire
* at the `interval` starting at the `startTime` and for the specified
* `duration`.
* @param callback The callback to invoke.
* @param interval The duration between successive callbacks. Must be a positive number.
* @param startTime When along the timeline the events should start being invoked.
* @param duration How long the event should repeat.
* @return The ID of the scheduled event. Use this to cancel the event.
* @example
* import { Oscillator, Transport } from "tone";
* const osc = new Oscillator().toDestination().start();
* // a callback invoked every eighth note after the first measure
* Transport.scheduleRepeat((time) => {
* osc.start(time).stop(time + 0.1);
* }, "8n", "1m");
*/
Transport.prototype.scheduleRepeat = function (callback, interval, startTime, duration) {
if (duration === void 0) { duration = Infinity; }
var event = new TransportRepeatEvent(this, {
callback: callback,
duration: new TimeClass(this.context, duration).toTicks(),
interval: new TimeClass(this.context, interval).toTicks(),
time: new TransportTimeClass(this.context, startTime).toTicks(),
});
// kick it off if the Transport is started
// @ts-ignore
return this._addEvent(event, this._repeatedEvents);
};
/**
* Schedule an event that will be removed after it is invoked.
* @param callback The callback to invoke once.
* @param time The time the callback should be invoked.
* @returns The ID of the scheduled event.
*/
Transport.prototype.scheduleOnce = function (callback, time) {
var event = new TransportEvent(this, {
callback: callback,
once: true,
time: new TransportTimeClass(this.context, time).toTicks(),
});
return this._addEvent(event, this._timeline);
};
/**
* Clear the passed in event id from the timeline
* @param eventId The id of the event.
*/
Transport.prototype.clear = function (eventId) {
if (this._scheduledEvents.hasOwnProperty(eventId)) {
var item = this._scheduledEvents[eventId.toString()];
item.timeline.remove(item.event);
item.event.dispose();
delete this._scheduledEvents[eventId.toString()];
}
return this;
};
/**
* Add an event to the correct timeline. Keep track of the
* timeline it was added to.
* @returns the event id which was just added
*/
Transport.prototype._addEvent = function (event, timeline) {
this._scheduledEvents[event.id.toString()] = {
event: event,
timeline: timeline,
};
timeline.add(event);
return event.id;
};
/**
* Remove scheduled events from the timeline after
* the given time. Repeated events will be removed
* if their startTime is after the given time
* @param after Clear all events after this time.
*/
Transport.prototype.cancel = function (after) {
var _this = this;
if (after === void 0) { after = 0; }
var computedAfter = this.toTicks(after);
this._timeline.forEachFrom(computedAfter, function (event) { return _this.clear(event.id); });
this._repeatedEvents.forEachFrom(computedAfter, function (event) { return _this.clear(event.id); });
return this;
};
//-------------------------------------
// START/STOP/PAUSE
//-------------------------------------
/**
* Bind start/stop/pause events from the clock and emit them.
*/
Transport.prototype._bindClockEvents = function () {
var _this = this;
this._clock.on("start", function (time, offset) {
offset = new TicksClass(_this.context, offset).toSeconds();
_this.emit("start", time, offset);
});
this._clock.on("stop", function (time) {
_this.emit("stop", time);
});
this._clock.on("pause", function (time) {
_this.emit("pause", time);
});
};
Object.defineProperty(Transport.prototype, "state", {
/**
* Returns the playback state of the source, either "started", "stopped", or "paused"
*/
get: function () {
return this._clock.getStateAtTime(this.now());
},
enumerable: true,
configurable: true
});
/**
* Start the transport and all sources synced to the transport.
* @param time The time when the transport should start.
* @param offset The timeline offset to start the transport.
* @example
* import { Transport } from "tone";
* // start the transport in one second starting at beginning of the 5th measure.
* Transport.start("+1", "4:0:0");
*/
Transport.prototype.start = function (time, offset) {
var offsetTicks;
if (isDefined(offset)) {
offsetTicks = this.toTicks(offset);
}
// start the clock
this._clock.start(time, offsetTicks);
return this;
};
/**
* Stop the transport and all sources synced to the transport.
* @param time The time when the transport should stop.
* @example
* import { Transport } from "tone";
* Transport.stop();
*/
Transport.prototype.stop = function (time) {
this._clock.stop(time);
return this;
};
/**
* Pause the transport and all sources synced to the transport.
*/
Transport.prototype.pause = function (time) {
this._clock.pause(time);
return this;
};
/**
* Toggle the current state of the transport. If it is
* started, it will stop it, otherwise it will start the Transport.
* @param time The time of the event
*/
Transport.prototype.toggle = function (time) {
time = this.toSeconds(time);
if (this._clock.getStateAtTime(time) !== "started") {
this.start(time);
}
else {
this.stop(time);
}
return this;
};
Object.defineProperty(Transport.prototype, "timeSignature", {
//-------------------------------------
// SETTERS/GETTERS
//-------------------------------------
/**
* The time signature as just the numerator over 4.
* For example 4/4 would be just 4 and 6/8 would be 3.
* @example
* import { Transport } from "tone";
* // common time
* Transport.timeSignature = 4;
* // 7/8
* Transport.timeSignature = [7, 8];
* // this will be reduced to a single number
* Transport.timeSignature; // returns 3.5
*/
get: function () {
return this._timeSignature;
},
set: function (timeSig) {
if (isArray(timeSig)) {
timeSig = (timeSig[0] / timeSig[1]) * 4;
}
this._timeSignature = timeSig;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "loopStart", {
/**
* When the Transport.loop = true, this is the starting position of the loop.
*/
get: function () {
return new TimeClass(this.context, this._loopStart, "i").toSeconds();
},
set: function (startPosition) {
this._loopStart = this.toTicks(startPosition);
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "loopEnd", {
/**
* When the Transport.loop = true, this is the ending position of the loop.
*/
get: function () {
return new TimeClass(this.context, this._loopEnd, "i").toSeconds();
},
set: function (endPosition) {
this._loopEnd = this.toTicks(endPosition);
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "loop", {
/**
* If the transport loops or not.
*/
get: function () {
return this._loop.get(this.now());
},
set: function (loop) {
this._loop.set(loop, this.now());
},
enumerable: true,
configurable: true
});
/**
* Set the loop start and stop at the same time.
* @example
* import { Transport } from "tone";
* // loop over the first measure
* Transport.setLoopPoints(0, "1m");
* Transport.loop = true;
*/
Transport.prototype.setLoopPoints = function (startPosition, endPosition) {
this.loopStart = startPosition;
this.loopEnd = endPosition;
return this;
};
Object.defineProperty(Transport.prototype, "swing", {
/**
* The swing value. Between 0-1 where 1 equal to the note + half the subdivision.
*/
get: function () {
return this._swingAmount;
},
set: function (amount) {
// scale the values to a normal range
this._swingAmount = amount;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "swingSubdivision", {
/**
* Set the subdivision which the swing will be applied to.
* The default value is an 8th note. Value must be less
* than a quarter note.
*/
get: function () {
return new TicksClass(this.context, this._swingTicks).toNotation();
},
set: function (subdivision) {
this._swingTicks = this.toTicks(subdivision);
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "position", {
/**
* The Transport's position in Bars:Beats:Sixteenths.
* Setting the value will jump to that position right away.
*/
get: function () {
var now = this.now();
var ticks = this._clock.getTicksAtTime(now);
return new TicksClass(this.context, ticks).toBarsBeatsSixteenths();
},
set: function (progress) {
var ticks = this.toTicks(progress);
this.ticks = ticks;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "seconds", {
/**
* The Transport's position in seconds
* Setting the value will jump to that position right away.
*/
get: function () {
return this._clock.seconds;
},
set: function (s) {
var now = this.now();
var ticks = this._clock.frequency.timeToTicks(s, now);
this.ticks = ticks;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "progress", {
/**
* The Transport's loop position as a normalized value. Always
* returns 0 if the transport if loop is not true.
*/
get: function () {
if (this.loop) {
var now = this.now();
var ticks = this._clock.getTicksAtTime(now);
return (ticks - this._loopStart) / (this._loopEnd - this._loopStart);
}
else {
return 0;
}
},
enumerable: true,
configurable: true
});
Object.defineProperty(Transport.prototype, "ticks", {
/**
* The transports current tick position.
*/
get: function () {
return this._clock.ticks;
},
set: function (t) {
if (this._clock.ticks !== t) {
var now = this.now();
// stop everything synced to the transport
if (this.state === "started") {
var ticks = this._clock.getTicksAtTime(now);
// schedule to start on the next tick, #573
var time = this._clock.getTimeOfTick(Math.ceil(ticks));
this.emit("stop", time);
this._clock.setTicksAtTime(t, time);
// restart it with the new time
this.emit("start", time, this._clock.getSecondsAtTime(time));
}
else {
this._clock.setTicksAtTime(t, now);
}
}
},
enumerable: true,
configurable: true
});
/**
* Get the clock's ticks at the given time.
* @param time When to get the tick value
* @return The tick value at the given time.
*/
Transport.prototype.getTicksAtTime = function (time) {
return Math.round(this._clock.getTicksAtTime(time));
};
/**
* Return the elapsed seconds at the given time.
* @param time When to get the elapsed seconds
* @return The number of elapsed seconds
*/
Transport.prototype.getSecondsAtTime = function (time) {
return this._clock.getSecondsAtTime(time);
};
Object.defineProperty(Transport.prototype, "PPQ", {
/**
* Pulses Per Quarter note. This is the smallest resolution
* the Transport timing supports. This should be set once
* on initialization and not set again. Changing this value
* after other objects have been created can cause problems.
*/
get: function () {
return this._clock.frequency.multiplier;
},
set: function (ppq) {
this._clock.frequency.multiplier = ppq;
},
enumerable: true,
configurable: true
});
//-------------------------------------
// SYNCING
//-------------------------------------
/**
* Returns the time aligned to the next subdivision
* of the Transport. If the Transport is not started,
* it will return 0.
* Note: this will not work precisely during tempo ramps.
* @param subdivision The subdivision to quantize to
* @return The context time of the next subdivision.
* @example
* import { Transport } from "tone";
* // the transport must be started, otherwise returns 0
* Transport.start();
* Transport.nextSubdivision("4n");
*/
Transport.prototype.nextSubdivision = function (subdivision) {
subdivision = this.toTicks(subdivision);
if (this.state !== "started") {
// if the transport's not started, return 0
return 0;
}
else {
var now = this.now();
// the remainder of the current ticks and the subdivision
var transportPos = this.getTicksAtTime(now);
var remainingTicks = subdivision - transportPos % subdivision;
return this._clock.nextTickTime(remainingTicks, now);
}
};
/**
* Attaches the signal to the tempo control signal so that
* any changes in the tempo will change the signal in the same
* ratio.
*
* @param signal
* @param ratio Optionally pass in the ratio between the two signals.
* Otherwise it will be computed based on their current values.
*/
Transport.prototype.syncSignal = function (signal, ratio) {
if (!ratio) {
// get the sync ratio
var now = this.now();
if (signal.getValueAtTime(now) !== 0) {
var bpm = this.bpm.getValueAtTime(now);
var computedFreq = 1 / (60 / bpm / this.PPQ);
ratio = signal.getValueAtTime(now) / computedFreq;
}
else {
ratio = 0;
}
}
var ratioSignal = new Gain(ratio);
// @ts-ignore
this.bpm.connect(ratioSignal);
// @ts-ignore
ratioSignal.connect(signal._param);
this._syncedSignals.push({
initial: signal.value,
ratio: ratioSignal,
signal: signal,
});
signal.value = 0;
return this;
};
/**
* Unsyncs a previously synced signal from the transport's control.
* See Transport.syncSignal.
*/
Transport.prototype.unsyncSignal = function (signal) {
for (var i = this._syncedSignals.length - 1; i >= 0; i--) {
var syncedSignal = this._syncedSignals[i];
if (syncedSignal.signal === signal) {
syncedSignal.ratio.dispose();
syncedSignal.signal.value = syncedSignal.initial;
this._syncedSignals.splice(i, 1);
}
}
return this;
};
/**
* Clean up.
*/
Transport.prototype.dispose = function () {
_super.prototype.dispose.call(this);
this._clock.dispose();
writable(this, "bpm");
this._timeline.dispose();
this._repeatedEvents.dispose();
return this;
};
return Transport;
}(ToneWithContext));
export { Transport };
Emitter.mixin(Transport);
//-------------------------------------
// INITIALIZATION
//-------------------------------------
onContextInit(function (context) {
context.transport = new Transport({ context: context });
});
onContextClose(function (context) {
context.transport.dispose();
});
//# sourceMappingURL=Transport.js.map |
<reponame>ctuning/ck-spack
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libsodium(AutotoolsPackage):
"""Sodium is a modern, easy-to-use software library for encryption,
decryption, signatures, password hashing and more."""
homepage = "https://download.libsodium.org/doc/"
url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.13.tar.gz"
list_url = "https://download.libsodium.org/libsodium/releases/old"
version('1.0.15', '070373e73a0b10bd96f412e1732ebc42')
version('1.0.13', 'f38aac160a4bd05f06f743863e54e499')
version('1.0.12', 'c308e3faa724b630b86cc0aaf887a5d4')
version('1.0.11', 'b58928d035064b2a46fb564937b83540')
version('1.0.10', 'ea89dcbbda0b2b6ff6a1c476231870dd')
version('1.0.3', 'b3bcc98e34d3250f55ae196822307fab')
version('1.0.2', 'dc40eb23e293448c6fc908757738003f')
version('1.0.1', '9a221b49fba7281ceaaf5e278d0f4430')
version('1.0.0', '3093dabe4e038d09f0d150cef064b2f7')
version('0.7.1', 'c224fe3923d1dcfe418c65c8a7246316')
def url_for_version(self, version):
url = 'https://download.libsodium.org/libsodium/releases/'
if version < Version('1.0.4'):
url += 'old/unsupported/'
elif version < Version('1.0.12'):
url += 'old/'
return url + 'libsodium-{0}.tar.gz'.format(version)
|
<gh_stars>1-10
package org.jmotor.validation
/**
* Component:
* Description:
* Date: 2018/8/28
*
* @author AI
*/
trait Validator[T] {
def validate(value: T): Unit
}
|
package com.phoenixnap.oss.ramlplugin.raml2code.github;
import org.junit.Test;
import com.phoenixnap.oss.ramlplugin.raml2code.rules.GitHubAbstractRuleTestBase;
import com.phoenixnap.oss.ramlplugin.raml2code.rules.Spring4ControllerDecoratorRule;
/**
* @author aleksandars
* @since 2.0.5
*/
public class Issue292RulesTest extends GitHubAbstractRuleTestBase {
@Test
public void check_inline_enums_are_not_generated() throws Exception {
loadRaml("issue-292-1.raml");
rule = new Spring4ControllerDecoratorRule();
rule.apply(getControllerMetadata(), jCodeModel);
verifyGeneratedCode("Issue292-1Spring4ControllerDecorator");
}
@Test
public void check_inline_enums_are_merged() throws Exception {
loadRaml("issue-292-2.raml");
rule = new Spring4ControllerDecoratorRule();
rule.apply(getControllerMetadata(), jCodeModel);
verifyGeneratedCode("Issue292-2Spring4ControllerDecorator");
}
}
|
package com.today.dbreport.utils
import java.io.File
import com.typesafe.config.{Config, ConfigFactory}
/**
* 类功能描述: 配置文件读取
*
* Create by Barry
*
* @version 1.0.0
* Create on 2018/6/25.
*/
object ConfigReader {
/**
* 读取配置文件
* @return
*/
def getConfig(): Config ={
val configFileName = "config.conf"
//读取Jar包内resources下的config.conf文件
val configFile = s"${System.getProperty("user.dir")}${File.separator}config${File.separator}${configFileName}"
println(s"config file path:${configFile}")
var databaseConfig: Config = null
val file = new File(configFile)
//读取Jar包外, 与Jar包平级目录/config/下的config.conf文件文件
if(file.exists()){
databaseConfig = ConfigFactory.parseFile(file)
} else {
databaseConfig = ConfigFactory.load(ConfigReader.getClass().getClassLoader, configFileName)
}
databaseConfig
}
}
|
BASEDIR=$(dirname $(pwd))
sudo docker run --rm -v ${BASEDIR}:/work -w /work journeymidnight/yig bash -c 'make plugin_internal' |
#!/bin/bash
TMPDIR=$(mktemp -d)
startdir="$PWD"
dirlist=$(mktemp)
function cleanup {
# I use trash-cli here instead of rm
# https://github.com/andreafrancia/trash-cli
# Obviously, substitute rm -f for trash if you want to use it.
find "$TMPDIR/" -iname "OTHER*" -exec trash {} \;
find "$TMPDIR/" -iname "FRONT_COVER*" -exec trash {} \;
find "$TMPDIR/" -iname "cover*" -exec trash {} \;
find "$TMPDIR/" -iname "ICON*" -exec trash {} \;
find "$TMPDIR/" -iname "ILLUSTRATION*" -exec trash {} \;
}
#https://www.reddit.com/r/bash/comments/8nau9m/remove_leading_and_trailing_spaces_from_a_variable/
trim() {
local s=$1 LC_CTYPE=C
s=${s#"${s%%[![:space:]]*}"}
s=${s%"${s##*[![:space:]]}"}
printf '%s' "$s"
}
if [ -f "$HOME/.config/vindauga.rc" ];then
readarray -t line < "$HOME/.config/vindauga.rc"
musicdir=${line[1]}
cachedir=${line[3]}
placeholder_img=${line[5]}
placeholder_dir=${line[7]}
display_size=${line[9]}
XCoord=${line[11]}
YCoord=${line[13]}
ConkyFile=${line[15]}
LastfmAPIKey=${line[17]}
fi
SAVEIFS=$IFS
IFS=$(echo -en "\n\b")
ENTRIES=$(find -name '*.mp3' -printf '%h\n' | sort -u | grep -c / )
CURRENTENTRY=1
find -H . -type f \( -name "*.bz2" -or -name "*.gz" -or -name "*.iso" -or -name "*.tgz" -or -name "*.rar" -or -name "*.zip" \) -exec chmod 666 '{}' ';'
find -name '*.mp3' -printf '%h\n' | sort -u | realpath -p > "$dirlist"
while read line
do
cleanup
TITLE=""
ALBUMARTIST=""
NEWTITLE=""
SONGFILE=""
SONGDIR=""
BOB=""
LOOPEND="False"
#SONGFILE="$file"
#SongDir=$(dirname "${SONGFILE}")
dir=$(echo "$line")
SongDir=$(echo "$dir")
fullpath=$(realpath "$dir")
SONGFILE=$(find "$fullpath" -name '*.mp3' | head -1)
echo "$CURRENTENTRY of $ENTRIES $dir"
CURRENTENTRY=$(($CURRENTENTRY+1))
####################################################################
# Do cover files exist? If so, make sure both cover and folder exist.
####################################################################
if [ -f "$fullpath/cover.png" ];then
convert "$fullpath/cover.png" "$fullpath/cover.jpg"
rm "$fullpath/cover.png"
fi
if [ -f "$fullpath/folder.png" ];then
convert "$fullpath/folder.png" "$fullpath/folder.jpg"
rm "$fullpath/folder.png"
fi
if [ ! -f "$fullpath/cover.jpg" ] && [ -f "$fullpath/folder.jpg" ];then
cp "$fullpath/folder.jpg" "$fullpath/cover.jpg"
fi
if [ ! -f "$fullpath/folder.jpg" ] && [ -f "$fullpath/cover.jpg" ];then
cp "$fullpath/cover.jpg" "$fullpath/folder.jpg"
fi
#read
if [ ! -f "$fullpath/cover.jpg" ];then
echo "Nothing found in directory $fullpath"
########################################################################
# Getting data from song along with a
# sed one liner to remove any null bytes that might be in there
# Also switching to ffmpeg for most of the data; speeds it up a LOT
######################################################################## awk '{for(i=2;i<=NF;++i)print $i}'
songdata=$(ffprobe "$SONGFILE" 2>&1)
# big long grep string to avoid all the possible frakups I found, lol
ARTIST=$(echo "$songdata" | grep "artist" | grep -v "mp3," | head -1 | awk -F ': ' '{for(i=2;i<=NF;++i)print $i}')
ALBUM=$(echo "$songdata" | grep "album" | head -1 | awk -F ': ' '{for(i=2;i<=NF;++i)print $i}' | tr '\n' ' ')
ARTIST=$(trim "$ARTIST")
ALBUM=$(trim "$ALBUM")
CoverExist=$(echo "$songdata" | grep -c "front")
if [ $CoverExist -gt 0 ];then
DATA=`eyeD3 "$SONGFILE" 2>/dev/null | sed 's/\x0//g' `
COVER=$(echo "$DATA" | grep "FRONT_COVER" )
fi
####################################################################
# Does the MP3 have a cover file?
####################################################################
####################################################################
# Albumart file, nothing in MP3
####################################################################
#if [[ ! -z "$FILTER" ]] && [[ -z "$COVER" ]];then
# echo "### Cover art retrieved from music directory!"
# echo "### Cover art being copied to MP3 ID3 tags!"
# if [ -f "$SongDir/cover.jpg" ]; then
# if [ ! -f "$SongDir/folder.jpg" ]; then
# convert "$SongDir/cover.jpg" "$SongDir/folder.jpg"
# fi
# else
# if [ -f "$SongDir/folder.jpg" ]; then
# convert "$SongDir/folder.jpg" "$SongDir/cover.jpg"
# fi
# fi
# echo "$fullpath/cover.jpg"
# eyeD3 --add-image="$SongDir/cover.jpg":FRONT_COVER "$SONGFILE" 2>/dev/null
#fi
####################################################################
# MP3 cover, no file
####################################################################
eyeD3 --write-images="$TMPDIR" "$SONGFILE" 1> /dev/null
if [ -f "$TMPDIR/FRONT_COVER.png" ]; then
echo "### Converting PNG into JPG"
convert "$TMPDIR/FRONT_COVER.png" "$TMPDIR/FRONT_COVER.jpeg"
fi
# Catching when it's sometimes stored as "Other" tag instead of FRONT_COVER
# but only when FRONT_COVER doesn't exist.
if [ ! -f "$TMPDIR/FRONT_COVER.jpeg" ]; then
if [ -f "$TMPDIR/OTHER.png" ]; then
echo "converting PNG into JPG"
convert "$TMPDIR/OTHER.png" "$TMPDIR/OTHER.jpeg"
fi
if [ -f "$TMPDIR/OTHER.jpg" ]; then
cp "$TMPDIR/OTHER.jpg" "$TMPDIR/OTHER.jpeg"
fi
if [ -f "$TMPDIR/OTHER.jpeg" ]; then
cp "$TMPDIR/OTHER.jpeg" "$TMPDIR/FRONT_COVER.jpeg"
fi
if [ -f "$TMPDIR/FRONT_COVER.jpg" ]; then
cp "$TMPDIR/FRONT_COVER.jpg" "$TMPDIR/FRONT_COVER.jpeg"
fi
fi
if [ -f "$TMPDIR/FRONT_COVER.jpeg" ]; then
echo "### Cover art retrieved from MP3 ID3 tags!"
echo "### Cover art being copied to music directory!"
echo "$fullpath/cover.jpg"
cp "$TMPDIR/FRONT_COVER.jpeg" "$fullpath/cover.jpg"
cp "$TMPDIR/FRONT_COVER.jpeg" "$fullpath/folder.jpg"
fi
####################################################################
# No albumart file, nothing in MP3
####################################################################
if [ ! -f "$fullpath/cover.jpg" ];then
glyrc cover --timeout 15 --artist "$ARTIST" --album "$ALBUM" --write "$TMPDIR/cover.tmp" --from "musicbrainz;discogs;coverartarchive;rhapsody;lastfm"
convert "$TMPDIR/cover.tmp" "$TMPDIR/cover.jpg"
#tempted to be a hard stop here, because sometimes these covers are just wrong.
if [ -f "$TMPDIR/cover.jpg" ]; then
cp "$TMPDIR/cover.jpg" "$fullpath/cover.jpg"
cp "$TMPDIR/cover.jpg" "$fullpath/folder.jpg"
echo "Cover art found online; you may wish to check it before embedding it."
else
echo "No cover art found online or elsewhere."
fi
fi
fi
##########################################################################
# Copy to vindauga cache, if exists And get artist image
##########################################################################
if [ -d "$cachedir" ];then
if [ -f "$fullpath/cover.jpg" ];then
SONGFILE=$(find "$fullpath" -name '*.mp3' | head -1)
songdata=$(ffprobe "$SONGFILE" 2>&1)
ARTIST=$(echo "$songdata" | grep "artist" | grep -v "mp3," | head -1 | awk -F ': ' '{for(i=2;i<=NF;++i)print $i}')
ALBUM=$(echo "$songdata" | grep "album" | head -1 | awk -F ': ' '{for(i=2;i<=NF;++i)print $i}' | tr '\n' ' ')
ARTIST=$(trim "$ARTIST")
ALBUM=$(trim "$ALBUM")
EscapedArtist=$(echo "$ARTIST" | sed -e 's/[/()&]//g')
EscapedAlbum=$(echo "$ALBUM" | sed -e 's/[/()&]//g')
cachecover=$(printf "%s/%s-%s-album.jpg" "$cachedir" "$EscapedArtist" "$EscapedAlbum")
cacheartist=$(printf "%s/%s-artist.jpg" "$cachedir" "$EscapedArtist")
#Adding in glyrc search for artist image...
if [ ! -f "$cacheartist" ];then
glyrc artistphoto --timeout 15 --artist "$ARTIST" --album "$ALBUM" --write "$TMPDIR/artist.tmp" --from "discogs;lastfm;bbcmusic;rhapsody;singerpictures"
if [ -f "$TMPDIR/artist.tmp" ];then
convert "$TMPDIR/artist.tmp" "$cacheartist"
rm "$TMPDIR/artist.jpg"
fi
fi
if [ ! -f "$cacheartist" ];then
echo "Trying deezer..."
API_URL="https://api.deezer.com/search/artist?q=$EscapedArtist" && API_URL=${API_URL//' '/'%20'}
IMG_URL=$(curl -s "$API_URL" | jq -r '.data[0] | .picture_big ')
#deezer outputs a wonky url if there's no image match, this checks for it.
# https://e-cdns-images.dzcdn.net/images/artist//500x500-000000-80-0-0.jpg
check=$(awk 'BEGIN{print gsub(ARGV[2],"",ARGV[1])}' "$IMG_URL" "//")
if [ "$check" != "1" ]; then
IMG_URL=""
fi
if [ ! -z "$LastfmAPIKey" ] && [ -z "$IMG_URL" ];then # deezer first, then lastfm
echo "Trying lastfm..."
METHOD=artist.getinfo
API_URL="https://ws.audioscrobbler.com/2.0/?method=$METHOD&artist=$EscapedArtist&api_key=$LastfmAPIKey&format=json" && API_URL=${API_URL//' '/'%20'}
IMG_URL=$(curl -s "$API_URL" | jq -r ' .artist | .image ' | grep -B1 -w "extralarge" | grep -v "extralarge" | awk -F '"' '{print $4}')
fi
if [ ! -z "$IMG_URL" ];then
tempartist=$(mktemp)
wget -q "$IMG_URL" -O "$tempartist"
bob=$(file "$tempartist" | head -1) #It really is an image
sizecheck=$(wc -c "$tempartist" | awk '{print $1}')
# This test is because I *HATE* last.fm's default artist image
if [[ "$bob" == *"image data"* ]];then
if [ "$sizecheck" != "4195" ];then
convert "$tempartist" "$cacheartist"
rm "$tempartist"
fi
fi
fi
fi
if [ ! -f "$cachecover" ];then
ln -s "$fullpath/cover.jpg" "$cachecover"
fi
ARTIST=""
if [ -f "$TMPDIR/artist.tmp" ];then
rm "$TMPDIR/artist.tmp"
fi
if [ -f "$tempartist" ];then
rm "$tempartist"
fi
fi
fi
done < "$dirlist"
IFS=$SAVEIFS
|
/*
* socket_output.h
*
* Created on: May 30, 2018
* by: <NAME>
*/
#ifndef SRC_MODULES_OUTPUT_SOCKET_OUTPUT_H_
#define SRC_MODULES_OUTPUT_SOCKET_OUTPUT_H_
#include "../../../configure.h"
typedef struct socket_output_struct socket_output_t;
socket_output_t* socket_output_init( const socket_output_conf_t *config );
bool socket_output_send( socket_output_t *context, const char *msg );
void socket_output_release( socket_output_t *context );
#endif /* SRC_MODULES_OUTPUT_SOCKET_OUTPUT_H_ */
|
# coding: utf-8
from flask import Flask
app = Flask(__name__)
@app.route('/')
def home():
return 'Site em Construção'
if __name__ == '__main__':
app.run()
|
import React from 'react';
import { IProps } from './types';
import Tutorial from '@root/containers/Pages/Game/Tutorial';
import Win from '@root/containers/Pages/Game/Win';
import Field from '@root/containers/Pages/Game/Field';
import If from '@root/components/If';
import s from './styles.scss';
const Content: React.FC<IProps> = (props) => {
const { level, startTime, endTime, field, setLevel, setStartTime, setEndTime } = props;
const notStarted = startTime === 0;
const isFinished = endTime > 0;
const isGiveUp = startTime === endTime;
return (
<div id="content" className={s.content}>
{/* Tutorial */}
<If condition={notStarted}>
<Tutorial setStartTime={setStartTime} />
</If>
{/* Win*/}
<If condition={isFinished && !isGiveUp}>
<div className={s.fadeIn}>
<Win
level={level}
startTime={startTime}
endTime={endTime}
setLevel={setLevel}
setStartTime={setStartTime}
setEndTime={setEndTime}
/>
</div>
</If>
{/* Matrix field */}
<If condition={!isFinished || isGiveUp}>
<Field field={field} level={level} startTime={startTime} endTime={endTime} setEndTime={setEndTime} />
</If>
</div>
);
};
export default Content;
|
package testpb
import (
"fmt"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/runtime/protoiface"
"google.golang.org/protobuf/runtime/protoimpl"
"google.golang.org/protobuf/types/dynamicpb"
"math"
"pgregory.net/rapid"
"testing"
)
func TestProtoMethods(t *testing.T) {
t.Run("testSize", rapid.MakeCheck(testSize))
t.Run("testMarshal", rapid.MakeCheck(testMarshal))
t.Run("testUnmarshal", rapid.MakeCheck(testUnmarshal))
}
func testSize(t *rapid.T) {
slowMsg := getRapidMsg(t)
fastMsg := slowMsg.ProtoReflect()
dyn := dynamicpb.NewMessage(md_A)
populateDynamicMsg(dyn, fastMsg)
methods := fastMsg.ProtoMethods()
result := methods.Size(protoiface.SizeInput{Message: fastMsg})
expected := proto.Size(dyn)
require.Equal(t, expected, result.Size)
}
func testMarshal(t *rapid.T) {
msg := getRapidMsg(t)
fastMsg := msg.ProtoReflect()
dyn := dynamicpb.NewMessage(md_A)
populateDynamicMsg(dyn, fastMsg)
result, err := proto.MarshalOptions{Deterministic: true}.Marshal(fastMsg.Interface())
require.NoError(t, err)
canonical, err := proto.MarshalOptions{Deterministic: true}.Marshal(dyn)
require.NoError(t, err)
require.Equal(t, canonical, result)
}
func testUnmarshal(t *rapid.T) {
a := getRapidMsg(t)
fastMsg := a.ProtoReflect()
dyn := dynamicpb.NewMessage(md_A)
populateDynamicMsg(dyn, fastMsg)
bz, err := proto.MarshalOptions{Deterministic: true}.Marshal(dyn)
require.NoError(t, err)
aa := A{}
fastaa := aa.ProtoReflect()
err = proto.UnmarshalOptions{
NoUnkeyedLiterals: struct{}{},
Merge: false,
AllowPartial: false,
DiscardUnknown: false,
Resolver: nil,
}.Unmarshal(bz, fastaa.Interface())
require.NoError(t, err)
require.True(t, proto.Equal(fastMsg.Interface(), fastaa.Interface()), fmt.Sprintf("left: %+v\nright:%+v", fastMsg, fastaa))
}
func TestNegativeZero(t *testing.T) {
testCases := []struct {
name string
value float64
}{
{
name: "negative 0",
value: math.Copysign(0, -1),
},
{
name: "negative float",
value: -0.420,
},
{
name: "regular zero",
value: 0,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
a := A{}
a.DOUBLE = tc.value
dyn := dynamicpb.NewMessage(md_A)
dyn.Set(fd_A_DOUBLE, protoreflect.ValueOfFloat64(tc.value))
bz, err := proto.MarshalOptions{Deterministic: true}.Marshal(dyn)
require.NoError(t, err)
bz2, err := proto.Marshal(a.ProtoReflect().Interface())
require.NoError(t, err)
require.Equal(t, bz, bz2)
})
}
}
func populateDynamicMsg(dyn *dynamicpb.Message, msg protoreflect.Message) {
msg.Range(func(descriptor protoreflect.FieldDescriptor, value protoreflect.Value) bool {
if descriptor.IsMap() {
dynMap := dyn.Mutable(descriptor).Map()
underlying := value.Map()
underlying.Range(func(key protoreflect.MapKey, value protoreflect.Value) bool {
dynMap.Set(key, value)
return true
})
dyn.Set(fd_A_MAP, protoreflect.ValueOfMap(dynMap))
} else if descriptor.IsList() {
dynList := dyn.Mutable(descriptor).List()
underlying := value.List()
for i := 0; i < underlying.Len(); i++ {
dynList.Append(underlying.Get(i))
}
dyn.Set(descriptor, protoreflect.ValueOfList(dynList))
} else {
dyn.Set(descriptor, value)
}
return true
})
}
func getRapidMsg(t *rapid.T) A {
return A{
Enum: Enumeration(rapid.IntRange(0, 1).Draw(t, "enum").(int)),
SomeBoolean: rapid.Bool().Draw(t, "SomeBool").(bool),
INT32: rapid.Int32().Draw(t, "INT32").(int32),
SINT32: rapid.Int32().Draw(t, "SINT32").(int32),
UINT32: rapid.Uint32().Draw(t, "UINT32").(uint32),
INT64: rapid.Int64().Draw(t, "INT64").(int64),
SING64: rapid.Int64().Draw(t, "SING64").(int64),
UINT64: rapid.Uint64().Draw(t, "UINT64").(uint64),
SFIXED32: rapid.Int32().Draw(t, "SFIXED32").(int32),
FIXED32: rapid.Uint32().Draw(t, "FIXED32").(uint32),
FLOAT: rapid.Float32().Draw(t, "FLOAT").(float32),
SFIXED64: rapid.Int64().Draw(t, "SFIXED64").(int64),
FIXED64: rapid.Uint64().Draw(t, "FIXED64").(uint64),
DOUBLE: rapid.Float64().Draw(t, "DOUBLE").(float64),
STRING: rapid.String().Draw(t, "STRING").(string),
BYTES: rapid.SliceOf(rapid.Byte()).Draw(t, "byte slice").([]byte),
MESSAGE: genMessageB.Draw(t, "MESSAGE").(*B),
LIST: rapid.SliceOf(genMessageB).Draw(t, "LIST").([]*B),
ONEOF: genOneOf.Draw(t, "one of").(isA_ONEOF),
MAP: rapid.MapOf(rapid.String(), genMessageB).Draw(t, "map[string]*B").(map[string]*B),
LIST_ENUM: rapid.SliceOf(genEnumSlice).Draw(t, "slice enum").([]Enumeration),
}
}
var genEnumSlice = rapid.Custom(func(t *rapid.T) Enumeration {
n := rapid.Int32Range(0, 1).Draw(t, "int32").(int32)
return Enumeration(n)
})
var genOneOf = rapid.Custom(func(t *rapid.T) isA_ONEOF {
oneof := rapid.OneOf(genOneOfB, genOneOfString).Draw(t, "oneof").(isA_ONEOF)
return oneof
})
var genOneOfB = rapid.Custom(func(t *rapid.T) *A_ONEOF_B {
return &A_ONEOF_B{ONEOF_B: genMessageB.Draw(t, "message B in one of").(*B)}
})
var genOneOfString = rapid.Custom(func(t *rapid.T) *A_ONEOF_STRING {
return &A_ONEOF_STRING{ONEOF_STRING: rapid.StringN(1, -1, -1).Draw(t, "string in one of").(string)}
})
var genMessageB = rapid.Custom(func(t *rapid.T) *B {
msg := B{
state: protoimpl.MessageState{},
sizeCache: 0,
unknownFields: nil,
X: rapid.String().Draw(t, "X").(string),
}
return &msg
})
|
#!/bin/bash
#
# Install script to install the operator
#
set -o errexit
set -o nounset
set -o pipefail
operator=ibm-spectrum-scale-csi-operator
installDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
#$APP_TEST_LIBRARY_FUNCTIONS/operatorInstall.sh \
# --cr $CV_TEST_BUNDLE_DIR/operators/${operator}/deploy/crds/csiscaleoperators.csi.ibm.com_cr.yaml
kubectl apply -f $CV_TEST_BUNDLE_DIR/operators/${operator}/deploy/crds/csiscaleoperators.csi.ibm.com_cr.yaml
|
#!/usr/bin/env bash
set -euo pipefail
shopt -s nullglob
cd "$(dirname "$0")"
[[ $# -eq 2 ]] || exit 1
DEVICE=$1
BUILD=$2
MOUNT_ROOT="mount"
DL_ROOT="dl"
blob_partitions=(system product system_ext vendor)
# Prints an info message in light green
function msg() {
echo -e "\e[1;32m$*\e[0m"
}
# Prints an error in light red
function err() {
echo -e "\e[1;31m$*\e[0m"
}
# Prints a warning in light yellow
function warn() {
echo -e "\e[1;33m$*\e[0m"
}
function kill_jobs() {
jobs -p | xargs -I{} kill -- {}
}
function pushd() {
command pushd "$@" > /dev/null
}
function popd() {
command popd "$@" > /dev/null
}
trap kill_jobs EXIT
function download_factory() {
local device="$1"
local build_id="$2"
local dl_dir="$3"
echo " - Factory $BUILD image for $DEVICE"
../android-prepare-vendor/scripts/download-nexus-image.sh -y -d "$device" -b "$build_id" -o "$dl_dir"
}
(
msg "Downloading $DEVICE $BUILD images"
dl_dir="$DL_ROOT"
mkdir -p "$dl_dir"
if [[ "$(ls "$dl_dir" | grep -e "$DEVICE-$BUILD" | wc -l)" -ge 1 ]]; then
echo "Skipping download, already present"
else
download_factory "$DEVICE" "$BUILD" "$dl_dir"
fi
) &
wait
dl_dir="$DL_ROOT"
msg "Extracting images for $DEVICE $BUILD"
dev_mount="$MOUNT_ROOT"
# Extract zip (and delete if necessary)
if [[ ! -d "$dl_dir/$DEVICE-$BUILD" ]]; then
unzip -d "$dl_dir" "$dl_dir/$DEVICE-"*.zip
fi
pushd "$dl_dir/$DEVICE-"*/
build_parts=("${blob_partitions[@]}")
for part in "${build_parts[@]}"
do
msg " - $part"
if [[ ! -f "$part.img" ]]; then
unzip "image-$DEVICE"*.zip "$part.img"
fi
if [[ ! -f "$part.img.raw" ]]; then
if file "$part.img" | grep -q 'Android sparse image'; then
simg2img "$part.img" "$part.img.raw"
else
ln "$part.img" "$part.img.raw"
fi
fi
mountpoint="$dev_mount/$part"
sudo mkdir -p "$mountpoint"
# Unmount if already mounted
if mountpoint -q "$mountpoint"; then
sudo umount "$mountpoint"
fi
sudo mount -o ro "$part.img.raw" "$mountpoint"
done
popd
|
import mongoose from 'mongoose';
/*
Job Model
*/
export const JobSchema = mongoose.Schema(
{
name: {
type: String,
required: true,
trim: true,
unique: true
},
axesIds: [mongoose.Schema.Types.ObjectId]
}
);
const Job = mongoose.model('Job', JobSchema);
export default Job;
|
import React, { useRef, useCallback, useEffect } from 'react'
import PropTypes from 'prop-types'
import { isSingleValue, createVisualization } from '@dhis2/analytics'
const ChartPlugin = ({
visualization,
responses,
extraOptions,
legendSets,
id: renderCounter,
style,
onChartGenerated,
animation: defaultAnimation,
}) => {
const canvasRef = useRef(undefined)
const prevStyle = useRef(style)
const prevRenderCounter = useRef(renderCounter)
const renderVisualization = useCallback(
animation => {
const visualizationConfig = createVisualization(
responses,
visualization,
canvasRef.current,
{
...extraOptions,
animation,
legendSets,
},
undefined,
undefined,
isSingleValue(visualization.type) ? 'dhis' : 'highcharts' // output format
)
if (isSingleValue(visualization.type)) {
onChartGenerated(visualizationConfig.visualization)
} else {
onChartGenerated(
visualizationConfig.visualization.getSVGForExport({
sourceHeight: 768,
sourceWidth: 1024,
})
)
}
},
[
canvasRef,
visualization,
onChartGenerated,
responses,
extraOptions,
legendSets,
]
)
useEffect(() => {
renderVisualization(defaultAnimation)
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [visualization, responses, extraOptions])
useEffect(() => {
if (renderCounter !== prevRenderCounter.current) {
renderVisualization(0)
prevRenderCounter.current = renderCounter
}
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [renderCounter])
useEffect(() => {
if (
style.width !== prevStyle.current.width ||
style.height !== prevStyle.current.height
) {
renderVisualization(0)
prevStyle.current = style
}
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [style])
return <div ref={canvasRef} style={style} />
}
ChartPlugin.defaultProps = {
visualization: {},
filters: {},
style: {},
animation: 200,
id: null,
onChartGenerated: Function.prototype,
}
ChartPlugin.propTypes = {
extraOptions: PropTypes.object.isRequired,
legendSets: PropTypes.arrayOf(PropTypes.object).isRequired,
responses: PropTypes.arrayOf(PropTypes.object).isRequired,
visualization: PropTypes.object.isRequired,
animation: PropTypes.number,
id: PropTypes.number,
style: PropTypes.object,
onChartGenerated: PropTypes.func,
}
export default ChartPlugin
|
#!/usr/bin/env bash
## Create temporary folder
dir=$(mktemp -d)
## Clone the repo
git clone https://github.com/kamranahmedse/git-standup.git --depth=1 ${dir} || { echo >&2 "Clone failed with $?"; exit 1; }
cd ${dir}
make install || { echo >&2 "Clone failed with $?"; exit 1; }
cd ..
rm -rf ${dir}
|
#!/bin/bash
# Download python
sudo apt-get install python3
# Download pip
sudo apt-get install python3-pip
# install chromedriver
# Download the Chrome Driver
wget -O /tmp/chromedriver.zip http://chromedriver.storage.googleapis.com/`curl -sS chromedriver.storage.googleapis.com/LATEST_RELEASE_92.0.4515`/chromedriver_linux64.zip
unzip /tmp/chromedriver.zip chromedriver -d ./
# install dependencies
pip install -r requirements.txt
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2927-1
#
# Security announcement date: 2016-03-14 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:15 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: i686
#
# Vulnerable packages fix on version:
# - libgraphite2-3:1.3.6-1ubuntu0.14.04.1
#
# Last versions recommanded by security team:
# - libgraphite2-3:1.3.6-1ubuntu0.14.04.1
#
# CVE List:
# - CVE-2016-1977
# - CVE-2016-2790
# - CVE-2016-2791
# - CVE-2016-2792
# - CVE-2016-2793
# - CVE-2016-2794
# - CVE-2016-2795
# - CVE-2016-2796
# - CVE-2016-2797
# - CVE-2016-2798
# - CVE-2016-2799
# - CVE-2016-2800
# - CVE-2016-2801
# - CVE-2016-2802
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade libgraphite2-3=1.3.6-1ubuntu0.14.04.1 -y
|
<filename>app/src/main/java/com/ulfy/master/ui/custom_dkplayer/cache/PreloadTask.java<gh_stars>10-100
package com.ulfy.master.ui.custom_dkplayer.cache;
import com.danikula.videocache.HttpProxyCacheServer;
import com.dueeeke.videoplayer.util.L;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.concurrent.ExecutorService;
public class PreloadTask implements Runnable {
/**
* 原始地址
*/
public String mRawUrl;
/**
* 列表中的位置
*/
public int mPosition;
/**
* VideoCache服务器
*/
public HttpProxyCacheServer mCacheServer;
/**
* 是否被取消
*/
private boolean mIsCanceled;
/**
* 是否正在预加载
*/
private boolean mIsExecuted;
@Override
public void run() {
if (!mIsCanceled) {
start();
}
mIsExecuted = false;
mIsCanceled = false;
}
/**
* 开始预加载
*/
private void start() {
L.i("开始预加载:" + mPosition);
HttpURLConnection connection = null;
try {
//获取HttpProxyCacheServer的代理地址
String proxyUrl = mCacheServer.getProxyUrl(mRawUrl);
URL url = new URL(proxyUrl);
connection = (HttpURLConnection) url.openConnection();
connection.setConnectTimeout(5_000);
connection.setReadTimeout(5_000);
InputStream in = new BufferedInputStream(connection.getInputStream());
int length;
int read = -1;
byte[] bytes = new byte[8 * 1024];
while ((length = in.read(bytes)) != -1) {
read += length;
//预加载完成或者取消预加载
if (mIsCanceled || read >= PreloadManager.PRELOAD_LENGTH) {
L.i("结束预加载:" + mPosition);
break;
}
}
if (read == -1) { //这种情况一般是预加载出错了,删掉缓存
L.i("预加载失败:" + mPosition);
File cacheFile = mCacheServer.getCacheFile(mRawUrl);
if (cacheFile.exists()) {
cacheFile.delete();
}
}
} catch (Exception e) {
L.i("异常结束预加载:" + mPosition, e);
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
/**
* 将预加载任务提交到线程池,准备执行
*/
public void executeOn(ExecutorService executorService) {
if (mIsExecuted) return;
mIsExecuted = true;
executorService.submit(this);
}
/**
* 取消预加载任务
*/
public void cancel() {
if (mIsExecuted) {
mIsCanceled = true;
}
}
}
|
<reponame>isandlaTech/cohorte-runtime
package org.psem2m.isolates.loggers;
/**
* @author ogattaz
*
*/
public class CLoggersException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1109486025824328252L;
/**
* @param aFormat
* @param aArgs
*/
public CLoggersException(final String aFormat, final Object... aArgs) {
super(String.format(aFormat, aArgs));
}
/**
* @param aCause
* @param aFormat
* @param aArgs
*/
public CLoggersException(final Throwable aCause, final String aFormat,
final Object... aArgs) {
super(String.format(aFormat, aArgs), aCause);
}
}
|
set -e
echo "Registry"
curl --fail http://localhost:8080/v1/_internal_ping
echo ""
echo "Verbs"
curl --fail http://localhost:8080/c1/_internal_ping
echo ""
echo "Security scan"
curl --fail http://localhost:8080/secscan/_internal_ping
echo ""
echo "Web"
curl --fail http://localhost:8080/_internal_ping
echo ""
|
<reponame>Martin-BG/Softuni-Java-MVC-Spring-Feb-2019
package org.softuni.residentevil.domain.validation.annotations.composite.user;
import javax.validation.Constraint;
import javax.validation.Payload;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Cannot be blank, length {@value MIN_LENGTH}-{@value MAX_LENGTH}
*/
@NotBlank(message = "{user.username.blank}")
@Size(message = "{user.username.length}", min = ValidUserUsername.MIN_LENGTH, max = ValidUserUsername.MAX_LENGTH)
@Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER})
@Retention(RUNTIME)
@Constraint(validatedBy = {})
@Documented
public @interface ValidUserUsername {
int MIN_LENGTH = 1;
int MAX_LENGTH = 32;
String message() default "";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.lecture.ui.blockimport;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiSortableColumnDef;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
/**
*
* Initial date: 15 Oct 2018<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class OverviewLectureBlocksDataModel extends DefaultFlexiTableDataModel<ImportedLectureBlock> {
public OverviewLectureBlocksDataModel(FlexiTableColumnModel columnModel) {
super(columnModel);
}
@Override
public Object getValueAt(int row, int col) {
ImportedLectureBlock block = getObject(row);
switch(BlockCols.values()[col]) {
case status: return block;
case externalId: return block.getLectureBlock().getExternalId();
case title: return block.getLectureBlock().getTitle();
case plannedLectures: return block.getLectureBlock().getPlannedLecturesNumber();
case date:
case startTime: return block.getLectureBlock().getStartDate();
case endTime: return block.getLectureBlock().getEndDate();
case compulsory: return block.getLectureBlock().isCompulsory();
case teachers: return block;
case participants: return block;
case location: return block.getLectureBlock().getLocation();
case description: return block.getLectureBlock().getDescription();
case preparation: return block.getLectureBlock().getPreparation();
case comment: return block.getLectureBlock().getComment();
default: return "ERROR";
}
}
public enum BlockCols implements FlexiSortableColumnDef {
status("table.header.import.status"),
externalId("table.header.external.ref"),
title("lecture.title"),
plannedLectures("table.header.planned.lectures"),
date("table.header.date"),
startTime("table.header.start.time"),
endTime("table.header.end.time"),
compulsory("table.header.compulsory"),
teachers("table.header.teachers"),
participants("table.header.participants"),
location("table.header.location"),
description("table.header.description"),
preparation("table.header.preparation"),
comment("table.header.comment");
private final String i18nKey;
private BlockCols(String i18nKey) {
this.i18nKey = i18nKey;
}
@Override
public String i18nHeaderKey() {
return i18nKey;
}
@Override
public boolean sortable() {
return true;
}
@Override
public String sortKey() {
return name();
}
}
}
|
import React, { useState, useCallback } from 'react';
import Dragula from 'react-dragula';
const Example = () => {
const [sourceItems, setSourceItems] = useState([
'Item 1',
'Item 2',
'Item 3',
'Item 4'
]);
const [targetItems, setTargetItems] = useState([]);
// Create dragula instance & set event listeners
const dragulaDecorator = useCallback(
(componentBackingInstance) => {
if (componentBackingInstance) {
let options = { };
Dragula([componentBackingInstance], options);
}
},
[sourceItems]
);
return (
<div>
<div ref={dragulaDecorator} className="drag-container">
<div>
<h2>Source</h2>
{sourceItems.map(item => (
<div key={item}>{item}</div>
))}
</div>
<div>
<h2>Target</h2>
{targetItems.map(item => (
<div key={item}>{item}</div>
))}
</div>
</div>
</div>
);
};
export default Example; |
# models.py
import pickle
class ClassificationModel(models.Model):
clf = pickle.load(open("classifier.pkl", "rb"))
# views.py
@app.route('/classify', methods=['POST'])
def classify_text():
classification_model = ClassificationModel.objects.get(id=1)
prediction = classification_model.clf.predict(request.data['text'])
return {'prediction': prediction} |
public KCentroid<TScalar, TKernel> GetKCentroid(int index)
{
if (!(0 <= index && index < this.NumberOfCenters))
throw new ArgumentOutOfRangeException();
// Retrieve the data points belonging to the cluster at the given index
List<DataPoint> clusterDataPoints = GetClusterDataPoints(index);
// Calculate the mean position of all the data points to obtain the centroid
DataPoint centroid = CalculateCentroid(clusterDataPoints);
return centroid;
}
private List<DataPoint> GetClusterDataPoints(int index)
{
// Implement the logic to retrieve the data points belonging to the cluster at the given index
// Return the list of data points for the cluster
}
private DataPoint CalculateCentroid(List<DataPoint> dataPoints)
{
// Implement the logic to calculate the mean position of all the data points
// Return the calculated centroid
} |
#!/bin/bash
set -x
# Compile every and each .cpp file available here - .c also...
CXXFLAGS=" -Iminibu -Isrc -Isrc/functions -Isrc/views -Isrc/conditions -Isrc/misc"
CXX="g++";
FLEX="flex";
BISON="bison";
# Run it.
$FLEX src/build.l
cd src; $BISON build.y; cd ..
find . -name "*.cpp" | while read srcfile; do
if [ -f $srcfile.o ]; then continue; fi;
$CXX $CXXFLAGS -c $srcfile -o $srcfile.o
[ "$?" != "0" ] && exit 1;
done
find . -name "*.c" | while read srcfile; do
$CXX $CXXFLAGS -c $srcfile -o $srcfile.o
if [ $? != 0 ]; then exit $?; fi
done
# Gather all o's and put them together. Long, long command.
OFILES="$(find . -name '*.o' | sed 's/\n/ /g')"
$CXX -o build_speedbuild $OFILES
|
def sort_descending(nums):
# Iterate through the array to find the largest number
for i in range(len(nums)):
max_index = i
for j in range(i+1, len(nums)):
# Find the index of the largest number
if nums[j] > nums[max_index]:
max_index = j
# Swap the largest number with the current number
temp = nums[i]
nums[i] = nums[max_index]
nums[max_index] = temp
return nums |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package teorica;
/**
*
* @author PauloCésar
*/
public class Horista extends Empregado{
private double precoHora, horasTrabalhadas;
@Override
public double vencimento() {
return precoHora*horasTrabalhadas;
}
}
|
package models
import collection.JavaConversions._
import org.jsoup.Jsoup
import com.mongodb.casbah.Imports._
object IndexPortfolio {
val indexholdings = MongoConnection()("typedynamic")("indexholdings")
def apply(symbol: String): List[String] = {
val query = MongoDBObject("symbol" -> symbol)
indexholdings.findOne(query) match {
case Some(obj) => return obj.as[List[String]]("holdings")
case None => {
val infodoc = Jsoup.
connect("http://us.ishares.com/product_info/fund/holdings/" +
symbol + ".htm").get
val holdings = infodoc.select("#holdings-eq #holdings-table-body tr #holding-ticker").map(_.text).toList
indexholdings += MongoDBObject("symbol" -> symbol, "holdings" -> holdings)
return holdings
}
}
}
}
|
<filename>archguard/src/api/scanner/evaluations.ts
import axios from "../axios";
export function createEvaluation(type: string) {
return axios<{ isRunning: boolean }>({
url: '/api/scanner/evaluations',
method: "POST",
data: { type },
});
}
|
import java.applet.Applet;
import net.runelite.mapping.Export;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("ab")
public class class44 {
@ObfuscatedName("f")
@Export("applet")
public static Applet applet;
@ObfuscatedName("o")
public static String field294;
static {
applet = null; // L: 10
field294 = ""; // L: 11
}
@ObfuscatedName("p")
@ObfuscatedSignature(
descriptor = "(II)I",
garbageValue = "1811738574"
)
public static int method519(int var0) {
long var2 = ViewportMouse.ViewportMouse_entityTags[var0]; // L: 61
int var1 = (int)(var2 >>> 14 & 3L); // L: 63
return var1; // L: 65
}
@ObfuscatedName("k")
@ObfuscatedSignature(
descriptor = "(IIZI)Ljava/lang/String;",
garbageValue = "-1536836078"
)
static String method521(int var0, int var1, boolean var2) {
if (var1 >= 2 && var1 <= 36) { // L: 140
if (var2 && var0 >= 0) { // L: 141
int var3 = 2; // L: 142
for (int var4 = var0 / var1; var4 != 0; ++var3) { // L: 143 144 146
var4 /= var1; // L: 145
}
char[] var5 = new char[var3]; // L: 148
var5[0] = '+'; // L: 149
for (int var6 = var3 - 1; var6 > 0; --var6) { // L: 150
int var7 = var0; // L: 151
var0 /= var1; // L: 152
int var8 = var7 - var0 * var1; // L: 153
if (var8 >= 10) { // L: 154
var5[var6] = (char)(var8 + 87);
} else {
var5[var6] = (char)(var8 + 48); // L: 155
}
}
return new String(var5); // L: 157
} else {
return Integer.toString(var0, var1);
}
} else {
throw new IllegalArgumentException("" + var1);
}
}
@ObfuscatedName("g")
@ObfuscatedSignature(
descriptor = "(II)I",
garbageValue = "1967957574"
)
static int method520(int var0) {
Message var1 = (Message)Messages.Messages_hashTable.get((long)var0); // L: 66
if (var1 == null) { // L: 67
return -1;
} else {
return var1.nextDual == Messages.Messages_queue.sentinel ? -1 : ((Message)var1.nextDual).count; // L: 68 69
}
}
}
|
<filename>src/org/lwjglx/util/glu/GLU.java<gh_stars>1-10
/*
* Copyright (c) 2002-2008 LWJGL Project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'LWJGL' nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.lwjglx.util.glu;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import static org.lwjgl.opengl.GL11.*;
import org.lwjglx.util.glu.tessellation.GLUtessellatorImpl;
/**
* GLU.java
* <p>
* <p>
* Created 23-dec-2003
*
* @author <NAME>
*/
public class GLU {
static final float PI = (float) Math.PI;
/* Errors: (return value 0 = no error) */
public static final int GLU_INVALID_ENUM = 100900;
public static final int GLU_INVALID_VALUE = 100901;
public static final int GLU_OUT_OF_MEMORY = 100902;
public static final int GLU_INCOMPATIBLE_GL_VERSION = 100903;
/* StringName */
public static final int GLU_VERSION = 100800;
public static final int GLU_EXTENSIONS = 100801;
/* Boolean */
public static final boolean GLU_TRUE = true;
public static final boolean GLU_FALSE = false;
/** ** Quadric constants *** */
/* QuadricNormal */
public static final int GLU_SMOOTH = 100000;
public static final int GLU_FLAT = 100001;
public static final int GLU_NONE = 100002;
/* QuadricDrawStyle */
public static final int GLU_POINT = 100010;
public static final int GLU_LINE = 100011;
public static final int GLU_FILL = 100012;
public static final int GLU_SILHOUETTE = 100013;
/* QuadricOrientation */
public static final int GLU_OUTSIDE = 100020;
public static final int GLU_INSIDE = 100021;
/* Callback types: */
/* ERROR = 100103 */
/** ** Tesselation constants *** */
public static final double GLU_TESS_MAX_COORD = 1.0e150;
public static final double TESS_MAX_COORD = 1.0e150;
/* TessProperty */
public static final int GLU_TESS_WINDING_RULE = 100140;
public static final int GLU_TESS_BOUNDARY_ONLY = 100141;
public static final int GLU_TESS_TOLERANCE = 100142;
/* TessWinding */
public static final int GLU_TESS_WINDING_ODD = 100130;
public static final int GLU_TESS_WINDING_NONZERO = 100131;
public static final int GLU_TESS_WINDING_POSITIVE = 100132;
public static final int GLU_TESS_WINDING_NEGATIVE = 100133;
public static final int GLU_TESS_WINDING_ABS_GEQ_TWO = 100134;
/* TessCallback */
public static final int GLU_TESS_BEGIN = 100100;
/* void (CALLBACK*)(GLenum type) */
public static final int GLU_TESS_VERTEX = 100101;
/* void (CALLBACK*)(void *data) */
public static final int GLU_TESS_END = 100102;
/* void (CALLBACK*)(void) */
public static final int GLU_TESS_ERROR = 100103;
/* void (CALLBACK*)(GLenum errno) */
public static final int GLU_TESS_EDGE_FLAG = 100104;
/* void (CALLBACK*)(GLboolean boundaryEdge) */
public static final int GLU_TESS_COMBINE = 100105;
/* void (CALLBACK*)(GLdouble coords[3],
* void *data[4],
* GLfloat weight[4],
* void **dataOut) */
public static final int GLU_TESS_BEGIN_DATA = 100106;
/* void (CALLBACK*)(GLenum type,
* void *polygon_data) */
public static final int GLU_TESS_VERTEX_DATA = 100107;
/* void (CALLBACK*)(void *data,
* void *polygon_data) */
public static final int GLU_TESS_END_DATA = 100108;
/* void (CALLBACK*)(void *polygon_data) */
public static final int GLU_TESS_ERROR_DATA = 100109;
/* void (CALLBACK*)(GLenum errno,
* void *polygon_data) */
public static final int GLU_TESS_EDGE_FLAG_DATA = 100110;
/* void (CALLBACK*)(GLboolean boundaryEdge,
* void *polygon_data) */
public static final int GLU_TESS_COMBINE_DATA = 100111;
/* void (CALLBACK*)(GLdouble coords[3],
* void *data[4],
* GLfloat weight[4],
* void **dataOut,
* void *polygon_data) */
/* TessError */
public static final int GLU_TESS_ERROR1 = 100151;
public static final int GLU_TESS_ERROR2 = 100152;
public static final int GLU_TESS_ERROR3 = 100153;
public static final int GLU_TESS_ERROR4 = 100154;
public static final int GLU_TESS_ERROR5 = 100155;
public static final int GLU_TESS_ERROR6 = 100156;
public static final int GLU_TESS_ERROR7 = 100157;
public static final int GLU_TESS_ERROR8 = 100158;
public static final int GLU_TESS_MISSING_BEGIN_POLYGON = GLU_TESS_ERROR1;
public static final int GLU_TESS_MISSING_BEGIN_CONTOUR = GLU_TESS_ERROR2;
public static final int GLU_TESS_MISSING_END_POLYGON = GLU_TESS_ERROR3;
public static final int GLU_TESS_MISSING_END_CONTOUR = GLU_TESS_ERROR4;
public static final int GLU_TESS_COORD_TOO_LARGE = GLU_TESS_ERROR5;
public static final int GLU_TESS_NEED_COMBINE_CALLBACK = GLU_TESS_ERROR6;
/** ** NURBS constants *** */
/* NurbsProperty */
public static final int GLU_AUTO_LOAD_MATRIX = 100200;
public static final int GLU_CULLING = 100201;
public static final int GLU_SAMPLING_TOLERANCE = 100203;
public static final int GLU_DISPLAY_MODE = 100204;
public static final int GLU_PARAMETRIC_TOLERANCE = 100202;
public static final int GLU_SAMPLING_METHOD = 100205;
public static final int GLU_U_STEP = 100206;
public static final int GLU_V_STEP = 100207;
/* NurbsSampling */
public static final int GLU_PATH_LENGTH = 100215;
public static final int GLU_PARAMETRIC_ERROR = 100216;
public static final int GLU_DOMAIN_DISTANCE = 100217;
/* NurbsTrim */
public static final int GLU_MAP1_TRIM_2 = 100210;
public static final int GLU_MAP1_TRIM_3 = 100211;
/* NurbsDisplay */
/* FILL = 100012 */
public static final int GLU_OUTLINE_POLYGON = 100240;
public static final int GLU_OUTLINE_PATCH = 100241;
/* NurbsCallback */
/* ERROR = 100103 */
/* NurbsErrors */
public static final int GLU_NURBS_ERROR1 = 100251;
public static final int GLU_NURBS_ERROR2 = 100252;
public static final int GLU_NURBS_ERROR3 = 100253;
public static final int GLU_NURBS_ERROR4 = 100254;
public static final int GLU_NURBS_ERROR5 = 100255;
public static final int GLU_NURBS_ERROR6 = 100256;
public static final int GLU_NURBS_ERROR7 = 100257;
public static final int GLU_NURBS_ERROR8 = 100258;
public static final int GLU_NURBS_ERROR9 = 100259;
public static final int GLU_NURBS_ERROR10 = 100260;
public static final int GLU_NURBS_ERROR11 = 100261;
public static final int GLU_NURBS_ERROR12 = 100262;
public static final int GLU_NURBS_ERROR13 = 100263;
public static final int GLU_NURBS_ERROR14 = 100264;
public static final int GLU_NURBS_ERROR15 = 100265;
public static final int GLU_NURBS_ERROR16 = 100266;
public static final int GLU_NURBS_ERROR17 = 100267;
public static final int GLU_NURBS_ERROR18 = 100268;
public static final int GLU_NURBS_ERROR19 = 100269;
public static final int GLU_NURBS_ERROR20 = 100270;
public static final int GLU_NURBS_ERROR21 = 100271;
public static final int GLU_NURBS_ERROR22 = 100272;
public static final int GLU_NURBS_ERROR23 = 100273;
public static final int GLU_NURBS_ERROR24 = 100274;
public static final int GLU_NURBS_ERROR25 = 100275;
public static final int GLU_NURBS_ERROR26 = 100276;
public static final int GLU_NURBS_ERROR27 = 100277;
public static final int GLU_NURBS_ERROR28 = 100278;
public static final int GLU_NURBS_ERROR29 = 100279;
public static final int GLU_NURBS_ERROR30 = 100280;
public static final int GLU_NURBS_ERROR31 = 100281;
public static final int GLU_NURBS_ERROR32 = 100282;
public static final int GLU_NURBS_ERROR33 = 100283;
public static final int GLU_NURBS_ERROR34 = 100284;
public static final int GLU_NURBS_ERROR35 = 100285;
public static final int GLU_NURBS_ERROR36 = 100286;
public static final int GLU_NURBS_ERROR37 = 100287;
/* Contours types -- obsolete! */
public static final int GLU_CW = 100120;
public static final int GLU_CCW = 100121;
public static final int GLU_INTERIOR = 100122;
public static final int GLU_EXTERIOR = 100123;
public static final int GLU_UNKNOWN = 100124;
/* Names without "TESS_" prefix */
public static final int GLU_BEGIN = GLU_TESS_BEGIN;
public static final int GLU_VERTEX = GLU_TESS_VERTEX;
public static final int GLU_END = GLU_TESS_END;
public static final int GLU_ERROR = GLU_TESS_ERROR;
public static final int GLU_EDGE_FLAG = GLU_TESS_EDGE_FLAG;
/**
* Method gluLookAt
*
* @param eyex
* @param eyey
* @param eyez
* @param centerx
* @param centery
* @param centerz
* @param upx
* @param upy
* @param upz
*/
public static void gluLookAt(
float eyex,
float eyey,
float eyez,
float centerx,
float centery,
float centerz,
float upx,
float upy,
float upz) {
Project.gluLookAt(eyex, eyey, eyez, centerx, centery, centerz, upx, upy, upz);
}
/**
* Method gluOrtho2D
*
* @param left
* @param right
* @param bottom
* @param top
*/
public static void gluOrtho2D(
float left,
float right,
float bottom,
float top) {
glOrtho(left, right, bottom, top, -1.0, 1.0);
}
/**
* Method gluPerspective
*
* @param fovy
* @param aspect
* @param zNear
* @param zFar
*/
public static void gluPerspective(
float fovy,
float aspect,
float zNear,
float zFar) {
Project.gluPerspective(fovy, aspect, zNear, zFar);
}
/**
* Method gluProject
*
* @param objx
* @param objy
* @param objz
* @param modelMatrix
* @param projMatrix
* @param viewport
* @param win_pos
*/
public static boolean gluProject(float objx, float objy, float objz,
FloatBuffer modelMatrix,
FloatBuffer projMatrix,
IntBuffer viewport,
FloatBuffer win_pos) {
return Project.gluProject(objx, objy, objz, modelMatrix, projMatrix, viewport, win_pos);
}
/**
* Method gluUnproject
*
* @param winx
* @param winy
* @param winz
* @param modelMatrix
* @param projMatrix
* @param viewport
* @param obj_pos
*/
public static boolean gluUnProject(float winx, float winy, float winz,
FloatBuffer modelMatrix,
FloatBuffer projMatrix,
IntBuffer viewport,
FloatBuffer obj_pos) {
return Project.gluUnProject(winx, winy, winz, modelMatrix, projMatrix, viewport, obj_pos);
}
/**
* Method gluPickMatrix
*
* @param x
* @param y
* @param width
* @param height
* @param viewport
*/
public static void gluPickMatrix(
float x,
float y,
float width,
float height,
IntBuffer viewport) {
Project.gluPickMatrix(x, y, width, height, viewport);
}
/**
* Method gluGetString.
*
* @param name
* @return String
*/
public static String gluGetString(int name) {
return Registry.gluGetString(name);
}
/**
* Method gluCheckExtension.
*
* @param extName
* @param extString
* @return boolean
*/
public static boolean gluCheckExtension(String extName, String extString) {
return Registry.gluCheckExtension(extName, extString);
}
/**
* Method gluBuild2DMipmaps
*
* @param target
* @param components
* @param width
* @param height
* @param format
* @param type
* @param data
* @return int
*/
public static int gluBuild2DMipmaps(
int target,
int components,
int width,
int height,
int format,
int type,
ByteBuffer data) {
return MipMap.gluBuild2DMipmaps(target, components, width, height, format, type, data);
}
/**
* Method gluScaleImage.
*
* @param format
* @param widthIn
* @param heightIn
* @param typeIn
* @param dataIn
* @param widthOut
* @param heightOut
* @param typeOut
* @param dataOut
* @return int
*/
public static int gluScaleImage(
int format,
int widthIn,
int heightIn,
int typeIn,
ByteBuffer dataIn,
int widthOut,
int heightOut,
int typeOut,
ByteBuffer dataOut) {
return MipMap.gluScaleImage(format, widthIn, heightIn, typeIn, dataIn, widthOut, heightOut, typeOut, dataOut);
}
public static String gluErrorString(int error_code) {
switch (error_code) {
case GLU_INVALID_ENUM:
return "Invalid enum (glu)";
case GLU_INVALID_VALUE:
return "Invalid value (glu)";
case GLU_OUT_OF_MEMORY:
return "Out of memory (glu)";
default:
// return Util.translateGLErrorString(error_code);
}
throw new RuntimeException("Q");
}
public static GLUtessellator gluNewTess() {
return new GLUtessellatorImpl();
}
}
|
<gh_stars>10-100
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <rtthread.h>
#include "paho_mqtt.h"
/**
* MQTT URI farmat:
* domain mode
* tcp://iot.eclipse.org:1883
*
* ipv4 mode
* tcp://192.168.10.1:1883
* ssl://192.168.10.1:1884
*
* ipv6 mode
* tcp://[fe80::20c:29ff:fe9a:a07e]:1883
* ssl://[fe80::20c:29ff:fe9a:a07e]:1884
*/
#define MQTT_URI "tcp://iot.eclipse.org:1883"
#define MQTT_CLIENTID "rtthread-mqtt"
#define MQTT_USERNAME "admin"
#define MQTT_PASSWORD "<PASSWORD>"
#define MQTT_SUBTOPIC "/mqtt/test"
#define MQTT_PUBTOPIC "/mqtt/test"
#define MQTT_WILLMSG "Goodbye!"
/* define MQTT client context */
static MQTTClient client;
static void mqtt_sub_callback(MQTTClient *c, MessageData *msg_data)
{
*((char *)msg_data->message->payload + msg_data->message->payloadlen) = '\0';
rt_kprintf("mqtt sub callback: %.*s %.*s\n",
msg_data->topicName->lenstring.len,
msg_data->topicName->lenstring.data,
msg_data->message->payloadlen,
(char *)msg_data->message->payload);
return;
}
static void mqtt_sub_default_callback(MQTTClient *c, MessageData *msg_data)
{
*((char *)msg_data->message->payload + msg_data->message->payloadlen) = '\0';
rt_kprintf("mqtt sub default callback: %.*s %.*s\n",
msg_data->topicName->lenstring.len,
msg_data->topicName->lenstring.data,
msg_data->message->payloadlen,
(char *)msg_data->message->payload);
return;
}
static void mqtt_connect_callback(MQTTClient *c)
{
rt_kprintf("inter mqtt_connect_callback! \n");
}
static void mqtt_online_callback(MQTTClient *c)
{
rt_kprintf("inter mqtt_online_callback! \n");
}
static void mqtt_offline_callback(MQTTClient *c)
{
rt_kprintf("inter mqtt_offline_callback! \n");
}
/**
* This function create and config a mqtt client.
*
* @param void
*
* @return none
*/
static void mq_start(void)
{
/* init condata param by using MQTTPacket_connectData_initializer */
MQTTPacket_connectData condata = MQTTPacket_connectData_initializer;
static int is_started = 0;
if (is_started)
{
return;
}
/* config MQTT context param */
{
client.uri = MQTT_URI;
/* config connect param */
memcpy(&client.condata, &condata, sizeof(condata));
client.condata.clientID.cstring = MQTT_CLIENTID;
client.condata.keepAliveInterval = 60;
client.condata.cleansession = 1;
client.condata.username.cstring = MQTT_USERNAME;
client.condata.password.cstring = <PASSWORD>_PASSWORD;
/* config MQTT will param. */
client.condata.willFlag = 1;
client.condata.will.qos = 1;
client.condata.will.retained = 0;
client.condata.will.topicName.cstring = MQTT_PUBTOPIC;
client.condata.will.message.cstring = MQTT_WILLMSG;
/* malloc buffer. */
client.buf_size = client.readbuf_size = 1024;
client.buf = malloc(client.buf_size);
client.readbuf = malloc(client.readbuf_size);
if (!(client.buf && client.readbuf))
{
rt_kprintf("no memory for MQTT client buffer!\n");
goto _exit;
}
/* set event callback function */
client.connect_callback = mqtt_connect_callback;
client.online_callback = mqtt_online_callback;
client.offline_callback = mqtt_offline_callback;
/* set subscribe table and event callback */
client.messageHandlers[0].topicFilter = MQTT_SUBTOPIC;
client.messageHandlers[0].callback = mqtt_sub_callback;
client.messageHandlers[0].qos = QOS1;
/* set default subscribe event callback */
client.defaultMessageHandler = mqtt_sub_default_callback;
}
/* run mqtt client */
paho_mqtt_start(&client);
is_started = 1;
_exit:
return;
}
/**
* This function publish message to specific mqtt topic.
*
* @param send_str publish message
*
* @return none
*/
static void mq_publish(const char *send_str)
{
MQTTMessage message;
const char *msg_str = send_str;
const char *topic = MQTT_PUBTOPIC;
message.qos = QOS1;
message.retained = 0;
message.payload = (void *)msg_str;
message.payloadlen = strlen(message.payload);
MQTTPublish(&client, topic, &message);
return;
}
#ifdef RT_USING_FINSH
#include <finsh.h>
FINSH_FUNCTION_EXPORT(mq_start, startup mqtt client);
FINSH_FUNCTION_EXPORT(mq_publish, publish mqtt msg);
#ifdef FINSH_USING_MSH
MSH_CMD_EXPORT(mq_start, startup mqtt client);
int mq_pub(int argc, char **argv)
{
if (argc != 2)
{
rt_kprintf("More than two input parameters err!!\n");
return 0;
}
mq_publish(argv[1]);
return 0;
}
MSH_CMD_EXPORT(mq_pub, publish mqtt msg);
#endif /* FINSH_USING_MSH */
#endif /* RT_USING_FINSH */
|
<gh_stars>0
import { RuleConfigurationOverride } from '../../../support/Rule'
import BaseConfiguration from '../eslint/no-unused-vars.d'
type Configuration = RuleConfigurationOverride<BaseConfiguration, 'unused-imports/no-unused-vars', 'eslint-plugin-unused-imports'>
export default Configuration
|
#!/bin/bash
# Copyright (c) The Diem Core Contributors
# SPDX-License-Identifier: Apache-2.0
# This script sets up the environment for the Diem build by installing necessary dependencies.
#
# Usage ./dev_setup.sh <options>
# v - verbose, print all statements
# Assumptions for nix systems:
# 1 The running user is the user who will execute the builds.
# 2 .profile will be used to configure the shell
# 3 ${HOME}/bin/ is expected to be on the path - hashicorp tools/hadolint/etc. will be installed there on linux systems.
SHELLCHECK_VERSION=0.7.1
HADOLINT_VERSION=1.17.4
SCCACHE_VERSION=0.2.14-alpha.0
#If installing sccache from a git repp set url@revision.
SCCACHE_GIT='https://github.com/rexhoffman/sccache.git@549babdd3866aa60dae01668c42ee00bf1e8c763'
KUBECTL_VERSION=1.18.6
TERRAFORM_VERSION=0.12.26
HELM_VERSION=3.2.4
VAULT_VERSION=1.5.0
Z3_VERSION=4.8.9
CVC4_VERSION=aac53f51
DOTNET_VERSION=3.1
BOOGIE_VERSION=2.7.35
SCRIPT_PATH="$( cd "$( dirname "$0" )" >/dev/null 2>&1 && pwd )"
cd "$SCRIPT_PATH/.." || exit
function usage {
echo "Usage:"
echo "Installs or updates necessary dev tools for diem/diem."
echo "-b batch mode, no user interactions and miminal output"
echo "-p update ${HOME}/.profile"
echo "-t install build tools"
echo "-o install operations tooling as well: helm, terraform, hadolint, yamllint, vault, docker, kubectl, python3"
echo "-y installs or updates Move prover tools: z3, cvc4, dotnet, boogie"
echo "-v verbose mode"
echo "If no toolchain component is selected with -t, -o, -y, or -p, the behavior is as if -t had been provided."
echo "This command must be called from the root folder of the Diem project."
}
function add_to_profile {
eval "$1"
FOUND=$(grep -c "$1" < "${HOME}/.profile")
if [ "$FOUND" == "0" ]; then
echo "$1" >> "${HOME}"/.profile
fi
}
function update_path_and_profile {
touch "${HOME}"/.profile
mkdir -p "${HOME}"/bin
add_to_profile "export PATH=\"${HOME}/bin:${HOME}/.cargo/bin:\$PATH\""
if [[ "$INSTALL_PROVER" == "true" ]]; then
add_to_profile "export DOTNET_ROOT=\$HOME/.dotnet"
add_to_profile "export PATH=\"${HOME}/.dotnet/tools:\$PATH\""
add_to_profile "export Z3_EXE=$HOME/bin/z3"
add_to_profile "export CVC4_EXE=$HOME/bin/cvc4"
add_to_profile "export BOOGIE_EXE=$HOME/.dotnet/tools/boogie"
fi
}
function install_build_essentials {
PACKAGE_MANAGER=$1
#Differently named packages for pkg-config
if [[ "$PACKAGE_MANAGER" == "apt-get" ]]; then
install_pkg build-essential "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "pacman" ]]; then
install_pkg base-devel "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "apk" ]]; then
install_pkg alpine-sdk "$PACKAGE_MANAGER"
install_pkg coreutils "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "yum" ]] || [[ "$PACKAGE_MANAGER" == "dnf" ]]; then
install_pkg gcc "$PACKAGE_MANAGER"
install_pkg gcc-c++ "$PACKAGE_MANAGER"
install_pkg make "$PACKAGE_MANAGER"
fi
#if [[ "$PACKAGE_MANAGER" == "brew" ]]; then
# install_pkg pkgconfig "$PACKAGE_MANAGER"
#fi
}
function install_rustup {
BATCH_MODE=$1
# Install Rust
[[ "${BATCH_MODE}" == "false" ]] && echo "Installing Rust......"
if rustup --version &>/dev/null; then
[[ "${BATCH_MODE}" == "false" ]] && echo "Rust is already installed"
else
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
PATH="${HOME}/.cargo/bin:${PATH}"
fi
}
function install_hadolint {
if ! command -v hadolint &> /dev/null; then
export HADOLINT=${HOME}/bin/hadolint
curl -sL -o "$HADOLINT" "https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m)" && chmod 700 "$HADOLINT"
fi
hadolint -v
}
function install_vault {
VERSION=$(vault --version)
if [[ "$VERSION" != "Vault v${VAULT_VERSION}" ]]; then
MACHINE=$(uname -m);
if [[ $MACHINE == "x86_64" ]]; then
MACHINE="amd64"
fi
TMPFILE=$(mktemp)
curl -sL -o "$TMPFILE" "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_$(uname -s | tr '[:upper:]' '[:lower:]')_${MACHINE}.zip"
unzip -qq -d "${HOME}"/bin/ "$TMPFILE"
rm "$TMPFILE"
chmod +x "${HOME}"/bin/vault
fi
vault --version
}
function install_helm {
if ! command -v helm &> /dev/null; then
if [[ $(uname -s) == "Darwin" ]]; then
install_pkg helm brew
else
MACHINE=$(uname -m);
if [[ $MACHINE == "x86_64" ]]; then
MACHINE="amd64"
fi
TMPFILE=$(mktemp)
rm "$TMPFILE"
mkdir -p "$TMPFILE"/
curl -sL -o "$TMPFILE"/out.tar.gz "https://get.helm.sh/helm-v${HELM_VERSION}-$(uname -s | tr '[:upper:]' '[:lower:]')-${MACHINE}.tar.gz"
tar -zxvf "$TMPFILE"/out.tar.gz -C "$TMPFILE"/
cp "${TMPFILE}/$(uname -s | tr '[:upper:]' '[:lower:]')-${MACHINE}/helm" "${HOME}/bin/helm"
rm -rf "$TMPFILE"
chmod +x "${HOME}"/bin/helm
fi
fi
}
function install_terraform {
VERSION=$(terraform --version | head -1)
if [[ "$VERSION" != "Terraform v${TERRAFORM_VERSION}" ]]; then
if [[ $(uname -s) == "Darwin" ]]; then
install_pkg tfenv brew
tfenv install ${TERRAFORM_VERSION}
tfenv use ${TERRAFORM_VERSION}
else
MACHINE=$(uname -m);
if [[ $MACHINE == "x86_64" ]]; then
MACHINE="amd64"
fi
TMPFILE=$(mktemp)
curl -sL -o "$TMPFILE" "https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_$(uname -s | tr '[:upper:]' '[:lower:]')_${MACHINE}.zip"
unzip -qq -d "${HOME}"/bin/ "$TMPFILE"
rm "$TMPFILE"
chmod +x "${HOME}"/bin/terraform
terraform --version
fi
fi
}
function install_kubectl {
VERSION=$(kubectl version client --short=true | head -1)
if [[ "$VERSION" != "Client Version: v${KUBECTL_VERSION}" ]]; then
if [[ $(uname -s) == "Darwin" ]]; then
install_pkg kubectl brew
else
MACHINE=$(uname -m);
if [[ $MACHINE == "x86_64" ]]; then
MACHINE="amd64"
fi
curl -sL -o "${HOME}"/bin/kubectl "https://storage.googleapis.com/kubernetes-release/release/v${KUBECTL_VERSION}/bin/$(uname -s | tr '[:upper:]' '[:lower:]')/${MACHINE}/kubectl"
chmod +x "${HOME}"/bin/kubectl
fi
fi
kubectl version client --short=true | head -1
}
function install_awscli {
if ! command -v aws &> /dev/null; then
if [[ $(uname -s) == "Darwin" ]]; then
install_pkg awscli brew
else
MACHINE=$(uname -m);
TMPFILE=$(mktemp)
rm "$TMPFILE"
mkdir -p "$TMPFILE"/work/
curl -sL -o "$TMPFILE"/aws.zip "https://awscli.amazonaws.com/awscli-exe-$(uname -s | tr '[:upper:]' '[:lower:]')-${MACHINE}.zip"
unzip -qq -d "$TMPFILE"/work/ "$TMPFILE"/aws.zip
mkdir -p "${HOME}"/.local/
"$TMPFILE"/work/aws/install -i "${HOME}"/.local/aws-cli -b "${HOME}"/bin
rm -rf "$TMPFILE"
fi
fi
aws --version
}
function install_pkg {
package=$1
PACKAGE_MANAGER=$2
PRE_COMMAND=()
if [ "$(whoami)" != 'root' ]; then
PRE_COMMAND=(sudo)
fi
if which "$package" &>/dev/null; then
echo "$package is already installed"
else
echo "Installing ${package}."
if [[ "$PACKAGE_MANAGER" == "yum" ]]; then
"${PRE_COMMAND[@]}" yum install "${package}" -y
elif [[ "$PACKAGE_MANAGER" == "apt-get" ]]; then
"${PRE_COMMAND[@]}" apt-get install "${package}" --no-install-recommends -y
elif [[ "$PACKAGE_MANAGER" == "pacman" ]]; then
"${PRE_COMMAND[@]}" pacman -Syu "$package" --noconfirm
elif [[ "$PACKAGE_MANAGER" == "apk" ]]; then
apk --update add --no-cache "${package}"
elif [[ "$PACKAGE_MANAGER" == "dnf" ]]; then
dnf install "$package"
elif [[ "$PACKAGE_MANAGER" == "brew" ]]; then
brew install "$package"
fi
fi
}
function install_pkg_config {
PACKAGE_MANAGER=$1
#Differently named packages for pkg-config
if [[ "$PACKAGE_MANAGER" == "apt-get" ]] || [[ "$PACKAGE_MANAGER" == "dnf" ]]; then
install_pkg pkg-config "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "pacman" ]]; then
install_pkg pkgconf "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "brew" ]] || [[ "$PACKAGE_MANAGER" == "apk" ]] || [[ "$PACKAGE_MANAGER" == "yum" ]]; then
install_pkg pkgconfig "$PACKAGE_MANAGER"
fi
}
function install_shellcheck {
if ! command -v shellcheck &> /dev/null; then
if [[ $(uname -s) == "Darwin" ]]; then
install_pkg shellcheck brew
else
install_pkg xz "$PACKAGE_MANAGER"
MACHINE=$(uname -m);
TMPFILE=$(mktemp)
rm "$TMPFILE"
mkdir -p "$TMPFILE"/
curl -sL -o "$TMPFILE"/out.xz "https://github.com/koalaman/shellcheck/releases/download/v${SHELLCHECK_VERSION}/shellcheck-v${SHELLCHECK_VERSION}.$(uname -s | tr '[:upper:]' '[:lower:]').${MACHINE}.tar.xz"
tar -xf "$TMPFILE"/out.xz -C "$TMPFILE"/
cp "${TMPFILE}/shellcheck-v${SHELLCHECK_VERSION}/shellcheck" "${HOME}/bin/shellcheck"
rm -rf "$TMPFILE"
chmod +x "${HOME}"/bin/shellcheck
fi
fi
}
function install_openssl_dev {
PACKAGE_MANAGER=$1
#Differently named packages for openssl dev
if [[ "$PACKAGE_MANAGER" == "apk" ]]; then
install_pkg openssl-dev "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "apt-get" ]]; then
install_pkg libssl-dev "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "yum" ]] || [[ "$PACKAGE_MANAGER" == "dnf" ]]; then
install_pkg openssl-devel "$PACKAGE_MANAGER"
fi
if [[ "$PACKAGE_MANAGER" == "pacman" ]] || [[ "$PACKAGE_MANAGER" == "brew" ]]; then
install_pkg openssl "$PACKAGE_MANAGER"
fi
}
function install_gcc_powerpc_linux_gnu {
PACKAGE_MANAGER=$1
#Differently named packages for gcc-powerpc-linux-gnu
if [[ "$PACKAGE_MANAGER" == "apt-get" ]] || [[ "$PACKAGE_MANAGER" == "yum" ]]; then
install_pkg gcc-powerpc-linux-gnu "$PACKAGE_MANAGER"
fi
#if [[ "$PACKAGE_MANAGER" == "pacman" ]]; then
# install_pkg powerpc-linux-gnu-gcc "$PACKAGE_MANAGER"
#fi
#if [[ "$PACKAGE_MANAGER" == "apk" ]] || [[ "$PACKAGE_MANAGER" == "brew" ]]; then
# TODO
#fi
}
function install_toolchain {
version=$1
FOUND=$(rustup show | grep -c "$version" )
if [[ "$FOUND" == "0" ]]; then
echo "Installing ${version} of rust toolchain"
rustup install "$version"
else
echo "${version} rust toolchain already installed"
fi
}
function install_sccache {
VERSION="$(sccache --version)"
if [[ "$VERSION" != "sccache ""${SCCACHE_VERSION}" ]]; then
if [[ -n "${SCCACHE_GIT}" ]]; then
git_repo=$( echo "$SCCACHE_GIT" | cut -d "@" -f 1 );
git_hash=$( echo "$SCCACHE_GIT" | cut -d "@" -f 2 );
cargo install sccache --git "$git_repo" --rev "$git_hash" --features s3;
else
cargo install sccache --version="${SCCACHE_VERSION}" --features s3;
fi
fi
}
function install_grcov {
if ! command -v grcov &> /dev/null; then
cargo install grcov
fi
}
function install_dotnet {
echo "Installing .Net"
if [[ "$(uname)" == "Linux" ]]; then
# Install various prerequisites for .dotnet. There are known bugs
# in the dotnet installer to warn even if they are present. We try
# to install anyway based on the warnings the dotnet installer creates.
if [ "$PACKAGE_MANAGER" == "apk" ]; then
install_pkg icu "$PACKAGE_MANAGER"
install_pkg zlib "$PACKAGE_MANAGER"
install_pkg libintl "$PACKAGE_MANAGER"
install_pkg libcurl "$PACKAGE_MANAGER"
elif [ "$PACKAGE_MANAGER" == "apt-get" ]; then
install_pkg gettext "$PACKAGE_MANAGER"
install_pkg zlib1g "$PACKAGE_MANAGER"
elif [ "$PACKAGE_MANAGER" == "yum" ] || [ "$PACKAGE_MANAGER" == "dnf" ]; then
install_pkg icu "$PACKAGE_MANAGER"
install_pkg zlib "$PACKAGE_MANAGER"
elif [ "$PACKAGE_MANAGER" == "pacman" ]; then
install_pkg icu "$PACKAGE_MANAGER"
install_pkg zlib "$PACKAGE_MANAGER"
fi
fi
# Below we need to (a) set TERM variable because the .net installer expects it and it is not set
# in some environments (b) use bash not sh because the installer uses bash features.
curl -sSL https://dot.net/v1/dotnet-install.sh \
| TERM=linux /bin/bash -s -- --channel $DOTNET_VERSION --version latest
}
function install_boogie {
echo "Installing boogie"
export DOTNET_ROOT=$HOME/.dotnet
if [[ "$("$HOME"/.dotnet/dotnet tool list -g)" =~ .*boogie.*${BOOGIE_VERSION}.* ]]; then
echo "Boogie $BOOGIE_VERSION already installed"
else
"$HOME/.dotnet/dotnet" tool update --global Boogie --version $BOOGIE_VERSION
fi
}
function install_z3 {
echo "Installing Z3"
if which /usr/local/bin/z3 &>/dev/null; then
echo "z3 already exists at /usr/local/bin/z3"
echo "but this install will go to $HOME/bin/z3."
echo "you may want to remove the shared instance to avoid version confusion"
fi
if which "$HOME/bin/z3" &>/dev/null && [[ "$("$HOME/bin/z3" --version)" =~ .*${Z3_VERSION}.* ]]; then
echo "Z3 ${Z3_VERSION} already installed"
return
fi
if [[ "$(uname)" == "Linux" ]]; then
Z3_PKG="z3-$Z3_VERSION-x64-ubuntu-16.04"
elif [[ "$(uname)" == "Darwin" ]]; then
Z3_PKG="z3-$Z3_VERSION-x64-osx-10.14.6"
else
echo "Z3 support not configured for this platform (uname=$(uname))"
return
fi
TMPFILE=$(mktemp)
rm "$TMPFILE"
mkdir -p "$TMPFILE"/
(
cd "$TMPFILE" || exit
curl -LOs "https://github.com/Z3Prover/z3/releases/download/z3-$Z3_VERSION/$Z3_PKG.zip"
unzip -q "$Z3_PKG.zip"
cp "$Z3_PKG/bin/z3" "$HOME/bin"
chmod +x "$HOME/bin/z3"
)
rm -rf "$TMPFILE"
}
function install_cvc4 {
echo "Installing CVC4"
if which /usr/local/bin/cvc4 &>/dev/null; then
echo "cvc4 already exists at /usr/local/bin/cvc4"
echo "but this install will go to $HOME/bin/cvc4."
echo "you may want to remove the shared instance to avoid version confusion"
fi
if which "$HOME/bin/cvc4" &>/dev/null && [[ "$("$HOME/bin/cvc4" --version)" =~ .*${CVC4_VERSION}.* ]]; then
echo "CVC4 ${CVC4_VERSION} already installed"
return
fi
if [[ "$(uname)" == "Linux" ]]; then
CVC4_PKG="cvc4-$CVC4_VERSION-x64-ubuntu"
elif [[ "$(uname)" == "Darwin" ]]; then
CVC4_PKG="cvc4-$CVC4_VERSION-x64-osx"
else
echo "CVC4 support not configured for this platform (uname=$(uname))"
return
fi
TMPFILE=$(mktemp)
rm "$TMPFILE"
mkdir -p "$TMPFILE"/
(
cd "$TMPFILE" || exit
curl -LOs "https://cvc4.cs.stanford.edu/downloads/builds/minireleases/$CVC4_PKG.zip"
unzip -q "$CVC4_PKG.zip"
cp "$CVC4_PKG/cvc4" "$HOME/bin"
chmod +x "$HOME/bin/cvc4"
)
rm -rf "$TMPFILE"
}
function welcome_message {
cat <<EOF
Welcome to Diem!
This script will download and install the necessary dependencies needed to
build, test and inspect Diem Core.
Based on your selection, these tools will be included:
EOF
if [[ "$INSTALL_BUILD_TOOLS" == "true" ]]; then
cat <<EOF
Build tools (since -t or no option was provided):
* Rust (and the necessary components, e.g. rust-fmt, clippy)
* CMake
* Clang
* grcov
* lcov
* pkg-config
* libssl-dev
* sccache
* if linux, gcc-powerpc-linux-gnu
EOF
fi
if [[ "$OPERATIONS" == "true" ]]; then
cat <<EOF
Operation tools (since -o was provided):
* yamllint
* python3
* docker
* vault
* terraform
* kubectl
* helm
* aws cli
EOF
fi
if [[ "$INSTALL_PROVER" == "true" ]]; then
cat <<EOF
Move prover tools (since -y was provided):
* z3
* cvc4
* dotnet
* boogie
EOF
fi
if [[ "$INSTALL_PROFILE" == "true" ]]; then
cat <<EOF
Moreover, ~/.profile will be updated (since -p was provided).
EOF
fi
cat <<EOF
If you'd prefer to install these dependencies yourself, please exit this script
now with Ctrl-C.
EOF
}
BATCH_MODE=false;
VERBOSE=false;
INSTALL_BUILD_TOOLS=false;
OPERATIONS=false;
INSTALL_PROFILE=false;
INSTALL_PROVER=false;
#parse args
while getopts "btopvyh" arg; do
case "$arg" in
b)
BATCH_MODE="true"
;;
t)
INSTALL_BUILD_TOOLS="true"
;;
o)
OPERATIONS="true"
;;
p)
INSTALL_PROFILE="true"
;;
v)
VERBOSE=true
;;
y)
INSTALL_PROVER="true"
;;
*)
usage;
exit 0;
;;
esac
done
if [[ "$VERBOSE" == "true" ]]; then
set -x
fi
if [[ "$INSTALL_BUILD_TOOLS" == "false" ]] && \
[[ "$OPERATIONS" == "false" ]] && \
[[ "$INSTALL_PROFILE" == "false" ]] && \
[[ "$INSTALL_PROVER" == "false" ]]; then
INSTALL_BUILD_TOOLS="true"
fi
if [ ! -f rust-toolchain ]; then
echo "Unknown location. Please run this from the diem repository. Abort."
exit 1
fi
PRE_COMMAND=()
if [ "$(whoami)" != 'root' ]; then
PRE_COMMAND=(sudo)
fi
PACKAGE_MANAGER=
if [[ "$(uname)" == "Linux" ]]; then
if command -v yum &> /dev/null; then
PACKAGE_MANAGER="yum"
elif command -v apt-get &> /dev/null; then
PACKAGE_MANAGER="apt-get"
elif command -v pacman &> /dev/null; then
PACKAGE_MANAGER="pacman"
elif command -v apk &>/dev/null; then
PACKAGE_MANAGER="apk"
elif command -v dnf &>/dev/null; then
echo "WARNING: dnf package manager support is experimental"
PACKAGE_MANAGER="dnf"
else
echo "Unable to find supported package manager (yum, apt-get, dnf, or pacman). Abort"
exit 1
fi
elif [[ "$(uname)" == "Darwin" ]]; then
if which brew &>/dev/null; then
PACKAGE_MANAGER="brew"
else
echo "Missing package manager Homebrew (https://brew.sh/). Abort"
exit 1
fi
else
echo "Unknown OS. Abort."
exit 1
fi
if [[ "$BATCH_MODE" == "false" ]]; then
welcome_message
printf "Proceed with installing necessary dependencies? (y/N) > "
read -e -r input
if [[ "$input" != "y"* ]]; then
echo "Exiting..."
exit 0
fi
fi
if [[ "$PACKAGE_MANAGER" == "apt-get" ]]; then
[[ "$BATCH_MODE" == "false" ]] && echo "Updating apt-get......"
"${PRE_COMMAND[@]}" apt-get update
[[ "$BATCH_MODE" == "false" ]] && echo "Installing ca-certificates......"
"${PRE_COMMAND[@]}" install_pkg ca-certificates "$PACKAGE_MANAGER"
fi
[[ "$INSTALL_PROFILE" == "true" ]] && update_path_and_profile
install_pkg curl "$PACKAGE_MANAGER"
if [[ "$INSTALL_BUILD_TOOLS" == "true" ]]; then
install_build_essentials "$PACKAGE_MANAGER"
install_pkg cmake "$PACKAGE_MANAGER"
install_pkg clang "$PACKAGE_MANAGER"
install_pkg llvm "$PACKAGE_MANAGER"
install_gcc_powerpc_linux_gnu "$PACKAGE_MANAGER"
install_openssl_dev "$PACKAGE_MANAGER"
install_pkg_config "$PACKAGE_MANAGER"
install_rustup "$BATCH_MODE"
install_toolchain "$(cat ./cargo-toolchain)"
install_toolchain "$(cat ./rust-toolchain)"
# Add all the components that we need
rustup component add rustfmt
rustup component add clippy
install_sccache
install_grcov
install_pkg lcov "$PACKAGE_MANAGER"
fi
if [[ "$OPERATIONS" == "true" ]]; then
install_pkg yamllint "$PACKAGE_MANAGER"
install_pkg python3 "$PACKAGE_MANAGER"
install_pkg unzip "$PACKAGE_MANAGER"
install_pkg jq "$PACKAGE_MANAGER"
install_pkg git "$PACKAGE_MANAGER"
#for timeout
if [[ "$PACKAGE_MANAGER" == "apt-get" ]]; then
install_pkg coreutils "$PACKAGE_MANAGER"
fi
install_shellcheck
install_hadolint
install_vault
install_helm
install_terraform
install_kubectl
install_awscli
fi
if [[ "$INSTALL_PROVER" == "true" ]]; then
install_z3
install_cvc4
install_dotnet
install_boogie
fi
[[ "${BATCH_MODE}" == "false" ]] && cat <<EOF
Finished installing all dependencies.
You should now be able to build the project by running:
cargo build
EOF
exit 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.