text
stringlengths 1
1.05M
|
|---|
func sumOfEvenNumbers(in array: [Int]) -> Int {
var sum = 0
for number in array {
if number % 2 == 0 {
sum += number
}
}
return sum
}
|
<gh_stars>10-100
/* jshint unused: true, undef: true */
/* global window, document, localStorage, $, $each, setTimeout, screen, clearInterval */
window.addEventListener('load', init, false);
function init() {
if (localStorage.popupHeight) document.body.style.height = localStorage.popupHeight + 'px';
if (localStorage.popupWidth) document.body.style.width = localStorage.popupWidth + 'px';
}
(function(window) {
var document = window.document;
var chrome = window.chrome;
var localStorage = window.localStorage;
var navigator = window.navigator;
var body = document.body;
var _m = chrome.i18n.getMessage;
// Error alert
var AlertDialog = {
open: function(dialog) {
if (!dialog) return;
$('alert-dialog-text').innerHTML = dialog;
body.addClass('needAlert');
},
close: function() {
body.removeClass('needAlert');
}
};
// popdown toast when an error occurs
window.addEventListener('error', function() {
AlertDialog.open('<strong>' + _m('errorOccured') + '</strong><br>' + _m('reportedToDeveloper'));
}, false);
// Platform detection
var os = (navigator.platform.toLowerCase().match(/mac|win|linux/i) || ['other'])[0];
body.addClass(os);
// Some i18n
$('edit-dialog-name').placeholder = _m('name');
$('edit-dialog-url').placeholder = _m('url');
$('hotkey-dialog-hotkey').placeholder = _m('hotkey');
$each({
'bookmark-new-tab': 'openNewTab',
'bookmark-new-window': 'openNewWindow',
'bookmark-new-incognito-window': 'openIncognitoWindow',
'bookmark-edit': 'edit',
'bookmark-update': 'updateEllipsis',
'bookmark-delete': 'deleteEllipsis',
'bookmark-set-hotkey': 'setHotkeyEllipsis',
'bookmark-unset-hotkey': 'unsetHotkey',
'folder-window': 'openBookmarks',
'folder-new-window': 'openBookmarksNewWindow',
'folder-new-incognito-window': 'openBookmarksIncognitoWindow',
'folder-edit': 'edit',
'folder-delete': 'deleteEllipsis',
'edit-dialog-button': 'save',
'hotkey-dialog-button': 'save'
}, function(msg, id) {
var el = $(id),
m = _m(msg);
if (el.tagName == 'COMMAND') el.label = m;
el.textContent = m;
});
// RTL indicator
var rtl = (body.getComputedStyle('direction') == 'rtl');
if (rtl) body.addClass('rtl');
// Init some variables
var opens = localStorage.opens ? JSON.parse(localStorage.opens) : [];
var rememberState = !localStorage.dontRememberState;
var httpsPattern = /^https?:\/\//i;
// Hotkey-related functions.
var hotkeys = localStorage.hotkeys ? JSON.parse(localStorage.hotkeys) : {};
function setHotkey(id, hotkey) {
hotkeys[id] = hotkey;
localStorage.hotkeys = JSON.stringify(hotkeys);
}
function unsetHotkey(id) {
if (id in hotkeys) {
delete hotkeys[id];
localStorage.hotkeys = JSON.stringify(hotkeys);
}
}
function getHotkey(id) {
if (hotkeys.hasOwnProperty(id)) {
return hotkeys[id];
} else {
return '';
}
}
function getHotkeyId(hotkey) {
for (var id in hotkeys) {
if (hotkeys.hasOwnProperty(id)) {
if (hotkeys[id] === hotkey) {
return id;
}
}
}
return null;
}
function setHotkeyText(id, hotkey) {
var li = $('neat-tree-item-' + id);
var a = li.querySelector('a');
var em = a.querySelector('em');
// Create element if it doesn't exist.
if (!em) {
em = document.createElement('em');
em.addClass('hotkey');
a.insertBefore(em, a.firstChild);
}
em.textContent = '[' + hotkey + ']';
}
function unsetHotkeyText(id) {
var li = $('neat-tree-item-' + id);
var a = li.querySelector('a');
var em = a.querySelector('em');
if (em) {
a.removeChild(em);
}
}
function refreshHotkeyText() {
for (var id in hotkeys) {
if (hotkeys.hasOwnProperty(id)) {
setHotkeyText(id, hotkeys[id]);
}
}
}
// Adaptive bookmark tooltips
var adaptBookmarkTooltips = function() {
var bookmarks = document.querySelectorAll('li.child a');
for (var i = 0, l = bookmarks.length; i < l; i++) {
var bookmark = bookmarks[i];
if (bookmark.hasClass('titled')) {
if (bookmark.scrollWidth <= bookmark.offsetWidth) {
bookmark.title = bookmark.href;
bookmark.removeClass('titled');
}
} else if (bookmark.scrollWidth > bookmark.offsetWidth) {
var text = bookmark.querySelector('i').textContent;
var title = bookmark.title;
if (text != title) {
bookmark.title = text + '\n' + title;
bookmark.addClass('titled');
}
}
}
};
var generateBookmarkHTML = function(title, url, extras) {
if (!extras) extras = '';
var u = url.htmlspecialchars();
var favicon = 'chrome://favicon/' + u;
var tooltipURL = url;
if (/^javascript:/i.test(url)) {
if (url.length > 140) tooltipURL = url.slice(0, 140) + '...';
favicon = 'images/document-code.png';
}
tooltipURL = tooltipURL.htmlspecialchars();
var name = title.htmlspecialchars() || (httpsPattern.test(url) ? url.replace(httpsPattern, '') : _m('noTitle'));
return '<a href="' + u + '"' + ' title="' + tooltipURL + '" tabindex="0" ' + extras + '>' + '<img src="' + favicon + '" width="16" height="16" alt=""><i>' + name + '</i>' + '</a>';
};
var generateHTML = function(data, level) {
if (!level) level = 0;
var paddingStart = 14 * level;
var group = (level === 0) ? 'tree' : 'group';
var html = '<ul role="' + group + '" data-level="' + level + '">';
var getBookmarks = function(_id) {
chrome.bookmarks.getChildren(_id, function(children) {
var html = generateHTML(children, level + 1);
var div = document.createElement('div');
div.innerHTML = html;
var ul = div.querySelector('ul');
ul.inject($('neat-tree-item-' + _id));
div.destroy();
});
};
for (var i = 0, l = data.length; i < l; i++) {
var d = data[i];
var children = d.children;
var title = d.title.htmlspecialchars();
var url = d.url;
var id = d.id;
var parentID = d.parentId;
var idHTML = id ? ' id="neat-tree-item-' + id + '"' : '';
var isFolder = d.dateGroupModified || children || typeof url == 'undefined';
if (isFolder) {
var isOpen = false;
var open = '';
if (rememberState) {
isOpen = opens.contains(id);
if (isOpen) open = ' open';
}
html += '<li class="parent' + open + '"' + idHTML + ' role="treeitem" aria-expanded="' + isOpen + '" data-parentid="' + parentID + '">' + '<span tabindex="0" style="-webkit-padding-start: ' + paddingStart + 'px"><b class="twisty"></b>' + '<img src="images/folder.png" width="16" height="16" alt=""><i>' + (title || _m('noTitle')) + '</i>' + '</span>';
if (isOpen) {
if (children) {
html += generateHTML(children, level + 1);
} else {
getBookmarks(id);
}
}
} else {
html += '<li class="child"' + idHTML + ' role="treeitem" data-parentid="' + parentID + '">' + generateBookmarkHTML(title, url, 'style="-webkit-padding-start: ' + paddingStart + 'px"');
}
html += '</li>';
}
html += '</ul>';
return html;
};
var $tree = $('tree');
chrome.bookmarks.getTree(function(tree) {
var html = generateHTML(tree[0].children);
$tree.innerHTML = html;
refreshHotkeyText();
// Automatically give focus to the first folder.
var firstChild = $tree.querySelector('li:first-child>span');
if (firstChild) {
firstChild.focus();
}
setTimeout(adaptBookmarkTooltips, 100);
tree = null;
});
// Events for the tree
$tree.addEventListener('scroll', function() {
localStorage.scrollTop = $tree.scrollTop; // store scroll position at each scroll event
});
var closeUnusedFolders = localStorage.closeUnusedFolders;
$tree.addEventListener('click', function(e) {
if (e.button !== 0) return;
var el = e.target;
var tagName = el.tagName;
if (tagName != 'SPAN') return;
if (e.shiftKey || e.ctrlKey) return;
var parent = el.parentNode;
parent.toggleClass('open');
var expanded = parent.hasClass('open');
parent.setAttribute('aria-expanded', expanded);
var children = parent.querySelector('ul');
if (!children) {
var id = parent.id.replace('neat-tree-item-', '');
chrome.bookmarks.getChildren(id, function(children) {
var html = generateHTML(children, parseInt(parent.parentNode.dataset.level) + 1);
var div = document.createElement('div');
div.innerHTML = html;
var ul = div.querySelector('ul');
ul.inject(parent);
div.destroy();
refreshHotkeyText();
setTimeout(adaptBookmarkTooltips, 100);
});
}
if (closeUnusedFolders && expanded) {
var siblings = parent.getSiblings('li');
for (var i = 0, l = siblings.length; i < l; i++) {
var li = siblings[i];
if (li.hasClass('parent')) {
li.removeClass('open').setAttribute('aria-expanded', false);
}
}
}
var opens = $tree.querySelectorAll('li.open');
opens = Array.map(function(li) {
return li.id.replace('neat-tree-item-', '');
}, opens);
localStorage.opens = JSON.stringify(opens);
});
// Force middle clicks to trigger the focus event
$tree.addEventListener('mouseup', function(e) {
if (e.button != 1) return;
var el = e.target;
var tagName = el.tagName;
if (tagName != 'A' && tagName != 'SPAN') return;
el.focus();
});
// Popup auto-height
var resetHeight = function() {
setTimeout(function() {
var neatTree = $tree.firstElementChild;
if (neatTree) {
var fullHeight = neatTree.offsetHeight + $tree.offsetTop + 6;
// Slide up faster than down
body.style.webkitTransitionDuration = (fullHeight < window.innerHeight) ? '.3s' : '.1s';
var maxHeight = screen.height - window.screenY - 50;
var height = Math.max(0, Math.min(fullHeight, maxHeight));
body.style.height = height + 'px';
localStorage.popupHeight = height;
}
}, 100);
};
resetHeight();
$tree.addEventListener('click', resetHeight);
$tree.addEventListener('keyup', resetHeight);
// Confirm dialog event listeners
$('confirm-dialog-button-1').addEventListener('click', function() {
ConfirmDialog.fn1();
ConfirmDialog.close();
}, false);
$('confirm-dialog-button-2').addEventListener('click', function() {
ConfirmDialog.fn2();
ConfirmDialog.close();
}, false);
// Confirm dialog
var ConfirmDialog = {
open: function(opts) {
if (!opts) return;
$('confirm-dialog-text').innerHTML = opts.dialog.widont();
$('confirm-dialog-button-1').innerHTML = opts.button1;
$('confirm-dialog-button-2').innerHTML = opts.button2;
if (opts.fn1) ConfirmDialog.fn1 = opts.fn1;
if (opts.fn2) ConfirmDialog.fn2 = opts.fn2;
$('confirm-dialog-button-' + (opts.focusButton || 1)).focus();
document.body.addClass('needConfirm');
},
close: function() {
document.body.removeClass('needConfirm');
},
fn1: function() {},
fn2: function() {}
};
// Edit dialog event listener
$('edit-dialog').addEventListener('submit', function(e) {
EditDialog.close();
e.preventDefault();
}, false);
// Edit dialog
var EditDialog = window.EditDialog = {
open: function(opts) {
if (!opts) return;
$('edit-dialog-text').innerHTML = opts.dialog.widont();
if (opts.fn) EditDialog.fn = opts.fn;
var type = opts.type || 'bookmark';
var name = $('edit-dialog-name');
name.value = opts.name;
name.focus();
name.select();
name.scrollLeft = 0; // very delicate, show first few words instead of last
var url = $('edit-dialog-url');
if (type == 'bookmark') {
url.style.display = '';
url.disabled = false;
url.value = opts.url;
} else {
url.style.display = 'none';
url.disabled = true;
url.value = '';
}
body.addClass('needEdit');
},
close: function() {
var urlInput = $('edit-dialog-url');
var url = urlInput.value;
if (!urlInput.validity.valid) {
urlInput.value = 'http://' + url;
if (!urlInput.validity.valid) url = ''; // if still invalid, forget it.
url = 'http://' + url;
}
EditDialog.fn($('edit-dialog-name').value, url);
EditDialog.closeNoSave();
},
closeNoSave: function() {
body.removeClass('needEdit');
},
fn: function() {}
};
// Hotkey dialog event listener
$('hotkey-dialog').addEventListener('submit', function(e) {
HotkeyDialog.close();
e.preventDefault();
}, false);
// Hotkey input validation.
$('hotkey-dialog-hotkey').onkeypress = function(e) {
var key;
if (e.keyCode) key = e.keyCode;
else if (e.which) key = e.which;
// Allow enter, backspace...
if (key === 13 || key === 8) {
return true;
}
if (/[^A-Za-z0-9]/.test(String.fromCharCode(key))) {
return false;
}
return true;
};
// Hotkey dialog
var HotkeyDialog = window.HotkeyDialog = {
open: function(opts) {
if (!opts) return;
$('hotkey-dialog-text').innerHTML = opts.dialog.widont();
if (opts.fn) HotkeyDialog.fn = opts.fn;
var name = $('hotkey-dialog-name');
name.value = opts.name;
name.disabled = true;
name.scrollLeft = 0; // very delicate, show first few words instead of last
var hotkey = $('hotkey-dialog-hotkey');
hotkey.disabled = false;
hotkey.value = opts.hotkey;
hotkey.focus();
hotkey.select();
body.addClass('needSetHotkey');
},
close: function() {
var hotkeyInput = $('hotkey-dialog-hotkey');
var hotkey = hotkeyInput.value.toLowerCase();
HotkeyDialog.fn(hotkey);
HotkeyDialog.closeNoSave();
},
closeNoSave: function() {
body.removeClass('needSetHotkey');
},
fn: function() {}
};
// Bookmark handling
var dontConfirmOpenFolder = !!localStorage.dontConfirmOpenFolder;
var openBookmarksLimit = 5;
var actions = {
openBookmark: function(url) {
chrome.tabs.getSelected(null, function(tab) {
var decodedURL;
try {
decodedURL = decodeURIComponent(url);
} catch (e) {
return;
}
chrome.tabs.update(tab.id, {
url: decodedURL
});
setTimeout(window.close, 200);
});
},
openBookmarkNewTab: function(url, selected, blankTabCheck) {
var open = function() {
chrome.tabs.create({
url: url,
selected: selected
});
};
if (blankTabCheck) {
chrome.tabs.getSelected(null, function(tab) {
if (/^chrome:\/\/newtab/i.test(tab.url)) {
chrome.tabs.update(tab.id, {
url: url
});
setTimeout(window.close, 200);
} else {
open();
}
});
} else {
open();
}
},
openBookmarkNewWindow: function(url, incognito) {
chrome.windows.create({
url: url,
incognito: incognito
});
},
openBookmarks: function(li, urls, selected) {
var urlsLen = urls.length;
var open = function() {
chrome.tabs.create({
url: urls.shift(),
selected: selected // first tab will be selected
});
for (var i = 0, l = urls.length; i < l; i++) {
chrome.tabs.create({
url: urls[i],
selected: false
});
}
};
if (!dontConfirmOpenFolder && urlsLen > openBookmarksLimit) {
ConfirmDialog.open({
dialog: _m('confirmOpenBookmarks', '' + urlsLen),
button1: '<strong>' + _m('open') + '</strong>',
button2: _m('nope'),
fn1: open,
fn2: function() {
li.querySelector('a, span').focus();
}
});
} else {
open();
}
},
openBookmarksNewWindow: function(li, urls, incognito) {
var urlsLen = urls.length;
var open = function() {
chrome.windows.create({
url: urls,
incognito: incognito
});
};
if (!dontConfirmOpenFolder && urlsLen > openBookmarksLimit) {
var dialog = incognito ? _m('confirmOpenBookmarksNewIncognitoWindow', '' + urlsLen) : _m('confirmOpenBookmarksNewWindow', '' + urlsLen);
ConfirmDialog.open({
dialog: dialog,
button1: '<strong>' + _m('open') + '</strong>',
button2: _m('nope'),
fn1: open,
fn2: function() {
li.querySelector('a, span').focus();
}
});
} else {
open();
}
},
editBookmarkFolder: function(id) {
chrome.bookmarks.get(id, function(nodeList) {
if (!nodeList.length) return;
var node = nodeList[0];
var url = node.url;
var isBookmark = !!url;
var type = isBookmark ? 'bookmark' : 'folder';
var dialog = isBookmark ? _m('editBookmark') : _m('editFolder');
EditDialog.open({
dialog: dialog,
type: type,
name: node.title,
url: decodeURIComponent(url),
fn: function(name, url) {
chrome.bookmarks.update(id, {
title: name,
url: isBookmark ? url : ''
}, function(n) {
var title = n.title;
var url = n.url;
var li = $('neat-tree-item-' + id);
if (li) {
if (isBookmark) {
var css = li.querySelector('a').style.cssText;
li.innerHTML = generateBookmarkHTML(title, url, 'style="' + css + '"');
} else {
var i = li.querySelector('i');
var name = title || (httpsPattern.test(url) ? url.replace(httpsPattern, '') : _m('noTitle'));
i.textContent = name;
}
}
li.firstElementChild.focus();
});
}
});
});
},
updateBookmark: function(id) {
chrome.tabs.query({ active: true, lastFocusedWindow: true }, function (tabs) {
var new_url = tabs[0].url;
var li = $('neat-tree-item-' + id);
var bookmarkName = '<cite>' + li.textContent.trim() + '</cite>';
var dialog = _m('confirmUpdateBookmark', [bookmarkName, new_url]);
ConfirmDialog.open({
dialog: dialog,
button1: '<strong>' + _m('update') + '</strong>',
button2: _m('nope'),
fn1: function() {
chrome.bookmarks.update(id, { url: new_url }, function(n) {
var title = n.title;
var url = n.url;
var css = li.querySelector('a').style.cssText;
li.innerHTML = generateBookmarkHTML(title, url, 'style="' + css + '"');
li.firstElementChild.focus();
});
},
fn2: function() {
li.querySelector('a, span').focus();
}
});
});
},
deleteBookmark: function(id) {
var li = $('neat-tree-item-' + id);
var bookmarkName = '<cite>' + li.textContent.trim() + '</cite>';
var dialog = _m('confirmDeleteBookmark', [bookmarkName]);
ConfirmDialog.open({
dialog: dialog,
button1: '<strong>' + _m('delete') + '</strong>',
button2: _m('nope'),
fn1: function() {
chrome.bookmarks.remove(id, function() {
if (li) {
var nearLi1 = li.nextElementSibling || li.previousElementSibling;
li.destroy();
if (nearLi1) nearLi1.querySelector('a, span').focus();
}
});
},
fn2: function() {
li.querySelector('a, span').focus();
}
});
},
deleteBookmarks: function(id, bookmarkCount, folderCount) {
var li = $('neat-tree-item-' + id);
var item = li.querySelector('span');
var dialog = '';
var folderName = '<cite>' + item.textContent.trim() + '</cite>';
if (bookmarkCount && folderCount) {
dialog = _m('confirmDeleteFolderSubfoldersBookmarks', [folderName, folderCount, bookmarkCount]);
} else if (bookmarkCount) {
dialog = _m('confirmDeleteFolderBookmarks', [folderName, bookmarkCount]);
} else if (folderCount) {
dialog = _m('confirmDeleteFolderSubfolders', [folderName, folderCount]);
} else {
dialog = _m('confirmDeleteFolder', [folderName]);
}
ConfirmDialog.open({
dialog: dialog,
button1: '<strong>' + _m('delete') + '</strong>',
button2: _m('nope'),
fn1: function() {
chrome.bookmarks.removeTree(id, function() {
li.destroy();
});
var nearLi = li.nextElementSibling || li.previousElementSibling;
if (nearLi) nearLi.querySelector('a, span').focus();
},
fn2: function() {
li.querySelector('a, span').focus();
}
});
},
setHotkey: function(id, name) {
HotkeyDialog.open({
dialog: _m('setHotkey'),
name: name,
hotkey: getHotkey(id),
fn: function(hotkey) {
// If not alphanumeric or is empty string...
if (/[^a-z0-9]|(^$)/.test(hotkey)) {
unsetHotkey(id);
unsetHotkeyText(id);
} else {
setHotkey(id, hotkey);
setHotkeyText(id, hotkey);
}
}
});
},
unsetHotkey: function(id) {
unsetHotkey(id);
unsetHotkeyText(id);
}
};
// For performing bookmark actions via keyboard commands.
var leftClickNewTab = !!localStorage.leftClickNewTab;
var noOpenBookmark = false;
var bookmarkHandler = function(e) {
e.preventDefault();
if (e.button !== 0) return; // force left-click
if (noOpenBookmark) { // flag that disables opening bookmark
noOpenBookmark = false;
return;
}
var el = e.target;
var ctrlMeta = (e.ctrlKey || e.metaKey);
var shift = e.shiftKey;
if (el.tagName == 'A') {
var url = el.href;
if (ctrlMeta) { // ctrl/meta click
actions.openBookmarkNewTab(url, !shift);
} else { // click
if (shift) {
actions.openBookmarkNewWindow(url);
} else {
if (leftClickNewTab) {
actions.openBookmarkNewTab(url, true, true);
} else {
actions.openBookmark(url);
}
}
}
} else if (el.tagName == 'SPAN') {
var li = el.parentNode;
var id = li.id.replace('neat-tree-item-', '');
chrome.bookmarks.getChildren(id, function(children) {
var urls = Array.map(function(c) {
return c.url;
}, children).clean();
var urlsLen = urls.length;
if (!urlsLen) return;
if (ctrlMeta) { // ctrl/meta click
actions.openBookmarks(li, urls, !shift);
} else if (shift) { // shift click
actions.openBookmarksNewWindow(li, urls);
}
});
}
};
$tree.addEventListener('click', bookmarkHandler);
var bookmarkHandlerMiddle = function(e) {
if (e.button != 1) return; // force middle-click
var event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, true, false, e.shiftKey, true, 0, null);
e.target.dispatchEvent(event);
};
$tree.addEventListener('mouseup', bookmarkHandlerMiddle);
// Disable Chrome auto-scroll feature
window.addEventListener('mousedown', function(e) {
if (e.button == 1) e.preventDefault();
});
// Context menu
var $bookmarkContextMenu = $('bookmark-context-menu');
var $folderContextMenu = $('folder-context-menu');
var clearMenu = function(e) {
currentContext = null;
var active = body.querySelector('.active');
if (active) {
active.removeClass('active');
// This is kinda hacky. Oh well.
if (e) {
var el = e.target;
if (el == $tree) active.focus();
}
}
$bookmarkContextMenu.style.left = '-999px';
$bookmarkContextMenu.style.opacity = 0;
$folderContextMenu.style.left = '-999px';
$folderContextMenu.style.opacity = 0;
};
body.addEventListener('click', clearMenu);
$tree.addEventListener('scroll', clearMenu);
$tree.addEventListener('focus', clearMenu, true);
var currentContext = null;
var macCloseContextMenu = false;
body.addEventListener('contextmenu', function(e) {
e.preventDefault();
clearMenu();
if (os == 'mac') {
macCloseContextMenu = false;
setTimeout(function() {
macCloseContextMenu = true;
}, 500);
}
var el = e.target;
var active, pageX, pageY, boundY;
if (el.tagName == 'A') {
currentContext = el;
active = body.querySelector('.active');
if (active) active.removeClass('active');
el.addClass('active');
var bookmarkMenuWidth = $bookmarkContextMenu.offsetWidth;
var bookmarkMenuHeight = $bookmarkContextMenu.offsetHeight;
pageX = rtl ? Math.max(0, e.pageX - bookmarkMenuWidth) : Math.min(e.pageX, body.offsetWidth - bookmarkMenuWidth);
pageY = e.pageY;
boundY = window.innerHeight - bookmarkMenuHeight;
if (pageY > boundY) pageY -= bookmarkMenuHeight;
if (pageY < 0) pageY = boundY;
pageY = Math.max(0, pageY);
$bookmarkContextMenu.style.left = pageX + 'px';
$bookmarkContextMenu.style.top = pageY + 'px';
$bookmarkContextMenu.style.opacity = 1;
$bookmarkContextMenu.focus();
} else if (el.tagName == 'SPAN') {
currentContext = el;
active = body.querySelector('.active');
if (active) active.removeClass('active');
el.addClass('active');
if (el.parentNode.dataset.parentid == '0') {
$folderContextMenu.addClass('hide-editables');
} else {
$folderContextMenu.removeClass('hide-editables');
}
var folderMenuWidth = $folderContextMenu.offsetWidth;
var folderMenuHeight = $folderContextMenu.offsetHeight;
pageX = rtl ? Math.max(0, e.pageX - folderMenuWidth) : Math.min(e.pageX, body.offsetWidth - folderMenuWidth);
pageY = e.pageY;
boundY = window.innerHeight - folderMenuHeight;
if (pageY > boundY) pageY -= folderMenuHeight;
if (pageY < 0) pageY = boundY;
$folderContextMenu.style.left = pageX + 'px';
$folderContextMenu.style.top = pageY + 'px';
$folderContextMenu.style.opacity = 1;
$folderContextMenu.focus();
}
});
// on Mac, holding down right-click for a period of time closes the context menu
// Not a complete implementation, but it works :)
if (os == 'mac') body.addEventListener('mouseup', function(e) {
if (e.button == 2 && macCloseContextMenu) {
macCloseContextMenu = false;
clearMenu();
}
});
var bookmarkContextHandler = function(e) {
e.stopPropagation();
if (!currentContext) return;
var el = e.target;
if (el.tagName != 'COMMAND') return;
var url = currentContext.href;
switch (el.id) {
case 'bookmark-new-tab':
actions.openBookmarkNewTab(url);
break;
case 'bookmark-new-window':
actions.openBookmarkNewWindow(url);
break;
case 'bookmark-new-incognito-window':
actions.openBookmarkNewWindow(url, true);
break;
case 'bookmark-edit':
var li = currentContext.parentNode;
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
actions.editBookmarkFolder(id);
break;
case 'bookmark-update':
var li = currentContext.parentNode;
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
actions.updateBookmark(id);
break;
case 'bookmark-delete':
var li = currentContext.parentNode;
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
actions.deleteBookmark(id);
break;
case 'bookmark-set-hotkey':
var li = currentContext.parentNode;
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
var name = li.querySelector('i');
actions.setHotkey(id, name.textContent);
break;
case 'bookmark-unset-hotkey':
var li = currentContext.parentNode;
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
actions.unsetHotkey(id);
break;
}
clearMenu();
};
// On Mac, all three mouse clicks work; on Windows, middle-click doesn't work
$bookmarkContextMenu.addEventListener('mouseup', function(e) {
e.stopPropagation();
if (e.button === 0 || (os == 'mac' && e.button == 1)) bookmarkContextHandler(e);
});
$bookmarkContextMenu.addEventListener('contextmenu', bookmarkContextHandler);
$bookmarkContextMenu.addEventListener('click', function(e) {
e.stopPropagation();
});
var folderContextHandler = function(e) {
if (!currentContext) return;
var el = e.target;
if (el.tagName != 'COMMAND') return;
var li = currentContext.parentNode;
var id = li.id.replace('neat-tree-item-', '');
chrome.bookmarks.getChildren(id, function(children) {
var urls = Array.map(function(c) {
return c.url;
}, children).clean();
var urlsLen = urls.length;
var noURLS = !urlsLen;
switch (el.id) {
case 'folder-window':
if (noURLS) return;
actions.openBookmarks(li, urls);
break;
case 'folder-new-window':
if (noURLS) return;
actions.openBookmarksNewWindow(li, urls);
break;
case 'folder-new-incognito-window':
if (noURLS) return;
actions.openBookmarksNewWindow(li, urls, true);
break;
case 'folder-edit':
actions.editBookmarkFolder(id);
break;
case 'folder-delete':
actions.deleteBookmarks(id, urlsLen, children.length - urlsLen);
break;
}
});
clearMenu();
};
$folderContextMenu.addEventListener('mouseup', function(e) {
e.stopPropagation();
if (e.button === 0 || (os == 'mac' && e.button == 1)) folderContextHandler(e);
});
$folderContextMenu.addEventListener('contextmenu', folderContextHandler);
$folderContextMenu.addEventListener('click', function(e) {
e.stopPropagation();
});
// Keyboard navigation
var treeKeyDown = function(e) {
var item = document.activeElement;
if (!/^(a|span)$/i.test(item.tagName)) item = $tree.querySelector('li:first-child>span');
var li = item.parentNode;
var keyCode = e.keyCode;
var metaKey = e.metaKey;
if (keyCode == 40 && metaKey) keyCode = 35; // cmd + down (Mac)
if (keyCode == 38 && metaKey) keyCode = 36; // cmd + up (Mac)
var event; var lis; var parentID;
switch (keyCode) {
case 40: // down
e.preventDefault();
var liChild = li.querySelector('ul>li:first-child');
if (li.hasClass('open') && liChild) {
liChild.querySelector('a, span').focus();
} else {
var nextLi = li.nextElementSibling;
if (nextLi) {
nextLi.querySelector('a, span').focus();
} else {
do {
// Go up in hierarchy
li = li.parentNode.parentNode;
// Go to next
if (li.tagName === 'LI') nextLi = li.nextElementSibling;
if (nextLi) nextLi.querySelector('a, span').focus();
} while (li.tagName === 'LI' && !nextLi);
}
}
break;
case 38: // up
e.preventDefault();
var prevLi = li.previousElementSibling;
if (prevLi) {
while (prevLi.hasClass('open') && prevLi.querySelector('ul>li:last-child')) {
lis = prevLi.querySelectorAll('ul>li:last-child');
prevLi = Array.filter(function(li) {
return !!li.parentNode.offsetHeight;
}, lis).getLast();
}
prevLi.querySelector('a, span').focus();
} else {
var parentPrevLi = li.parentNode.parentNode;
if (parentPrevLi && parentPrevLi.tagName == 'LI') {
parentPrevLi.querySelector('a, span').focus();
}
}
break;
case 39: // right (left for RTL)
e.preventDefault();
if (li.hasClass('parent') && ((!rtl && !li.hasClass('open')) || (rtl && li.hasClass('open')))) {
event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null);
li.firstElementChild.dispatchEvent(event);
} else if (rtl) {
parentID = li.dataset.parentid;
if (parentID == '0') return;
$('neat-tree-item-' + parentID).querySelector('span').focus();
}
break;
case 37: // left (right for RTL)
e.preventDefault();
if (li.hasClass('parent') && ((!rtl && li.hasClass('open')) || (rtl && !li.hasClass('open')))) {
event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null);
li.firstElementChild.dispatchEvent(event);
} else if (!rtl) {
parentID = li.dataset.parentid;
if (parentID == '0') return;
$('neat-tree-item-' + parentID).querySelector('span').focus();
}
break;
case 32: // space
case 13: // enter
e.preventDefault();
event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, e.ctrlKey, false, e.shiftKey, e.metaKey, 0, null);
li.firstElementChild.dispatchEvent(event);
break;
case 35: // end
lis = this.querySelectorAll('ul>li:last-child');
Array.filter(function(li) {
return !!li.parentNode.offsetHeight;
}, lis).getLast().querySelector('span, a').focus();
break;
case 36: // home
this.querySelector('ul>li:first-child').querySelector('span, a').focus();
break;
case 113: // F2, not for Mac
if (os == 'mac') break;
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
actions.editBookmarkFolder(id);
break;
case 46: // delete
break; // don't run 'default'
default:
var key = String.fromCharCode(keyCode).trim();
if (!key) return;
// Trigger the hotkey if it exists.
key = key.toLowerCase();
var id = getHotkeyId(key);
if (id) {
var li = $('neat-tree-item-' + id);
// Due to Chrome bookmark sync bug, it's possible that the element
// for this bookmark id doesn't actually exist anymore.
if (li === null) {
// Delete hotkey for this id to prevent invalid ids from remaining.
unsetHotkey(id);
} else {
event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, e.ctrlKey, false, e.shiftKey, e.metaKey, 0, null);
li.firstElementChild.dispatchEvent(event);
}
}
}
};
$tree.addEventListener('keydown', treeKeyDown);
var treeKeyUp = function(e) {
var item = document.activeElement;
if (!/^(a|span)$/i.test(item.tagName)) item = $tree.querySelector('li:first-child>span');
var li = item.parentNode;
switch (e.keyCode) {
case 8: // backspace
if (os != 'mac') break; // somehow delete button on mac gives backspace
/* falls through */
case 46: // delete
e.preventDefault();
var id = li.id.replace(/(neat\-tree)\-item\-/, '');
if (li.hasClass('parent')) {
chrome.bookmarks.getChildren(id, function(children) {
var urlsLen = Array.map(function(c) {
return c.url;
}, children).clean().length;
actions.deleteBookmarks(id, urlsLen, children.length - urlsLen);
});
} else {
actions.deleteBookmark(id);
}
break;
}
};
$tree.addEventListener('keyup', treeKeyUp);
var contextKeyDown = function(e) {
var menu = this;
var item = document.activeElement;
var metaKey = e.metaKey;
switch (e.keyCode) {
case 40: // down
e.preventDefault();
if (metaKey) { // cmd + down (Mac)
menu.lastElementChild.focus();
} else {
if (item.tagName == 'COMMAND') {
var nextItem = item.nextElementSibling;
if (nextItem && nextItem.tagName == 'HR') nextItem = nextItem.nextElementSibling;
if (nextItem) {
nextItem.focus();
} else if (os != 'mac') {
menu.firstElementChild.focus();
}
} else {
item.firstElementChild.focus();
}
}
break;
case 38: // up
e.preventDefault();
if (metaKey) { // cmd + up (Mac)
menu.firstElementChild.focus();
} else {
if (item.tagName == 'COMMAND') {
var prevItem = item.previousElementSibling;
if (prevItem && prevItem.tagName == 'HR') prevItem = prevItem.previousElementSibling;
if (prevItem) {
prevItem.focus();
} else if (os != 'mac') {
menu.lastElementChild.focus();
}
} else {
item.lastElementChild.focus();
}
}
break;
case 32: // space
if (os != 'mac') break;
/* falls through */
case 13: // enter
e.preventDefault();
var event = document.createEvent('MouseEvents');
event.initMouseEvent('mouseup', true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null);
item.dispatchEvent(event);
/* falls through */
case 27: // esc
e.preventDefault();
var active = body.querySelector('.active');
if (active) active.removeClass('active').focus();
clearMenu();
}
};
$bookmarkContextMenu.addEventListener('keydown', contextKeyDown);
$folderContextMenu.addEventListener('keydown', contextKeyDown);
var contextMouseMove = function(e) {
e.target.focus();
};
$bookmarkContextMenu.addEventListener('mousemove', contextMouseMove);
$folderContextMenu.addEventListener('mousemove', contextMouseMove);
var contextMouseOut = function() {
if (this.style.opacity.toInt()) this.focus();
};
$bookmarkContextMenu.addEventListener('mouseout', contextMouseOut);
$folderContextMenu.addEventListener('mouseout', contextMouseOut);
// Drag and drop
var draggedBookmark = null;
var draggedOut = false;
var canDrop = false;
var bookmarkClone = $('bookmark-clone');
var dropOverlay = $('drop-overlay');
$tree.addEventListener('mousedown', function(e) {
if (e.button !== 0) return;
var el = e.target;
var elParent = el.parentNode;
// can move any bookmarks/folders except the default root folders
if ((el.tagName == 'A' && elParent.hasClass('child')) || (el.tagName == 'SPAN' && elParent.hasClass('parent') && elParent.dataset.parentid != '0')) {
e.preventDefault();
draggedOut = false;
draggedBookmark = el;
bookmarkClone.innerHTML = el.innerHTML;
el.focus();
}
});
var scrollTree, scrollTreeInterval = 100,
scrollTreeSpot = 10;
var scrollTreeSpeed = 20;
var stopScrollTree = function() {
clearInterval(scrollTree);
scrollTree = null;
};
document.addEventListener('mousemove', function(e) {
if (e.button !== 0) return;
if (!draggedBookmark) return;
e.preventDefault();
var el = e.target;
var clientX = e.clientX;
var clientY = e.clientY + document.body.scrollTop;
if (el == draggedBookmark) {
bookmarkClone.style.left = '-999px';
dropOverlay.style.left = '-999px';
canDrop = false;
return;
}
draggedOut = true;
// if hovering over the dragged element itself or cursor move outside the tree
var treeTop = $tree.offsetTop,
treeBottom = window.innerHeight;
if (clientX < 0 || clientY < treeTop || clientX > $tree.offsetWidth || clientY > treeBottom) {
bookmarkClone.style.left = '-999px';
dropOverlay.style.left = '-999px';
canDrop = false;
}
// if hovering over the top or bottom edges of the tree, scroll the tree
var treeScrollHeight = $tree.scrollHeight,
treeOffsetHeight = $tree.offsetHeight;
if (treeScrollHeight > treeOffsetHeight) { // only scroll when it's scrollable
var treeScrollTop = $tree.scrollTop;
if (clientY <= treeTop + scrollTreeSpot) {
if (treeScrollTop === 0) {
stopScrollTree();
} else if (!scrollTree) {
scrollTree = setInterval(function() {
$tree.scrollTop -= scrollTreeSpeed;
dropOverlay.style.left = '-999px';
}, scrollTreeInterval);
}
} else if (clientY >= treeBottom - scrollTreeSpot) {
if (treeScrollTop == (treeScrollHeight - treeOffsetHeight)) {
stopScrollTree();
} else if (!scrollTree) {
scrollTree = setInterval(function() {
$tree.scrollTop += scrollTreeSpeed;
dropOverlay.style.left = '-999px';
}, scrollTreeInterval);
}
} else {
stopScrollTree();
}
}
// collapse the folder before moving it
var draggedBookmarkParent = draggedBookmark.parentNode;
if (draggedBookmark.tagName == 'SPAN' && draggedBookmarkParent.hasClass('open')) {
draggedBookmarkParent.removeClass('open').setAttribute('aria-expanded', false);
}
if (el.tagName == 'A') {
canDrop = true;
bookmarkClone.style.top = clientY + 'px';
bookmarkClone.style.left = (rtl ? (clientX - bookmarkClone.offsetWidth) : clientX) + 'px';
var elRect = el.getBoundingClientRect();
var elRectTop = elRect.top + document.body.scrollTop;
var elRectBottom = elRect.bottom + document.body.scrollTop;
var top = (clientY >= elRectTop + elRect.height / 2) ? elRectBottom : elRectTop;
dropOverlay.className = 'bookmark';
dropOverlay.style.top = top + 'px';
dropOverlay.style.left = rtl ? '0px' : el.style.webkitPaddingStart.toInt() + 16 + 'px';
dropOverlay.style.width = (el.getComputedStyle('width').toInt() - 12) + 'px';
dropOverlay.style.height = null;
} else if (el.tagName == 'SPAN') {
canDrop = true;
bookmarkClone.style.top = clientY + 'px';
bookmarkClone.style.left = clientX + 'px';
var elRect = el.getBoundingClientRect();
var top = null;
var elRectTop = elRect.top + document.body.scrollTop;
var elRectHeight = elRect.height;
var elRectBottom = elRect.bottom + document.body.scrollTop;
var elParent = el.parentNode;
if (elParent.dataset.parentid != '0') {
if (clientY < elRectTop + elRectHeight * 0.3) {
top = elRectTop;
} else if (clientY > (elRectTop + elRectHeight * 0.7) && !elParent.hasClass('open')) {
top = elRectBottom;
}
}
if (top === null) {
dropOverlay.className = 'folder';
dropOverlay.style.top = elRectTop + 'px';
dropOverlay.style.left = '0px';
dropOverlay.style.width = elRect.width + 'px';
dropOverlay.style.height = elRect.height + 'px';
} else {
dropOverlay.className = 'bookmark';
dropOverlay.style.top = top + 'px';
dropOverlay.style.left = el.style.webkitPaddingStart.toInt() + 16 + 'px';
dropOverlay.style.width = (el.getComputedStyle('width').toInt() - 12) + 'px';
dropOverlay.style.height = null;
}
}
});
var onDrop = function() {
draggedBookmark = null;
bookmarkClone.style.left = '-999px';
dropOverlay.style.left = '-999px';
canDrop = false;
};
document.addEventListener('mouseup', function(e) {
if (e.button !== 0) return;
if (!draggedBookmark) return;
stopScrollTree();
if (!canDrop) {
if (draggedOut) noOpenBookmark = true;
draggedOut = false;
onDrop();
return;
}
var el = e.target;
var elParent = el.parentNode;
var id = elParent.id.replace('neat-tree-item-', '');
if (!id) {
onDrop();
return;
}
var draggedBookmarkParent = draggedBookmark.parentNode;
var draggedID = draggedBookmarkParent.id.replace('neat-tree-item-', '');
var clientY = e.clientY + document.body.scrollTop;
if (el.tagName == 'A') {
var elRect = el.getBoundingClientRect();
var elRectTop = elRect.top + document.body.scrollTop;
var moveBottom = (clientY >= elRectTop + elRect.height / 2);
chrome.bookmarks.get(id, function(node) {
if (!node || !node.length) return;
node = node[0];
var index = node.index;
var parentId = node.parentId;
if (draggedID) {
chrome.bookmarks.move(draggedID, {
parentId: parentId,
index: moveBottom ? ++index : index
}, function() {
draggedBookmarkParent.inject(elParent, moveBottom ? 'after' : 'before');
draggedBookmark.style.webkitPaddingStart = el.style.webkitPaddingStart;
draggedBookmark.focus();
onDrop();
});
}
});
} else if (el.tagName == 'SPAN') {
var elRect = el.getBoundingClientRect();
var move = 0; // 0 = middle, 1 = top, 2 = bottom
var elRectTop = elRect.top,
elRectHeight = elRect.height;
var elParent = el.parentNode;
if (elParent.dataset.parentid != '0') {
if (clientY < elRectTop + elRectHeight * 0.3) {
move = 1;
} else if (clientY > elRectTop + elRectHeight * 0.7 && !elParent.hasClass('open')) {
move = 2;
}
}
if (move > 0) {
var moveBottom = (move == 2);
chrome.bookmarks.get(id, function(node) {
if (!node || !node.length) return;
node = node[0];
var index = node.index;
var parentId = node.parentId;
chrome.bookmarks.move(draggedID, {
parentId: parentId,
index: moveBottom ? ++index : index
}, function() {
draggedBookmarkParent.inject(elParent, moveBottom ? 'after' : 'before');
draggedBookmark.style.webkitPaddingStart = el.style.webkitPaddingStart;
draggedBookmark.focus();
onDrop();
});
});
} else {
chrome.bookmarks.move(draggedID, {
parentId: id
}, function() {
var ul = elParent.querySelector('ul');
var level = parseInt(elParent.parentNode.dataset.level) + 1;
draggedBookmark.style.webkitPaddingStart = (14 * level) + 'px';
if (ul) {
draggedBookmarkParent.inject(ul);
} else {
draggedBookmarkParent.destroy();
}
el.focus();
onDrop();
});
}
} else {
onDrop();
}
});
// Resizer
var $resizer = $('resizer');
var resizerDown = false;
var bodyWidth, screenX;
$resizer.addEventListener('mousedown', function(e) {
e.preventDefault();
e.stopPropagation();
resizerDown = true;
bodyWidth = body.offsetWidth;
screenX = e.screenX;
});
document.addEventListener('mousemove', function(e) {
if (!resizerDown) return;
e.preventDefault();
var changedWidth = rtl ? (e.screenX - screenX) : (screenX - e.screenX);
var width = bodyWidth + changedWidth;
width = Math.min(640, Math.max(320, width));
body.style.width = width + 'px';
localStorage.popupWidth = width;
clearMenu(); // messes the context menu
});
document.addEventListener('mouseup', function(e) {
if (!resizerDown) return;
e.preventDefault();
resizerDown = false;
adaptBookmarkTooltips();
});
// Closing dialogs on escape
var closeDialogs = function() {
if (body.hasClass('needConfirm')) ConfirmDialog.fn2();
ConfirmDialog.close();
if (body.hasClass('needEdit')) EditDialog.closeNoSave();
if (body.hasClass('needSetHotkey')) HotkeyDialog.closeNoSave();
if (body.hasClass('needAlert')) AlertDialog.close();
};
document.addEventListener('keydown', function(e) {
if (e.keyCode == 27 && (body.hasClass('needConfirm') || body.hasClass('needEdit') || body.hasClass('needSetHotkey') || body.hasClass('needAlert'))) { // esc
e.preventDefault();
closeDialogs();
}
});
$('cover').addEventListener('click', closeDialogs);
// Make webkit transitions work only after elements are settled down
setTimeout(function() {
body.addClass('transitional');
}, 10);
})(window);
|
/*
* main.cpp
*
* Created on: 1 Nov 2015
* Author: Patrick
*/
#include <iostream>
#define SCOPE_ERROR(A, B, C) A B C
namespace {
char const foolit[] = SCOPE_ERROR("A", "B", "C");
char const fooled[] = SCOPE_ERROR("A", "B", "C");
char const * const foolit_ptr = SCOPE_ERROR("A", "B", "C");
char const * const fooled_ptr = SCOPE_ERROR("A", "B", "C");
struct T {
static const char foolit[];
static const char fooled[];
static char const * foolit_ptr;
};
const char T::foolit[] = SCOPE_ERROR("A", "B", "C");
const char T::fooled[] = SCOPE_ERROR("A", "B", "C");
char const * T::foolit_ptr = SCOPE_ERROR("A", "B", "C");
struct U {
static const char foolit[];
static const char fooled[];
static char const * foolit_ptr;
};
const char U::foolit[] = SCOPE_ERROR("A", "B", "C");
const char U::fooled[] = SCOPE_ERROR("A", "B", "C");
char const * U::foolit_ptr = SCOPE_ERROR("A", "B", "C");
}
int main(int argc, char * argv[])
{
std::cout << std::boolalpha <<
"(global) compare char arrays: " << (::foolit == ::fooled) << "\n" <<
"(global) compare string literals: " << (::foolit_ptr == ::fooled_ptr) << "\n" <<
"(global, T) compare char arrays: " << (T::foolit == ::foolit) << "\n" <<
"(global, T) compare string literals: " << (T::foolit_ptr == ::foolit_ptr) << "\n" <<
"(T) compare char arrays: " << (T::foolit == T::fooled) << "\n" <<
"(T, U) compare char arrays: " << (T::fooled == U::fooled) << "\n" <<
"(T, U) compare string literals: " << (T::foolit_ptr == U::foolit_ptr) << "\n"
"(global, U) compare string literals: " << (U::foolit_ptr == ::foolit_ptr) << "\n"
;
}
|
def get_channel_choice_to(channels):
# Function to convert variables to string format
vars_to_string = lambda c_id, local_bal, remote_bal, graphic: f"{c_id} - {local_bal} - {remote_bal} - {graphic}"
# Generate choices based on the given channels
choices = [{'name': vars_to_string(c['channel_id'], c['local_balance'], c['remote_balance'], c['graphic'])} for c in channels]
# Validate function to ensure at least one channel is chosen
validate = lambda answer: 'You must choose at least one channel' if len(answer) == 0 else True
return {
"type": "checkbox",
"qmark": "⚡️",
"message": "CHOOSE FROM nick, channel id, local_balance, remote_balace, graphic",
"name": "channel_choices_from",
"choices": choices,
"validate": validate,
}
|
ORIGIN=${ORIGIN:-origin}
version=$(git fetch --tags "${ORIGIN}" &>/dev/null | git -c "versionsort.prereleasesuffix=-beta" tag -l --sort=version:refname | tail -n1 | cut -c 2-)
echo "$version"
|
#!/bin/bash
# -*- coding: utf-8 -*-
#
# Python dev environment tools.
#
# This script should be sourced to use.
#
# This file is generated by cookiecutter-pygitrepo 0.0.2: https://github.com/MacHu-GWU/cookiecutter-pygitrepo/tree/0.0.2
if [ -n "${BASH_SOURCE}" ]
then
dir_here="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
else
dir_here="$( cd "$(dirname "$0")" ; pwd -P )"
fi
dir_bin="$(dirname "${dir_here}")"
dir_project_root=$(dirname "${dir_bin}")
source ${dir_bin}/settings.sh
source ${dir_bin}/source/detect-os.sh
source ${dir_bin}/source/helpers.sh
# Virtualenv Name
venv_name="${package_name}_venv"
# Full Python Version
py_version="${py_ver_major}.${py_ver_minor}.${py_ver_micro}"
py_version_major_and_minor="${py_ver_major}.${py_ver_minor}"
resolve_important_path()
{
local tmp_dir_project_root=$1
path_readme=${tmp_dir_project_root}/README.rst
path_sphinx_doc=${tmp_dir_project_root}/docs
path_sphinx_doc_source=${tmp_dir_project_root}/docs/source
path_sphinx_doc_build=${tmp_dir_project_root}/docs/build
path_sphinx_doc_build_html=${path_sphinx_doc_build}/html
path_sphinx_index_html=${path_sphinx_doc_build}/html/index.html
path_sphinx_config=${path_sphinx_doc_source}/conf.py
path_version_file=${tmp_dir_project_root}/${package_name}/_version.py
path_requirement_file=${tmp_dir_project_root}/requirements.txt
path_dev_requirement_file=${tmp_dir_project_root}/requirements-dev.txt
path_doc_requirement_file=${tmp_dir_project_root}/requirements-doc.txt
path_test_requirement_file=${tmp_dir_project_root}/requirements-test.txt
path_test_dir=${tmp_dir_project_root}/tests
path_coverage_annotate_dir=${tmp_dir_project_root}/.coverage.annotate
path_tox_dir=${tmp_dir_project_root}/.tox
path_auto_pep8_script=${tmp_dir_project_root}/fix_code_style.py
path_build_dir=${tmp_dir_project_root}/build
path_dist_dir=${tmp_dir_project_root}/dist
path_egg_dir=${tmp_dir_project_root}/${package_name}.egg-info
path_pytest_cache_dir=${tmp_dir_project_root}/.pytest_cache
}
resolve_important_path ${dir_project_root}
resolve_venv_bin()
{
# python virtual environment bin directory
local tmp_dir_venv_bin=$1
bin_activate="${tmp_dir_venv_bin}/activate"
bin_python="${tmp_dir_venv_bin}/python"
bin_pip="${tmp_dir_venv_bin}/pip"
bin_pytest="${tmp_dir_venv_bin}/pytest"
bin_sphinx_start="${tmp_dir_venv_bin}/sphinx-quickstart"
bin_twine="${tmp_dir_venv_bin}/twine"
bin_tox="${tmp_dir_venv_bin}/tox"
bin_jupyter="${tmp_dir_venv_bin}/jupyter"
bin_aws="${tmp_dir_venv_bin}/aws"
}
resolve_other_venv_dir_on_windows()
{
# python virtual environment directory
local tmp_dir_venv=$1
dir_venv_bin="${tmp_dir_venv}/Scripts"
dir_venv_site_packages="${tmp_dir_venv}/Lib/site-packages"
dir_venv_site_packages64="${tmp_dir_venv}/Lib64/site-packages"
}
resolve_other_venv_dir_on_darwin_or_linux()
{
# python virtual environment directory
local tmp_dir_venv=$1
# python major and minor version, example: 2.7 or 3.6
local tmp_py_version_major_and_minor=$2
dir_venv_bin="${tmp_dir_venv}/bin"
dir_venv_site_packages="${tmp_dir_venv}/lib/python${tmp_py_version_major_and_minor}/site-packages"
dir_venv_site_packages64="${tmp_dir_venv}/lib64/python${tmp_py_version_major_and_minor}/site-packages"
}
# --- resolve venv
resolve_windows_venv()
{
local tmp_venv_name=$1
local tmp_py_version_major_and_minor=$2
local tmp_dir_envs="${HOMEPATH}/venvs/python/${tmp_py_version_major_and_minor}"
mkdir -p ${tmp_dir_envs}
dir_venv="${tmp_dir_envs}/${tmp_venv_name}"
resolve_other_venv_dir_on_windows ${dir_venv}
resolve_venv_bin ${dir_venv_bin}
}
resolve_mac_pyenv()
{
local tmp_venv_name=$1
local tmp_py_version=$2
local tmp_py_version_major_and_minor=$3
dir_venv="${HOME}/.pyenv/versions/${tmp_py_version}/envs/${tmp_venv_name}"
resolve_other_venv_dir_on_darwin_or_linux ${dir_venv} ${tmp_py_version_major_and_minor}
resolve_venv_bin ${dir_venv_bin}
}
resolve_mac_venv()
{
local tmp_venv_name=$1
local tmp_py_version=$2
local tmp_py_version_major_and_minor=$3
local tmp_dir_envs="${HOME}/venvs/python/${tmp_py_version}"
mkdir -p ${tmp_dir_envs}
dir_venv="${tmp_dir_envs}/${tmp_venv_name}"
resolve_other_venv_dir_on_darwin_or_linux ${dir_venv} ${tmp_py_version_major_and_minor}
resolve_venv_bin ${dir_venv_bin}
}
resolve_linux_pyenv() {
resolve_mac_pyenv $1 $2 $3
}
resolve_linux_venv() {
resolve_mac_venv $1 $2 $3
}
if [ "${os_is_windows}" = "Y" ]
then
bin_global_python="/c/Python${py_ver_major}${py_ver_minor}/python.exe"
resolve_windows_venv ${venv_name} ${py_version_major_and_minor}
elif [ "${os_is_darwin}" = "Y" ]
then
bin_global_python="python${py_ver_major}.${py_ver_minor}"
bin_global_python="$(which ${bin_global_python})"
if [ "${use_pyenv}" = "Y" ]
then
resolve_mac_pyenv ${venv_name} ${py_version} ${py_version_major_and_minor}
else
resolve_mac_venv ${venv_name} ${py_version} ${py_version_major_and_minor}
fi
elif [ "${os_is_linux}" = "Y" ]
then
bin_global_python="python${py_ver_major}.${py_ver_minor}"
bin_global_python="$(which ${bin_global_python})"
resolve_linux_venv ${venv_name} ${py_version} ${py_version_major_and_minor}
fi
# Doc Relative Variables
package_version=$(python ${path_version_file})
rtd_url="https://${rtd_project_name}.readthedocs.io/"
rtd_project_url="https://readthedocs.org/projects/${package_name}/"
s3_uri_doc_versioned="s3://${s3_bucket_doc_host}/docs/${package_name}/${package_version}"
s3_uri_doc_latest="s3://${s3_bucket_doc_host}/docs/${package_name}/latest"
s3_doc_url="${s3_bucket_doc_host}.s3.amazonaws.com/docs/${package_name}/latest/index.html"
# Deploy sphinx generated html doc to s3 bucket
deploy_doc_to_s3() {
local tmp_path_sphinx_doc_build_html="$1" # html doc dir
local tmp_s3_uri_doc="$2" # uri of dir on s3
echo "remove existing doc on ${tmp_s3_uri_doc}"
aws s3 rm ${tmp_s3_uri_doc} \
--recursive \
--only-show-errors \
--profile ${aws_profile_doc_host}
echo "upload doc to ${tmp_s3_uri_doc}"
aws s3 sync ${tmp_path_sphinx_doc_build_html} ${tmp_s3_uri_doc} \
--only-show-errors \
--profile ${aws_profile_doc_host}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018-2020 The Matilda Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
if [[ $HOST = *-mingw32 ]]; then
BEGIN_FOLD wrap-wine
# Generate all binaries, so that they can be wrapped
DOCKER_EXEC make $MAKEJOBS -C src/secp256k1 VERBOSE=1
DOCKER_EXEC make $MAKEJOBS -C src/univalue VERBOSE=1
DOCKER_EXEC "${BASE_ROOT_DIR}/ci/test/wrap-wine.sh"
END_FOLD
fi
if [ -n "$QEMU_USER_CMD" ]; then
BEGIN_FOLD wrap-qemu
# Generate all binaries, so that they can be wrapped
DOCKER_EXEC make $MAKEJOBS -C src/secp256k1 VERBOSE=1
DOCKER_EXEC make $MAKEJOBS -C src/univalue VERBOSE=1
DOCKER_EXEC "${BASE_ROOT_DIR}/ci/test/wrap-qemu.sh"
END_FOLD
fi
if [ -n "$USE_VALGRIND" ]; then
BEGIN_FOLD wrap-valgrind
DOCKER_EXEC "${BASE_ROOT_DIR}/ci/test/wrap-valgrind.sh"
END_FOLD
fi
if [ "$RUN_UNIT_TESTS" = "true" ]; then
BEGIN_FOLD unit-tests
DOCKER_EXEC ${TEST_RUNNER_ENV} DIR_UNIT_TEST_DATA=${DIR_UNIT_TEST_DATA} LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib make $MAKEJOBS check VERBOSE=1
END_FOLD
fi
if [ "$RUN_UNIT_TESTS_SEQUENTIAL" = "true" ]; then
BEGIN_FOLD unit-tests-seq
DOCKER_EXEC ${TEST_RUNNER_ENV} DIR_UNIT_TEST_DATA=${DIR_UNIT_TEST_DATA} LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib "${BASE_BUILD_DIR}/matilda-*/src/test/test_matilda*" --catch_system_errors=no -l test_suite
END_FOLD
fi
if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib ${TEST_RUNNER_ENV} test/functional/test_runner.py --ci $MAKEJOBS --tmpdirprefix "${BASE_SCRATCH_DIR}/test_runner/" --ansi --combinedlogslen=4000 --timeout-factor=${TEST_RUNNER_TIMEOUT_FACTOR} ${TEST_RUNNER_EXTRA} --quiet --failfast
END_FOLD
fi
if [ "$RUN_SECURITY_TESTS" = "true" ]; then
BEGIN_FOLD security-tests
DOCKER_EXEC make test-security-check
END_FOLD
fi
if [ "$RUN_FUZZ_TESTS" = "true" ]; then
BEGIN_FOLD fuzz-tests
DOCKER_EXEC LD_LIBRARY_PATH=$DEPENDS_DIR/$HOST/lib test/fuzz/test_runner.py ${FUZZ_TESTS_CONFIG} $MAKEJOBS -l DEBUG ${DIR_FUZZ_IN}
END_FOLD
fi
|
source "${0:h}/external/git-flow/git-flow-completion.zsh"
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-shuffled-N/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-shuffled-N/512+0+512-STWS-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_trigrams_within_sentences_first_half_full --eval_function last_element_eval
|
#!/bin/bash
echo "BUILD make"
cp make/config.mk .
echo "USE_CUDA=0" >> config.mk
echo "USE_CUDNN=0" >> config.mk
echo "USE_BLAS=openblas" >> config.mk
echo "ADD_CFLAGS += -I/usr/include/openblas" >>config.mk
echo "GTEST_PATH=/usr/local/gtest" >> config.mk
echo 'export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:$PKG_CONFIG_PATH' >> ~/.profile
echo 'export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH' >> ~/.profile
echo 'export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.25.amzn1.x86_64' >> ~/.profile
echo 'export JRE_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.25.amzn1.x86_64/jre' >> ~/.profile
echo 'export PATH=$PATH:/apache-maven-3.3.9/bin/:/usr/bin:/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.25.amzn1.x86_64/bin:/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.25.amzn1.x86_64/jre/bin' >> ~/.profile
source ~/.profile
user=`id -u -n`
make -j 4 || exit -1
echo "BUILD python2 mxnet"
cd python
if [ $user == 'root' ]
then
python setup.py install || exit 1
else
python setup.py install --prefix ~/.local || exit 1
fi
cd ..
echo "BUILD python3 mxnet"
cd python
if [ $user == 'root' ]
then
python3 setup.py install || exit 1
else
python3 setup.py install --prefix ~/.local || exit 1
fi
cd ..
echo "BUILD lint"
make lint || exit -1
echo "BUILD cpp_test"
make -j 4 test || exit -1
export MXNET_ENGINE_INFO=true
for test in tests/cpp/*_test; do
./$test || exit -1
done
export MXNET_ENGINE_INFO=false
echo "BUILD python_test"
nosetests --verbose tests/python/unittest || exit -1
nosetests --verbose tests/python/train || exit -1
echo "BUILD python3_test"
nosetests3 --verbose tests/python/unittest || exit -1
nosetests3 --verbose tests/python/train || exit -1
echo "BUILD julia_test"
export MXNET_HOME="${PWD}"
julia -e 'try Pkg.clone("MXNet"); catch end; Pkg.checkout("MXNet"); Pkg.build("MXNet"); Pkg.test("MXNet")' || exit -1
echo "BUILD scala_test"
make scalapkg || exit -1
make scalatest || exit -1
|
#!/bin/bash
set -e
PKG=$1
PKG_LIST=$(go list ${PKG}/... | grep -v /vendor/)
go test -short ${PKG_LIST}
|
<filename>TafangGame/app/src/main/java/com/bn/tag/Shell.java
package com.bn.tag;
import java.util.List;
import java.util.Vector;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import static com.bn.tag.Constant.*;
public class Shell {
GameView gameView;
private Bitmap bitmap;//λͼ
float shellx;
float shelly;
SingleJianta jianta;
static List<Shell> shl=new Vector<Shell>();
static List<Target> tas=new Vector<Target>();
Target tartee;
double direction;
public Shell(GameView gameView,Bitmap bitmap,float shellx,float shelly,Target tartee,SingleJianta jianta)
{
this.gameView=gameView;
this.bitmap=bitmap;
this.shellx=shellx;
this.shelly=shelly;
this.tartee=tartee;
this.jianta=jianta;
//this.targett=targett;
}
public void drawSelf(Canvas canvas,Paint paint)
{
go();
float dnX=shellx;
float dnY=shelly;
canvas.drawBitmap(bitmap, dnX, dnY,paint);
}
//ǰ���ķ���
public void go()
{
direction=calDirection1(shellx,shelly,tartee.ballx,tartee.bally);
float llx=(float) (SPEED*Math.sin(direction*Math.PI/180)+shellx);//x����
float lly=(float) (SPEED*Math.cos(direction*Math.PI/180)+shelly);//y����
if(tas.contains(tartee))
{
shl.add(this);
}
if(IsTwoRectCross//һ�����ε��ĸ�����֮һ�Ƿ�����һ��������
(
llx,lly,JIAN_TOU_WEIGHT,JIAN_TOU_HEIGHT,//���ϵ�x,y���꣬������
tartee.ballx-SINGLE_PIC/2+20,tartee.bally-SINGLE_PIC/2,SINGLE_PIC-15,SINGLE_PIC-15
))
{
//ÿ��ɱһ�������ý�Ǯ����
tartee.bloodsum-=1;
if(tartee.bloodsum==0)
{
if(!tas.contains(tartee))
{
gameView.doller+=5;
gameView.shaNUM+=1;
gameView.shuijingMiddleNum+=1;
tas.add(tartee);
}
}
shl.add(this);
if(gameView.activity.isSoundOn())
{
gameView.playSound(2, 0);
}
}
else if(getlength(llx,lly,jianta.clo*SINGLE_RODER,jianta.row*SINGLE_RODER)>R_LENGTH*R_LENGTH)
{
shl.add(this);
}
else
{
shellx=llx;//(float) (SPEED*Math.sin(direction*Math.PI/180)+shellx);//x����
shelly=lly;//(float) (SPEED*Math.cos(direction*Math.PI/180)+shelly);//y����
}
}
public float getlength(float x1,float y1,float x2,float y2)
{
float result=(x1-x2)*(x1-x2)+(y1-y2)*(y1-y2);
return result;
}
//���㷽��
public double calDirection1(float x1,float y1,float x2,float y2)
{
double direction = 0;
float dx=x1-x2;
float dy=y1-y2;
if(dx!=0||dy!=0)
{
if(dx>0&&dy>0)
{
direction=180+Math.toDegrees(Math.atan(dx/dy));
}
else if(dx<0&&dy>0)
{
direction=180-Math.toDegrees(Math.atan(-dx/dy));
}
else if(dx<0&&dy<0)
{
direction=Math.toDegrees(Math.atan(dx/dy));
}
else if(dx>0&&dy<0)
{
direction=360-Math.toDegrees(Math.atan(dx/-dy));
}
else if(dx==0)
{
if(dy>0)
{
direction=180;
}
else
{
direction=0;
}
}
else if(dy==0)
{
if(dx>0)
{
direction=270;
}
else
{
direction=90;
}
}
}
return direction;
}
public static boolean IsTwoRectCross//һ�����ε��ĸ�����֮һ�Ƿ�����һ��������
(
float xLeftTop1,float yLeftTop1,float length1,float width1,//���ϵ�x,y���꣬������
float xLeftTop2,float yLeftTop2,float length2,float width2
)
{
if
(
isPointInRect(xLeftTop1,yLeftTop1,xLeftTop2,yLeftTop2,length2,width2)|| //���϶���
isPointInRect(xLeftTop1+length1,yLeftTop1,xLeftTop2,yLeftTop2,length2,width2)|| //���϶���
isPointInRect(xLeftTop1,yLeftTop1+width1,xLeftTop2,yLeftTop2,length2,width2)|| //���¶���
isPointInRect(xLeftTop1+length1,yLeftTop1+width1,xLeftTop2,yLeftTop2,length2,width2)|| //���¶���
isPointInRect(xLeftTop2,yLeftTop2,xLeftTop1,yLeftTop1,length1,width1)|| //���϶���
isPointInRect(xLeftTop2+length2,yLeftTop2,xLeftTop1,yLeftTop1,length1,width1)|| //���϶���
isPointInRect(xLeftTop2,yLeftTop2+width2,xLeftTop1,yLeftTop1,length1,width1)|| //���¶���
isPointInRect(xLeftTop2+length2,yLeftTop2+width2,xLeftTop1,yLeftTop1,length1,width1) //���¶���
)
{
return true;
}
return false;
}
public static boolean isPointInRect//һ�����Ƿ��ھ����ڣ������߽磩
(
float pointx,float pointy,
float xLeftTop,float yLeftTop,float length,float width
)
{
if(
pointx>=xLeftTop&&pointx<=xLeftTop+length&&
pointy>=yLeftTop&&pointy<=yLeftTop+width
)
{
return true;
}
return false;
}
}
|
package cn.leancloud;
import cn.leancloud.callback.SaveCallback;
import cn.leancloud.convertor.ObserverBuilder;
import cn.leancloud.json.JSON;
import cn.leancloud.json.JSONObject;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
import java.util.Map;
import cn.leancloud.push.AndroidNotificationManager;
import cn.leancloud.utils.LogUtil;
import cn.leancloud.utils.StringUtil;
/**
* Created by fengjunwen on 2018/8/28.
*/
public class LCFirebaseMessagingService extends FirebaseMessagingService {
private final static LCLogger LOGGER = LogUtil.getLogger(LCFirebaseMessagingService.class);
private final String VENDOR = "fcm";
/**
* FCM 有两种消息:通知消息与数据消息。
* 通知消息 -- 就是普通的通知栏消息,应用在后台的时候,通知消息会直接显示在通知栏,默认情况下,
* 用户点按通知即可打开应用启动器(通知消息附带的参数在 Bundle 内),我们无法处理。
*
* 数据消息 -- 类似于其他厂商的「透传消息」。应用在前台的时候,数据消息直接发送到应用内,应用层通过这一接口进行响应。
*
* @param remoteMessage
*/
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
Map<String, String> data = remoteMessage.getData();
if (null == data || data.size() < 1) {
return;
}
LOGGER.d("received message from: " + remoteMessage.getFrom() + ", payload: " + data.toString());
if (remoteMessage.getNotification() == null) {
return;
}
String title = remoteMessage.getNotification().getTitle();
String alert = remoteMessage.getNotification().getBody();
try {
JSONObject jsonObject = JSON.parseObject(data.get("payload"));
if (null != jsonObject) {
String channel = jsonObject.getString("_channel");
String action = jsonObject.getString("action");
if (!StringUtil.isEmpty(title)) {
jsonObject.put("title", title);
}
if (!StringUtil.isEmpty(alert)) {
jsonObject.put("alert", alert);
}
AndroidNotificationManager androidNotificationManager = AndroidNotificationManager.getInstance();
androidNotificationManager.processFcmMessage(channel, action, jsonObject.toJSONString());
}
} catch (Exception ex) {
LOGGER.e("failed to parse push data.", ex);
}
}
@Override
public void onNewToken(String token) {
LOGGER.d("refreshed token: " + token);
// If you want to send messages to this application instance or
// manage this apps subscriptions on the server side, send the
// FCM registration token to your app server.
sendRegistrationToServer(token);
}
private void sendRegistrationToServer(String refreshedToken) {
if (StringUtil.isEmpty(refreshedToken)) {
return;
}
LCInstallation installation = LCInstallation.getCurrentInstallation();
if (!VENDOR.equals(installation.getString(LCInstallation.VENDOR))) {
installation.put(LCInstallation.VENDOR, VENDOR);
}
if (!refreshedToken.equals(installation.getString(LCInstallation.REGISTRATION_ID))) {
installation.put(LCInstallation.REGISTRATION_ID, refreshedToken);
}
installation.saveInBackground().subscribe(
ObserverBuilder.buildSingleObserver(new SaveCallback() {
@Override
public void done(LCException e) {
if (null != e) {
LOGGER.e("failed to update installation.", e);
} else {
LOGGER.d("succeed to update installation.");
}
}
}));
LOGGER.d("FCM registration success! registrationId=" + refreshedToken);
}
@Override
public void onDeletedMessages() {
super.onDeletedMessages();
}
}
|
function findLongestString(strList) {
let longestStr = '';
strList.forEach(str => {
if (str.length > longestStr.length) {
longestStr = str;
}
})
return longestStr;
}
let longestWord = findLongestString(['javascript', 'python', 'ruby']);
console.log(longestWord); // Output: 'javascript'
|
<filename>load_config.py<gh_stars>0
import hjson
config = hjson.load(open("./conf/config.hjson", encoding='utf-8'))
|
SELECT o.*
FROM Orders o
INNER JOIN OrderItems i
ON o.order_id = i.order_id
GROUP BY o.order_id
HAVING SUM(i.price) > 100;
|
export { default } from "./PositionSettingsForm";
|
#!/bin/bash
#
# Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Make it able to work on minikube and nodeless
IP_ROUTE=$(ip route get 1)
IP_ADDRESS=$(echo ${IP_ROUTE#*src} | awk '{print $1}')
ADMISSION_SERVICE_NAME="gardener-extension-admission-alicloud"
ADMISSION_ENDPOINT_NAME="gardener-extension-admission-alicloud"
if kubectl -n garden get service "$ADMISSION_SERVICE_NAME" &> /dev/null; then
kubectl -n garden delete service $ADMISSION_SERVICE_NAME
fi
if kubectl -n garden get endpoints "$ADMISSION_ENDPOINT_NAME" &> /dev/null; then
kubectl -n garden delete endpoints $ADMISSION_ENDPOINT_NAME
fi
cat <<EOF | kubectl apply -f -
kind: Service
apiVersion: v1
metadata:
name: $ADMISSION_SERVICE_NAME
namespace: garden
spec:
ports:
- protocol: TCP
port: 443
targetPort: 9443
EOF
cat <<EOF | kubectl apply -f -
---
kind: Endpoints
apiVersion: v1
metadata:
name: $ADMISSION_ENDPOINT_NAME
namespace: garden
subsets:
- addresses:
- ip: ${IP_ADDRESS}
ports:
- port: 9443
EOF
kubectl apply -f $(dirname $0)/../example/40-validatingwebhookconfiguration.yaml
|
<gh_stars>0
import sys, os
import CORBA, Fortune, Fortune__POA
FORTUNE_PATH = "/usr/games/fortune"
class CookieServer_i(Fortune__POA.CookieServer):
def get_cookie(self):
pipe = os.popen(FORTUNE_PATH)
cookie = pipe.read()
if pipe.close():
# An error occurred with the pipe
cookie = "Oh dear, couldn't get a fortune\n"
return cookie
orb = CORBA.ORB_init(sys.argv)
poa = orb.resolve_initial_references("RootPOA")
servant = CookieServer_i()
poa.activate_object(servant)
print orb.object_to_string(servant._this())
# see the Discussion session about what this print statement emits
poa._get_the_POAManager().activate()
orb.run()
|
import * as uuid from 'uuid';
import Store from '../store.js';
import Painter from '../Painter.js';
jest.mock('uuid');
jest.mock('../Painter.js');
describe('test store.js', () => {
const mockMessages = {
'1': {
id: '1',
message: 'mock-message-1',
time: 10,
},
'2': {
id: '2',
message: 'mock-message-2',
time: 150,
},
'3': {
id: '3',
message: 'mock-message-3',
time: 30,
},
};
it('should test addMessage method', () => {
const store = new Store();
const getIndexOfTarget = jest.fn(() => 5);
store.getIndexOfTarget = getIndexOfTarget;
const addNewMessage = jest.spyOn(Painter, 'addNewMessage');
const v4 = jest.spyOn(uuid, 'v4');
const mockId = 'mock-id';
const mockMessage = 'mock-message';
const mockTime = '30';
v4.mockImplementation(() => mockId);
const nextMessageObj = {
id: mockId,
message: mockMessage,
time: Number(mockTime),
};
expect(store.messages).toEqual({});
store.addMessage(mockMessage, mockTime);
expect(store.messages).toEqual({
[mockId]: nextMessageObj,
});
expect(getIndexOfTarget).toHaveBeenNthCalledWith(1, 'mock-id');
expect(addNewMessage).toHaveBeenCalledTimes(1);
expect(addNewMessage).toHaveBeenCalledWith(
nextMessageObj,
5,
store,
);
});
it('should test deleteMessage method', () => {
const store = new Store();
store.messages = JSON.parse(JSON.stringify(mockMessages));
const getIndexOfTarget = jest.fn(() => 5);
store.getIndexOfTarget = getIndexOfTarget;
const removeMessage = jest.spyOn(Painter, 'removeMessage');
store.deleteMessage('2');
expect(getIndexOfTarget).toHaveBeenNthCalledWith(1, '2');
expect(removeMessage).toHaveBeenNthCalledWith(1, 5);
expect(store.messages).toEqual({
'1': {
id: '1',
message: 'mock-message-1',
time: 10,
},
'3': {
id: '3',
message: 'mock-message-3',
time: 30,
},
});
});
describe('should test updateTime method', () => {
const store = new Store();
const getIndexOfTarget = jest.fn(() => 5);
const deleteMessage = jest.fn();
store.getIndexOfTarget = getIndexOfTarget;
store.deleteMessage = deleteMessage;
const updateTime = jest.spyOn(Painter, 'updateTime');
beforeEach(() => {
store.messages = JSON.parse(JSON.stringify(mockMessages));
getIndexOfTarget.mockClear();
getIndexOfTarget.mockImplementationOnce(() => 3);
updateTime.mockClear();
});
it('when execute time to plus value', () => {
expect(store.messages[2].time).toBe(150);
store.updateTime('2', '5');
expect(store.messages[2].time).toBe(155);
expect(getIndexOfTarget).toHaveBeenNthCalledWith(1, '2');
expect(getIndexOfTarget).toHaveBeenNthCalledWith(2, '2');
expect(updateTime).toHaveBeenNthCalledWith(1, 3, 5, 155);
});
it('when execute time to multiple value', () => {
expect(store.messages[2].time).toBe(150);
store.updateTime('2', '3-times');
expect(store.messages[2].time).toBe(450);
expect(getIndexOfTarget).toHaveBeenNthCalledWith(1, '2');
expect(getIndexOfTarget).toHaveBeenNthCalledWith(2, '2');
expect(updateTime).toHaveBeenNthCalledWith(1, 3, 5, 450);
});
it('when execute time to minus value then time is under the 0', () => {
expect(store.messages[2].time).toBe(150);
store.updateTime('2', '-155');
expect(deleteMessage).toHaveBeenNthCalledWith(1, '2');
expect(getIndexOfTarget).toHaveBeenCalledTimes(0);
expect(updateTime).toHaveBeenCalledTimes(0);
});
});
it('should test tickTime method', () => {
const store = new Store();
const updateTime = jest.fn();
store.updateTime = updateTime;
store.messages = JSON.parse(JSON.stringify(mockMessages));
store.tickTime();
expect(updateTime).toHaveBeenCalledTimes(3);
expect(updateTime).toHaveBeenCalledWith('1', 0);
expect(updateTime).toHaveBeenCalledWith('2', 0);
expect(updateTime).toHaveBeenCalledWith('3', 0);
});
it('should test getIndexOfTarget method', () => {
const store = new Store();
const getOrderedList = jest.fn(() => ['mock-id-1', 'mock-id-2', 'mock-id-3']);
store.getOrderedList = getOrderedList;
const result = store.getIndexOfTarget('mock-id-2');
expect(getOrderedList).toHaveBeenCalledTimes(1);
expect(result).toBe(1);
});
it('should test getOrderedList method', () => {
const store = new Store();
store.messages = JSON.parse(JSON.stringify(mockMessages));
const result = store.getOrderedList();
expect(result).toEqual(['2', '3', '1']);
});
});
|
#!/bin/sh
sh -c "echo $(ls -la) && cd $WORKING_DIRECTORY && echo $(ls -la) && pipenv $*"
|
const Firestore = require('@firestore').Firestore
exports.getAllPartyData = (req, res) => {
console.log('party: ', req.params.party)
const db = new Firestore()
const party = req.params.party
db.PoliticalParty()
.where('name', '==', party)
.select()
.then((snapshot) => {
if (snapshot.empty) {
res.status(404).json({
success: false,
message: 'No party with that name found'
})
}
snapshot.forEach((doc) => {
res.status(200).json({
success: true,
data: doc.data()
})
})
})
.catch(console.error)
}
|
#!/usr/bin/env bash
# Copyright 2015 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Exit on non-true return value
set -e
# Exit on reference to uninitialized variable
set -u
set -o pipefail
# Set up the environment configuration.
source ./init.sh
if [[ "$DOWNLOAD_CCACHE" -ne 0 ]]; then
download_ccache
fi
# Configure the compiler/linker flags, bootstrapping tools if necessary.
source ./init-compiler.sh
################################################################################
# How to add new versions to the toolchain:
#
# * Make sure the build script is ready to build the new version.
# * Find the libary in the list below and create new line that follows the
# pattern: LIBRARYNAME_VERSION=Version $SOURCE_DIR/source/LIBRARYNAME/build.sh
#
# WARNING: Once a library has been rolled out to production, it cannot be
# removed, but only new versions can be added. Make sure that the library
# and version you want to add works as expected.
################################################################################
################################################################################
# Boost
################################################################################
if [[ "$ARCH_NAME" != "aarch64" ]]; then
if (( BUILD_HISTORICAL )) ; then
BOOST_VERSION=1.57.0 $SOURCE_DIR/source/boost/build.sh
BOOST_VERSION=1.57.0-p1 $SOURCE_DIR/source/boost/build.sh
BOOST_VERSION=1.57.0-p2 $SOURCE_DIR/source/boost/build.sh
fi
BOOST_VERSION=1.57.0-p3 $SOURCE_DIR/source/boost/build.sh
fi
BOOST_VERSION=1.61.0-p2 $SOURCE_DIR/source/boost/build.sh
################################################################################
# Build Python
################################################################################
if [[ ! "$OSTYPE" == "darwin"* ]]; then
# For now, provide both Python 2 and 3 until we can switch over to Python 3.
PYTHON_VERSION=2.7.16 $SOURCE_DIR/source/python/build.sh
PYTHON_VERSION=3.7.4 $SOURCE_DIR/source/python/build.sh
else
PYTHON_VERSION=2.7.16 build_fake_package "python"
fi
################################################################################
# LLVM
################################################################################
# Build LLVM 3.3 with and without asserts.
# For LLVM 3.3, the default is a release build with assertions. The assertions
# are disabled by including "no-asserts" in the version string.
if (( BUILD_HISTORICAL )) ; then
LLVM_VERSION=3.3-p1 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=3.3-no-asserts-p1 $SOURCE_DIR/source/llvm/build.sh
fi
# Build LLVM 3.7+ with and without assertions. For LLVM 3.7+, the default is a
# release build with no assertions.
(
export PYTHON_VERSION=2.7.16
if (( BUILD_HISTORICAL )) ; then
LLVM_VERSION=3.7.0 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=3.8.0 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=3.8.0-p1 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=3.8.0-asserts-p1 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=3.9.1 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=3.9.1-asserts $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=5.0.1 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=5.0.1-asserts $SOURCE_DIR/source/llvm/build.sh
fi
LLVM_VERSION=5.0.1-p2 $SOURCE_DIR/source/llvm/build.sh
LLVM_VERSION=5.0.1-asserts-p2 $SOURCE_DIR/source/llvm/build.sh
)
################################################################################
# SASL
################################################################################
if [[ ! "$OSTYPE" == "darwin"* ]]; then
CYRUS_SASL_VERSION=2.1.23 $SOURCE_DIR/source/cyrus-sasl/build.sh
else
CYRUS_SASL_VERSION=2.1.26 $SOURCE_DIR/source/cyrus-sasl/build.sh
fi
################################################################################
# Build protobuf
################################################################################
PROTOBUF_VERSION=3.5.1 $SOURCE_DIR/source/protobuf/build.sh
################################################################################
# Build libev
################################################################################
LIBEV_VERSION=4.20 $SOURCE_DIR/source/libev/build.sh
################################################################################
# Build crcutil
################################################################################
if (( BUILD_HISTORICAL )) ; then
CRCUTIL_VERSION=440ba7babeff77ffad992df3a10c767f184e946e\
$SOURCE_DIR/source/crcutil/build.sh
CRCUTIL_VERSION=440ba7babeff77ffad992df3a10c767f184e946e-p1\
$SOURCE_DIR/source/crcutil/build.sh
fi
CRCUTIL_VERSION=440ba7babeff77ffad992df3a10c767f184e946e-p2\
$SOURCE_DIR/source/crcutil/build.sh
################################################################################
# Build OpenSSL - this is not intended for production use of Impala.
# Libraries that depend on OpenSSL will only use it if PRODUCTION=1.
################################################################################
if (( BUILD_HISTORICAL )); then
OPENSSL_VERSION=1.0.1p $SOURCE_DIR/source/openssl/build.sh
fi
OPENSSL_VERSION=1.0.2l $SOURCE_DIR/source/openssl/build.sh
################################################################################
# Build ZLib
################################################################################
ZLIB_VERSION=1.2.8 $SOURCE_DIR/source/zlib/build.sh
################################################################################
# Build Bison
################################################################################
BISON_VERSION=3.0.4 $SOURCE_DIR/source/bison/build.sh
################################################################################
# Build Thrift
# * depends on bison, boost, zlib and openssl
################################################################################
export BISON_VERSION=3.0.4
export BOOST_VERSION=1.61.0-p2
export ZLIB_VERSION=1.2.8
export OPENSSL_VERSION=1.0.2l
if [[ ! "$OSTYPE" == "darwin"* ]]; then
if (( BUILD_HISTORICAL )); then
THRIFT_VERSION=0.9.0-p2 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.0-p4 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.0-p5 $SOURCE_DIR/source/thrift/build.sh
# 0.9.0-p6 is a revert of -p5 patch. It doesn't need to be built.
# It is equivalent to p4 and is needed for subsequent patches.
THRIFT_VERSION=0.9.0-p7 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.0-p8 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.0-p9 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.0-p10 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.3-p5 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.3-p6 $SOURCE_DIR/source/thrift/build.sh
fi
# Required until Python 2.6 compatibility issues have been sorted out with
# thrift-0.9.3.
THRIFT_VERSION=0.9.0-p12 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.9.3-p7 $SOURCE_DIR/source/thrift/build.sh
THRIFT_VERSION=0.11.0-p2 $SOURCE_DIR/source/thrift/build.sh
else
THRIFT_VERSION=0.9.2-p4 $SOURCE_DIR/source/thrift/build.sh
fi
export -n BISON_VERSION
export -n BOOST_VERSION
export -n ZLIB_VERSION
export -n OPENSSL_VERSION
################################################################################
# gflags
################################################################################
if (( BUILD_HISTORICAL )); then
GFLAGS_VERSION=2.0 $SOURCE_DIR/source/gflags/build.sh
fi
GFLAGS_VERSION=2.2.0-p2 $SOURCE_DIR/source/gflags/build.sh
################################################################################
# Build gperftools
################################################################################
if (( BUILD_HISTORICAL )); then
GPERFTOOLS_VERSION=2.0-p1 $SOURCE_DIR/source/gperftools/build.sh
GPERFTOOLS_VERSION=2.3 $SOURCE_DIR/source/gperftools/build.sh
fi
# IMPALA-6414: Required until issues with 2.6.3 have been sorted out.
GPERFTOOLS_VERSION=2.5 $SOURCE_DIR/source/gperftools/build.sh
GPERFTOOLS_VERSION=2.6.3 $SOURCE_DIR/source/gperftools/build.sh
################################################################################
# Build glog
################################################################################
if (( BUILD_HISTORICAL )) ; then
GFLAGS_VERSION=2.0 GLOG_VERSION=0.3.2-p1 $SOURCE_DIR/source/glog/build.sh
GFLAGS_VERSION=2.0 GLOG_VERSION=0.3.2-p2 $SOURCE_DIR/source/glog/build.sh
if [[ ! "$RELEASE_NAME" =~ CentOS.*5\.[[:digit:]] ]]; then
# CentOS 5 has issues with the glog patch, probably autotools is too old.
GFLAGS_VERSION=2.2.0 GLOG_VERSION=0.3.3-p1 $SOURCE_DIR/source/glog/build.sh
fi
GFLAGS_VERSION=2.2.0-p1 GLOG_VERSION=0.3.4-p2 $SOURCE_DIR/source/glog/build.sh
fi
GFLAGS_VERSION=2.2.0-p2 GLOG_VERSION=0.3.4-p3 $SOURCE_DIR/source/glog/build.sh
################################################################################
# Build gtest
################################################################################
GTEST_VERSION=1.6.0 $SOURCE_DIR/source/gtest/build.sh
# New versions of gtest are named googletest
GOOGLETEST_VERSION=1.8.0 $SOURCE_DIR/source/googletest/build.sh
################################################################################
# Build Snappy
################################################################################
if (( BUILD_HISTORICAL )); then
SNAPPY_VERSION=1.0.5 $SOURCE_DIR/source/snappy/build.sh
SNAPPY_VERSION=1.1.3 $SOURCE_DIR/source/snappy/build.sh
fi
SNAPPY_VERSION=1.1.4 $SOURCE_DIR/source/snappy/build.sh
################################################################################
# Build Lz4
################################################################################
if (( BUILD_HISTORICAL )); then
LZ4_VERSION=svn $SOURCE_DIR/source/lz4/build.sh
fi
LZ4_VERSION=1.7.5 $SOURCE_DIR/source/lz4/build.sh
################################################################################
# Build Zstd
################################################################################
ZSTD_VERSION=1.4.0 $SOURCE_DIR/source/zstd/build.sh
################################################################################
# Build re2
################################################################################
if (( BUILD_HISTORICAL )); then
RE2_VERSION=20130115 $SOURCE_DIR/source/re2/build.sh
RE2_VERSION=20130115-p1 $SOURCE_DIR/source/re2/build.sh
fi
RE2_VERSION=20190301 $SOURCE_DIR/source/re2/build.sh
################################################################################
# Build Ldap
################################################################################
OPENLDAP_VERSION=2.4.47 $SOURCE_DIR/source/openldap/build.sh
################################################################################
# Build Avro
################################################################################
if (( BUILD_HISTORICAL )); then
AVRO_VERSION=1.7.4-p4 $SOURCE_DIR/source/avro/build.sh
fi
AVRO_VERSION=1.7.4-p5 $SOURCE_DIR/source/avro/build.sh
################################################################################
# Build Rapidjson
################################################################################
# Build two versions for now - the build time and size is fairly minimal.
RAPIDJSON_VERSION=0.11 $SOURCE_DIR/source/rapidjson/build.sh
RAPIDJSON_VERSION=1.1.0 $SOURCE_DIR/source/rapidjson/build.sh
################################################################################
# Build BZip2
################################################################################
if (( BUILD_HISTORICAL )); then
BZIP2_VERSION=1.0.6-p1 $SOURCE_DIR/source/bzip2/build.sh
fi
BZIP2_VERSION=1.0.6-p2 $SOURCE_DIR/source/bzip2/build.sh
################################################################################
# Build GDB
################################################################################
if [[ ! "$RELEASE_NAME" =~ CentOS.*5\.[[:digit:]] ]]; then
GDB_VERSION=7.9.1-p1 $SOURCE_DIR/source/gdb/build.sh
else
GDB_VERSION=7.9.1-p1 build_fake_package "gdb"
fi
################################################################################
# Build Libunwind
################################################################################
LIBUNWIND_VERSION=1.3-rc1-p3 $SOURCE_DIR/source/libunwind/build.sh
################################################################################
# Build Breakpad
################################################################################
if (( BUILD_HISTORICAL )); then
BREAKPAD_VERSION=20150612-p1 $SOURCE_DIR/source/breakpad/build.sh
fi
BREAKPAD_VERSION=97a98836768f8f0154f8f86e5e14c2bb7e74132e-p2 $SOURCE_DIR/source/breakpad/build.sh
################################################################################
# Build Flatbuffers
################################################################################
FLATBUFFERS_VERSION=1.6.0 $SOURCE_DIR/source/flatbuffers/build.sh
################################################################################
# Build Kudu
################################################################################
(
export BOOST_VERSION=1.57.0-p3
export KUDU_VERSION=5c610bf40
export PYTHON_VERSION=2.7.16
if $SOURCE_DIR/source/kudu/build.sh is_supported_platform; then
$SOURCE_DIR/source/kudu/build.sh build
else
build_fake_package kudu
fi
)
################################################################################
# Build TPC-H
################################################################################
TPC_H_VERSION=2.17.0 $SOURCE_DIR/source/tpc-h/build.sh
################################################################################
# Build TPC-DS
################################################################################
TPC_DS_VERSION=2.1.0 $SOURCE_DIR/source/tpc-ds/build.sh
################################################################################
# Build KRB5
################################################################################
KRB5_VERSION=1.15.1 $SOURCE_DIR/source/krb5/build.sh
################################################################################
# Build ORC
################################################################################
(
export LZ4_VERSION=1.7.5
export PROTOBUF_VERSION=3.5.1
export SNAPPY_VERSION=1.1.4
export ZLIB_VERSION=1.2.8
export GOOGLETEST_VERSION=1.8.0
if (( BUILD_HISTORICAL )); then
ORC_VERSION=1.4.3-p3 $SOURCE_DIR/source/orc/build.sh
ORC_VERSION=1.5.5-p1 $SOURCE_DIR/source/orc/build.sh
fi
ORC_VERSION=1.6.2-p6 $SOURCE_DIR/source/orc/build.sh
)
################################################################################
# CCTZ
################################################################################
CCTZ_VERSION=2.2 $SOURCE_DIR/source/cctz/build.sh
|
package interpreter
import "errors"
import msg "github.com/filecoin-project/specs/systems/filecoin_vm/message"
import addr "github.com/filecoin-project/specs/systems/filecoin_vm/actor/address"
import actor "github.com/filecoin-project/specs/systems/filecoin_vm/actor"
import st "github.com/filecoin-project/specs/systems/filecoin_vm/state_tree"
import vmr "github.com/filecoin-project/specs/systems/filecoin_vm/runtime"
import exitcode "github.com/filecoin-project/specs/systems/filecoin_vm/runtime/exitcode"
import gascost "github.com/filecoin-project/specs/systems/filecoin_vm/runtime/gascost"
import util "github.com/filecoin-project/specs/util"
func (vmi *VMInterpreter_I) ApplyMessageBatch(inTree st.StateTree, msgs []MessageRef) (outTree st.StateTree, ret []msg.MessageReceipt) {
compTree := inTree
for _, m := range msgs {
oT, r := vmi.ApplyMessage(compTree, m.Message(), m.Miner())
compTree = oT // assign the current tree. (this call always succeeds)
ret = append(ret, r) // add message receipt
}
return compTree, ret
}
func (vmi *VMInterpreter_I) ApplyMessage(inTree st.StateTree, message msg.UnsignedMessage, minerAddr addr.Address) (outTree st.StateTree, ret msg.MessageReceipt) {
compTree := inTree
fromActor := compTree.GetActor(message.From())
if fromActor == nil {
return applyError(inTree, exitcode.InvalidMethod, gascost.ApplyMessageFail)
}
// make sure fromActor has enough money to run the max invocation
maxGasCost := gasToFIL(message.GasLimit(), message.GasPrice())
totalCost := message.Value() + actor.TokenAmount(maxGasCost)
if fromActor.State().Balance() < totalCost {
return applyError(inTree, exitcode.InsufficientFunds, gascost.ApplyMessageFail)
}
// make sure this is the right message order for fromActor
// (this is protection against replay attacks, and useful sequencing)
if message.CallSeqNum() != fromActor.State().CallSeqNum()+1 {
return applyError(inTree, exitcode.InvalidCallSeqNum, gascost.ApplyMessageFail)
}
// may return a different tree on succeess.
// this MUST get rolled back if the invocation fails.
var toActor actor.Actor
var err error
compTree, toActor, err = treeGetOrCreateAccountActor(compTree, message.To())
if err != nil {
return applyError(inTree, exitcode.ActorNotFound, gascost.ApplyMessageFail)
}
// deduct maximum expenditure gas funds first
// TODO: use a single "transfer"
compTree = treeDeductFunds(compTree, fromActor, maxGasCost)
// transfer funds fromActor -> toActor
// (yes deductions can be combined, spelled out here for clarity)
// TODO: use a single "transfer"
compTree = treeDeductFunds(compTree, fromActor, message.Value())
compTree = treeDepositFunds(compTree, toActor, message.Value())
// Prepare invocInput.
invocInput := vmr.InvocInput{
InTree: compTree,
FromActor: fromActor,
ToActor: toActor,
Method: message.Method(),
Params: message.Params(),
Value: message.Value(),
GasLimit: message.GasLimit(),
}
// TODO: this is mega jank. need to rework invocationInput + runtime boundaries.
invocInput.Runtime = makeRuntime(compTree, invocInput)
// perform the method call to the actor
// TODO: eval if we should lift gas tracking and calc to the beginning of invocation
// (ie, include account creation, gas accounting itself)
out := invocationMethodDispatch(invocInput)
// var outTree StateTree
if out.ExitCode != 0 {
// error -- revert all state changes -- ie drop updates. burn used gas.
outTree = inTree // wipe!
outTree = treeDeductFunds(outTree, fromActor, gasToFIL(out.GasUsed, message.GasPrice()))
} else {
// success -- refund unused gas
outTree = out.OutTree // take the state from the invocation output
refundGas := message.GasLimit() - out.GasUsed
outTree = treeDepositFunds(outTree, fromActor, gasToFIL(refundGas, message.GasPrice()))
outTree = treeIncrementActorSeqNo(outTree, fromActor)
}
// reward miner gas fees
minerActor := compTree.GetActor(minerAddr) // TODO: may be nil.
outTree = treeDepositFunds(outTree, minerActor, gasToFIL(out.GasUsed, message.GasPrice()))
return outTree, &msg.MessageReceipt_I{
ExitCode_: out.ExitCode,
ReturnValue_: out.ReturnValue,
GasUsed_: out.GasUsed,
}
}
func invocationMethodDispatch(input vmr.InvocInput) vmr.InvocOutput {
if input.Method == 0 {
// just sending money. move along.
return vmr.InvocOutput{
OutTree: input.InTree,
GasUsed: gascost.SimpleValueSend,
ExitCode: exitcode.OK,
ReturnValue: nil,
}
}
//TODO: actually invoke the funtion here.
// put any vtable lookups in this function.
actorCode, err := loadActorCode(input, input.ToActor.State().CodeCID())
if err != nil {
return vmr.InvocOutput{
OutTree: input.InTree,
GasUsed: gascost.ApplyMessageFail,
ExitCode: exitcode.ActorCodeNotFound,
ReturnValue: nil, // TODO: maybe: err
}
}
return actorCode.InvokeMethod(input, input.Method, input.Params)
}
func treeIncrementActorSeqNo(inTree st.StateTree, a actor.Actor) (outTree st.StateTree) {
panic("todo")
}
func treeDeductFunds(inTree st.StateTree, a actor.Actor, amt actor.TokenAmount) (outTree st.StateTree) {
// TODO: turn this into a single transfer call.
panic("todo")
}
func treeDepositFunds(inTree st.StateTree, a actor.Actor, amt actor.TokenAmount) (outTree st.StateTree) {
// TODO: turn this into a single transfer call.
panic("todo")
}
func treeGetOrCreateAccountActor(inTree st.StateTree, a addr.Address) (outTree st.StateTree, _ actor.Actor, err error) {
toActor := inTree.GetActor(a)
if toActor != nil { // found
return inTree, toActor, nil
}
switch a.Type().Which() {
case addr.Address_Type_Case_BLS:
return treeNewBLSAccountActor(inTree, a)
case addr.Address_Type_Case_Secp256k1:
return treeNewSecp256k1AccountActor(inTree, a)
case addr.Address_Type_Case_ID:
return inTree, nil, errors.New("no actor with given ID")
case addr.Address_Type_Case_Actor:
return inTree, nil, errors.New("no such actor")
default:
return inTree, nil, errors.New("unknown address type")
}
}
func treeNewBLSAccountActor(inTree st.StateTree, addr addr.Address) (outTree st.StateTree, _ actor.Actor, err error) {
panic("todo")
}
func treeNewSecp256k1AccountActor(inTree st.StateTree, addr addr.Address) (outTree st.StateTree, _ actor.Actor, err error) {
panic("todo")
}
func applyError(tree st.StateTree, exitCode msg.ExitCode, gasUsed msg.GasAmount) (outTree st.StateTree, ret msg.MessageReceipt) {
return outTree, &msg.MessageReceipt_I{
ExitCode_: exitCode,
ReturnValue_: nil,
GasUsed_: gasUsed,
}
}
func gasToFIL(gas msg.GasAmount, price msg.GasPrice) actor.TokenAmount {
return actor.TokenAmount(util.UVarint(gas) * util.UVarint(price))
}
func makeRuntime(tree st.StateTree, input vmr.InvocInput) vmr.Runtime {
return &vmr.Runtime_I{
Invocation_: input,
State_: &vmr.VMState_I{
StateTree_: tree, // TODO: also in input.InTree.
Storage_: &vmr.VMStorage_I{},
},
}
}
|
/**
* @fileOverview Math for a simple robot simulation
* @author <NAME> <<EMAIL>>
* @version 0.1
*/
define([ 'exports', 'robertaLogic.constants' ], function(exports, CONSTANTS) {
/**
* exports helper for calculations in ORsimulation
*
* @namespace
*/
/**
* Convert from degree to radians.
*
* @memberOf exports
* @param {Number}
* degree to convert
* @returns {Number} radians
*/
exports.toRadians = function(degree) {
return degree * (Math.PI / 180);
};
/**
* Convert from radians to degree.
*
* @memberOf exports
* @param {Number}
* radians to convert
* @returns {Number} degree
*/
exports.toDegree = function(radians) {
return radians * (180 / Math.PI);
};
/**
* Get intersection point from two lines.
*
* @memberOf exports
* @param {line1}
* one line
* @param {line2}
* another line
* @returns {point} or null, if no intersection found
*/
exports.getIntersectionPoint = function(line1, line2) {
var d = (line1.x1 - line1.x2) * (line2.y1 - line2.y2) - (line1.y1 - line1.y2) * (line2.x1 - line2.x2);
if (d === 0) {
return null;
}
var xi = ((line2.x1 - line2.x2) * (line1.x1 * line1.y2 - line1.y1 * line1.x2) - (line1.x1 - line1.x2) * (line2.x1 * line2.y2 - line2.y1 * line2.x2))
/ d;
var yi = ((line2.y1 - line2.y2) * (line1.x1 * line1.y2 - line1.y1 * line1.x2) - (line1.y1 - line1.y2) * (line2.x1 * line2.y2 - line2.y1 * line2.x2))
/ d;
if (xi < Math.min(line1.x1, line1.x2) - 0.01 || xi > Math.max(line1.x1, line1.x2) + 0.01) {
return null;
}
if (xi < Math.min(line2.x1, line2.x2) - 0.01 || xi > Math.max(line2.x1, line2.x2) + 0.01) {
return null;
}
if (yi < Math.min(line1.y1, line1.y2) - 0.01 || yi > Math.max(line1.y1, line1.y2) + 0.01) {
return null;
}
if (yi < Math.min(line2.y1, line2.y2) - 0.01 || yi > Math.max(line2.y1, line2.y2) + 0.01) {
return null;
}
return {
x : xi,
y : yi
};
};
/**
* Get four lines from a rectangle.
*
* @memberOf exports
* @param {rect}
* a rectangle
* @returns {Array} four lines
*/
exports.getLinesFromRect = function(rect) {
if(rect.isParallelToAxis){
return [ {
x1 : rect.x,
x2 : rect.x,
y1 : rect.y,
y2 : rect.y + rect.h
}, {
x1 : rect.x,
x2 : rect.x + rect.w,
y1 : rect.y,
y2 : rect.y
}, {
x1 : rect.x + rect.w,
x2 : rect.x,
y1 : rect.y + rect.h,
y2 : rect.y + rect.h
}, {
x1 : rect.x + rect.w,
x2 : rect.x + rect.w,
y1 : rect.y + rect.h,
y2 : rect.y
} ];
}else{
return [ {
x1 : rect.backLeft.rx,
x2 : rect.frontLeft.rx,
y1 : rect.backLeft.ry,
y2 : rect.frontLeft.ry
}, {
x1 : rect.frontLeft.rx,
x2 : rect.frontRight.rx,
y1 : rect.frontLeft.ry,
y2 : rect.frontRight.ry
}, {
x1 : rect.frontRight.rx,
x2 : rect.backRight.rx,
y1 : rect.frontRight.ry,
y2 : rect.backRight.ry
}, {
x1 : rect.backRight.rx,
x2 : rect.backLeft.rx,
y1 : rect.backRight.ry,
y2 : rect.backLeft.ry
} ];
}
};
/**
* Calculate the square of a number.
*
* @memberOf exports
* @param {Number}
* x value to square
* @returns {Number} square of x
*/
exports.sqr = function(x) {
return x * x;
};
/**
* Get the distance of two points.
*
* @memberOf exports
* @param {p1}
* one point
* @param {p1}
* x another point
* @returns {distance}
*/
function getDistance(p1, p2) {
return exports.sqr(p1.x - p2.x) + exports.sqr(p1.y - p2.y);
};
exports.getDistance = getDistance;
/**
* Get the shortest distance from a point to a line as a vector.
*
* @memberOf exports
* @param {p}
* one point
* @param {p1}
* start point of line
* @param {p2}
* p2 end point of line
* @returns {distance}
*/
function getDistanceToLine(p, p1, p2) {
var d = exports.getDistance(p1, p2);
if (d == 0) {
return p1;
}
var t = ((p.x - p1.x) * (p2.x - p1.x) + (p.y - p1.y) * (p2.y - p1.y)) / d;
if (t < 0) {
return p1;
}
if (t > 1) {
return p2;
}
return ({
x : p1.x + t * (p2.x - p1.x),
y : p1.y + t * (p2.y - p1.y)
});
};
exports.getDistanceToLine = getDistanceToLine;
exports.isPointInsideRectangle = function(p, rect){
var p1 = rect.p1;
var p2 = rect.p2;
var p3 = rect.p3;
var p4 = rect.p4;
var t1 = getDistance(p, getDistanceToLine(p, p1, p2));
var t2 = getDistance(p, getDistanceToLine(p, p2, p3));
var t3 = getDistance(p, getDistanceToLine(p, p3, p4));
var t4 = getDistance(p, getDistanceToLine(p, p4, p1));
var s1 = getDistance(p1, p2);
var s2 = getDistance(p2, p3);
if(t1<=s2 && t3<=s2 && t2<=s1 && t4<=s1){
return true;
}else{
return false;
}
}
/**
* Convert a rgb value to hsv value.
*
* @memberOf exports
* @param {Number}
* r red value
* @param {Number}
* g green value
* @param {Number}
* b blue value
* @returns {Array} hsv value
*/
//copy from http://stackoverflow.com/questions/2348597/why-doesnt-this-javascript-rgb-to-hsl-code-work
exports.rgbToHsv = function(r, g, b) {
var min = Math.min(r, g, b), max = Math.max(r, g, b), delta = max - min, h, s, v = max;
v = Math.floor(max / 255 * 100);
if (max !== 0) {
s = Math.floor(delta / max * 100);
} else {
// black
return [ 0, 0, 0 ];
}
if (r === max) {
h = (g - b) / delta; // between yellow & magenta
} else if (g === max) {
h = 2 + (b - r) / delta; // between cyan & yellow
} else {
h = 4 + (r - g) / delta; // between magenta & cyan
}
h = Math.floor(h * 60); // degrees
if (h < 0) {
h += 360;
}
return [ h, s, v ];
};
/**
* Map a hsv value to a color name.
*
* @memberOf exports
* @param {Array}
* hsv value
* @returns {Enum} color
*/
exports.getColor = function(hsv) {
if (hsv[2] <= 10) {
return CONSTANTS.COLOR_ENUM.BLACK;
}
if ((hsv[0] < 10 || hsv[0] > 350) && hsv[1] > 90 && hsv[2] > 50) {
return CONSTANTS.COLOR_ENUM.RED;
}
if (hsv[0] > 40 && hsv[0] < 70 && hsv[1] > 90 && hsv[2] > 50) {
return CONSTANTS.COLOR_ENUM.YELLOW;
}
if (hsv[0] < 50 && hsv[1] > 50 && hsv[1] < 100 && hsv[2] < 50) {
return CONSTANTS.COLOR_ENUM.BROWN;
}
if (hsv[1] < 10 && hsv[2] > 90) {
return CONSTANTS.COLOR_ENUM.WHITE;
}
if (hsv[0] > 70 && hsv[0] < 160 && hsv[1] > 80) {
return CONSTANTS.COLOR_ENUM.GREEN;
}
if (hsv[0] > 200 && hsv[0] < 250 && hsv[1] > 90 && hsv[2] > 50) {
return CONSTANTS.COLOR_ENUM.BLUE;
}
return CONSTANTS.COLOR_ENUM.NONE;
}
});
|
import express from 'express';
import ExpressRateLimit from 'express-rate-limit';
import http, {Server} from 'http';
import Redis from 'ioredis';
import ms from 'ms';
import ExpressRateLimitIORedisStore from '../../src';
const limiter = ExpressRateLimit({
store: new ExpressRateLimitIORedisStore({
client: new Redis('localhost:6379'),
}),
max: 10,
windowMs: ms('3s'),
handler: (_, res) => {
return res.json({error: 'Too many requests'});
},
});
export default function createServer(): Server {
const app = createApp();
return http.createServer(app);
}
function createApp() {
const app = express();
app.get('/', limiter, (_, res) =>
res.json({
message: 'No problems so far',
})
);
return app;
}
|
def merge(arr1, arr2, n1, n2):
arr3 = [None] * (n1 + n2)
i = 0
j = 0
k = 0
# Traverse both array
while i < n1 and j < n2:
# Check if current element of first
# array is smaller than current element
# of second array. If yes, store first
# array element and increment first array
# index. Otherwise do same with second array
if arr1[i] < arr2[j]:
arr3[k] = arr1[i]
k = k + 1
i = i + 1
else:
arr3[k] = arr2[j]
k = k + 1
j = j + 1
# Store remaining elements of first array
while i < n1:
arr3[k] = arr1[i];
k = k + 1
i = i + 1
# Store remaining elements of second array
while j < n2:
arr3[k] = arr2[j];
k = k + 1
j = j + 1
|
<reponame>daylmer/SpectralLibrary<gh_stars>0
/*
The left most peak is monoisotopic
Isotopes being present in the spectra are really useful to determine charge state
Relative difference in intensity between two charge states of something with the same mass to charge is
another factor in being confident you have identified the same peptide between samples.
What is intensity measured in/as?
*/
/*
Mass (measured, theoretical)
Charge (actual, deconvoluted)
Intensity (actual, normalised)
retention time (actual, aligned)
Drift time (actual, in seconds)
PPM (delta molecular mass Vs measurement)
FWHM (Peak width half height)
count of matching spectra
count of precursor matches
count of product matches
count of peptide matches
averages
avg error calculations
FDR
*/
-- Start
USE [protein]
GO
/*
Temporarily drop all foreign key constraints
*/
--IF OBJECT_ID('mspeaksequence_mspeakid_fk', 'F') IS NOT NULL
-- ALTER TABLE mspeaksequence DROP CONSTRAINT mspeaksequence_mspeakid_fk
--GO
IF OBJECT_ID('FactSpectra_DimDataSet_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimDataSet_fk
IF OBJECT_ID('FactSpectra_DimTime_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimTime_fk
IF OBJECT_ID('FactSpectra_DimTimePoint_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimTimePoint_fk
IF OBJECT_ID('FactSpectra_DimBioReplicate_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimBioReplicate_fk
IF OBJECT_ID('FactSpectra_DimTechReplicate_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimTechReplicate_fk
IF OBJECT_ID('FactSpectra_DimMassChargeAbs_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimMassChargeAbs_fk
IF OBJECT_ID('FactSpectra_DimMassChargeRel_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimMassChargeRel_fk
IF OBJECT_ID('FactSpectra_DimRetentionTimeAbs_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimRetentionTimeAbs_fk
IF OBJECT_ID('FactSpectra_DimRetentionTimeRel_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimRetentionTimeRel_fk
IF OBJECT_ID('FactSpectra_DimDriftTimeAbs_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimDriftTimeAbs_fk
IF OBJECT_ID('FactSpectra_DimDriftTimeRel_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimDriftTimeRel_fk
IF OBJECT_ID('FactSpectra_DimIntensityAbs_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimIntensityAbs_fk
IF OBJECT_ID('FactSpectra_DimIntensityRel_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimIntensityRel_fk
IF OBJECT_ID('FactSpectra_DimSequence_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimSequence_fk
IF OBJECT_ID('FactSpectra_DimSequenceType_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimSequenceType_fk
IF OBJECT_ID('FactSpectra_DimProtein_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimProtein_fk
IF OBJECT_ID('FactSpectra_DimExperiment_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimExperiment_fk
IF OBJECT_ID('FactSpectra_DimScore_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimScore_fk
IF OBJECT_ID('FactSpectra_DimCharge_fk', 'F') IS NOT NULL
ALTER TABLE FactSpectra DROP CONSTRAINT FactSpectra_DimCharge_fk
GO
/*
Temporarily drop unique constraints
*/
IF OBJECT_ID('Dimsequence_sequence_uq', 'UQ') IS NOT NULL
ALTER TABLE [dbo].[sequence] DROP CONSTRAINT sequence_sequence_uq
IF OBJECT_ID('Dimprotein_accession_uq', 'UQ') IS NOT NULL
ALTER TABLE [dbo].[protein] DROP CONSTRAINT protein_accession_uq
/*
Views first
*/
IF OBJECT_ID (N'vSpectra', N'V') IS NOT NULL BEGIN
DROP VIEW vSpectra
END
IF OBJECT_ID (N'vPeptideFragment', N'V') IS NOT NULL BEGIN
DROP VIEW vPeptideFragment
END
/*
Dimension tables first
*/
IF OBJECT_ID (N'DimDataSet', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimDataSet]
END
CREATE TABLE [dbo].[DimDataSet] (
[id] bigint identity PRIMARY KEY NOT NULL,
[extid] bigint NULL,
[label] nvarchar(64) NOT NULL,
[loaddate] datetime NULL,
[sampledate] datetime NULL,
[filename] nvarchar(512) NULL,
)
--ALTER TABLE DimDataSet ALTER COLUMN extid bigint NULL
--ALTER TABLE DimDataSet ALTER COLUMN loaddate datetime NULL
--ALTER TABLE DimDataSet ALTER COLUMN sampledate datetime NULL
--ALTER TABLE DimDataSet ALTER COLUMN filename nvarchar(512) NULL
IF OBJECT_ID (N'DimTime', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimTime]
END
CREATE TABLE [dbo].[DimTime] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[SampleDate] DateTime NULL
)
IF OBJECT_ID (N'DimTimePoint', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimTimePoint]
END
CREATE TABLE [dbo].[DimTimePoint] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
)
IF OBJECT_ID (N'DimBioReplicate', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimBioReplicate]
END
CREATE TABLE [dbo].[DimBioReplicate] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
)
IF OBJECT_ID (N'DimTechReplicate', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimTechReplicate]
END
CREATE TABLE [dbo].[DimTechReplicate] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
)
IF OBJECT_ID (N'DimMassChargeAbs', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimMassChargeAbs]
END
CREATE TABLE [dbo].[DimMassChargeAbs] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL
)
/*
IF OBJECT_ID (N'DimMassChargeRel', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimMassChargeRel]
END
CREATE TABLE [dbo].[DimMassChargeRel] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL,
[center] numeric(12,6) NULL,
[sumofsquares] numeric(18,6) NULL
)
*/
IF OBJECT_ID (N'DimRetentionTimeAbs', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimRetentionTimeAbs]
END
CREATE TABLE [dbo].[DimRetentionTimeAbs] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL
)
/*
IF OBJECT_ID (N'DimRetentionTimeRel', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimRetentionTimeRel]
END
CREATE TABLE [dbo].[DimRetentionTimeRel] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL,
[center] numeric(12,6) NULL,
[sumofsquares] numeric(12,6) NULL
)
*/
IF OBJECT_ID (N'DimDriftTimeAbs', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimDriftTimeAbs]
END
CREATE TABLE [dbo].[DimDriftTimeAbs] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL
)
/*
IF OBJECT_ID (N'DimDriftTimeRel', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimDriftTimeRel]
END
CREATE TABLE [dbo].[DimDriftTimeRel] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL,
[center] numeric(12,6) NULL,
[sumofsquares] numeric(12,6) NULL
)
*/
IF OBJECT_ID (N'DimIntensityAbs', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimIntensityAbs]
END
CREATE TABLE [dbo].[DimIntensityAbs] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(18,6) NULL,
[maxrange] numeric(18,6) NULL
)
/*
IF OBJECT_ID (N'DimIntensityRel', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimIntensityRel]
END
CREATE TABLE [dbo].[DimIntensityRel] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(18,6) NULL,
[maxrange] numeric(18,6) NULL,
[center] numeric(18,6) NULL,
[sumofsquares] numeric(18,6) NULL
)
*/
IF OBJECT_ID (N'DimSequence', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimSequence]
END
CREATE TABLE [dbo].[DimSequence] (
[id] bigint identity PRIMARY KEY NOT NULL,
[sequence] nvarchar(128) NOT NULL
-- Maybe put the theoretical mass here..
)
IF OBJECT_ID (N'DimSequenceType', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimSequenceType]
END
CREATE TABLE [dbo].[DimSequenceType] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] varchar(64) NOT NULL
)
IF OBJECT_ID (N'DimProtein', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimProtein]
END
CREATE TABLE [dbo].[DimProtein] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL
)
IF OBJECT_ID (N'DimExperiment', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimExperiment]
END
CREATE TABLE [dbo].[DimExperiment] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL
)
IF OBJECT_ID (N'DimScore', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimScore]
END
CREATE TABLE [dbo].[DimScore] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL,
[minrange] numeric(12,6) NULL,
[maxrange] numeric(12,6) NULL
)
IF OBJECT_ID (N'DimCharge', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[DimCharge]
END
CREATE TABLE [dbo].[DimCharge] (
[id] bigint identity PRIMARY KEY NOT NULL,
[label] nvarchar(64) NOT NULL
)
IF OBJECT_ID (N'FactSpectra', N'U') IS NOT NULL BEGIN
DROP TABLE [dbo].[FactSpectra]
END
CREATE TABLE [dbo].[FactSpectra] (
[id] bigint identity PRIMARY KEY NOT NULL,
-- Foreign Keys
[DataSetID] bigint NOT NULL,
[TimeID] bigint NOT NULL,
[TimePointID] bigint NOT NULL,
[BioReplicateID] bigint NOT NULL,
[TechReplicateID] bigint NOT NULL,
[MassChargeAbsID] bigint NOT NULL,
[MassChargeRelID] bigint NOT NULL,
[RetentionTimeAbsID]bigint NOT NULL,
[RetentionTimeRelID]bigint NOT NULL,
[DriftTimeAbsID] bigint NOT NULL,
[DriftTimeRelID] bigint NOT NULL,
[IntensityAbsID] bigint NOT NULL,
[IntensityRelID] bigint NOT NULL,
[SequenceID] bigint NOT NULL,
[SequenceTypeID] bigint NOT NULL,
[ProteinID] bigint NOT NULL,
[ExperimentID] bigint NOT NULL,
[ScoreID] bigint NOT NULL,
[ChargeID] bigint NOT NULL,
-- Numerics
[ProteinScore] numeric(12, 6) NULL,
[PeptideScore] numeric(12, 6) NULL,
[MassCharge] numeric(12, 6) NULL,
[DeconvolutedMass] numeric(12, 6) NULL,
[TheoreticalMass] numeric(12, 6) NULL,
[Charge] int NULL,
[Intensity] numeric(18, 6) NULL,
[RetentionTime] numeric(12, 6) NULL,
[DriftTime] numeric(12, 6) NULL,
[PPM] numeric(12, 6) NULL,
[FWHM] numeric(12, 6) NULL,
[SequenceLength] int NULL,
-- What are these again?
[SpectraMatchCount] bigint NULL,
[PeptideMatchCount] bigint NULL,
[FragmentMatchCount]bigint NULL
)
/*
Add all foreign key constraints
*/
/*
ALTER TABLE [dbo].[FactSpectra] ADD
CONSTRAINT FactSpectra_DimDataSet_fk FOREIGN KEY (DataSetID) references DimDataSet(id),
CONSTRAINT FactSpectra_DimTime_fk FOREIGN KEY (TimeID) references DimTime(id),
CONSTRAINT FactSpectra_DimTimePoint_fk FOREIGN KEY (TimePointID) references DimTimePoint(id),
CONSTRAINT FactSpectra_DimBioReplicate_fk FOREIGN KEY (BioReplicateID) references DimBioReplicate(id),
CONSTRAINT FactSpectra_DimTechReplicate_fk FOREIGN KEY (TechReplicateID) references DimTechReplicate(id),
CONSTRAINT FactSpectra_DimMassChargeAbs_fk FOREIGN KEY (MassChargeAbsID) references DimMassChargeAbs(id),
CONSTRAINT FactSpectra_DimMassChargeRel_fk FOREIGN KEY (MassChargeRelID) references DimMassChargeRel (id),
CONSTRAINT FactSpectra_DimRetentionTimeAbs_fk FOREIGN KEY (RetentionTimeAbsID)references DimRetentionTimeAbs (id),
CONSTRAINT FactSpectra_DimRetentionTimeRel_fk FOREIGN KEY (RetentionTimeRelID)references DimRetentionTimeRel (id),
CONSTRAINT FactSpectra_DimDriftTimeAbs_fk FOREIGN KEY (DriftTimeAbsID) references DimDriftTimeAbs (id),
CONSTRAINT FactSpectra_DimDriftTimeRel_fk FOREIGN KEY (DriftTimeRelID) references DimDriftTimeRel (id),
CONSTRAINT FactSpectra_DimIntensityAbs_fk FOREIGN KEY (IntensityAbsID) references DimIntensityAbs (id),
CONSTRAINT FactSpectra_DimIntensityRel_fk FOREIGN KEY (IntensityRelID) references DimIntensityRel (id),
CONSTRAINT FactSpectra_DimSequence_fk FOREIGN KEY (SequenceID) references DimSequence (id),
CONSTRAINT FactSpectra_DimSequenceType_fk FOREIGN KEY (SequenceTypeID) references DimSequenceType (id),
CONSTRAINT FactSpectra_DimProtein_fk FOREIGN KEY (ProteinID) references DimProtein (id),
CONSTRAINT FactSpectra_DimExperiment_fk FOREIGN KEY (ExperimentID) references DimExperiment (id),
CONSTRAINT FactSpectra_DimScore_fk FOREIGN KEY (ScoreID) references DimScore (id),
CONSTRAINT FactSpectra_DimCharge_fk FOREIGN KEY (ChargeID) references DimCharge (id)
*/
/*
Add Unique constraints
*/
-- select * from DimDataSet
-- update DimDataSet set label = [filename] where label = ''
/*
SELECT SUM(max_length)AS TotalIndexKeySize
FROM sys.columns
WHERE name IN (N'label')
AND object_id = OBJECT_ID(N'DimDataSet');
*/
/*
alter table DimDataSet alter column label nvarchar(64) not null
IF OBJECT_ID('DimDataSet_label_uq', 'UQ') IS NOT NULL
ALTER TABLE [dbo].[DimDataSet] DROP CONSTRAINT DimDataSet_label_uq
ALTER TABLE [dbo].[DimDataSet] ADD CONSTRAINT DimDataSet_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimTime] ADD CONSTRAINT DimTime_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimTimePoint] ADD CONSTRAINT DimTimePoint_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimBioReplicate] ADD CONSTRAINT DimBioReplicate_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimTechReplicate] ADD CONSTRAINT DimTechReplicate_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimMassChargeAbs] ADD CONSTRAINT DimMassChargeAbs_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimMassChargeRel] ADD CONSTRAINT DimMassChargeRel_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimRetentionTimeAbs] ADD CONSTRAINT DimRetentionTimeAbs_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimRetentionTimeRel] ADD CONSTRAINT DimRetentionTimeRel_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimDriftTimeAbs] ADD CONSTRAINT DimDriftTimeAbs_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimDriftTimeRel] ADD CONSTRAINT DimDriftTimeRel_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimIntensityAbs] ADD CONSTRAINT DimIntensityAbs_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimIntensityRel] ADD CONSTRAINT DimIntensityRel_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimSequence] ADD CONSTRAINT DimSequence_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimSequenceType] ADD CONSTRAINT DimSequenceType_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimProtein] ADD CONSTRAINT DimProtein_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimExperiment] ADD CONSTRAINT DimExperiment_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimScore] ADD CONSTRAINT DimScore_label_uq UNIQUE (label)
ALTER TABLE [dbo].[DimCharge] ADD CONSTRAINT DimCharg_label_uq UNIQUE (label)
*/
-- Insert 'unknowns' into id = 1
INSERT INTO DimDataSet (extid, label, loaddate, sampledate, filename) VALUES (null, 'Unknown', null, null, null)
INSERT INTO DimTime (label, sampledate) VALUES ('Unknown', null)
INSERT INTO DimTimePoint (label) VALUES ('Unknown')
INSERT INTO DimBioReplicate (label) VALUES ('Unknown')
INSERT INTO DimTechReplicate (label) VALUES ('Unknown')
INSERT INTO DimMassChargeAbs (label, minrange, maxrange) VALUES ('Unknown', null, null)
-- INSERT INTO DimMassChargeRel (label, minrange, maxrange, center, sumofsquares) VALUES ('Unknown', null, null, null, null)
INSERT INTO DimRetentionTimeAbs (label, minrange, maxrange) VALUES ('Unknown', null, null)
-- INSERT INTO DimRetentionTimeRel (label, minrange, maxrange, center, sumofsquares) VALUES ('Unknown', null, null, null, null)
INSERT INTO DimDriftTimeAbs (label, minrange, maxrange) VALUES ('Unknown', null, null)
-- INSERT INTO DimDriftTimeRel (label, minrange, maxrange, center, sumofsquares) VALUES ('Unknown', null, null, null, null)
INSERT INTO DimIntensityAbs (label, minrange, maxrange) VALUES ('Unknown', null, null)
-- INSERT INTO DimIntensityRel (label, minrange, maxrange, center, sumofsquares) VALUES ('Unknown', null, null, null, null)
INSERT INTO DimSequence (sequence) VALUES('Unknown')
INSERT INTO DimSequenceType (label) VALUES ('Unknown')
INSERT INTO DimProtein (label) VALUES ('Unknown')
INSERT INTO DimExperiment (label) VALUES ('Unknown')
INSERT INTO DimScore (label, minrange, maxrange) VALUES ('Unknown', null, null)
INSERT INTO DimCharge (label) VALUES ('Unknown')
IF OBJECT_ID (N'vSpectra', N'V') IS NOT NULL BEGIN
DROP VIEW vSpectra
END
GO
CREATE VIEW vSpectra As
SELECT
fs.id,
dmcr.label MassChargeCluster,
drtr.label RetentionTimeCluster,
ddtr.label DriftTimeCluster,
dir.label IntensityCluster,
dmca.label MassChargeFWBin,
drta.label RetentionTimeFWBin,
ddta.label DriftTimeFWBin,
dia.label IntensityFWBin,
fs.ProteinScore, fs.PeptideScore, fs.MassCharge, fs.DeconvolutedMass, fs.TheoreticalMass, fs.Charge, fs.Intensity, fs.RetentionTime, fs.DriftTime, fs.PPM, fs.FWHM, fs.SequenceLength, fs.SpectraMatchCount, fs.PeptideMatchCount, fs.FragmentMatchCount,
ds.sequence
from FactSpectra fs
INNER JOIN DimDataSet dds on fs.DataSetID = dds.id
INNER JOIN DimSequence ds on fs.SequenceID = ds.id
INNER JOIN DimMassChargeRel dmcr on fs.MassChargeRelID = dmcr.id
INNER JOIN DimRetentionTimeRel drtr on fs.RetentionTimeRelID = drtr.id
INNER JOIN DimDriftTimeRel ddtr on fs.DriftTimeRelID = ddtr.id
INNER JOIN DimIntensityRel dir on fs.IntensityRelID = dir.id
INNER JOIN DimMassChargeAbs dmca on fs.MassChargeAbsID = dmca.id
INNER JOIN DimRetentionTimeAbs drta on fs.RetentionTimeAbsID = drta.id
INNER JOIN DimDriftTimeAbs ddta on fs.DriftTimeAbsID = ddta.id
INNER JOIN DimIntensityAbs dia on fs.IntensityAbsID = dia.id
WHERE ds.sequence IS NOT NULL AND ds.sequence <> ''
--and ds.sequence = 'AA' and dds.id = 3
GO
--select * from FactSpectra
select * from vSpectra
select count(*) from FactSpectra
select count(*) from vSpectra
IF OBJECT_ID (N'vPeptideFragment', N'V') IS NOT NULL BEGIN
DROP VIEW vPeptideFragment
END
GO
CREATE VIEW vPeptideFragment As
SELECT
fs.id PeptideID,
dmcr.label PeptideMassChargeCluster,
drtr.label PeptideRetentionTimeCluster,
ddtr.label PeptideDriftTimeCluster,
dir.label PeptideIntensityCluster,
fs.ProteinScore, fs.PeptideScore, fs.MassCharge, fs.DeconvolutedMass, fs.TheoreticalMass, fs.Charge, fs.Intensity, fs.RetentionTime, fs.DriftTime, fs.PPM, fs.FWHM, fs.SequenceLength, fs.SpectraMatchCount, fs.PeptideMatchCount, fs.FragmentMatchCount,
ds.sequence
from FactSpectra fs
INNER JOIN DimSequence ds on fs.SequenceID = ds.id
INNER JOIN DimMassChargeRel dmcr on fs.MassChargeRelID = dmcr.id
INNER JOIN DimRetentionTimeRel drtr on fs.RetentionTimeRelID = drtr.id
INNER JOIN DimDriftTimeRel ddtr on fs.DriftTimeRelID = ddtr.id
INNER JOIN DimIntensityRel dir on fs.IntensityRelID = dir.id
WHERE ds.sequence IS NOT NULL AND ds.sequence <> ''
GO
SELECT * FROM DimDataSet
SELECT * FROM DimTime
SELECT * FROM DimTimePoint
SELECT * FROM DimBioReplicate
SELECT * FROM DimTechReplicate
SELECT * FROM DimMassChargeAbs
SELECT * FROM DimMassChargeRel
SELECT * FROM DimRetentionTimeAbs
SELECT * FROM DimRetentionTimeRel
SELECT * FROM DimDriftTimeAbs
SELECT * FROM DimDriftTimeRel
SELECT * FROM DimIntensityAbs
SELECT * FROM DimIntensityRel
SELECT * FROM DimSequence
SELECT * FROM DimSequenceType
SELECT * FROM DimProtein
SELECT * FROM DimExperiment
SELECT * FROM DimScore
SELECT * FROM DimCharge
|
define([
'underscore',
'backbone'
],
function(_, Backbone){
var TodoItem = Backbone.Model.extend({
initialize: function() {
},
url: function(){
return this.id ? '/todos/' + this.id : '/todos/';
}
});
return TodoItem;
}
);
|
define((require, exports, module) => {
return () => {
return {
logId: null,
party_no: null,
enemy_id: null,
enemy_name: null,
enemy_level: null,
rank: null,
mvp: null,
now: null
}
}
})
|
def longestPalindromeSubstring(string):
currentLongest = [0,1]
for i in range(1,len(string)):
odd = getLongestPalindromeFrom(string,i-1,i+1)
even = getLongestPalindromeFrom(string,i-1,i)
longest = max(odd, even, key = lambda x: x[1] - x[0])
currentLongest = max(longest, currentLongest, key = lambda x: x[1] - x[0])
return string[currentLongest[0]:currentLongest[1]]
def getLongestPalindromeFrom(string, left, right):
while left >= 0 and right < len(string):
if string[left] != string[right]:
break
left -= 1
right += 1
return [left+1, right]
|
#!/usr/bin/env bash
set -x
set -e
set -u
HERE="$(cd $(dirname $0) && pwd)"
. ${HERE}/buildbot_functions.sh
ROOT=`pwd`
PLATFORM=`uname`
export PATH="/usr/local/bin:$PATH"
LLVM=$ROOT/llvm
CMAKE_COMMON_OPTIONS="-GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_ASSERTIONS=ON"
clobber
# Stage 1
build_stage1_clang_at_revison
if ccache -s ; then
CMAKE_COMMON_OPTIONS="${CMAKE_COMMON_OPTIONS} -DLLVM_CCACHE_BUILD=ON"
fi
buildbot_update
# Stage 2 / Memory Sanitizer
build_stage2_msan
check_stage2_msan
# Stage 2 / AddressSanitizer
build_stage2_asan
check_stage2_asan
# Stage 2 / UndefinedBehaviorSanitizer
build_stage2_ubsan
check_stage2_ubsan
|
package org.multibit.hd.ui.views.wizards.lab_settings;
import com.google.common.base.Optional;
import net.miginfocom.swing.MigLayout;
import org.multibit.hd.core.config.Configuration;
import org.multibit.hd.core.config.Configurations;
import org.multibit.hd.ui.events.view.ViewEvents;
import org.multibit.hd.ui.languages.MessageKey;
import org.multibit.hd.ui.views.components.ComboBoxes;
import org.multibit.hd.ui.views.components.Labels;
import org.multibit.hd.ui.views.components.Panels;
import org.multibit.hd.ui.views.components.panels.PanelDecorator;
import org.multibit.hd.ui.views.fonts.AwesomeIcon;
import org.multibit.hd.ui.views.wizards.AbstractWizard;
import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView;
import org.multibit.hd.ui.views.wizards.WizardButton;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
/**
* <p>View to provide the following to UI:</p>
* <ul>
* <li>Lab settings: Enter details</li>
* </ul>
*
* @since 0.0.1
*
*/
public class LabSettingsPanelView extends AbstractWizardPanelView<LabSettingsWizardModel, LabSettingsPanelModel> implements ActionListener {
// Panel specific components
private JComboBox<String> trezorYesNoComboBox;
private JComboBox<String> showRestoreBeta7WalletsYesNoComboBox;
/**
* @param wizard The wizard managing the states
* @param panelName The panel name
*/
public LabSettingsPanelView(AbstractWizard<LabSettingsWizardModel> wizard, String panelName) {
super(wizard, panelName, AwesomeIcon.FLASK, MessageKey.LABS_SETTINGS_TITLE);
}
@Override
public void newPanelModel() {
// Use a deep copy to avoid reference leaks
Configuration configuration = Configurations.currentConfiguration.deepCopy();
// Configure the panel model
setPanelModel(new LabSettingsPanelModel(
getPanelName(),
configuration
));
}
@Override
public void initialiseContent(JPanel contentPanel) {
contentPanel.setLayout(new MigLayout(
Panels.migXYLayout(),
"[][]", // Column constraints
"[][][]" // Row constraints
));
Configuration configuration = Configurations.currentConfiguration.deepCopy();
trezorYesNoComboBox = ComboBoxes.newHardwareYesNoComboBox(this, configuration.isTrezor());
showRestoreBeta7WalletsYesNoComboBox = ComboBoxes.newShowRestoreBeta7WalletsYesNoComboBox(this, configuration.isShowRestoreBeta7Wallets());
contentPanel.add(Labels.newLabChangeNote(), "growx,span 2,wrap");
contentPanel.add(Labels.newSelectHardware(), "shrink");
contentPanel.add(trezorYesNoComboBox, "growx,wrap");
contentPanel.add(Labels.newSelectShowRestoreBeta7Wallets(), "shrink");
contentPanel.add(showRestoreBeta7WalletsYesNoComboBox, "growx,wrap");
}
@Override
protected void initialiseButtons(AbstractWizard<LabSettingsWizardModel> wizard) {
PanelDecorator.addCancelApply(this, wizard);
}
@Override
public void fireInitialStateViewEvents() {
// Apply button starts off enabled
ViewEvents.fireWizardButtonEnabledEvent(getPanelName(), WizardButton.APPLY, true);
}
@Override
public boolean beforeHide(boolean isExitCancel) {
if (!isExitCancel) {
// Switch the main configuration over to the new one
Configurations.switchConfiguration(getWizardModel().getConfiguration());
}
// Must be OK to proceed
return true;
}
@Override
public void updateFromComponentModels(Optional componentModel) {
// Do nothing
}
/**
* <p>Handle one of the combo boxes changing</p>
*
* @param e The action event
*/
@Override
public void actionPerformed(ActionEvent e) {
// Create a new configuration
Configuration configuration = Configurations.currentConfiguration.deepCopy();
JComboBox source = (JComboBox) e.getSource();
if (ComboBoxes.HARDWARE_COMMAND.equals(e.getActionCommand())) {
configuration.setTrezor(source.getSelectedIndex() == 0);
}
if (ComboBoxes.SHOW_RESTORE_BETA7_WALLETS_COMMAND.equals(e.getActionCommand())) {
configuration.setShowRestoreBeta7Wallets(source.getSelectedIndex() == 0);
}
// Update the model
getWizardModel().setConfiguration(configuration);
}
}
|
APPNAME="HelloAnalytics-JS"
# options
buildexternalsfromsource=
usage(){
cat << EOF
usage: $0 [options]
Build C/C++ code for $APPNAME using Android NDK
OPTIONS:
-s Build externals from source
-h this help
EOF
}
while getopts "sh" OPTION; do
case "$OPTION" in
s)
buildexternalsfromsource=1
;;
h)
usage
exit 0
;;
esac
done
# paths
if [ -z "${NDK_ROOT+aaa}" ];then
echo "please define NDK_ROOT"
exit 1
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# ... use paths relative to current directory
PLUGIN_ROOT="$DIR/../../.."
COCOS2DX_ROOT="$DIR/../../../.."
APP_ROOT="$DIR/.."
APP_ANDROID_ROOT="$DIR"
BINDINGS_JS_ROOT="$COCOS2DX_ROOT/scripting/javascript/bindings/js"
echo "PLUGIN_ROOT = $PLUGIN_ROOT"
echo "NDK_ROOT = $NDK_ROOT"
echo "COCOS2DX_ROOT = $COCOS2DX_ROOT"
echo "APP_ROOT = $APP_ROOT"
echo "APP_ANDROID_ROOT = $APP_ANDROID_ROOT"
echo "---------------------------------------------------------"
# make sure assets is exist
if [ -d "$APP_ANDROID_ROOT"/assets ]; then
rm -rf "$APP_ANDROID_ROOT"/assets
fi
mkdir "$APP_ANDROID_ROOT"/assets
# copy resources
for file in "$APP_ROOT"/Resources/*
do
if [ -d "$file" ]; then
cp -rf "$file" "$APP_ANDROID_ROOT"/assets
fi
if [ -f "$file" ]; then
cp "$file" "$APP_ANDROID_ROOT"/assets
fi
done
# copy bindings/*.js into assets' root
cp -f "$BINDINGS_JS_ROOT"/* "$APP_ANDROID_ROOT"/assets
# copy plugin js into assets' path
cp -f "$PLUGIN_ROOT/jsbindings/js"/* "$APP_ANDROID_ROOT"/assets
if [[ "$buildexternalsfromsource" ]]; then
echo "Building external dependencies from source"
set -x
"$NDK_ROOT"/ndk-build -C "$APP_ANDROID_ROOT" $* \
"NDK_MODULE_PATH=${PLUGIN_ROOT}/publish:${COCOS2DX_ROOT}:${COCOS2DX_ROOT}/cocos2dx/platform/third_party/android/source"
else
echo "Using prebuilt externals"
set -x
"$NDK_ROOT"/ndk-build -C "$APP_ANDROID_ROOT" $* \
"NDK_MODULE_PATH=${PLUGIN_ROOT}/publish:${COCOS2DX_ROOT}:${COCOS2DX_ROOT}/cocos2dx/platform/third_party/android/prebuilt"
fi
|
'use strict';
const assert = require('chai').assert;
const emailSeed = require('../../../src/seed/email');
describe('seed/email.js', () => {
it('should be string', () => {
assert.isString(emailSeed());
});
it('should be diff', () => {
assert.notStrictEqual(emailSeed(), emailSeed());
});
it('should be right format', () => {
assert.notStrictEqual(emailSeed().match(/^[a-z]+@[a-z]+(?:\.[a-z]+)*$/), null);
});
});
|
# Coloured Man Pages!
export LESS_TERMCAP_mb=$'\E[01;31m'
export LESS_TERMCAP_md=$'\E[01;31m'
export LESS_TERMCAP_me=$'\E[0m'
export LESS_TERMCAP_se=$'\E[0m'
export LESS_TERMCAP_so=$'\E[01;44;33m'
export LESS_TERMCAP_ue=$'\E[0m'
export LESS_TERMCAP_us=$'\E[01;32m'
# source custom colors:
eval $(dircolors -b ${HOME}/.config/dir_colors)
|
<reponame>prasanthi21/neighborhood-map<filename>src/components/App.js
import React, {Component} from 'react';
import LocationList from './LocationList';
class App extends Component {
/**
* Constructor
*/
constructor(props) {
super(props);
this.state = {
'alllocations': [
{
'name': "warangal railway station",
'type': "Raliway Station",
'latitude': 17.973,
'longitude': 79.6055,
'streetAddress': "Station Road, Shiva Nagar"
},
{
'name': "Ramappa Lake",
'type': "Lake",
'latitude': 18.239,
'longitude': 79.9361,
'streetAddress': "Ramappa Lake,warangal"
},
{
'name': "<NAME>",
'type': "lake",
'latitude': 18.1494,
'longitude': 80.073,
'streetAddress': "Laknavaram Cheruvu,Warangal"
},
{
'name': "University Gate Bus Stop",
'type': "Bus Stop",
'latitude': 18.0215,
'longitude': 79.5574,
'streetAddress': "Kakatiya University Bypass Rd, Kakatiya University"
},
{
'name': "<NAME>",
'type': "movie theater",
'latitude': 18.0076,
'longitude': 79.5671,
'streetAddress': "242, 6-1-242, Main Road, Reddy Colony"
},
{
'name': "<NAME>",
'type': "Hindu Temple",
'latitude': 17.9948,
'longitude': 79.5827,
'streetAddress': "Bhadrakali Temple Rd, Near Lal Bahadur college Kupuwada"
},
{
'name': "Thousand Pillar Temple",
'type': "Hindu Temple",
'latitude': 18.0037,
'longitude': 79.5748,
'streetAddress': "Warangal-Hyderabad Rd, Brahmanawada"
},
{
'name': "<NAME> - <NAME> ",
'type': "Movie Theatre",
'latitude': 18.0046,
'longitude': 79.56297,
'streetAddress': "Bus Stand Road,Hanamkonda"
},
{
'name': "Kakatiya Musical Garden",
'type': "Garden",
'latitude': 17.9911,
'longitude': 79.5888,
'streetAddress': "Near & Back Side of Warangal Muncipal Corporation , Near MGM"
},
{
'name': "<NAME>",
'type': "Hotel",
'latitude': 18.0002,
'longitude': 79.5559,
'streetAddress': "Nakkalagutta, NH163, Balasamudram"
}
],
'map': '',
'infowindow': '',
'prevmarker': ''
};
// retain object instance when used in the function
this.initMap = this.initMap.bind(this);
this.openInfoWindow = this.openInfoWindow.bind(this);
this.closeInfoWindow = this.closeInfoWindow.bind(this);
}
componentDidMount() {
// Connect the initMap() function within this class to the global window context,
// so Google Maps can invoke it
window.initMap = this.initMap;
// Asynchronously load the Google Maps script, passing in the callback reference
loadMapJS('https://maps.googleapis.com/maps/api/js?key=AIzaSyCPi0o_tjNjKYYDe_6nYg82r0leI7kKlOE&callback=initMap')
}
/**
* Initialise the map once the google map script is loaded
*/
initMap() {
var self = this;
var mapview = document.getElementById('map');
mapview.style.height = window.innerHeight + "px";
var map = new window.google.maps.Map(mapview, {
center: {lat: 26.907502, lng: 75.737586},
zoom: 15,
mapTypeControl: false
});
var InfoWindow = new window.google.maps.InfoWindow({});
window.google.maps.event.addListener(InfoWindow, 'closeclick', function () {
self.closeInfoWindow();
});
this.setState({
'map': map,
'infowindow': InfoWindow
});
window.google.maps.event.addDomListener(window, "resize", function () {
var center = map.getCenter();
window.google.maps.event.trigger(map, "resize");
self.state.map.setCenter(center);
});
window.google.maps.event.addListener(map, 'click', function () {
self.closeInfoWindow();
});
var alllocations = [];
this.state.alllocations.forEach(function (location) {
var longname = location.name + ' - ' + location.type;
var marker = new window.google.maps.Marker({
position: new window.google.maps.LatLng(location.latitude, location.longitude),
animation: window.google.maps.Animation.DROP,
map: map
});
marker.addListener('click', function () {
self.openInfoWindow(marker);
});
location.longname = longname;
location.marker = marker;
location.display = true;
alllocations.push(location);
});
this.setState({
'alllocations': alllocations
});
}
/**
* Open the infowindow for the marker
*/
openInfoWindow(marker) {
this.closeInfoWindow();
this.state.infowindow.open(this.state.map, marker);
marker.setAnimation(window.google.maps.Animation.BOUNCE);
this.setState({
'prevmarker': marker
});
this.state.infowindow.setContent('Loading Data...');
this.state.map.setCenter(marker.getPosition());
this.state.map.panBy(0, -200);
this.getMarkerInfo(marker);
}
/**
* Retrive the location data from the foursquare api for the marker and display it in the infowindow
*/
getMarkerInfo(marker) {
var self = this;
var clientId = "<KEY>";
var clientSecret = "<KEY>";
var url = "https://api.foursquare.com/v2/venues/search?client_id=" + clientId + "&client_secret=" + clientSecret + "&v=20130815&ll=" + marker.getPosition().lat() + "," + marker.getPosition().lng() + "&limit=1";
fetch(url)
.then(
function (response) {
if (response.status !== 200) {
self.state.infowindow.setContent("Sorry data can't be loaded");
return;
}
// Examine the text in the response
response.json().then(function (data) {
var location_data = data.response.venues[0];
var verified = '<b>Verified Location: </b>' + this.state.name + '<br>';
var checkinsCount = '<b>Street Address: </b>' + this.state.streetAddress + '<br>';
});
}
)
.catch(function (err) {
self.state.infowindow.setContent("Sorry data can't be loaded");
});
}
/**
* Close the infowindow for the marker
*/
closeInfoWindow() {
if (this.state.prevmarker) {
this.state.prevmarker.setAnimation(null);
}
this.setState({
'prevmarker': ''
});
this.state.infowindow.close();
}
render() {
return (
<div>
<LocationList key="100" alllocations={this.state.alllocations} openInfoWindow={this.openInfoWindow}
closeInfoWindow={this.closeInfoWindow}/>
<div id="map"></div>
</div>
);
}
}
export default App;
function loadMapJS(src) {
var ref = window.document.getElementsByTagName("script")[0];
var script = window.document.createElement("script");
script.src = src;
script.async = true;
script.onerror = function () {
document.write("Google Maps can't be loaded");
};
ref.parentNode.insertBefore(script, ref);
}
|
#!/bin/bash
# SPDX-License-Identifier: ISC
#
# IBFT 2.0 with permissions IBFT2 w/ setup script.
# 4 node setup
# besu is built using `gradlew installDist
#
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
BESU_PATH="${SCRIPTPATH}/besu/build/install/besu"
NETWORK="${SCRIPTPATH}/IBFT-NETWORK"
NODE_COUNT='4'
#Create node directories
for((i=1;i<=$NODE_COUNT;i++))
do
mkdir -p "${NETWORK}/Node-${i}/data"
done
# Create IBFT2 Genesis file
cat > $NETWORK/ibftConfigFile.json << IBFT_CONFIG
{
"genesis": {
"config": {
"chainId": 2018,
"constantinoplefixblock": 0,
"ibft2": {
"blockperiodseconds": 2,
"epochlength": 30000,
"requesttimeoutseconds": 10
}
},
"nonce": "0x0",
"timestamp": "0x58ee40ba",
"gasLimit": "0x47b760",
"difficulty": "0x1",
"mixHash": "0x63746963616c2062797a616e74696e65206661756c7420746f6c6572616e6365",
"coinbase": "0x0000000000000000000000000000000000000000",
"alloc": {
"fe3b557e8fb62b89f4916b721be55ceb828dbd73": {
"privateKey": "8f2a55949038a9610f50fb23b5883af3b4ecb3c3bb792cbcefbd1542c692be63",
"comment": "private key and this comment are ignored. In a real chain, the private key should NOT be stored",
"balance": "0xad78ebc5ac6200000"
},
"627306090abaB3A6e1400e9345bC60c78a8BEf57": {
"privateKey": "c87509a1c067bbde78beb793e6fa76530b6382a4c0241e5e4a9ec0a0f44dc0d3",
"comment": "private key and this comment are ignored. In a real chain, the private key should NOT be stored",
"balance": "90000000000000000000000"
},
"f17f52151EbEF6C7334FAD080c5704D77216b732": {
"privateKey": "ae6ae8e5ccbfb04590405997ee2d52d2b330726137b875053c36d94e974d162f",
"comment": "private key and this comment are ignored. In a real chain, the private key should NOT be stored",
"balance": "90000000000000000000000"
}
}
},
"blockchain": {
"nodes": {
"generate": true,
"count": $NODE_COUNT
}
}
}
IBFT_CONFIG
$BESU_PATH/bin/besu --version
$BESU_PATH/bin/besu operator generate-blockchain-config --config-file=$NETWORK/ibftConfigFile.json --to=$NETWORK/networkFiles
cp $NETWORK/networkFiles/genesis.json $NETWORK/
#Read the generated keys directories in an array
array=()
while IFS= read -r -d $'\0'; do
array+=("$REPLY")
done < <(find $NETWORK/networkFiles/keys -maxdepth 1 -mindepth 1 -type d -print0)
# Construct parameter values
PPPORT=30303
RPCPORT=8545
declare -a ENODES
declare -a PP_PORTS
declare -a RPC_PORTS
for((i=0;i<${#array[@]};i++))
do
NODE_KEY=`cat ${array[i]}/key.pub | cut -c3-`
PP_PORTS[i]=$((PPPORT + i))
RPC_PORTS[i]=$((RPCPORT + i))
ENODES[i]="\"enode://${NODE_KEY}@127.0.0.1:${PP_PORTS[i]}\""
done
# comma separated enode allowlist
ALLOWLIST=$(IFS=,; echo "${ENODES[*]}")
#Create permissions configuration file (first two accounts from above genesis file)
cat > $NETWORK/permissions_config.toml << PERM_CONFIG
accounts-allowlist=["0xfe3b557e8fb62b89f4916b721be55ceb828dbd73", "0x627306090abaB3A6e1400e9345bC60c78a8BEf57"]
nodes-allowlist=[$ALLOWLIST]
PERM_CONFIG
printf "$NETWORK/permissions_config.toml\n"
cat $NETWORK/permissions_config.toml
#Copy key files and print node launch commands
for((i=1;i<=${#array[@]};i++))
do
cp "${array[i-1]}/key.pub" "${NETWORK}/Node-${i}/data/key.pub"
cp "${array[i-1]}/key.priv" "${NETWORK}/Node-${i}/data/key"
cp "${NETWORK}/permissions_config.toml" "${NETWORK}/Node-${i}/data/"
printf " *** Node ${i} Command *** \n"
printf "${BESU_PATH}/bin/besu --data-path=${NETWORK}/Node-${i}/data --genesis-file=${NETWORK}/genesis.json $BOOTNODE --p2p-port=${PP_PORTS[i-1]} --rpc-http-port=${RPC_PORTS[i-1]} --permissions-nodes-config-file-enabled --permissions-accounts-config-file-enabled --rpc-http-enabled --rpc-http-api=ADMIN,ETH,NET,PERM,IBFT --host-allowlist=\"*\" --rpc-http-cors-origins=\"all\"\n"
BOOTNODE="--bootnodes=${ENODES[0]}"
done
echo -ne "==> Setup complete..."
|
#include <iostream>
class ImagePieceManager {
private:
double mImageHeight{0};
double mImagePieceWidth{0};
double mImagePieceHeight{0};
double mPieceXPosition{0};
double mPieceYPosition{0};
public:
void setImageHeight(double height) {
mImageHeight = height;
}
void setImagePieceDimensions(double width, double height) {
mImagePieceWidth = width;
mImagePieceHeight = height;
}
void setImagePiecePosition(double x, double y) {
if (x >= 0 && x + mImagePieceWidth <= mImageHeight) {
mPieceXPosition = x;
} else {
std::cout << "Invalid x-coordinate position for the image piece." << std::endl;
}
if (y >= 0 && y + mImagePieceHeight <= mImageHeight) {
mPieceYPosition = y;
} else {
std::cout << "Invalid y-coordinate position for the image piece." << std::endl;
}
}
};
int main() {
ImagePieceManager manager;
manager.setImageHeight(100);
manager.setImagePieceDimensions(20, 30);
manager.setImagePiecePosition(10, 20); // Valid position
manager.setImagePiecePosition(90, 20); // Invalid x-coordinate position
manager.setImagePiecePosition(10, 80); // Invalid y-coordinate position
return 0;
}
|
total=0
while read -r line; do
if [[ $line == START* ]]
then
date=$(echo $line | cut -c 8-)
start=$(date -j -f "%a %b %e %H:%M:%S %Z %Y" "$date" +%s)
elif [[ $line == STOP* ]]
then
date=$(echo $line | cut -c 7-)
stop=$(date -j -f "%a %b %e %H:%M:%S %Z %Y" "$date" +%s)
else
diff=$((stop-start))
total=$((total+diff))
seconds=$((diff%60))
minutes=$((diff%3600/60))
hours=$((diff/3600))
echo $(printf "%d:%02d:%02d" $hours $minutes $seconds)
fi
done < logbook.txt
echo "\n* TOTAL *"
seconds=$((total%60))
minutes=$((total%3600/60))
hours=$((total/3600))
echo $(printf "%02d:%02d:%02d" $hours $minutes $seconds)
|
#!/bin/sh
MYSELF=`which "$0" 2>/dev/null`
[ $? -gt 0 -a -f "$0" ] && MYSELF="./$0"
java=java
if test -n "$JAVA_HOME"; then
java="$JAVA_HOME/bin/java"
fi
#java_args=-Dlog4j.configurationFile=log4j2.xml
exec "$java" $java_args -jar $MYSELF "$@"
exit 1
|
H7.Rect = H7.Node.extend({
type: "Rect",
// 初始化
ctor: function (width, height) {
this.width = width;
this.height = height;
this.fillStyle = "";
},
render: function (ctx) {
var pos = this.getPos();
if (this.fillStyle) {
ctx.fillStyle = this.fillStyle;
}
ctx.fillRect(pos.x, pos.y, this.width * (H7.Game.retina ? 2 : 1), this.height * (H7.Game.retina ? 2 : 1));
// 边框
this.borderWidth = this.borderWidth || 0;
if (this.borderWidth) {
ctx.lineWidth = this.borderWidth * (H7.Game.retina ? 2 : 1);
if (this.borderStyle) ctx.strokeStyle = this.borderStyle;
ctx.strokeRect(pos.x, pos.y, this.width * (H7.Game.retina ? 2 : 1), this.height * (H7.Game.retina ? 2 : 1));
}
}
});
|
# initialize autocomplete here, otherwise functions won't be loaded
autoload -U compinit
compinit
compdef hub=git
|
#!/usr/bin/env bash
set -xe
MATCH_FILE="spdkcli_nvmf.test"
SPDKCLI_BRANCH="/nvmf"
testdir=$(readlink -f $(dirname $0))
. $testdir/common.sh
. $testdir/../nvmf/common.sh
timing_enter spdkcli_nvmf
trap 'on_error_exit; revert_soft_roce' ERR
rdma_device_init
timing_enter run_spdk_tgt
run_spdk_tgt
timing_exit run_spdk_tgt
RDMA_IP_LIST=$(get_available_rdma_ips)
NVMF_TARGET_IP=$(echo "$RDMA_IP_LIST" | head -n 1)
timing_enter spdkcli_create_nvmf_config
$spdkcli_job "/bdevs/malloc create 32 512 Malloc1" "Malloc1" True
$spdkcli_job "/bdevs/malloc create 32 512 Malloc2" "Malloc2" True
$spdkcli_job "/bdevs/malloc create 32 512 Malloc3" "Malloc3" True
$spdkcli_job "/bdevs/malloc create 32 512 Malloc4" "Malloc4" True
$spdkcli_job "/bdevs/malloc create 32 512 Malloc5" "Malloc5" True
$spdkcli_job "/bdevs/malloc create 32 512 Malloc6" "Malloc6" True
$spdkcli_job "nvmf/transport create RDMA max_qpairs_per_ctrlr=4 io_unit_size=8192" True
$spdkcli_job "/nvmf/subsystem create nqn.2014-08.org.spdk:cnode1 N37SXV509SRW\
max_namespaces=4 allow_any_host=True" "nqn.2014-08.org.spdk:cnode1" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/namespaces create Malloc3 1" "Malloc3" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/namespaces create Malloc4 2" "Malloc4" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/listen_addresses create \
RDMA $NVMF_TARGET_IP 4260 IPv4" "$NVMF_TARGET_IP:4260" True
$spdkcli_job "/nvmf/subsystem create nqn.2014-08.org.spdk:cnode2 N37SXV509SRD\
max_namespaces=2 allow_any_host=True" "nqn.2014-08.org.spdk:cnode2" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode2/namespaces create Malloc2" "Malloc2" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode2/listen_addresses create \
RDMA $NVMF_TARGET_IP 4260 IPv4" "$NVMF_TARGET_IP:4260" True
$spdkcli_job "/nvmf/subsystem create nqn.2014-08.org.spdk:cnode3 N37SXV509SRR\
max_namespaces=2 allow_any_host=True" "nqn.2014-08.org.spdk:cnode2" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode3/namespaces create Malloc1" "Malloc1" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode3/listen_addresses create \
RDMA $NVMF_TARGET_IP 4260 IPv4" "$NVMF_TARGET_IP:4260" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode3/listen_addresses create \
RDMA $NVMF_TARGET_IP 4261 IPv4" "$NVMF_TARGET_IP:4261" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode3/hosts create \
nqn.2014-08.org.spdk:cnode1" "nqn.2014-08.org.spdk:cnode1" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode3/hosts create \
nqn.2014-08.org.spdk:cnode2" "nqn.2014-08.org.spdk:cnode2" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1 allow_any_host True" "Allow any host"
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1 allow_any_host False" "Allow any host" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/listen_addresses create RDMA $NVMF_TARGET_IP 4261 IPv4" "$NVMF_TARGET_IP:4261" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/listen_addresses create RDMA $NVMF_TARGET_IP 4262 IPv4" "$NVMF_TARGET_IP:4262" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/hosts create nqn.2014-08.org.spdk:cnode2" "nqn.2014-08.org.spdk:cnode2" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/namespaces create Malloc5" "Malloc5" True
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/namespaces create Malloc6" "Malloc6" True
timing_exit spdkcli_create_nvmf_config
timing_enter spdkcli_check_match
check_match
timing_exit spdkcli_check_match
timing_enter spdkcli_clear_nvmf_config
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/namespaces delete nsid=1" "Malloc3"
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/namespaces delete_all" "Malloc4"
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/hosts delete nqn.2014-08.org.spdk:cnode2" "nqn.2014-08.org.spdk:cnode2"
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode3/hosts delete_all" "nqn.2014-08.org.spdk:cnode1"
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/listen_addresses delete RDMA $NVMF_TARGET_IP 4262" "$NVMF_TARGET_IP:4262"
$spdkcli_job "/nvmf/subsystem/nqn.2014-08.org.spdk:cnode1/listen_addresses delete_all" "$NVMF_TARGET_IP:4261"
$spdkcli_job "/nvmf/subsystem delete nqn.2014-08.org.spdk:cnode3" "nqn.2014-08.org.spdk:cnode3"
$spdkcli_job "/nvmf/subsystem delete_all" "nqn.2014-08.org.spdk:cnode2"
$spdkcli_job "/bdevs/malloc delete Malloc6" "Malloc6"
$spdkcli_job "/bdevs/malloc delete Malloc5" "Malloc5"
$spdkcli_job "/bdevs/malloc delete Malloc4" "Malloc4"
$spdkcli_job "/bdevs/malloc delete Malloc3" "Malloc3"
$spdkcli_job "/bdevs/malloc delete Malloc2" "Malloc2"
$spdkcli_job "/bdevs/malloc delete Malloc1" "Malloc1"
timing_exit spdkcli_clear_nvmf_config
killprocess $spdk_tgt_pid
#revert_soft_roce
timing_exit spdkcli_nvmf
report_test_completion spdk_cli_nvmf
|
/*
* Harness feature flag service client apis
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package io.harness.cfsdk.cloud.core.api;
import io.harness.cfsdk.cloud.core.client.ApiCallback;
import io.harness.cfsdk.cloud.core.client.ApiClient;
import io.harness.cfsdk.cloud.core.client.ApiException;
import io.harness.cfsdk.cloud.core.client.ApiResponse;
import io.harness.cfsdk.cloud.core.client.Configuration;
import io.harness.cfsdk.cloud.core.client.Pair;
import io.harness.cfsdk.cloud.core.client.ProgressRequestBody;
import io.harness.cfsdk.cloud.core.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import io.harness.cfsdk.cloud.core.model.AuthenticationRequest;
import io.harness.cfsdk.cloud.core.model.AuthenticationResponse;
import io.harness.cfsdk.cloud.core.model.Error;
import io.harness.cfsdk.cloud.core.model.Evaluation;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class DefaultApi {
private ApiClient localVarApiClient;
public DefaultApi() {
this(Configuration.getDefaultApiClient());
}
public DefaultApi(ApiClient apiClient) {
this.localVarApiClient = apiClient;
}
public ApiClient getApiClient() {
return localVarApiClient;
}
public void setApiClient(ApiClient apiClient) {
this.localVarApiClient = apiClient;
}
/**
* Build call for authenticate
* @param authenticationRequest (optional)
* @param _callback Callback for upload/download progress
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
<tr><td> 401 </td><td> Unauthenticated </td><td> - </td></tr>
<tr><td> 403 </td><td> Unauthorized </td><td> - </td></tr>
<tr><td> 404 </td><td> The specified resource was not found </td><td> - </td></tr>
<tr><td> 500 </td><td> Internal server error </td><td> - </td></tr>
</table>
*/
public okhttp3.Call authenticateCall(AuthenticationRequest authenticationRequest, final ApiCallback _callback) throws ApiException {
Object localVarPostBody = authenticationRequest;
// create path and map variables
String localVarPath = "/client/auth";
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, String> localVarCookieParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
String[] localVarAuthNames = new String[] { };
return localVarApiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
}
@SuppressWarnings("rawtypes")
private okhttp3.Call authenticateValidateBeforeCall(AuthenticationRequest authenticationRequest, final ApiCallback _callback) throws ApiException {
okhttp3.Call localVarCall = authenticateCall(authenticationRequest, _callback);
return localVarCall;
}
/**
* Authenticate with the admin server.
* Used to retrieve all target segments for certain account id.
* @param authenticationRequest (optional)
* @return AuthenticationResponse
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
<tr><td> 401 </td><td> Unauthenticated </td><td> - </td></tr>
<tr><td> 403 </td><td> Unauthorized </td><td> - </td></tr>
<tr><td> 404 </td><td> The specified resource was not found </td><td> - </td></tr>
<tr><td> 500 </td><td> Internal server error </td><td> - </td></tr>
</table>
*/
public AuthenticationResponse authenticate(AuthenticationRequest authenticationRequest) throws ApiException {
ApiResponse<AuthenticationResponse> localVarResp = authenticateWithHttpInfo(authenticationRequest);
return localVarResp.getData();
}
/**
* Authenticate with the admin server.
* Used to retrieve all target segments for certain account id.
* @param authenticationRequest (optional)
* @return ApiResponse<AuthenticationResponse>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
<tr><td> 401 </td><td> Unauthenticated </td><td> - </td></tr>
<tr><td> 403 </td><td> Unauthorized </td><td> - </td></tr>
<tr><td> 404 </td><td> The specified resource was not found </td><td> - </td></tr>
<tr><td> 500 </td><td> Internal server error </td><td> - </td></tr>
</table>
*/
public ApiResponse<AuthenticationResponse> authenticateWithHttpInfo(AuthenticationRequest authenticationRequest) throws ApiException {
okhttp3.Call localVarCall = authenticateValidateBeforeCall(authenticationRequest, null);
Type localVarReturnType = new TypeToken<AuthenticationResponse>(){}.getType();
return localVarApiClient.execute(localVarCall, localVarReturnType);
}
/**
* Authenticate with the admin server. (asynchronously)
* Used to retrieve all target segments for certain account id.
* @param authenticationRequest (optional)
* @param _callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
<tr><td> 401 </td><td> Unauthenticated </td><td> - </td></tr>
<tr><td> 403 </td><td> Unauthorized </td><td> - </td></tr>
<tr><td> 404 </td><td> The specified resource was not found </td><td> - </td></tr>
<tr><td> 500 </td><td> Internal server error </td><td> - </td></tr>
</table>
*/
public okhttp3.Call authenticateAsync(AuthenticationRequest authenticationRequest, final ApiCallback<AuthenticationResponse> _callback) throws ApiException {
okhttp3.Call localVarCall = authenticateValidateBeforeCall(authenticationRequest, _callback);
Type localVarReturnType = new TypeToken<AuthenticationResponse>(){}.getType();
localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback);
return localVarCall;
}
/**
* Build call for getEvaluationByIdentifier
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param feature Unique identifier for the flag object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @param _callback Callback for upload/download progress
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public okhttp3.Call getEvaluationByIdentifierCall(String environmentUUID, String feature, String target, final ApiCallback _callback) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/client/env/{environmentUUID}/target/{target}/evaluations/{feature}"
.replaceAll("\\{" + "environmentUUID" + "\\}", localVarApiClient.escapeString(environmentUUID.toString()))
.replaceAll("\\{" + "feature" + "\\}", localVarApiClient.escapeString(feature.toString()))
.replaceAll("\\{" + "target" + "\\}", localVarApiClient.escapeString(target.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, String> localVarCookieParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
};
final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
String[] localVarAuthNames = new String[] { };
return localVarApiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
}
@SuppressWarnings("rawtypes")
private okhttp3.Call getEvaluationByIdentifierValidateBeforeCall(String environmentUUID, String feature, String target, final ApiCallback _callback) throws ApiException {
// verify the required parameter 'environmentUUID' is set
if (environmentUUID == null) {
throw new ApiException("Missing the required parameter 'environmentUUID' when calling getEvaluationByIdentifier(Async)");
}
// verify the required parameter 'feature' is set
if (feature == null) {
throw new ApiException("Missing the required parameter 'feature' when calling getEvaluationByIdentifier(Async)");
}
// verify the required parameter 'target' is set
if (target == null) {
throw new ApiException("Missing the required parameter 'target' when calling getEvaluationByIdentifier(Async)");
}
okhttp3.Call localVarCall = getEvaluationByIdentifierCall(environmentUUID, feature, target, _callback);
return localVarCall;
}
/**
* Get feature evaluations for target
*
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param feature Unique identifier for the flag object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @return Evaluation
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public Evaluation getEvaluationByIdentifier(String environmentUUID, String feature, String target) throws ApiException {
ApiResponse<Evaluation> localVarResp = getEvaluationByIdentifierWithHttpInfo(environmentUUID, feature, target);
return localVarResp.getData();
}
/**
* Get feature evaluations for target
*
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param feature Unique identifier for the flag object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @return ApiResponse<Evaluation>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public ApiResponse<Evaluation> getEvaluationByIdentifierWithHttpInfo(String environmentUUID, String feature, String target) throws ApiException {
okhttp3.Call localVarCall = getEvaluationByIdentifierValidateBeforeCall(environmentUUID, feature, target, null);
Type localVarReturnType = new TypeToken<Evaluation>(){}.getType();
return localVarApiClient.execute(localVarCall, localVarReturnType);
}
/**
* Get feature evaluations for target (asynchronously)
*
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param feature Unique identifier for the flag object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @param _callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public okhttp3.Call getEvaluationByIdentifierAsync(String environmentUUID, String feature, String target, final ApiCallback<Evaluation> _callback) throws ApiException {
okhttp3.Call localVarCall = getEvaluationByIdentifierValidateBeforeCall(environmentUUID, feature, target, _callback);
Type localVarReturnType = new TypeToken<Evaluation>(){}.getType();
localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback);
return localVarCall;
}
/**
* Build call for getEvaluations
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @param _callback Callback for upload/download progress
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public okhttp3.Call getEvaluationsCall(String environmentUUID, String target, final ApiCallback _callback) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/client/env/{environmentUUID}/target/{target}/evaluations"
.replaceAll("\\{" + "environmentUUID" + "\\}", localVarApiClient.escapeString(environmentUUID.toString()))
.replaceAll("\\{" + "target" + "\\}", localVarApiClient.escapeString(target.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, String> localVarCookieParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
};
final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
String[] localVarAuthNames = new String[] { };
return localVarApiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
}
@SuppressWarnings("rawtypes")
private okhttp3.Call getEvaluationsValidateBeforeCall(String environmentUUID, String target, final ApiCallback _callback) throws ApiException {
// verify the required parameter 'environmentUUID' is set
if (environmentUUID == null) {
throw new ApiException("Missing the required parameter 'environmentUUID' when calling getEvaluations(Async)");
}
// verify the required parameter 'target' is set
if (target == null) {
throw new ApiException("Missing the required parameter 'target' when calling getEvaluations(Async)");
}
okhttp3.Call localVarCall = getEvaluationsCall(environmentUUID, target, _callback);
return localVarCall;
}
/**
* Get feature evaluations for target
*
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @return List<Evaluation>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public List<Evaluation> getEvaluations(String environmentUUID, String target) throws ApiException {
ApiResponse<List<Evaluation>> localVarResp = getEvaluationsWithHttpInfo(environmentUUID, target);
return localVarResp.getData();
}
/**
* Get feature evaluations for target
*
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @return ApiResponse<List<Evaluation>>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public ApiResponse<List<Evaluation>> getEvaluationsWithHttpInfo(String environmentUUID, String target) throws ApiException {
okhttp3.Call localVarCall = getEvaluationsValidateBeforeCall(environmentUUID, target, null);
Type localVarReturnType = new TypeToken<List<Evaluation>>(){}.getType();
return localVarApiClient.execute(localVarCall, localVarReturnType);
}
/**
* Get feature evaluations for target (asynchronously)
*
* @param environmentUUID Unique identifier for the environment object in the API. (required)
* @param target Unique identifier for the target object in the API. (required)
* @param _callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> - </td></tr>
</table>
*/
public okhttp3.Call getEvaluationsAsync(String environmentUUID, String target, final ApiCallback<List<Evaluation>> _callback) throws ApiException {
okhttp3.Call localVarCall = getEvaluationsValidateBeforeCall(environmentUUID, target, _callback);
Type localVarReturnType = new TypeToken<List<Evaluation>>(){}.getType();
localVarApiClient.executeAsync(localVarCall, localVarReturnType, _callback);
return localVarCall;
}
/**
* Build call for stream
* @param environmentId Unique UUID for the environemnt object in the API. (required)
* @param _callback Callback for upload/download progress
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> * Content-Type - <br> * Cache-Control - <br> * Connection - <br> * Access-Control-Allow-Origin - <br> </td></tr>
<tr><td> 503 </td><td> Service Unavailable </td><td> - </td></tr>
</table>
*/
public okhttp3.Call streamCall(String environmentId, final ApiCallback _callback) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/stream/environments/{environmentId}"
.replaceAll("\\{" + "environmentId" + "\\}", localVarApiClient.escapeString(environmentId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, String> localVarCookieParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
};
final String localVarAccept = localVarApiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
};
final String localVarContentType = localVarApiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
String[] localVarAuthNames = new String[] { "BearerAuth" };
return localVarApiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAuthNames, _callback);
}
@SuppressWarnings("rawtypes")
private okhttp3.Call streamValidateBeforeCall(String environmentId, final ApiCallback _callback) throws ApiException {
// verify the required parameter 'environmentId' is set
if (environmentId == null) {
throw new ApiException("Missing the required parameter 'environmentId' when calling stream(Async)");
}
okhttp3.Call localVarCall = streamCall(environmentId, _callback);
return localVarCall;
}
/**
* Stream endpoint.
*
* @param environmentId Unique UUID for the environemnt object in the API. (required)
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> * Content-Type - <br> * Cache-Control - <br> * Connection - <br> * Access-Control-Allow-Origin - <br> </td></tr>
<tr><td> 503 </td><td> Service Unavailable </td><td> - </td></tr>
</table>
*/
public void stream(String environmentId) throws ApiException {
streamWithHttpInfo(environmentId);
}
/**
* Stream endpoint.
*
* @param environmentId Unique UUID for the environemnt object in the API. (required)
* @return ApiResponse<Void>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> * Content-Type - <br> * Cache-Control - <br> * Connection - <br> * Access-Control-Allow-Origin - <br> </td></tr>
<tr><td> 503 </td><td> Service Unavailable </td><td> - </td></tr>
</table>
*/
public ApiResponse<Void> streamWithHttpInfo(String environmentId) throws ApiException {
okhttp3.Call localVarCall = streamValidateBeforeCall(environmentId, null);
return localVarApiClient.execute(localVarCall);
}
/**
* Stream endpoint. (asynchronously)
*
* @param environmentId Unique UUID for the environemnt object in the API. (required)
* @param _callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*
<table summary="Response Details" border="1">
<tr><td> Status Code </td><td> Description </td><td> Response Headers </td></tr>
<tr><td> 200 </td><td> OK </td><td> * Content-Type - <br> * Cache-Control - <br> * Connection - <br> * Access-Control-Allow-Origin - <br> </td></tr>
<tr><td> 503 </td><td> Service Unavailable </td><td> - </td></tr>
</table>
*/
public okhttp3.Call streamAsync(String environmentId, final ApiCallback<Void> _callback) throws ApiException {
okhttp3.Call localVarCall = streamValidateBeforeCall(environmentId, _callback);
localVarApiClient.executeAsync(localVarCall, _callback);
return localVarCall;
}
}
|
<reponame>sankalpana/AdventOfCode2021
package tech.houssemnasri;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class Part1 {
public static void run(String inputPath) throws IOException {
List<List<String>> heightMap = Files.readAllLines(Path.of(inputPath))
.stream().map(s -> Arrays.asList(s.split(""))).collect(Collectors.toList());
int riskLevelsSum = 0;
for (int i = 0; i < heightMap.size(); i++) {
List<String> row = heightMap.get(i);
for (int j = 0; j < row.size(); j++) {
int point = Integer.parseInt(row.get(j));
List<Integer> neighbours = new ArrayList<>();
if (j > 0) {
neighbours.add(Integer.parseInt(row.get(j - 1)));
}
if (j < row.size() - 1) {
neighbours.add(Integer.parseInt(row.get(j + 1)));
}
if (i > 0) {
neighbours.add(Integer.parseInt(heightMap.get(i - 1).get(j)));
}
if (i < heightMap.size() - 1) {
neighbours.add(Integer.parseInt(heightMap.get(i + 1).get(j)));
}
if (neighbours.stream().mapToInt(Integer::intValue).min().getAsInt() > point) {
riskLevelsSum += 1 + point;
}
}
}
System.out.println("Risk Levels Sum: " + riskLevelsSum);
}
}
|
#!/bin/bash
set -ev
cd /build/build-cross
make -j 2 all
export CTEST_OUTPUT_ON_FAILURE=TRUE
ctest -j 2
make install
|
import React from 'react';
import { ContentLink } from 'components/Link';
export default class LinkNode extends React.Component {
render() {
const { href, children } = this.props;
return <ContentLink href={href}>{children}</ContentLink>;
}
}
|
/*
* This file is part of Pwxe, a PHP extension to stop execution
* of writable files.
*
* Author: <NAME> <<EMAIL>>
*/
#ifndef PHP_PWXE_H
#define PHP_PWXE_H 1
#define PHP_PWXE_VERSION "0.1"
#define PHP_PWXE_EXTNAME "PWXE"
extern void pwxe_zend_init(TSRMLS_D);
extern void pwxe_zend_shutdown(TSRMLS_D);
PHP_MINIT_FUNCTION(pwxe);
PHP_MSHUTDOWN_FUNCTION(pwxe);
PHP_RINIT_FUNCTION(pwxe);
PHP_RSHUTDOWN_FUNCTION(pwxe);
PHP_MINFO_FUNCTION(pwxe);
extern zend_module_entry pwxe_module_entry;
#define phpext_pwxe_ptr &pwxe_module_entry
#endif
|
import React from 'react';
import { storiesOf } from '@storybook/react';
import { action } from '@storybook/addon-actions';
import { withKnobs, boolean, select } from '@storybook/addon-knobs';
import SplitButton from '../SplitButton';
import OverflowMenuItem from '../OverflowMenuItem';
const sizes = {
Default: 'default',
Field: 'field',
Small: 'small',
};
const props = {
regular: () => {
return {
classNameContainer: 'some-class',
classNameButton: 'some-class',
classNameOverflow: 'some-class',
size: select('Button size (size)', sizes, 'default'),
disabled: boolean('Disabled (disabled)', false),
};
},
items: () => {
return {
onClick: action('onClick'),
};
},
};
SplitButton.displayName = 'Button';
storiesOf('Pattern|SplitButton', module)
.addDecorator(withKnobs)
.add(
'Default',
() => {
const regularProps = props.regular();
const itemProps = props.items();
return (
<SplitButton {...regularProps}>
<OverflowMenuItem itemText={'Item 1'} {...itemProps} />
<OverflowMenuItem itemText={'Item 2'} {...itemProps} />
<OverflowMenuItem itemText={'Item 3'} {...itemProps} />
<OverflowMenuItem itemText={'Item 4'} {...itemProps} />
</SplitButton>
);
},
{
info: {
text: `
A split button can be used to group a series of actions together into a single location on the screen, providing
the user with a single primary action and a small list of alternative actions.
`,
},
}
);
|
#!/bin/bash
set -euo pipefail
cacert=/usr/share/logstash/config/ca/ca.crt
# Wait for ca file to exist before we continue. If the ca file doesn't exist then something went wrong.
while [ ! -f $cacert ]
do
echo
echo 'Wait for ca file ❗️'
echo
sleep 2
done
ls -l $cacert
es_url=https://elasticsearch:9200
# Wait for Elasticsearch to start up before doing anything.
while [[ "$(curl -u "elastic:${ELASTIC_PASSWORD}" --cacert $cacert -s -o /dev/null -w '%{http_code}' $es_url)" != "200" ]]; do
echo
echo 'Wait for Elasticsearch ❗️'
sleep 5
done
echo
echo 'Changing password for [logstash_system] user ⚠️'
echo
# Set the password for the logstash user.
# REF: https://www.elastic.co/guide/en/x-pack/6.2/setting-up-authentication.html#set-built-in-user-passwords
until curl -u "elastic:${ELASTIC_PASSWORD}" --cacert $cacert -s -H 'Content-Type:application/json' \
-XPUT $es_url/_xpack/security/user/logstash_system/_password \
-d "{\"password\": \"${ELASTIC_PASSWORD}\"}"
do
sleep 2
echo 'Retrying...'
done
echo
echo '[LOGSTASH] Password changed successfully ✅'
echo
echo "=== CREATE Keystore ==="
if [ -f /config/logstash/logstash.keystore ]; then
echo "Remove old logstash.keystore"
rm /config/logstash/logstash.keystore
fi
echo "y" | /usr/share/logstash/bin/logstash-keystore create
echo "Setting ELASTIC_PASSWORD..."
echo "$ELASTIC_PASSWORD" | /usr/share/logstash/bin/logstash-keystore add 'ELASTIC_PASSWORD' -x
mv /usr/share/logstash/config/logstash.keystore /config/logstash/logstash.keystore
|
<gh_stars>0
import React from 'react';
import facebookicon from '../../../public/images/facebook.png';
import twittericon from '../../../public/images/twitter.png';
import linkedinicon from '../../../public/images/linkedin.png';
import instagramicon from '../../../public/images/instagram.png';
class Footer extends React.Component {
state = {
error: null,
redes: [],
};
componentDidMount(){
this.fetchData();
}
fetchData = async () => {
try{
const response = await fetch('http://127.0.0.1:8888/api/redes/1');
var redes = await response.json();
this.setState({redes: redes});
this.setState({redes: JSON.parse(redes)});
}catch(error){
this.setState({error: error});
}
}
render() {
function footerContent(state) {
return(
<div className="content">
<center>
{(state.redes.Facebook) && (
<div className="footer-social">
<a href={state.redes.Facebook} target="_blank">
<img src={facebookicon} className="social-icon" alt="icono de facebook"/>
</a>
</div>
)}
{(state.redes.Twitter) && (
<div className="footer-social">
<a href={state.redes.Twitter} target="_blank">
<img src={twittericon} className="social-icon" alt="icono de twitter"/>
</a>
</div>)}
{(state.redes.Instagram) && (
<div className="footer-social">
<a href={state.redes.Instagram} target="_blank">
<img src={instagramicon} className="social-icon" alt="icono de instagram"/>
</a>
</div>
)}
{(state.redes.Linkedin) && (
<div className="footer-social">
<a href={state.redes.Linkedin} target="_blank">
<img src={linkedinicon} className="social-icon" alt="icono de linkedin"/>
</a>
</div>
)}
</center>
</div>
);
}
if(this.props.type=="normal"){
return(
<div className="footer footer2">
{footerContent(this.state)}
</div>
);
}else{
return(
<div className="footer fixed-bottom">
{footerContent(this.state)}
</div>
);
}
}
}
export default Footer;
|
package io.opensphere.core.util.swing.input.model;
import java.util.EventListener;
/**
* Defines an object which listens for PropertyChangeEvents.
*/
@FunctionalInterface
public interface PropertyChangeListener extends EventListener
{
/**
* Invoked when the target of the listener has changed its state.
*
* @param e a PropertyChangeEvent object
*/
void stateChanged(PropertyChangeEvent e);
}
|
<filename>Plugins/flibbles/relink/js/relinkoperations/text/wikitext/wikilink.js
/*\
module-type: relinkwikitextrule
Handles CamelCase links
WikiLink
but not:
~WikiLink
\*/
var utils = require("./utils.js");
exports.name = "wikilink";
exports.report = function(text, callback, options) {
var title = this.match[0],
unlink = $tw.config.textPrimitives.unWikiLink;
this.parser.pos = this.matchRegExp.lastIndex;
if (title[0] !== unlink) {
callback(title, unlink + title);
}
};
exports.relink = function(text, fromTitle, toTitle, options) {
var entry = undefined,
title = this.match[0];
this.parser.pos = this.matchRegExp.lastIndex;
if (title === fromTitle && title[0] !== $tw.config.textPrimitives.unWikiLink) {
entry = { output: this.makeWikilink(toTitle, options) };
if (entry.output === undefined) {
entry.impossible = true;
}
}
return entry;
};
exports.makeWikilink = function(title, options) {
if (title.match(this.matchRegExp) && title[0] !== $tw.config.textPrimitives.unWikiLink) {
return title;
} else {
return utils.makePrettylink(this.parser, title);
}
};
|
<reponame>phetsims/dot<gh_stars>1-10
// Copyright 2016-2021, University of Colorado Boulder
/**
* A numeric range with a required default value.
*
* @author <NAME> (PixelZoom, Inc.)
* @author <NAME> (PhET Interactive Simulations)
*/
import Range from './Range.js';
import dot from './dot.js';
class RangeWithValue extends Range {
/**
* @param {number} min - the minimum value of the range
* @param {number} max - the maximum value of the range
* @param {number} defaultValue - default value inside the range
*/
constructor( min, max, defaultValue ) {
super( min, max );
assert && assert( defaultValue !== undefined, 'default value is required' );
assert && assert( defaultValue >= min && defaultValue <= max, `defaultValue is out of range: ${defaultValue}` );
// @private
this._defaultValue = defaultValue;
}
/**
* Getter for defaultValue
* @returns {number}
* @public
*/
getDefaultValue() {
return this._defaultValue;
}
get defaultValue() {
return this.getDefaultValue();
}
/**
* Setter for min
* @param {number} min
* @public
* @override
*/
setMin( min ) {
assert && assert( this._defaultValue >= min, `min must be <= defaultValue: ${min}` );
super.setMin( min );
}
/**
* Setter for max
* @param {number} max
* @public
* @override
*/
setMax( max ) {
assert && assert( this._defaultValue <= max, `max must be >= defaultValue: ${max}` );
super.setMax( max );
}
/**
* Setter for min and max
* @param {number} min
* @param {number} max
* @public
* @override
*/
setMinMax( min, max ) {
assert && assert( this._defaultValue >= min, `min must be <= defaultValue: ${min}` );
assert && assert( this._defaultValue <= max, `max must be >= defaultValue: ${max}` );
super.setMinMax( min, max );
}
/**
* Converts the attributes of this range to a string
* @public
* @returns {string}
* @override
*/
toString() {
return `[RangeWithValue (min:${this.min} max:${this.max} defaultValue:${this._defaultValue})]`;
}
/**
* Determines if this RangeWithValue is equal to some object.
* @public
* @param {*} object
* @returns {boolean}
* @override
*/
equals( object ) {
return ( this.constructor === object.constructor ) &&
( this._defaultValue === object.defaultValue ) &&
super.equals( object );
}
}
dot.register( 'RangeWithValue', RangeWithValue );
export default RangeWithValue;
|
#!/bin/bash
set -e
cwd=$(pwd)
cd $(dirname "$0")/..
source .travis/sanitize.sh
sanitize deploy
. quark-travis/bin/activate
if [[ -f ~/.nvm/nvm.sh ]]; then
. ~/.nvm/nvm.sh
fi
set -x
npm install travis-after-all
if $(npm bin)/travis-after-all ; then
SUCCESS="$?"
echo "All tests in matrix passed, checking deployment conditions"
else
SUCCESS="$?"
case "$SUCCESS" in
1)
echo "Some tests in matrix failed, checking deployment conditions"
;;
2)
echo "I am not the master, done"
exit 0
;;
*)
echo "Trouble with travis-after-all ($SUCCESS)"
exit $SUCCESS
;;
esac
fi
if [[ "$TRAVIS_REPO_SLUG" != "datawire/quark" ]]; then
echo "Only CI for forks, skipping CD"
exit $SUCCESS
fi
if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
echo "Only CI for pull requests, skipping CD"
exit $SUCCESS
fi
DEPLOY=false
STAGE=undefined
case "$SUCCESS-$TRAVIS_BRANCH" in
0-master)
DEPLOY="master"
STAGE="initial"
;;
1-master)
DEPLOY="master"
STAGE="failed"
;;
0-develop | 0-quarkdev-ci)
DEPLOY="develop"
STAGE="initial"
;;
1-develop | 1-quarkdev-ci)
DEPLOY="develop"
STAGE="failed"
;;
*)
echo "Only CI for branch $TRAVIS_BRANCH, skipping CD"
exit $SUCCESS
;;
esac
if [[ "$CI" = "true" ]]; then
echo "Setting up write access for github and pypi"
git remote set-branches --add origin master && git fetch --unshallow
git config --global credential.helper store
git config --global user.email "automaton@datawire.io"
git config --global user.name "Continuous Delivery"
git config --global push.default simple
( set +x; echo "https://$GITHUB_ACCESS_TOKEN:x-oauth-basic@github.com" > ~/.git-credentials )
( set +x; cat <<EOF > ~/.pypirc
[distutils]
index-servers=pypi
[pypi]
repository = https://pypi.python.org/pypi
username = $ENCRYPTED_PYPI_USERNAME
password = $ENCRYPTED_PYPI_PASSWORD
EOF
)
fi
case "$STAGE-$DEPLOY" in
initial-master)
echo "TODO MASTER DEPLOY"
exit 1
;;
initial-develop)
pip install twine sphinx-better-theme
NEXT_VERSION=$(scripts/compute-next-version)
TAG="dev-$NEXT_VERSION"
COMMIT=$(git rev-parse HEAD)
./release version --dev "$NEXT_VERSION"
./release --cicd push-docs
./release --cicd push-pkgs
git tag -a -m "CI tests pass for $TAG" "$TAG" "$COMMIT"
git push origin "$TAG"
exit $SUCCESS
;;
failed-master)
echo "TODO: tag failed release build?"
exit $SUCCESS
;;
failed-develop)
echo "TODO: tag failed dev build?"
exit $SUCCESS
;;
*)
echo "Unhandled deploy mode $STAGE-$DEPLOY"
exit 1
;;
esac
|
<filename>pytest-svn/setup.py<gh_stars>100-1000
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from setuptools import setup
from common_setup import common_setup
classifiers = [
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
'Intended Audience :: Developers',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
install_requires = ['pytest',
'pytest-shutil',
]
tests_require = [
]
entry_points = {
'pytest11': [
'svn_repo = pytest_svn',
]
}
if __name__ == '__main__':
kwargs = common_setup('pytest_svn')
kwargs.update(dict(
name='pytest-svn',
description='SVN repository fixture for py.test',
platforms=['unix', 'linux'],
author='<NAME>',
author_email='<EMAIL>',
classifiers=classifiers,
install_requires=install_requires,
tests_require=tests_require,
py_modules=['pytest_svn'],
entry_points=entry_points,
))
setup(**kwargs)
|
package com.yoavfranco.wikigame.adapters;
import android.app.Activity;
import android.support.v7.widget.RecyclerView;
import android.text.format.DateUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.AnticipateInterpolator;
import android.view.animation.ScaleAnimation;
import android.widget.Button;
import android.widget.TextView;
import com.github.siyamed.shapeimageview.CircularImageView;
import com.squareup.picasso.Picasso;
import com.yoavfranco.wikigame.HTTP.WikiGameAPI;
import com.yoavfranco.wikigame.HTTP.WikiGameInterface;
import com.yoavfranco.wikigame.R;
import com.yoavfranco.wikigame.utils.Challenge;
import com.yoavfranco.wikigame.utils.Consts;
import com.yoavfranco.wikigame.utils.ErrorDialogs;
import com.yoavfranco.wikigame.utils.Friend;
import com.yoavfranco.wikigame.utils.FriendRequest;
import org.json.JSONException;
import org.json.JSONObject;
import org.w3c.dom.Text;
import java.util.ArrayList;
import java.util.List;
public class FriendRequestItemAdapter extends RecyclerView.Adapter<FriendRequestItemAdapter.MyViewHolder> {
private List<FriendRequest> friendRequests;
Activity mContext;
FriendRequestType friendRequestType;
FriendRequestAdapterInterface parent;
private int listSize;
public class MyViewHolder extends RecyclerView.ViewHolder {
public TextView usernameTextView;
public TextView tvHasntResponded, stateTextView;
public Button confirmButton, ignoreButton;
public CircularImageView userCountryImageView;
public TextView timeAgoTextView;
public MyViewHolder(View view) {
super(view);
usernameTextView = (TextView) view.findViewById(R.id.tvChallengerName);
userCountryImageView = (CircularImageView) view.findViewById(R.id.ciCountry);
confirmButton = (Button) view.findViewById(R.id.bConfirm);
ignoreButton = (Button) view.findViewById(R.id.bIgnore);
timeAgoTextView = (TextView) view.findViewById(R.id.tvTimeAgo);
tvHasntResponded = (TextView) view.findViewById(R.id.tvHasntResponded);
stateTextView = (TextView) view.findViewById(R.id.tvFriendState);
}
}
public void swap(ArrayList<FriendRequest> newData) {
swap(newData, this.friendRequests.size() != 0);
}
public void swap(ArrayList<FriendRequest> newData, boolean animated) {
if (animated) {
removeAll();
addAll(newData);
} else {
this.friendRequests.clear();
this.friendRequests.addAll(newData);
this.listSize = this.friendRequests.size();
notifyDataSetChanged();
}
}
private void addAll(List<FriendRequest> suggestionsFriendsList)
{
for (int i = 0; i < suggestionsFriendsList.size(); i++) {
addItem(i, suggestionsFriendsList.get(i));
}
this.listSize = friendRequests.size();
}
private void removeAll()
{
friendRequests.clear();
notifyItemRangeRemoved(0, listSize);
}
public void addItem(int position, FriendRequest leaderboardItem) {
friendRequests.add(position, leaderboardItem);
notifyItemInserted(position);
}
public void removeItem(int position) {
friendRequests.remove(position);
notifyItemRemoved(position);
}
public FriendRequestItemAdapter(Activity activity, List<FriendRequest> suggestionsFriendsList, FriendRequestType friendRequestType, FriendRequestAdapterInterface parent) {
// WARNING: the following line creates a shallow copy of the ArrayList, meaning that if the list is updated, one must call swap().
// not only notifyDataSetChanged().
this.friendRequests = new ArrayList<>(suggestionsFriendsList);
this.mContext = activity;
this.friendRequestType = friendRequestType;
this.parent = parent;
this.listSize = suggestionsFriendsList.size();
}
@Override
public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View itemView = LayoutInflater.from(parent.getContext())
.inflate(R.layout.friend_request_item, parent, false);
return new MyViewHolder(itemView);
}
@Override
public void onBindViewHolder(final MyViewHolder holder, final int position) {
final FriendRequest friendRequest = friendRequests.get(position);
holder.timeAgoTextView.setText(DateUtils.getRelativeTimeSpanString(friendRequest.getSentTime().getTime()));
Picasso.with(holder.userCountryImageView.getContext()).load(friendRequest.getFlagURL()).fit().placeholder(R.drawable.progress_animation).into(holder.userCountryImageView);
if (friendRequestType == FriendRequestType.Sent) {
holder.confirmButton.setVisibility(View.GONE);
holder.ignoreButton.setVisibility(View.GONE);
holder.usernameTextView.setText(friendRequest.getReceiverUsername());
holder.tvHasntResponded.setVisibility(View.VISIBLE);
} else if (friendRequestType == FriendRequestType.Pending){
holder.tvHasntResponded.setVisibility(View.GONE);
holder.usernameTextView.setText(friendRequest.getSenderUsername());
}
final ScaleAnimation scale = (ScaleAnimation) AnimationUtils.loadAnimation(mContext, R.anim.shrink);
scale.setInterpolator(new AnticipateInterpolator());
scale.setFillAfter(true);
scale.setDuration(300);
scale.setAnimationListener(new Animation.AnimationListener(){
@Override
public void onAnimationStart(Animation arg0) {
}
@Override
public void onAnimationRepeat(Animation arg0) {
}
@Override
public void onAnimationEnd(Animation arg0) {
// holder.stateTextView.setText("");
holder.stateTextView.setVisibility(View.VISIBLE);
holder.stateTextView.animate().alpha(0.7f).setDuration(200);
holder.confirmButton.setVisibility(View.GONE);
holder.ignoreButton.setVisibility(View.GONE);
}
});
holder.confirmButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
holder.confirmButton.setAnimation(scale);
holder.confirmButton.startAnimation(scale);
holder.ignoreButton.setAnimation(scale);
holder.ignoreButton.startAnimation(scale);
// TODO: Remove item with animation
// TODO: Loading animation
WikiGameAPI wikiGameAPI = new WikiGameAPI();
wikiGameAPI.acceptFriendRequestAsync(friendRequest.getSenderUsername(), new WikiGameInterface(mContext) {
@Override
public void onFinishedProcessingWikiRequest(JSONObject response) {
try {
if (response.getString(Consts.STATUS_CODE_KEY).equals(Consts.STATUS_OK)) {
Challenge newChallenge = response.has("new_challenge") ? Challenge.fromJSON(response.getJSONObject("new_challenge")) : null;
Friend newFriend = new Friend(friendRequest.getSenderUsername(), friendRequest.getCountryCode(), friendRequest.getFlagURL());
newFriend.setChallenge(newChallenge);
holder.stateTextView.setText("Friend successfully added");
// friendRequests.remove(position);
// notifyDataSetChanged();
//parent.onFriendRequestRemoved(friendRequest, true, newFriend);
} else {
holder.stateTextView.setText("Failed to confirm friend");
}
} catch (JSONException e) {
ErrorDialogs.showBadResponseDialog(getActivityContext(), false);
e.printStackTrace();
}
}
});
}
});
holder.ignoreButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
holder.confirmButton.setAnimation(scale);
holder.confirmButton.startAnimation(scale);
holder.ignoreButton.setAnimation(scale);
holder.ignoreButton.startAnimation(scale);
// TODO: Remove item with animation
// TODO: Loading animation
final FriendRequest friendRequest = friendRequests.get(position);
WikiGameAPI wikiGameAPI = new WikiGameAPI();
wikiGameAPI.ignoreFriendRequestAsync(friendRequests.get(position).getSenderUsername(), new WikiGameInterface(mContext) {
@Override
public void onFinishedProcessingWikiRequest(JSONObject response) {
try {
if (response.getString(Consts.STATUS_CODE_KEY).equals(Consts.STATUS_OK)) {
holder.stateTextView.setText("Friend successfully ignored");
parent.onFriendRequestRemoved(friendRequest, false, null);
} else {
holder.stateTextView.setText("Failed to ignore friend");
}
} catch (JSONException e) {
holder.stateTextView.setText("Failed to ignore friend");
ErrorDialogs.showBadResponseDialog(getActivityContext(), false);
e.printStackTrace();
}
}
});
}
});
}
@Override
public int getItemCount() {
return friendRequests.size();
}
}
|
<filename>src/main/java/com/github/chen0040/leetcode/day15/package-info.java
/**
* Created by xschen on 10/8/2017.
*/
package com.github.chen0040.leetcode.day15;
|
#!/bin/bash
IMAGE=nscrypt
if [ "$1" != "" ] ; then
IMAGE=$1
fi
IMAGEDIR=$HOME/volumes/$IMAGE
mkdir -p $IMAGEDIR
pushd $IMAGEDIR
if [ ! -e $IMAGEDIR/home ] ; then
# This will fail on docker < 1.8
id=$($SUDO docker create $IMAGE)
$SUDO docker cp $id:/home - | tar xf -
$SUDO docker rm -v $id
fi
if [ ! -e $IMAGEDIR/var/log ] ; then
mkdir -p var/log
chmod a+rwx var/log
$SUDO docker run \
-v $IMAGEDIR/var:/mnt \
$IMAGE \
cp -a -P -R /var/log /mnt
fi
if [ ! -e $IMAGEDIR/etc ] ; then
mkdir -p etc
chmod a+rwx etc
$SUDO docker run \
-v $IMAGEDIR:/mnt \
$IMAGE \
cp -a -P -R /etc /mnt
fi
$SUDO docker run \
--privileged -v /sys/fs/cgroup:/sys/fs/cgroup:ro \
-v $IMAGEDIR/home:/home \
-v $IMAGEDIR/etc:/etc \
-p 80:80 -p 8000:8000 -p 8080:8080 -p 3000:3000 $IMAGE &
echo "Docker started"
|
. /vagrant/config/setup.env
/u01/app/21.3.0.0/grid/gridSetup.sh -silent -executeConfigTools \
-responseFile /u01/app/21.3.0.0/grid/install/response/gridsetup.rsp \
INVENTORY_LOCATION=/u01/app/oraInventory \
SELECTED_LANGUAGES=en,en_GB \
oracle.install.option=CRS_CONFIG \
ORACLE_BASE=/u01/app/grid \
oracle.install.asm.OSDBA=asmdba \
oracle.install.asm.OSOPER=asmoper \
oracle.install.asm.OSASM=asmadmin \
oracle.install.crs.config.scanType=LOCAL_SCAN \
oracle.install.crs.config.gpnp.scanName=vgt-213-fpp-scan \
oracle.install.crs.config.gpnp.scanPort=1521 \
oracle.install.crs.config.clusterName=vgt-213-fpp-c \
oracle.install.crs.config.ClusterConfiguration=STANDALONE \
oracle.install.crs.config.configureAsExtendedCluster=false \
oracle.install.crs.configureGIMR=false \
oracle.install.crs.config.clusterNodes=fpps.localdomain:fpps-vip.localdomain:HUB \
oracle.install.crs.config.networkInterfaceList=eth1:192.168.125.0:1,eth2:192.168.200.0:5 \
oracle.install.crs.config.gpnp.configureGNS=false \
oracle.install.crs.config.autoConfigureClusterNodeVIP=false \
oracle.install.asm.configureGIMRDataDG=false \
oracle.install.crs.config.useIPMI=false \
oracle.install.asm.storageOption=ASM \
oracle.install.asmOnNAS.configureGIMRDataDG=false \
oracle.install.asm.SYSASMPassword=welcome1 \
oracle.install.asm.diskGroup.name=DATA \
oracle.install.asm.diskGroup.redundancy=EXTERNAL \
oracle.install.asm.diskGroup.AUSize=4 \
oracle.install.asm.diskGroup.disks=/dev/oracleasm/disks/ORCL_DISK1_P1,/dev/oracleasm/disks/ORCL_DISK2_P1,/dev/oracleasm/disks/ORCL_DISK3_P1,/dev/oracleasm/disks/ORCL_DISK4_P1,/dev/oracleasm/disks/ORCL_DISK5_P1,/dev/oracleasm/disks/ORCL_DISK6_P1,/dev/oracleasm/disks/ORCL_DISK7_P1,/dev/oracleasm/disks/ORCL_DISK8_P1 \
oracle.install.asm.diskGroup.diskDiscoveryString=/dev/oracleasm/disks/ORCL_* \
oracle.install.asm.gimrDG.AUSize=1 \
oracle.install.asm.monitorPassword=welcome1 \
oracle.install.crs.configureRHPS=false \
oracle.install.crs.config.ignoreDownNodes=false \
oracle.install.config.managementOption=NONE \
oracle.install.config.omsPort=0 \
oracle.install.crs.rootconfig.executeRootScript=false
|
import { Injectable, InternalServerErrorException } from '@nestjs/common';
import { AccountService } from '../accounts/accounts.service';
import { AuthService } from '../auth/auth.service';
@Injectable()
export class WithdrawService {
private userEmail: string;
constructor(
private readonly accountService: AccountService,
private readonly authService: AuthService,
) {}
async requestWithdraw(req, amount) {
this.userEmail = await this.authService.getLoggedUserEmail(req);
const newWithdraw = await this.accountService.disccountWithdraw(
this.userEmail,
amount,
);
if (newWithdraw.status == 'ok') {
return {
message: `The value ${amount} is already available.`,
};
} else {
throw new InternalServerErrorException(newWithdraw.message);
}
}
}
|
<reponame>nawazkhan/calendso<gh_stars>0
/*
Warnings:
- You are about to drop the column `smartContractAddress` on the `EventType` table. All the data in the column will be lost.
*/
-- AlterTable
ALTER TABLE "EventType" DROP COLUMN IF EXISTS "smartContractAddress";
-- AlterTable
ALTER TABLE "users" ADD COLUMN "verified" BOOLEAN DEFAULT false;
|
const defaultState = {
user: {
firstname: '',
lastname: '',
email: '',
number: '',
password: '',
},
fetching: false,
fetched: false,
error: null
}
const registrationReducer = (state = defaultState, action)=>{
switch(action.type){
case "FETCH_REG_PENDING":{
return {...state,
fetched: false,
error: null,
fetching: true
}
}
case "FETCH_REG_REJECTED":{
return {...state,
fetching: false,
fetched: false,
error: action.payload
}
}
case "FETCH_REG_FULFILLED":{
return {...state,
fetched: true,
fetching: false,
error: false,
user: action.payload
}
}
default:
return state;
}
}
export default registrationReducer;
|
import React, { useState } from 'react';
import { Container, LogoBar, Options, Footer } from './styles';
import iconStarctron from '../../assets/icon-starctron.png';
import { Link } from 'react-router-dom';
import { FiGrid, FiLayers, FiLogOut, FiBook, FiCalendar, FiMessageSquare, FiAlignLeft } from 'react-icons/fi';
import { FaUser, FaHome } from 'react-icons/fa';
import { RiLockPasswordFill, RiShieldUserLine, RiLayoutColumnFill } from 'react-icons/ri'
import { ImListNumbered } from 'react-icons/im';
import { AiFillApi, AiFillBuild } from 'react-icons/ai';
import SmallButton from '../SmallButton';
import Title from '../Title';
function Menu(props){
const [openFooterMenu, setOpenFooterMenu] = useState(false)
const options = [
{
title: 'Home',
link: '/home',
icon: <FaHome size={20}/> // Put a Icon Component here
},
{
title: 'Layout',
link: '/layout',
icon: <FiGrid size={20}/> // Put a Icon Component here
},
{
title: 'Menu Items',
link: '/menuitems',
icon: <ImListNumbered size={20}/> // Put a Icon Component here
},
{
title: 'Context API',
link: '/contextapi',
icon: <AiFillApi size={20}/> // Put a Icon Component here
},
{
title: 'User Login',
link: '/userlogin',
icon: <RiShieldUserLine size={20}/> // Put a Icon Component here
},
{
title: 'Theme',
link: '/theme',
icon: <RiLayoutColumnFill size={20}/> // Put a Icon Component here
},
{
title: 'Build the App',
link: '/buildapp',
icon: <AiFillBuild size={20}/> // Put a Icon Component here
},
{
title: 'Page 1',
link: '/page1',
icon: <FiLayers size={20}/> // Put a Icon Component here
},
{
title: 'Page 2',
link: '/page2',
icon: <FiLayers size={20}/> // Put a Icon Component here
},
{
title: 'Page 3',
link: '/page3',
icon: <FiLayers size={20}/> // Put a Icon Component here
},
{
title: 'Page 4',
link: '/page4',
icon: <FiLayers size={20}/> // Put a Icon Component here
},
{
title: 'SignIn',
link: '/signin',
icon: <FaUser size={20}/> // Put a Icon Component here
},
{
title: 'ForgotPassord',
link: '/forgotpassword',
icon: <RiLockPasswordFill size={20}/> // Put a Icon Component here
}
]
function handleOpenFooterMenu(){
setOpenFooterMenu(!openFooterMenu);
}
return (
<Container>
<LogoBar>
<img style={{width: props.isSmallMenu ? '50%': '20%'}} src={iconStarctron} alt="Logo"/>{!props.isSmallMenu && <Title>Starctron</Title>}
</LogoBar>
<Options>
{
options.map(option => (
<Link key={option.link} to={option.link} className={props.active === option.title ? 'active' : ''}>
<div style={{width: `${props.isSmallMenu ? '100%' : (props.width + 20) + '%'}`}}>{option.icon}</div>
{!props.isSmallMenu && <div>{option.title}</div>}
</Link>
))
}
</Options>
<Footer>
{
props.isSmallMenu ?
<SmallButton onClick={handleOpenFooterMenu}><FiAlignLeft size={20}/></SmallButton>
:
<>
<SmallButton><FiLogOut size={18}/></SmallButton>
<SmallButton><FiBook size={18}/></SmallButton>
<SmallButton><FiMessageSquare size={18}/></SmallButton>
<SmallButton><FiCalendar size={18}/></SmallButton>
</>
}
{
(props.isSmallMenu && openFooterMenu) &&
<div style={{ width: props.width * 4 + 'vw', left: props.width + 'vw'}}>
<SmallButton><FiLogOut size={18}/></SmallButton>
<SmallButton><FiBook size={18}/></SmallButton>
<SmallButton><FiMessageSquare size={18}/></SmallButton>
<SmallButton><FiCalendar size={18}/></SmallButton>
</div>
}
</Footer>
</Container>
)
}
export default Menu;
|
<reponame>Ranger-X/anycable
# frozen_string_literal: true
require "anycable/rpc/handlers/connect"
require "anycable/rpc/handlers/disconnect"
require "anycable/rpc/handlers/command"
module AnyCable
module RPC
# Generic RPC handler
class Handler
include Handlers::Connect
include Handlers::Disconnect
include Handlers::Command
def initialize(middleware: AnyCable.middleware)
@middleware = middleware
@commands = {}
end
def handle(cmd, data, meta = {})
middleware.call(cmd, data, meta) do
send(cmd, data)
end
end
private
attr_reader :commands, :middleware
def build_socket(env:)
AnyCable::Socket.new(env: env)
end
def build_env_response(socket)
AnyCable::EnvResponse.new(
cstate: socket.cstate.changed_fields,
istate: socket.istate.changed_fields
)
end
def logger
AnyCable.logger
end
def factory
AnyCable.connection_factory
end
end
end
end
|
def get_value(dict, key):
try:
return dict[key]
except:
return None
def set_value(dict, key, value):
try:
dict[key] = value
except:
print("Error setting value")
|
<reponame>wangxy1994/mydevkit<filename>exoskeleton/src/main/java/com/wangxy/exoskeleton/risk/util/BigDecimalUtil.java<gh_stars>0
package com.wangxy.exoskeleton.risk.util;
import java.math.BigDecimal;
/********************************************
* 文件名称: BigDecimalUtil.java
* 系统名称: 资金风险管理系统
* 模块名称:
* 软件版权:
* 功能说明: 精确计算处理类
* 系统版本:
* 开发人员:
* 开发时间:
* 审核人员:
* 相关文档:
* 修改记录: 修改日期 修改人员 修改说明
* 20190822 daijy 增加指数计算方法
*********************************************/
public class BigDecimalUtil {
public static final String VERSION="@system 资金交易风险管理系统V2.0 @version 2.5.0.1 @lastModiDate 2019-07-09 @describe ";
// 默认除法运算精度
private static final int DEF_DIV_SCALE = 16;
/**
*
* 提供精确的加法运算。
*
* @param v1
* 被加数
*
* @param v2
* 加数
*
* @return 两个参数的和
*
*/
public static double add(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.add(b2).doubleValue();
}
/**
* 多个参数相加(使用慎重) ,参数从左到右依次相加
* @param param
* @return
*/
public static double add(double ... param){
if(0==param.length){
throw new IllegalArgumentException("illegal argument");
}
double retValue=0.0D;
for(int i=0;i<param.length;i++){
retValue=BigDecimalUtil.add(retValue,param[i]);
}
return retValue;
}
/**
* 提供精确的减法运算。
*
* @param v1
* 被减数
* @param v2
* 减数
* @return 两个参数的差
*/
public static double sub(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.subtract(b2).doubleValue();
}
/**
* 多个参数相减少,从左到右依次相减
* @param param
* @return
*/
public static double sub(double ...param){
if(param.length==0){
throw new IllegalArgumentException("illegal argument");
}
double retValue=param[0];
for(int i=1;i<param.length;i++){
retValue=BigDecimalUtil.sub(retValue,param[i]);
}
return retValue;
}
/**
* 提供精确的乘法运算。
*
* @param v1
* 被乘数
* @param v2
* 乘数
* @return 两个参数的积
*/
public static double mul(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.multiply(b2).doubleValue();
}
/**
* 多个double相乘,从左到右依次相乘法
* @param param
* @return
*/
public static double mul(double ...param){
if(0==param.length){
throw new java.lang.IllegalArgumentException("illegal argument");
}
double retValue=1;
for(int i=0;i<param.length;i++){
retValue=BigDecimalUtil.mul(retValue,param[i]);
}
return retValue;
}
/**
* 提供(相对)精确的除法运算,当发生除不尽的情况时,精确到 小数点以后10位,以后的数字四舍五入。
*
* @param v1
* 被除数
* @param v2
* 除数
* @return 两个参数的商
*/
public static double div(double v1, double v2) {
return div(v1, v2, DEF_DIV_SCALE);
}
/**
*
* 提供(相对)精确的除法运算。当发生除不尽的情况时,由scale参数指
*
* 定精度,以后的数字四舍五入。
*/
public static double div(double v1, double v2, int scale) {
if (scale < 0) {
throw new IllegalArgumentException(
"The scale must be a positive integer or zero");
}
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.divide(b2, scale, BigDecimal.ROUND_HALF_UP).doubleValue();
}
/**
* 提供精确的指数运算。
*
* @param number
* 底数
* @param power
* 指数
* @return 底数的指数次方
* @author daijy
* @since 20190822
*/
public static double power(double number, int power) {
BigDecimal b1 = new BigDecimal(Double.toString(number));
return b1.pow(power).doubleValue();
}
/**
* 提供精确的小数位四舍五入处理。
*
* @param v
* 需要四舍五入的数字
* @param scale
* 小数点后保留几位
* @return 四舍五入后的结果
*/
public static double round(double v, int scale) {
if (scale < 0) {
throw new IllegalArgumentException(
"The scale must be a positive integer or zero");
}
BigDecimal b = new BigDecimal(Double.toString(v));
BigDecimal one = new BigDecimal("1");
return b.divide(one, scale, BigDecimal.ROUND_HALF_UP).doubleValue();
}
/**
* 提供精确的小数位数进位处理
* @param v
* 需要进位的数字
* @param scale
* 小数点后保留几位
* @return 进位后的结果
*/
public static double carry(double v ,int scale){
if (scale < 0) {
throw new IllegalArgumentException(
"The scale must be a positive integer or zero");
}
BigDecimal b = new BigDecimal(Double.toString(v));
BigDecimal one = new BigDecimal("1");
return b.divide(one, scale, BigDecimal.ROUND_CEILING).doubleValue();
}
/**
* 提供精确的小数位数截位处理
* @param v
* 需要截位的数字
* @param scale
* 小数点后保留几位
* @return 截位后的结果
*/
public static double trunc(double v,int scale){
if (scale < 0) {
throw new IllegalArgumentException(
"The scale must be a positive integer or zero");
}
//return Math.floor(v * Math.pow(10, scale))
/// Math.pow(10, scale);
return BigDecimalUtil.div( Math.floor( BigDecimalUtil.mul(v , Math.pow(10, scale))) , Math.pow(10, scale));
}
/**
* 判断两个数值的大小
* @param val1
* @param val2
* @return 0--相等 >0的整数---va1>val2 <0的整数 val1<val2
*/
public static int compare(double val1,double val2){
return Double.compare(val1,val2);
}
/**
* 根据精度处理方式和位数处理精度
* @param inpuVal 输入数字
* @param scale 精度位数
* @param mode 精度处理方式 0:四舍五入;1:截位;2:进位,默认按四舍五入处理
* @return 处理结果
*/
public static double precision(double inputVal, int scale, int mode){
if(mode == 1){
return trunc(inputVal, scale);
}
else if(mode == 2){
return carry(inputVal, scale);
}
else{
return round(inputVal, scale);
}
}
/**
* 检查double型数据小数点后的有效位数,是否在指定的有效数字范围内
* @param value 待检查有效位数的double型值
* @param num 有效位数
* @return
*/
public static boolean checkEffectiveNumberBits(double value,int num){
int i=0;
double tmpAmt= Math.abs(BigDecimalUtil.sub(value, new BigDecimal(Double.toString(value)).longValue())); //获取小数部分,再判断精度
if(num > 0 && tmpAmt > 0){
for(i=0; i<num;i++)
tmpAmt= BigDecimalUtil.mul(tmpAmt, 10);
if( BigDecimalUtil.compare(tmpAmt,Math.floor(tmpAmt))> 0)
return false;
}
return true;
}
/**
* 比较两个值取最小值
* @param val1
* @param val2
* @return
*/
public static double minValue(double val1,double val2){
if(BigDecimalUtil.compare(val1, val2)>=0){
return val2;
}else{
return val1;
}
}
/**
* 比较两个值取最大值
* @param val1
* @param val2
* @return
*/
public static double maxValue(double val1,double val2){
if(BigDecimalUtil.compare(val1, val2)<=0){
return val2;
}else{
return val1;
}
}
/**
* 整除判断<br>
* (v1能否被v2整除的判断,v2为零返回true;)
*
* @param v1
* @param v2
* @return
*/
public static boolean modIsZero(double v1, long v2) {
if (v2 == 0) {
return true;
}
BigDecimal d1 = BigDecimal.valueOf(v1);
BigDecimal d2 = BigDecimal.valueOf(v2);
if (d1.remainder(d2).compareTo(BigDecimal.valueOf(0)) == 0) {
return true;
} else {
return false;
}
}
/**
* 整除判断<br>
* (v1能否被v2整除的判断,v2为零返回true;)
*
* @param v1
* @param v2
* @return
*/
public static boolean modIsZero(double v1, double v2) {
if (Double.compare(v2, 0.0d) == 0) {
return true;
}
BigDecimal d1 = BigDecimal.valueOf(v1);
BigDecimal d2 = BigDecimal.valueOf(v2);
if (d1.remainder(d2).compareTo(BigDecimal.valueOf(0)) == 0) {
return true;
} else {
return false;
}
}
}
|
import tensorflow as tf
from tensorflow.keras.datasets import mnist
import matplotlib.pyplot as plt
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Reshape the dataset
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
# Normalize
x_train = x_train / 255
x_test = x_test / 255
# Create a model and compile it
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, (3,3), activation='relu', input_shape=(28, 28, 1)),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train the model
model.fit(x_train, y_train, epochs=10)
|
<gh_stars>0
import {
Component, OnInit, AfterViewInit,
ChangeDetectionStrategy, ChangeDetectorRef
} from '@angular/core';
import { Title, Meta } from '@angular/platform-browser';
import { MatSnackBar } from '@angular/material/snack-bar';
import { MatTableDataSource } from '@angular/material/table';
import { DownloadsService } from '../../services/downloads.service';
import { IDownloadCategory, IDownload } from '../../core/downloads';
import { IDictionary } from '../../core/utils';
enum State {
Idle,
Loading
}
@Component({
selector: 'app-downloads',
templateUrl: './downloads.component.html',
styleUrls: ['./downloads.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class DownloadsComponent implements OnInit, AfterViewInit {
readonly State = State;
private _state = State.Idle;
private _loadingDownloadCategories = false;
private _loadingDownloads = false;
//#region Table
displayedColumns = ['name', 'description', 'version'];
dataSources: IDictionary<MatTableDataSource<IDownload>> = {};
//#endregion
downloadCategories: IDictionary<IDownloadCategory> = {};
//#region Lifecycle
constructor(private title: Title,
private meta: Meta,
private cd: ChangeDetectorRef,
private snackBar: MatSnackBar,
private downloadsService: DownloadsService) {
}
ngOnInit() {
this.title.setTitle('Energon Software - Downloads');
this.meta.updateTag({
name: 'description',
content: 'Downloads',
});
}
ngAfterViewInit() {
this.getDataAsync();
}
//#endregion
get state() {
return this._state;
}
set state(state: State) {
this._state = state;
this.cd.detectChanges();
}
private idle() {
if (!this._loadingDownloadCategories
&& !this._loadingDownloads) {
this.state = State.Idle;
}
}
get hasDownloads() {
return Object.keys(this.dataSources).length > 0;
}
getDownloadCategory(downloadCategoryId: string) {
return this.downloadCategories[downloadCategoryId] ?? {
id: 'missing',
title: 'MISSING',
decription: 'DOWNLOAD CATEGORY MISSING',
};
}
private async getDataAsync() {
await Promise.all([
this.getDownloadCategoriesAsync(),
this.getDownloadsAsync(),
]);
}
private async getDownloadCategoriesAsync() {
this.state = State.Loading;
this._loadingDownloadCategories = true;
try {
const response = await this.downloadsService.getDownloadCategoriesAsync();
this.downloadCategories = {};
for (const downloadCategory of response.download_categories) {
this.downloadCategories[downloadCategory.id] = downloadCategory;
}
} catch (error) {
this.snackBar.open(`Download Categories Load Error: ${error}`, 'OK', {
panelClass: 'es-warn',
});
} finally {
this._loadingDownloadCategories = false;
this.idle();
}
}
private async getDownloadsAsync() {
this.dataSources = {};
this.state = State.Loading;
this._loadingDownloads = true;
try {
const response = await this.downloadsService.getDownloadsAsync();
for (const download of response.downloads) {
if (!Object.keys(this.dataSources).includes(download.category)) {
this.dataSources[download.category] = new MatTableDataSource<IDownload>();
}
this.dataSources[download.category].data.push(download);
}
} catch (error) {
this.snackBar.open(`Downloads Load Error: ${error}`, 'OK', {
panelClass: 'es-warn',
});
} finally {
this._loadingDownloads = false;
this.idle();
}
}
}
|
#
# Defines Homebrew aliases.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
# Qi Zhang <singularitti@outlook.com>
#
# Return if requirements are not found.
if ! [[ "${OSTYPE}" =~ (darwin|linux)* ]]; then
return 1
fi
#
# Aliases
#
# Homebrew
alias brewc='brew cleanup'
alias brewC='brew cleanup --force'
alias brewi='brew install'
alias brewl='brew list'
alias brewo='brew outdated'
alias brews='brew search'
alias brewu='brew update && brew upgrade'
alias brewx='brew remove'
# Homebrew Cask
alias cask='brew cask'
alias caskc='brew cask cleanup --outdated'
alias caskC='brew cask cleanup'
alias caski='brew cask install'
alias caskl='brew cask list'
alias casko='brew cask outdated'
alias caskx='brew cask uninstall'
casks() {
print "Calling \`brew cask search\` is deprecated! Please use \`brews\` instead!"
}
|
# frozen_string_literal: true
class RenameUsersToContributors < ActiveRecord::Migration[6.0]
def change
rename_table :users, :contributors
end
end
|
public int MaxConsecutiveStates(State targetState)
{
int maxConsecutive = 0;
int currentConsecutive = 0;
foreach (State state in states)
{
if (state == targetState)
{
currentConsecutive++;
maxConsecutive = Math.Max(maxConsecutive, currentConsecutive);
}
else
{
currentConsecutive = 0;
}
}
return maxConsecutive;
}
|
// Manipulate --js UpdateSODIUM D:\CIOS\Scripts\Softwares\Update-SODIUM.js
var url = "www.libsodium.org" ;
var filename = "G:/Temp/sodium.html" ;
var lastest = "G:/Temp/sodium-download.html" ;
var timeout = 60 * 1000 ;
var version = "libsodium-1.0.9.tar.gz" ;
var temp = "G:/Qt/Src/QtZMQ/3rdparty/sources/" ;
function Fetch(url,filename,timeout)
{
var msg ;
var r ;
mt = new MtJS ( ) ;
ftp = new FTP ( ) ;
msg = "下載 " + url + " 到 " + filename ;
mt . toConsoleLn ( msg ) ;
r = ftp . Download ( url , filename , timeout ) ;
delete ftp ;
delete mt ;
return r ;
}
function CheckLZO()
{
var msg ;
var lzs ;
var obs ;
var nvl ;
nvl = false ;
lzob = new ByteArray ( ) ;
ober = new ByteArray ( ) ;
if ( ! Fetch ( url , filename , timeout ) ) {
return false ;
} ;
lzob . Load ( lastest ) ;
ober . Load ( filename ) ;
lzs = lzob . Size ( ) ;
obs = ober . Size ( ) ;
if ( lzs !== obs ) {
ober . Save ( lastest ) ;
if ( ober . Contains ( version ) ) {
msg = "New version : " + version ;
print ( msg ) ;
nvl = true ;
} ;
} ;
delete lzob ;
delete ober ;
return nvl ;
}
function UpdateSODIUM()
{
var lzo ;
var lzf ;
if ( ! CheckLZO ( ) ) return false ;
lzo = url + version ;
lzf = temp + version ;
return Fetch ( lzo , lzf , timeout * 4 ) ;
}
|
<filename>client/src/components/store/BrandContainer.tsx
import React from 'react';
import FormGroup from '@material-ui/core/FormGroup';
import FormControlLabel from '@material-ui/core/FormControlLabel';
import Checkbox from '@material-ui/core/Checkbox';
interface Props {
brands: Array<string>
brandCheckbox: any,
onChange: () => void
}
function BrandContainer(props: Props) {
return (
<div>
<h4>Brand</h4>
<FormGroup row>
{props.brands.map((brand, index) =>
<FormControlLabel
key={index}
control={<Checkbox checked={props.brandCheckbox[brand] || false} onChange={props.onChange} name={brand}/>}
label={brand}
/>)}
</FormGroup>
</div>
)
}
export default BrandContainer;
|
<reponame>Aukbit/pluto
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: examples/dist/user_backend/proto/user.proto
package user
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// The request message containing the new user data.
type NewUser struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Email string `protobuf:"bytes,2,opt,name=email,proto3" json:"email,omitempty"`
Password string `protobuf:"bytes,3,opt,name=password,proto3" json:"password,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *NewUser) Reset() { *m = NewUser{} }
func (m *NewUser) String() string { return proto.CompactTextString(m) }
func (*NewUser) ProtoMessage() {}
func (*NewUser) Descriptor() ([]byte, []int) {
return fileDescriptor_da68ceb5ffd4959d, []int{0}
}
func (m *NewUser) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_NewUser.Unmarshal(m, b)
}
func (m *NewUser) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_NewUser.Marshal(b, m, deterministic)
}
func (m *NewUser) XXX_Merge(src proto.Message) {
xxx_messageInfo_NewUser.Merge(m, src)
}
func (m *NewUser) XXX_Size() int {
return xxx_messageInfo_NewUser.Size(m)
}
func (m *NewUser) XXX_DiscardUnknown() {
xxx_messageInfo_NewUser.DiscardUnknown(m)
}
var xxx_messageInfo_NewUser proto.InternalMessageInfo
func (m *NewUser) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *NewUser) GetEmail() string {
if m != nil {
return m.Email
}
return ""
}
func (m *NewUser) GetPassword() string {
if m != nil {
return m.Password
}
return ""
}
// The response message containing the user data
type User struct {
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Email string `protobuf:"bytes,3,opt,name=email,proto3" json:"email,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *User) Reset() { *m = User{} }
func (m *User) String() string { return proto.CompactTextString(m) }
func (*User) ProtoMessage() {}
func (*User) Descriptor() ([]byte, []int) {
return fileDescriptor_da68ceb5ffd4959d, []int{1}
}
func (m *User) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_User.Unmarshal(m, b)
}
func (m *User) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_User.Marshal(b, m, deterministic)
}
func (m *User) XXX_Merge(src proto.Message) {
xxx_messageInfo_User.Merge(m, src)
}
func (m *User) XXX_Size() int {
return xxx_messageInfo_User.Size(m)
}
func (m *User) XXX_DiscardUnknown() {
xxx_messageInfo_User.DiscardUnknown(m)
}
var xxx_messageInfo_User proto.InternalMessageInfo
func (m *User) GetId() string {
if m != nil {
return m.Id
}
return ""
}
func (m *User) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *User) GetEmail() string {
if m != nil {
return m.Email
}
return ""
}
// The response message containing the a users list
type Users struct {
Data []*User `protobuf:"bytes,1,rep,name=data,proto3" json:"data,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Users) Reset() { *m = Users{} }
func (m *Users) String() string { return proto.CompactTextString(m) }
func (*Users) ProtoMessage() {}
func (*Users) Descriptor() ([]byte, []int) {
return fileDescriptor_da68ceb5ffd4959d, []int{2}
}
func (m *Users) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Users.Unmarshal(m, b)
}
func (m *Users) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Users.Marshal(b, m, deterministic)
}
func (m *Users) XXX_Merge(src proto.Message) {
xxx_messageInfo_Users.Merge(m, src)
}
func (m *Users) XXX_Size() int {
return xxx_messageInfo_Users.Size(m)
}
func (m *Users) XXX_DiscardUnknown() {
xxx_messageInfo_Users.DiscardUnknown(m)
}
var xxx_messageInfo_Users proto.InternalMessageInfo
func (m *Users) GetData() []*User {
if m != nil {
return m.Data
}
return nil
}
// The response message containing the a users list
type Filter struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Filter) Reset() { *m = Filter{} }
func (m *Filter) String() string { return proto.CompactTextString(m) }
func (*Filter) ProtoMessage() {}
func (*Filter) Descriptor() ([]byte, []int) {
return fileDescriptor_da68ceb5ffd4959d, []int{3}
}
func (m *Filter) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Filter.Unmarshal(m, b)
}
func (m *Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Filter.Marshal(b, m, deterministic)
}
func (m *Filter) XXX_Merge(src proto.Message) {
xxx_messageInfo_Filter.Merge(m, src)
}
func (m *Filter) XXX_Size() int {
return xxx_messageInfo_Filter.Size(m)
}
func (m *Filter) XXX_DiscardUnknown() {
xxx_messageInfo_Filter.DiscardUnknown(m)
}
var xxx_messageInfo_Filter proto.InternalMessageInfo
func (m *Filter) GetName() string {
if m != nil {
return m.Name
}
return ""
}
// The request message containing the user basic credentials
type Credentials struct {
Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email,omitempty"`
Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Credentials) Reset() { *m = Credentials{} }
func (m *Credentials) String() string { return proto.CompactTextString(m) }
func (*Credentials) ProtoMessage() {}
func (*Credentials) Descriptor() ([]byte, []int) {
return fileDescriptor_da68ceb5ffd4959d, []int{4}
}
func (m *Credentials) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Credentials.Unmarshal(m, b)
}
func (m *Credentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Credentials.Marshal(b, m, deterministic)
}
func (m *Credentials) XXX_Merge(src proto.Message) {
xxx_messageInfo_Credentials.Merge(m, src)
}
func (m *Credentials) XXX_Size() int {
return xxx_messageInfo_Credentials.Size(m)
}
func (m *Credentials) XXX_DiscardUnknown() {
xxx_messageInfo_Credentials.DiscardUnknown(m)
}
var xxx_messageInfo_Credentials proto.InternalMessageInfo
func (m *Credentials) GetEmail() string {
if m != nil {
return m.Email
}
return ""
}
func (m *Credentials) GetPassword() string {
if m != nil {
return m.Password
}
return ""
}
// The response message containing the a users list
type Verification struct {
IsValid bool `protobuf:"varint,1,opt,name=isValid,proto3" json:"isValid,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Verification) Reset() { *m = Verification{} }
func (m *Verification) String() string { return proto.CompactTextString(m) }
func (*Verification) ProtoMessage() {}
func (*Verification) Descriptor() ([]byte, []int) {
return fileDescriptor_da68ceb5ffd4959d, []int{5}
}
func (m *Verification) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Verification.Unmarshal(m, b)
}
func (m *Verification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Verification.Marshal(b, m, deterministic)
}
func (m *Verification) XXX_Merge(src proto.Message) {
xxx_messageInfo_Verification.Merge(m, src)
}
func (m *Verification) XXX_Size() int {
return xxx_messageInfo_Verification.Size(m)
}
func (m *Verification) XXX_DiscardUnknown() {
xxx_messageInfo_Verification.DiscardUnknown(m)
}
var xxx_messageInfo_Verification proto.InternalMessageInfo
func (m *Verification) GetIsValid() bool {
if m != nil {
return m.IsValid
}
return false
}
func init() {
proto.RegisterType((*NewUser)(nil), "user.NewUser")
proto.RegisterType((*User)(nil), "user.User")
proto.RegisterType((*Users)(nil), "user.Users")
proto.RegisterType((*Filter)(nil), "user.Filter")
proto.RegisterType((*Credentials)(nil), "user.Credentials")
proto.RegisterType((*Verification)(nil), "user.Verification")
}
func init() {
proto.RegisterFile("examples/dist/user_backend/proto/user.proto", fileDescriptor_da68ceb5ffd4959d)
}
var fileDescriptor_da68ceb5ffd4959d = []byte{
// 336 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x5d, 0x4b, 0xc3, 0x40,
0x10, 0x6c, 0xd2, 0xf4, 0xc3, 0x4d, 0x15, 0x3c, 0x7c, 0x08, 0x41, 0xa4, 0x1c, 0xa2, 0x55, 0xa1,
0x85, 0x8a, 0xcf, 0x0a, 0x15, 0x1f, 0x15, 0x22, 0xed, 0xab, 0x5c, 0x7b, 0x2b, 0x1c, 0xa6, 0x49,
0xb8, 0x3b, 0xad, 0xfe, 0x0c, 0xff, 0xb1, 0x64, 0xaf, 0x1f, 0xa1, 0x54, 0x7d, 0xdb, 0x9d, 0x19,
0xe6, 0x76, 0x86, 0x83, 0x2b, 0xfc, 0x14, 0xf3, 0x22, 0x45, 0x33, 0x90, 0xca, 0xd8, 0xc1, 0xbb,
0x41, 0xfd, 0x32, 0x15, 0xb3, 0x37, 0xcc, 0xe4, 0xa0, 0xd0, 0xb9, 0xcd, 0x09, 0xea, 0xd3, 0xc8,
0x82, 0x72, 0xe6, 0x4f, 0xd0, 0x7a, 0xc4, 0xc5, 0xd8, 0xa0, 0x66, 0x0c, 0x82, 0x4c, 0xcc, 0x31,
0xf2, 0xba, 0x5e, 0x6f, 0x2f, 0xa1, 0x99, 0x1d, 0x41, 0x03, 0xe7, 0x42, 0xa5, 0x91, 0x4f, 0xa0,
0x5b, 0x58, 0x0c, 0xed, 0x42, 0x18, 0xb3, 0xc8, 0xb5, 0x8c, 0xea, 0x44, 0xac, 0x77, 0x7e, 0x07,
0x01, 0xb9, 0x1d, 0x80, 0xaf, 0xe4, 0xd2, 0xcb, 0x57, 0x72, 0xed, 0xee, 0xef, 0x72, 0xaf, 0x57,
0xdc, 0xf9, 0x39, 0x34, 0x4a, 0x07, 0xc3, 0x4e, 0x20, 0x90, 0xc2, 0x8a, 0xc8, 0xeb, 0xd6, 0x7b,
0xe1, 0x10, 0xfa, 0x74, 0x7c, 0x49, 0x25, 0x84, 0xf3, 0x63, 0x68, 0x3e, 0xa8, 0xd4, 0xee, 0x3e,
0x9d, 0xdf, 0x42, 0x38, 0xd2, 0x28, 0x31, 0xb3, 0x4a, 0xa4, 0x66, 0xf3, 0x96, 0xf7, 0x5b, 0x12,
0x7f, 0x2b, 0x49, 0x0f, 0x3a, 0x13, 0xd4, 0xea, 0x55, 0xcd, 0x84, 0x55, 0x79, 0xc6, 0x22, 0x68,
0x29, 0x33, 0x11, 0xe9, 0x32, 0x56, 0x3b, 0x59, 0xad, 0xc3, 0x6f, 0x1f, 0xc2, 0xf2, 0xae, 0x67,
0xd4, 0x1f, 0x6a, 0x86, 0xec, 0x02, 0x60, 0xa4, 0x51, 0x58, 0xa4, 0x26, 0xf6, 0xdd, 0xe1, 0xcb,
0x9a, 0xe3, 0x4a, 0x0e, 0x5e, 0x63, 0xa7, 0xd0, 0x4e, 0x50, 0x48, 0x12, 0x56, 0x98, 0x2d, 0xd5,
0x19, 0xc0, 0xb8, 0x90, 0x2b, 0xc3, 0x3f, 0x75, 0xf7, 0x98, 0xe2, 0xbf, 0xba, 0x4b, 0x08, 0x5d,
0x73, 0xae, 0xe8, 0x8e, 0x23, 0x1d, 0x14, 0x87, 0x1b, 0xa9, 0xe1, 0x35, 0x76, 0x03, 0x40, 0x35,
0x7c, 0x91, 0xe7, 0xa1, 0x23, 0x2b, 0xcd, 0xc6, 0xcc, 0x41, 0xd5, 0xae, 0x78, 0x6d, 0xda, 0xa4,
0x5f, 0x76, 0xfd, 0x13, 0x00, 0x00, 0xff, 0xff, 0xb1, 0xd8, 0xe8, 0x81, 0x94, 0x02, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// UserServiceClient is the client API for UserService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type UserServiceClient interface {
CreateUser(ctx context.Context, in *NewUser, opts ...grpc.CallOption) (*User, error)
ReadUser(ctx context.Context, in *User, opts ...grpc.CallOption) (*User, error)
UpdateUser(ctx context.Context, in *User, opts ...grpc.CallOption) (*User, error)
DeleteUser(ctx context.Context, in *User, opts ...grpc.CallOption) (*User, error)
FilterUsers(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Users, error)
VerifyUser(ctx context.Context, in *Credentials, opts ...grpc.CallOption) (*Verification, error)
}
type userServiceClient struct {
cc *grpc.ClientConn
}
func NewUserServiceClient(cc *grpc.ClientConn) UserServiceClient {
return &userServiceClient{cc}
}
func (c *userServiceClient) CreateUser(ctx context.Context, in *NewUser, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/user.UserService/CreateUser", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) ReadUser(ctx context.Context, in *User, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/user.UserService/ReadUser", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) UpdateUser(ctx context.Context, in *User, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/user.UserService/UpdateUser", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) DeleteUser(ctx context.Context, in *User, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/user.UserService/DeleteUser", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) FilterUsers(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Users, error) {
out := new(Users)
err := c.cc.Invoke(ctx, "/user.UserService/FilterUsers", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) VerifyUser(ctx context.Context, in *Credentials, opts ...grpc.CallOption) (*Verification, error) {
out := new(Verification)
err := c.cc.Invoke(ctx, "/user.UserService/VerifyUser", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// UserServiceServer is the server API for UserService service.
type UserServiceServer interface {
CreateUser(context.Context, *NewUser) (*User, error)
ReadUser(context.Context, *User) (*User, error)
UpdateUser(context.Context, *User) (*User, error)
DeleteUser(context.Context, *User) (*User, error)
FilterUsers(context.Context, *Filter) (*Users, error)
VerifyUser(context.Context, *Credentials) (*Verification, error)
}
func RegisterUserServiceServer(s *grpc.Server, srv UserServiceServer) {
s.RegisterService(&_UserService_serviceDesc, srv)
}
func _UserService_CreateUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(NewUser)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).CreateUser(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/user.UserService/CreateUser",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).CreateUser(ctx, req.(*NewUser))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_ReadUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(User)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).ReadUser(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/user.UserService/ReadUser",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).ReadUser(ctx, req.(*User))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_UpdateUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(User)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).UpdateUser(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/user.UserService/UpdateUser",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).UpdateUser(ctx, req.(*User))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_DeleteUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(User)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).DeleteUser(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/user.UserService/DeleteUser",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).DeleteUser(ctx, req.(*User))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_FilterUsers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Filter)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).FilterUsers(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/user.UserService/FilterUsers",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).FilterUsers(ctx, req.(*Filter))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_VerifyUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Credentials)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).VerifyUser(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/user.UserService/VerifyUser",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).VerifyUser(ctx, req.(*Credentials))
}
return interceptor(ctx, in, info, handler)
}
var _UserService_serviceDesc = grpc.ServiceDesc{
ServiceName: "user.UserService",
HandlerType: (*UserServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CreateUser",
Handler: _UserService_CreateUser_Handler,
},
{
MethodName: "ReadUser",
Handler: _UserService_ReadUser_Handler,
},
{
MethodName: "UpdateUser",
Handler: _UserService_UpdateUser_Handler,
},
{
MethodName: "DeleteUser",
Handler: _UserService_DeleteUser_Handler,
},
{
MethodName: "FilterUsers",
Handler: _UserService_FilterUsers_Handler,
},
{
MethodName: "VerifyUser",
Handler: _UserService_VerifyUser_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "examples/dist/user_backend/proto/user.proto",
}
|
<reponame>bblack16/block-stack-query
require_relative 'sqlite'
module BlockStack
class Query
module Adapters
class MySQL < SQLite
def self.classes
['Sequel::Mysql2::Dataset']
end
def self.type
:mysql
end
protected
def _match_to_s(exp)
"#{exp.attribute_to_s}#{exp.inverse? ? ' NOT' : nil} REGEXP '#{exp.expression.inspect.scan(/(?<=^\/).*(?=\/)/).first}'"
end
end
end
end
end
|
const addNewBookButton = document.querySelector('button.newBook');
let content = '';
const bookCard = (book, index) => `
<div id="book-${index}" class="col-md-4 mt-2">
<div class="card" style="width: 18rem;">
<div class="card-body">
<h5 class="card-title" id="itemName">${book.title}</h5>
<p class="card-text" id="itemDesc">${book.author}</p>
<p class="card-text">${book.pages}</p>
<button class="card-text status">${book.read}</button>
<button class="delete" id=${index} data-id=${index}>Delete</button>
</div>
</div>
</div>
`;
class Library {
constructor() {
this.books = [];
}
addBook(title, author, pages, id) {
const newBook = new Book(title, author, pages, id);
this.books.push(newBook);
}
allBooks() {
return this.books;
}
displayAllBooks() {
this.books.forEach((book, index) => {
content += bookCard(book, index);
});
}
}
const myLibrary = new Library();
const formSubmission = (event) => {
event.preventDefault();
content = '';
const form = event.target;
const values = $(form).serializeArray();
const index = myLibrary.books.length;
myLibrary.addBook(values[0].value, values[1].value, values[2].value, index);
myLibrary.displayAllBooks();
document.querySelector('.books-grid').innerHTML = content;
form.reset();
myLibrary.books.forEach((book, index) => {
const deleteButton = document.querySelector(`#book-${index} button.delete`);
const statusButton = document.querySelector(`#book-${index} button.status`);
deleteButton.addEventListener('click', () => {
myLibrary.books = myLibrary.books.filter((book) => book.id !== index);
const card = document.getElementById(`book-${index}`);
if (card) {
card.remove();
}
});
const readStatus = () => ($(statusButton).html() === 'Read'
? $(statusButton).html('Not Read')
: $(statusButton).html('Read'));
statusButton.addEventListener('click', readStatus);
});
if ($('.form').hasClass('show')) {
$('.form').addClass('hide');
$('.form').removeClass('show');
}
};
$('form').on('submit', formSubmission);
addNewBookButton.addEventListener('click', () => {
$('.form').toggleClass('hide show');
});
|
<filename>pecado-gateway/src/test/java/me/batizhao/gateway/api/GatewayApiTest.java
package me.batizhao.gateway.api;
import me.batizhao.common.core.util.ResultEnum;
import org.junit.jupiter.api.Test;
import org.springframework.http.MediaType;
/**
* @author batizhao
* @since 2020-04-24
**/
public class GatewayApiTest extends BaseApiTest {
@Test
void givenNoExistUrl_whenCallGateway_then404() {
webClient.get().uri("/api/xxxx").exchange().expectStatus().isNotFound()
.expectHeader()
.contentTypeCompatibleWith(MediaType.APPLICATION_JSON)
.expectBody()
.jsonPath("$.code").isEqualTo(ResultEnum.GATEWAY_ERROR.getCode())
.jsonPath("$.data").isEqualTo(404)
.jsonPath("$.message").isEqualTo("404 NOT_FOUND");
}
// @Test
// void givenExistServiceUrl_whenCallGateway_then503() {
// webClient.get().uri("/api/ims/users").exchange().expectStatus().is5xxServerError()
// .expectHeader()
// .contentTypeCompatibleWith(MediaType.APPLICATION_JSON)
// .expectBody()
// .jsonPath("$.code").isEqualTo(ResultEnum.GATEWAY_ERROR.getCode())
// .jsonPath("$.data").isEqualTo(503)
// .jsonPath("$.message").value(containsString("503 SERVICE_UNAVAILABLE"));
// }
@Test
public void actuatorManagementPort() {
webClient.get()
.uri("http://localhost:" + managementPort + "/actuator/gateway/routes")
.exchange().expectStatus().isOk();
}
}
|
<reponame>yoloho/enhanced-dao<gh_stars>1-10
package com.yoloho.enhanced.data.dao.impl;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.yoloho.enhanced.common.util.Logging;
import com.yoloho.enhanced.data.dao.annotations.EnableEnhancedDao;
import com.yoloho.enhanced.data.dao.annotations.EnableSqlSessionFactory;
import com.yoloho.enhanced.data.dao.api.UpdateEntry;
import com.yoloho.enhanced.data.dao.api.filter.DynamicQueryFilter;
import com.yoloho.enhanced.data.dao.impl.EnhancedDaoImplTest.UnitTestUserMapping;
@SpringBootApplication
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
@EnableSqlSessionFactory(
name = "testSessionFactory",
connectionUrl = "jdbc:mysql://192.168.127.56:3306/test?useUnicode=true&characterEncoding=utf-8&allowMultiQueries=true",
username = "test",
password = "<PASSWORD>"
)
@EnableEnhancedDao(
scanPath = "com.yoloho.enhanced.data.dao.impl",
sqlSessionFactory = "testSessionFactory"
)
public class EnhancedDaoAnnotationTest {
private static final Logger logger = LoggerFactory.getLogger(EnhancedDaoAnnotationTest.class.getSimpleName());
@Resource
private EnhancedDaoImpl<UnitTestUserMapping, Integer> unitTestUserMappingEnhancedDao;
@Before
public void init() {
Logging.initLogging(true, false);
}
@Test
public void insertRemoveTest() {
List<Integer> idList = Lists.newArrayList();
/**
* 不带自增
*/
{
DynamicQueryFilter filter = new DynamicQueryFilter();
logger.info("remove {} records", unitTestUserMappingEnhancedDao.remove(filter.getQueryData()));
}
{
UnitTestUserMapping bean = new UnitTestUserMapping();
bean.setUid(1);
bean.setOtherId(101);
bean.setMemo("user 1 contains ` special chars");
bean.setDateline((int)(System.currentTimeMillis() / 1000));
Assert.assertEquals(1, unitTestUserMappingEnhancedDao.insert(bean));
idList.add(1);
}
{
List<UnitTestUserMapping> list = Lists.newArrayList();
for (int i = 2; i < 19; i++) {
UnitTestUserMapping bean = new UnitTestUserMapping();
bean.setUid(i);
bean.setOtherId(100 + i);
bean.setMemo(String.format("user %d contains `&*^%%(^&^*%%@'\" special chars", i));
bean.setDateline((int)(System.currentTimeMillis() / 1000) + i);
list.add(bean);
idList.add(i);
}
Assert.assertEquals(17, unitTestUserMappingEnhancedDao.insert(list));
}
{
//update
UnitTestUserMapping mapping = unitTestUserMappingEnhancedDao.get(10);
Assert.assertNotNull(mapping);
Assert.assertEquals(10, mapping.getUid());
mapping.setMemo("new val");
Assert.assertEquals(1, unitTestUserMappingEnhancedDao.update(mapping));
mapping = unitTestUserMappingEnhancedDao.get(10);
Assert.assertNotNull(mapping);
Assert.assertEquals(10, mapping.getUid());
Assert.assertEquals("new val", mapping.getMemo());
//inc
int oldDateline = mapping.getDateline();
Map<String, UpdateEntry> data = Maps.newConcurrentMap();
data.put("dateline", new UpdateEntry().increse(2));
Assert.assertEquals(1, unitTestUserMappingEnhancedDao.update(data, new DynamicQueryFilter()
.equalPair("uid", 10)
.getQueryData()));
mapping = unitTestUserMappingEnhancedDao.get(10);
Assert.assertNotNull(mapping);
Assert.assertEquals(10, mapping.getUid());
Assert.assertEquals(oldDateline + 2, mapping.getDateline());
Assert.assertEquals("new val", mapping.getMemo());
}
{
//update batch
String tail = " update append new val";
List<UnitTestUserMapping> mappingList = unitTestUserMappingEnhancedDao.find(4, 5, 6);
Assert.assertNotNull(mappingList);
Assert.assertEquals(3, mappingList.size());
for (UnitTestUserMapping unitTestUserMapping : mappingList) {
unitTestUserMapping.setMemo(unitTestUserMapping.getMemo() + tail);
}
Assert.assertEquals(3, unitTestUserMappingEnhancedDao.update(mappingList));
mappingList = unitTestUserMappingEnhancedDao.find(4, 5, 6);
Assert.assertNotNull(mappingList);
Assert.assertEquals(3, mappingList.size());
Assert.assertTrue(mappingList.get(0).getMemo().contains(tail));
}
}
}
|
TCP (Transmission Control Protocol) is a connection-oriented protocol that uses acknowledgments and retransmissions to ensure positive delivery of the data. It creates a reliable, ordered two-way communication between two nodes that remain connected until the transmission is complete. UDP (User Datagram Protocol) is a connectionless protocol which is quicker than TCP, but it does not have the same level of quality assurance. UDP does not use acknowledgments or retransmissions, so data may be lost and the segments may not arrive in the same order they were sent. In addition, UDP is not able to guarantee delivery.
|
#!/bin/sh
# Note: Arguments to this script
# 1: string - S3 bucket for your backup save files (required)
# 2: true|false - whether to use Satisfactory Experimental build (optional, default false)
S3_SAVE_BUCKET=$1
USE_EXPERIMENTAL_BUILD=${2-false}
# install steamcmd: https://developer.valvesoftware.com/wiki/SteamCMD?__cf_chl_jschl_tk__=pmd_WNQPOiK18.h0rf16RCYrARI2s8_84hUMwT.7N1xHYcs-1635248050-0-gqNtZGzNAiWjcnBszQiR#Linux.2FmacOS)
add-apt-repository multiverse
dpkg --add-architecture i386
apt update
# Needed to accept steam license without hangup
echo steam steam/question 'select' "I AGREE" | sudo debconf-set-selections
echo steam steam/license note '' | sudo debconf-set-selections
apt install -y unzip lib32gcc1 steamcmd
# install satisfactory: https://satisfactory.fandom.com/wiki/Dedicated_servers
if [ $USE_EXPERIMENTAL_BUILD = "true" ]; then
STEAM_INSTALL_SCRIPT="/usr/games/steamcmd +login anonymous +app_update 1690800 -beta experimental validate +quit"
else
STEAM_INSTALL_SCRIPT="/usr/games/steamcmd +login anonymous +app_update 1690800 validate +quit"
fi
# note, we are switching users because steam doesn't recommend running steamcmd as root
su - ubuntu -c "$STEAM_INSTALL_SCRIPT"
# enable as server so it stays up and start: https://satisfactory.fandom.com/wiki/Dedicated_servers/Running_as_a_Service
cat << EOF > /etc/systemd/system/satisfactory.service
[Unit]
Description=Satisfactory dedicated server
Wants=network-online.target
After=syslog.target network.target nss-lookup.target network-online.target
[Service]
Environment="LD_LIBRARY_PATH=./linux64"
ExecStartPre=$STEAM_INSTALL_SCRIPT
ExecStart=/home/ubuntu/.steam/steamapps/common/SatisfactoryDedicatedServer/FactoryServer.sh
User=ubuntu
Group=ubuntu
StandardOutput=journal
Restart=on-failure
KillSignal=SIGINT
WorkingDirectory=/home/ubuntu/.steam/steamapps/common/SatisfactoryDedicatedServer
[Install]
WantedBy=multi-user.target
EOF
systemctl enable satisfactory
systemctl start satisfactory
# enable auto shutdown: https://github.com/feydan/satisfactory-tools/tree/main/shutdown
cat << 'EOF' > /home/ubuntu/auto-shutdown.sh
#!/bin/sh
shutdownIdleMinutes=30
idleCheckFrequencySeconds=1
isIdle=0
while [ $isIdle -le 0 ]; do
isIdle=1
iterations=$((60 / $idleCheckFrequencySeconds * $shutdownIdleMinutes))
while [ $iterations -gt 0 ]; do
sleep $idleCheckFrequencySeconds
connectionBytes=$(ss -lu | grep 777 | awk -F ' ' '{s+=$2} END {print s}')
if [ ! -z $connectionBytes ] && [ $connectionBytes -gt 0 ]; then
isIdle=0
fi
if [ $isIdle -le 0 ] && [ $(($iterations % 21)) -eq 0 ]; then
echo "Activity detected, resetting shutdown timer to $shutdownIdleMinutes minutes."
break
fi
iterations=$(($iterations-1))
done
done
echo "No activity detected for $shutdownIdleMinutes minutes, shutting down."
sudo shutdown -h now
EOF
chmod +x /home/ubuntu/auto-shutdown.sh
chown ubuntu:ubuntu /home/ubuntu/auto-shutdown.sh
cat << 'EOF' > /etc/systemd/system/auto-shutdown.service
[Unit]
Description=Auto shutdown if no one is playing Satisfactory
After=syslog.target network.target nss-lookup.target network-online.target
[Service]
Environment="LD_LIBRARY_PATH=./linux64"
ExecStart=/home/ubuntu/auto-shutdown.sh
User=ubuntu
Group=ubuntu
StandardOutput=journal
Restart=on-failure
KillSignal=SIGINT
WorkingDirectory=/home/ubuntu
[Install]
WantedBy=multi-user.target
EOF
systemctl enable auto-shutdown
systemctl start auto-shutdown
# automated backups to s3 every 5 minutes
su - ubuntu -c "crontab -l -e ubuntu | { cat; echo \"*/5 * * * * /usr/local/bin/aws s3 sync /home/ubuntu/.config/Epic/FactoryGame/Saved/SaveGames/server s3://$S3_SAVE_BUCKET\"; } | crontab -"
|
INSERT INTO PLM_CONFIG(ID,CODE,NAME) VALUES(1,'default','默认配置');
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(1,'request','需求池','none',1,1);
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(2,'preDev','准备开发','none',2,1);
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(3,'dev','开发中','start',3,1);
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(4,'preTest','准备测试','stop',4,1);
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(5,'test','测试中','start',5,1);
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(6,'preConfirm','准备验证','stop',6,1);
INSERT INTO PLM_STEP(ID,CODE,NAME,ACTION,PRIORITY,CONFIG_ID) VALUES(7,'confirm','验证完成','complete',7,1);
INSERT INTO PLM_SPRINT(ID,NAME,START_TIME,END_TIME,STATUS,PROJECT_ID,CONFIG_ID) VALUES(1,'global-1.0.0.0','2015-01-01 00:00:00','2015-01-31 00:00:00','active',11,1);
|
<reponame>Duncannn/DataProject<filename>Python/scraper.py
# Yahoo Finance scraper by <NAME>
import os
import sys
import csv
import codecs
import cStringIO
import errno
from pattern.web import URL, DOM, plaintext
# ----------------------------------------------------------------------------------------
TARGET_URL = "http://finance.yahoo.com/q/op?s="
stocks = ["AMZN&date=", "AAPL&date=", "BIDU&date=", "CL&date=", "COST&date=", "GS&date=",
"IBM&date=", "MA&date=", "NTES&date=", "NFLX&date=", "RL&date=", "WYNN&date="]
MATURITY_DATES = [("January 2, 2015", '1420156800'), ("January 9, 2015",'1420761600'),
("January 17, 2015",'1421452800'), ("January 23, 2015",'1421971200'),
("January 30, 2015", '1422576000')]
SCRIPT_DIR = os.path.split(os.path.realpath(__file__))[0]
# ----------------------------------------------------------------------------------------
class UTF8Recoder(object):
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader(object):
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter(object):
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
# ----------------------------------------------------------------------------------------
def create_dir(directory):
'''
Create directory if needed.
Args:
directory: string, path of directory to be made
Note: the backup directory is used to save the HTML of the pages you
crawl.
'''
try:
os.makedirs("/Users/duncanbarker/Desktop")
except OSError as e:
if e.errno == errno.EEXIST:
# Backup directory already exists, no problem for this script,
# just ignore the exception and carry on.
pass
else:
# All errors other than an already exising backup directory
# are not handled, so the exception is re-raised and the
# script will crash here.
raise
def save_csv(filename, rows):
'''
Save CSV file with the top 250 most popular movies on IMDB.
Args:
filename: string filename for the CSV file
rows: list of rows to be saved (250 movies in this exercise)
'''
with open(filename, 'wb') as f:
writer = UnicodeWriter(f) # implicitly UTF-8
writer.writerow([])
writer.writerows(rows)
# ----------------------------------------------------------------------------------------
def extract_prices(dom):
call_strike = []
put_strike = []
# Loop over the calls and append the strikes
for call in dom.by_id("optionsCallsTable").by_tag("table.details-table quote-table Fz-m")[:]:
for strike in call.by_tag("tr")[2:]:
call_strike.append(strike.by_tag("a")[0].content.encode('utf8'))
call_strike.append(strike.by_tag("div.option_entry Fz-m")[1].content.encode('utf8'))
# Loop over the puts and append the strikes
for put in dom.by_id("optionsPutsTable").by_tag("table.details-table quote-table Fz-m")[:]:
for strike in put.by_tag("tr")[2:]:
put_strike.append(strike.by_tag("a")[0].content.encode('utf8'))
put_strike.append(strike.by_tag("div.option_entry Fz-m")[1].content.encode('utf8'))
return call_strike, put_strike
def exparations(stocks, MATURITY_DATES):
rows = []
# For all stocks and all maturity dates
for stock in stocks:
for date in MATURITY_DATES:
print "Scraping stock "+ stock + date[1]
new_URL = TARGET_URL + stock + date[1]
url = URL(new_URL)
html = url.download()
dom = DOM(html)
# Get prices and append them
prices = extract_prices(dom)
rows.append(["CALL " + stock + date[1]] + prices[0])
rows.append(["PUT " + stock + date[1]] + prices[1])
print "Saving CSV ..."
save_csv(os.path.join(SCRIPT_DIR, 'PutCallData.csv'), rows)
# ----------------------------------------------------------------------------------------
if __name__ == '__main__':
# Download the HTML file
tvseries = exparations(stocks, MATURITY_DATES)
|
#! /bin/bash
set -e
if [ "${OUTPUT_DIR}" = "" ] || [ ! -d ${OUTPUT_DIR} ]; then exit 1; fi
cd ${OUTPUT_DIR}/usr/bin
echo ""
iter=0
while (( iter < ${#PROGS[@]} )); do
UT_CMD=$(eval echo '${PROGS['${iter}']}')
UT_PROG=$(echo ${UT_CMD}|cut -d' ' -f1)
echo "[${iter}] RUNNING '${UT_CMD}' with '${UT_PROG}'"|grep --color ".*"
echo ""
if [ -f ${UT_PROG} ]; then
${UT_CMD}
else
echo "${UT_CMD} SPECIFIED BUT ${UT_PROG} NOT FOUND"|grep --color ".*"
fi
iter=$(( iter + 1 ))
done
|
var tests_2main_8cpp =
[
[ "CATCH_CONFIG_MAIN", "tests_2main_8cpp.html#a656eb5868e824d59f489f910db438420", null ],
[ "Simulate", "tests_2main_8cpp.html#a177b86d04626c7a07e91a2a5087d40d8", null ],
[ "TEST_CASE", "tests_2main_8cpp.html#a1162bbc505e36e62eef48fc16aed2fde", null ]
];
|
<reponame>seqcode/multimds<filename>scripts/sup15.py
from multimds import data_tools as dt
import numpy as np
from multimds import compartment_analysis as ca
from sklearn import svm
from multimds import linear_algebra as la
from mayavi import mlab
from multimds import multimds as mm
path1 = "hic_data/GM12878_combined_21_100kb.bed"
path2 = "hic_data/K562_21_100kb.bed"
struct1, struct2 = mm.full_mds(path1, path2)
contacts1 = dt.matFromBed(path1, struct1)
enrichments1 = np.loadtxt("binding_data/GM12878_21_100kb_active_coverage.bed", usecols=6)
bin_nums1 = struct1.nonzero_abs_indices() + int(struct1.chrom.minPos/struct1.chrom.res)
enrichments1 = enrichments1[bin_nums1]
comps1 = np.array(ca.get_compartments(contacts1, struct1, enrichments1))
contacts2 = dt.matFromBed(path2, struct2)
enrichments2 = np.loadtxt("binding_data/K562_21_100kb_active_coverage.bed", usecols=6)
bin_nums2 = struct2.nonzero_abs_indices() + int(struct2.chrom.minPos/struct2.chrom.res)
enrichments2 = enrichments2[bin_nums2]
comps2 = np.array(ca.get_compartments(contacts2, struct2, enrichments2))
coords1 = struct1.getCoords()
coords2 = struct2.getCoords()
coords = np.concatenate((coords1, coords2))
compartments = np.concatenate((comps1, comps2))
clf = svm.LinearSVR()
clf.fit(coords, compartments)
coef = clf.coef_
transformed_coords1 = np.array(la.change_coordinate_system(coef, coords1))
transformed_coords2 = np.array(la.change_coordinate_system(coef, coords2))
struct1.setCoords(transformed_coords1)
struct2.setCoords(transformed_coords2)
index1 = struct1.get_rel_index(41900000)
index2 = struct1.get_rel_index(43900000)
comps1 = comps1[index1:index2+1]
comps2 = comps2[index1:index2+1]
index1 = struct1.chrom.getAbsoluteIndex(41900000)
index2 = struct1.chrom.getAbsoluteIndex(43900000)
struct1.subsamplePoints(index1, index2)
struct2.subsamplePoints(index1, index2)
colors = np.zeros_like(struct1.getPoints(), dtype=int)
index1 = struct1.get_rel_index(42700000)
index2 = struct1.get_rel_index(42900000)
colors[index1:index2] = -1
mlab.close(all=True)
mlab.figure(bgcolor=(1,1,1))
coords1 = np.array(struct1.getCoords())
mlab.plot3d(coords1[:,0], coords1[:,1], coords1[:,2], colors, colormap="RdYlBu")
coords2 = np.array(struct2.getCoords())
mlab.plot3d(coords2[:,0], coords2[:,1], coords2[:,2], colors, colormap="RdYlGn")
mlab.savefig("sup15a.png")
mlab.close(all=True)
mlab.figure(bgcolor=(1,1,1))
coords1 = np.array(struct1.getCoords())
mlab.plot3d(coords1[:,0], coords1[:,1], coords1[:,2], comps1, colormap="bwr")
coords2 = np.array(struct2.getCoords())
mlab.plot3d(coords2[:,0], coords2[:,1], coords2[:,2], comps2, colormap="bwr")
mlab.savefig("sup15b.png")
|
import Account from "@/api/account";
import Prs from "@/api/prs";
export {
Account,
Prs
}
|
import makeGamesRepository from 'shared/domain/repositories/factories/makeGamesRepository';
import CreateGameService from '../CreateGameService';
export default function makeCreateGameService(): CreateGameService {
const gamesRepository = makeGamesRepository();
const createGame = new CreateGameService(gamesRepository);
return createGame;
}
|
package org.om.core.impl.session;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import junit.framework.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.om.core.api.mapping.registry.MappingRegistry;
import org.om.core.api.session.Session;
import org.om.core.impl.mapping.extractor.EntityMappingExtractorImpl;
import org.om.core.impl.mapping.registry.OnDemandMappingRegistry;
import org.om.core.impl.persistence.cglib.CglibProxyFactory;
import org.om.core.impl.persistence.delegate.TestingDelegateFactory;
import org.om.core.impl.persistence.delegate.TestingPersistenceContext;
import org.om.core.impl.persistence.interceptor.factory.PersistenceInterceptorFactoryImpl;
import org.om.core.impl.test.EntityWithPrimitiveProperties;
import org.om.core.impl.test.EntityWithReferenceProperties;
/**
* @author <NAME>
* @author tom
*/
public class ImmutableSessionImplTest {
@Test
public void testGetWithPrimitiveProperties() {
MappingRegistry mappingRegistry = new OnDemandMappingRegistry(new EntityMappingExtractorImpl());
Session session = new ImmutableSessionImpl(null, new TestingDelegateFactory(), mappingRegistry, new CglibProxyFactory(
new PersistenceInterceptorFactoryImpl()));
EntityWithPrimitiveProperties entity = session.get(EntityWithPrimitiveProperties.class, "");
assertThat(entity, notNullValue());
assertThat(entity instanceof EntityWithPrimitiveProperties, is(true));
}
@Test
public void testGetWithReferenceProperties() {
MappingRegistry mappingRegistry = new OnDemandMappingRegistry(new EntityMappingExtractorImpl());
TestingPersistenceContext persistenceContext = new TestingPersistenceContext();
persistenceContext.addProperty("entityWithPrimitiveProperties", "");
Session session = new ImmutableSessionImpl(persistenceContext, new TestingDelegateFactory(), mappingRegistry, new CglibProxyFactory(
new PersistenceInterceptorFactoryImpl()));
EntityWithReferenceProperties entity = session.get(EntityWithReferenceProperties.class, "");
assertThat(entity, notNullValue());
assertThat(entity instanceof EntityWithReferenceProperties, is(true));
EntityWithPrimitiveProperties referencedEntity = entity.getEntityWithPrimitiveProperties();
assertThat(referencedEntity, notNullValue());
}
@Test
@Ignore
public void testSetGetWithPrimitiveProperties() {
/*
* some setup
*/
MappingRegistry mappingRegistry = new OnDemandMappingRegistry(new EntityMappingExtractorImpl());
Session session = new ImmutableSessionImpl(null, new TestingDelegateFactory(), mappingRegistry, new CglibProxyFactory(
new PersistenceInterceptorFactoryImpl()));
Assert.assertNotNull(session);
/*
* an entity with an Id
*/
EntityWithPrimitiveProperties ewpp = new EntityWithPrimitiveProperties();
ewpp.setId("tge");
ewpp.setPrimitiveInt(15);
/*
* save it
*/
session.save(ewpp);
/*
* get it back
*/
EntityWithPrimitiveProperties ewpp2 = session.get(EntityWithPrimitiveProperties.class, "tge");
Assert.assertNotNull(ewpp2);
System.out.println(ewpp2.getId());
Assert.assertTrue(ewpp2.getId().compareTo("tge") == 0);
}
}
|
#!/bin/bash
git remote add heroku https://git.heroku.com/fathomless-inlet-82408.git
wget https://cli-assets.heroku.com/branches/stable/heroku-linux-amd64.tar.gz
sudo mkdir -p /usr/local/lib /usr/local/bin
sudo tar -xvzf heroku-linux-amd64.tar.gz -C /usr/local/lib
sudo ln -s /usr/local/lib/heroku/bin/heroku /usr/local/bin/heroku
cat > ~/.netrc << EOF
machine api.heroku.com
login $HEROKU_LOGIN
password $HEROKU_API_KEY
machine git.heroku.com
login $HEROKU_LOGIN
password $HEROKU_API_KEY
EOF
mkdir ~/.ssh
touch ~/.ssh/config
cat >> ~/.ssh/config << EOF
VerifyHostKeyDNS yes
StrictHostKeyChecking no
EOF
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#generate token files
KUBELET_TOKEN=$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64 | tr -d "=+/" | dd bs=32 count=1 2>/dev/null)
KUBE_PROXY_TOKEN=$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64 | tr -d "=+/" | dd bs=32 count=1 2>/dev/null)
known_tokens_file="/srv/salt-overlay/salt/kube-apiserver/known_tokens.csv"
if [[ ! -f "${known_tokens_file}" ]]; then
mkdir -p /srv/salt-overlay/salt/kube-apiserver
known_tokens_file="/srv/salt-overlay/salt/kube-apiserver/known_tokens.csv"
(umask u=rw,go= ;
echo "$KUBELET_TOKEN,kubelet,kubelet" > $known_tokens_file;
echo "$KUBE_PROXY_TOKEN,kube_proxy,kube_proxy" >> $known_tokens_file)
mkdir -p /srv/salt-overlay/salt/kubelet
kubelet_auth_file="/srv/salt-overlay/salt/kubelet/kubernetes_auth"
(umask u=rw,go= ; echo "{\"BearerToken\": \"$KUBELET_TOKEN\", \"Insecure\": true }" > $kubelet_auth_file)
kubelet_kubeconfig_file="/srv/salt-overlay/salt/kubelet/kubeconfig"
mkdir -p /srv/salt-overlay/salt/kubelet
(umask 077;
cat > "${kubelet_kubeconfig_file}" << EOF
apiVersion: v1
kind: Config
clusters:
- cluster:
insecure-skip-tls-verify: true
name: local
contexts:
- context:
cluster: local
user: kubelet
name: service-account-context
current-context: service-account-context
users:
- name: kubelet
user:
token: ${KUBELET_TOKEN}
EOF
)
mkdir -p /srv/salt-overlay/salt/kube-proxy
kube_proxy_kubeconfig_file="/srv/salt-overlay/salt/kube-proxy/kubeconfig"
# Make a kubeconfig file with the token.
# TODO(etune): put apiserver certs into secret too, and reference from authfile,
# so that "Insecure" is not needed.
(umask 077;
cat > "${kube_proxy_kubeconfig_file}" << EOF
apiVersion: v1
kind: Config
clusters:
- cluster:
insecure-skip-tls-verify: true
name: local
contexts:
- context:
cluster: local
user: kube-proxy
name: service-account-context
current-context: service-account-context
users:
- name: kube-proxy
user:
token: ${KUBE_PROXY_TOKEN}
EOF
)
# Generate tokens for other "service accounts". Append to known_tokens.
#
# NB: If this list ever changes, this script actually has to
# change to detect the existence of this file, kill any deleted
# old tokens and add any new tokens (to handle the upgrade case).
service_accounts=("system:scheduler" "system:controller_manager" "system:logging" "system:monitoring" "system:dns")
for account in "${service_accounts[@]}"; do
token=$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64 | tr -d "=+/" | dd bs=32 count=1 2>/dev/null)
echo "${token},${account},${account}" >> "${known_tokens_file}"
done
fi
readonly BASIC_AUTH_FILE="/srv/salt-overlay/salt/kube-apiserver/basic_auth.csv"
if [ ! -e "${BASIC_AUTH_FILE}" ]; then
mkdir -p /srv/salt-overlay/salt/kube-apiserver
(umask 077;
echo "${KUBE_PASSWORD},${KUBE_USER},admin" > "${BASIC_AUTH_FILE}")
fi
# Create the overlay files for the salt tree. We create these in a separate
# place so that we can blow away the rest of the salt configs on a kube-push and
# re-apply these.
mkdir -p /srv/salt-overlay/pillar
cat <<EOF >/srv/salt-overlay/pillar/cluster-params.sls
instance_prefix: '$(echo "$INSTANCE_PREFIX" | sed -e "s/'/''/g")'
node_instance_prefix: $NODE_INSTANCE_PREFIX
service_cluster_ip_range: $SERVICE_CLUSTER_IP_RANGE
enable_cluster_monitoring: "${ENABLE_CLUSTER_MONITORING:-none}"
enable_cluster_logging: "${ENABLE_CLUSTER_LOGGING:false}"
enable_cluster_ui: "${ENABLE_CLUSTER_UI:true}"
enable_node_logging: "${ENABLE_NODE_LOGGING:false}"
logging_destination: $LOGGING_DESTINATION
elasticsearch_replicas: $ELASTICSEARCH_LOGGING_REPLICAS
enable_cluster_dns: "${ENABLE_CLUSTER_DNS:-false}"
dns_replicas: ${DNS_REPLICAS:-1}
dns_server: $DNS_SERVER_IP
dns_domain: $DNS_DOMAIN
e2e_storage_test_environment: "${E2E_STORAGE_TEST_ENVIRONMENT:-false}"
cluster_cidr: "$NODE_IP_RANGES"
allocate_node_cidrs: "${ALLOCATE_NODE_CIDRS:-true}"
admission_control: NamespaceLifecycle,LimitRanger,SecurityContextDeny,ServiceAccount,ResourceQuota
EOF
mkdir -p /srv/salt-overlay/salt/nginx
echo $MASTER_HTPASSWD > /srv/salt-overlay/salt/nginx/htpasswd
|
<reponame>gcusnieux/jooby<filename>modules/jooby-scanner/src/test/java/app/ns/GuiceModule.java
package app.ns;
import com.google.inject.Binder;
import com.google.inject.Module;
public class GuiceModule implements Module {
@Override
public void configure(final Binder binder) {
binder.bind(GuiceModule.class);
}
}
|
FLAGS="-fPIC -shared -s -O1"
LIBS="-lGLESv2 -lEGL -lOpenSLES -lm -landroid -llog"
NDK_ROOT="/home/buildbot/android/android-ndk-r22/toolchains/llvm/prebuilt/linux-x86_64/bin"
TOOLS_ROOT="/home/buildbot/android/sdk/build-tools/26.0.0"
SDK_ROOT="/home/buildbot/android/sdk/platforms/android-26"
cd /home/buildbot/client/src
$NDK_ROOT/i686-linux-android26-clang *.c $FLAGS $LIBS -o cc-droid-x86_32
$NDK_ROOT/aarch64-linux-android26-clang *.c $FLAGS $LIBS -o cc-droid-arm_64
$NDK_ROOT/x86_64-linux-android26-clang *.c $FLAGS $LIBS -o cc-droid-x86_64
$NDK_ROOT/armv7a-linux-androideabi26-clang *.c $FLAGS $LIBS -o cc-droid-arm_32
cd ../android/app/src/main
# remove old java temp files
rm -rf obj
mkdir obj
rm classes.dex
# copy required native libraries
rm -rf lib
mkdir lib
mkdir lib/armeabi-v7a
mkdir lib/arm64-v8a
mkdir lib/x86
mkdir lib/x86_64
cp ~/client/src/cc-droid-arm_32 lib/armeabi-v7a/libclassicube.so
cp ~/client/src/cc-droid-arm_64 lib/arm64-v8a/libclassicube.so
cp ~/client/src/cc-droid-x86_32 lib/x86/libclassicube.so
cp ~/client/src/cc-droid-x86_64 lib/x86_64/libclassicube.so
# compile interop java file into its multiple .class files
javac java/com/classicube/MainActivity.java -d ./obj -classpath $SDK_ROOT/android.jar
# compile the multiple .class files into one .dex file
$TOOLS_ROOT/dx --dex --output=obj/classes.dex ./obj
# create initial .apk with packaged version of resources
$TOOLS_ROOT/aapt package -f -M AndroidManifest.xml -S res -F obj/cc-unsigned.apk -I $SDK_ROOT/android.jar
# and add all the required files
cp obj/classes.dex classes.dex
$TOOLS_ROOT/aapt add -f obj/cc-unsigned.apk classes.dex lib/armeabi-v7a/libclassicube.so lib/arm64-v8a/libclassicube.so lib/x86/libclassicube.so lib/x86_64/libclassicube.so
# sign the apk with debug key (https://stackoverflow.com/questions/16711233/)
cp obj/cc-unsigned.apk obj/cc-signed.apk
jarsigner -verbose -keystore debug.keystore -storepass android -keypass android obj/cc-signed.apk androiddebugkey
# create aligned .apk file
$TOOLS_ROOT/zipalign -f -v 4 obj/cc-signed.apk obj/cc-final.apk
|
package HexaGhost;
import Model.Cell;
public interface HGStrategy {
public Cell play();
}
|
import React from "react";
import "./fotter.css";
import logofotter from "./logo.png";
import instagramlogo from "./instagramlogo.png";
import { Link } from "react-router-dom";
const Fotter = () => (
<div className="footer">
<div className="main_container">
<Link className="logo" to="/">
<img src={logofotter} alt="logo" />
</Link>
<div className="footer_centr">
<div className="copyright">
© Copyright 2021 ООО «<NAME>» Все права защищены. Возраст 6+
</div>
<a href="/#">Политика обработки персональных данных</a>
</div>
<div className="social_footer">
<a className="social" href="https://www.instagram.com/" target="blank">
<img src={instagramlogo} alt="insta"></img>
</a>
</div>
</div>
</div>
);
export default Fotter;
|
#!/bin/bash
#/* ****************************************************
# * FileName: gmsg.sh
# * Author : ghostwwl
# * Date : 2015
# * Note : message output for posix shell
# * ***************************************************/
function color_msg ()
{
black='\033[0m'
boldblack='\033[1;0m'
red='\033[31m'
boldred='\033[1;31m'
green='\033[32m'
boldgreen='\033[1;32m'
yellow='\033[33m'
boldyellow='\033[1;33m'
blue='\033[34m'
boldblue='\033[1;34m'
magenta='\033[35m'
boldmagenta='\033[1;35m'
cyan='\033[36m'
boldcyan='\033[1;36m'
white='\033[37m'
boldwhite='\033[1;37m'
local default_msg="No message passed."
message=${1:-$default_msg}
color=${2:-black}
case $color in
black)
printf "$black"
;;
boldblack)
printf "$boldblack"
;;
red)
printf "$red"
;;
boldred)
printf "$boldred"
;;
green)
printf "$green"
;;
boldgreen)
printf "$boldgreen"
;;
yellow)
printf "$yellow"
;;
boldyellow)
printf "$boldyellow"
;;
blue)
printf "$blue"
;;
boldblue)
printf "$boldblue"
;;
magenta)
printf "$magenta"
;;
boldmagenta)
printf "$boldmagenta"
;;
cyan)
printf "$cyan"
;;
boldcyan)
printf "$boldcyan"
;;
white)
printf "$white"
;;
boldwhite)
printf "$boldwhite"
;;
esac
printf "%s\n" "$message"
tput sgr0 # Reset to normal.
printf "$black"
return
}
function message()
{
dtag=`date '+%Y-%m-%d %H:%M:%S'`
LEVE_KEY=('INFO' 'WARNING' 'ERROR' 'DEBUG' 'FATAL')
level=${2:-1}
leve_str=${LEVE_KEY[0]}
if [ -n $level ]; then
leve_str=${LEVE_KEY[`expr $level - 1`]}
fi
Msg=$(printf "%-7s %s] %s\n" "${leve_str}" "${dtag}" "$1")
case $level in
"1")
color_msg "$Msg" cyan
;;
"2")
color_msg "$Msg" green
;;
"3")
color_msg "$Msg" boldred
;;
"4")
color_msg "$Msg" yellow
;;
"5")
color_msg "$Msg" boldmagenta
esac
return
}
function seg()
{
sencents=${1:-1}
result=$(echo $sencent|python3 -m jieba)
message "$result"
}
|
#!/usr/bin/env bats
# Copyright 2016 tsuru authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
setup() {
rm -rf /home/application/ruby
rm -rf /home/application/current && mkdir /home/application/current
chown ubuntu /home/application/current
export CURRENT_DIR=/home/application/current
export PATH=/home/application/ruby/bin:${PATH}
}
@test "installs ruby" {
dpkg -s ruby | grep "install ok installed"
}
@test "installs build-essential" {
dpkg -s build-essential | grep "install ok installed"
}
@test "install ruby version 2.4.6 as default" {
run /var/lib/tsuru/deploy
run /home/application/ruby/bin/ruby --version
[ "$status" -eq 0 ]
[[ "$output" == *"2.4.6"* ]]
}
@test "install specific ruby version" {
export RUBY_VERSION="2.6.3"
run /var/lib/tsuru/deploy
run /home/application/ruby/bin/ruby --version
[ "$status" -eq 0 ]
[[ "$output" == *"2.6.3"* ]]
}
@test "deploy fails on invalid ruby version" {
export RUBY_VERSION="ABC"
run /var/lib/tsuru/deploy
[ "$status" -eq 1 ]
[[ "$output" == *"ERROR: ruby version ABC is not supported."* ]]
}
@test "display supported versions if set" {
export RUBY_VERSION="ABC"
export SUPPORTED_VERSIONS="1.1.1, 1.2, 1.3"
run /var/lib/tsuru/deploy
[ "$status" -eq 1 ]
[[ "$output" == *"Supported versions are: 1.1.1, 1.2, 1.3"* ]]
}
@test "parse ruby version from .ruby-version" {
echo "ruby-2.5.5" > ${CURRENT_DIR}/.ruby-version
run /var/lib/tsuru/deploy
run /home/application/ruby/bin/ruby --version
[ "$status" -eq 0 ]
[[ "$output" == *"2.5.5"* ]]
rm ${CURRENT_DIR}/.ruby-version
}
@test "using bundler within ruby package when version >=2.6.0" {
export RUBY_VERSION="2.6.0"
run /var/lib/tsuru/deploy
run /home/application/ruby/bin/bundler --version
[ "$status" -eq 0 ]
[[ "$output" == "Bundler version "* ]]
}
@test "bundle install when provide Gemfile and reuse already installed gem" {
echo "ruby-2.4.3" > ${CURRENT_DIR}/.ruby-version
echo "source 'https://rubygems.org'" > ${CURRENT_DIR}/Gemfile
echo "gem 'hello-world', '1.2.0'" >> ${CURRENT_DIR}/Gemfile
cat <<EOF>${CURRENT_DIR}/Gemfile.lock
GEM
remote: https://rubygems.org/
specs:
hello-world (1.2.0)
PLATFORMS
ruby
DEPENDENCIES
hello-world (= 1.2.0)
BUNDLED WITH
1.13.7
EOF
run /var/lib/tsuru/deploy
[ "$status" -eq 0 ]
[[ "$output" == *"Installing hello-world"* ]]
run /var/lib/tsuru/deploy
[[ "$output" == *"Using hello-world"* ]]
run /home/application/ruby/bin/bundler --version
[ "$status" -eq 0 ]
[[ "$output" == "Bundler version 1.13.7" ]]
}
@test "using bundle inside Gemfile.lock and ignore bundle vendoring version for ruby >= 2.6" {
echo "ruby-2.6.3" > ${CURRENT_DIR}/.ruby-version
echo "source 'https://rubygems.org'" > ${CURRENT_DIR}/Gemfile
echo "gem 'hello-world', '1.2.0'" >> ${CURRENT_DIR}/Gemfile
cat <<EOF>${CURRENT_DIR}/Gemfile.lock
GEM
remote: https://rubygems.org/
specs:
hello-world (1.2.0)
PLATFORMS
ruby
DEPENDENCIES
hello-world (= 1.2.0)
BUNDLED WITH
2.0.1
EOF
run /var/lib/tsuru/deploy
[ "$status" -eq 0 ]
[[ "$output" == *"Installing hello-world"* ]]
run /var/lib/tsuru/deploy
[[ "$output" == *"Using hello-world"* ]]
run /home/application/ruby/bin/bundler --version
[ "$status" -eq 0 ]
[[ "$output" == "Bundler version 2.0.1" ]]
}
@test "bundle install when provide Gemfile with no bundled with section" {
echo "ruby-2.5.4" > ${CURRENT_DIR}/.ruby-version
echo "source 'https://rubygems.org'" > ${CURRENT_DIR}/Gemfile
echo "gem 'hello-world', '1.2.0'" >> ${CURRENT_DIR}/Gemfile
cat <<EOF>${CURRENT_DIR}/Gemfile.lock
GEM
remote: https://rubygems.org/
specs:
hello-world (1.2.0)
PLATFORMS
ruby
DEPENDENCIES
hello-world (= 1.2.0)
EOF
run /var/lib/tsuru/deploy
[ "$status" -eq 0 ]
[[ "$output" == *"Installing hello-world"* ]]
run /var/lib/tsuru/deploy
[[ "$output" == *"Using hello-world"* ]]
run /home/application/ruby/bin/bundler --version
[ "$status" -eq 0 ]
[[ "$output" == "Bundler version 2."* ]]
}
|
#!/bin/sh
set -euf
build() {
docker-compose build --force-rm
}
mmock() {
docker-compose up --remove-orphans -d mock
}
run() {
docker-compose up --force-recreate --remove-orphans --exit-code-from gandalf gandalf
}
down() {
docker-compose down -v --remove-orphans
docker-compose rm -svf
}
cleanup() {
down
export COMPOSE_FILE=
export COMPOSE_PROJECT_NAME=
}
setup() {
export COMPOSE_FILE=examples/prototype/docker-compose.yml
export COMPOSE_PROJECT_NAME=gandalf
trap cleanup EXIT
}
setup && build && mmock && run
|
<reponame>jogoes/caradverts
package repository.jooq
import java.sql.Date
import java.time.LocalDate
import org.scalatest.{FlatSpec, Matchers}
class SqlDateToLocalDateConverterSpec extends FlatSpec with Matchers {
private val converter = new SqlDateToLocalDateConverter
"SqlDateToLocalDateConverter" should "convert SqlDate to LocalDate" in {
val sqlDate = new Date(1477954800000L)
converter.from(sqlDate) shouldBe LocalDate.of(2016, 11, 1)
}
it should "convert null LocalDate to null SqlDate" in {
converter.from(null) shouldBe null
}
it should "convert LocalDate to SqlDate" in {
converter.to(LocalDate.of(2016, 11, 1)) shouldBe new Date(1477954800000L)
}
it should "convert null SqlDate to null LocalDate" in {
converter.to(null) shouldBe null
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.