text stringlengths 1 1.05M |
|---|
import numpy as np
class ReinforcementLearningAgent:
def __init__(self):
self.epsilon = 0.1
@lab_api
def sample(self):
# Simulates a sample from the environment
# Implement logic to sample from the environment and return the observed state
observed_state = ... # Implement the logic to obtain the observed state
return observed_state
@lab_api
def update(self):
# Performs an update step in the learning algorithm
# Implement the learning algorithm update logic
# For example, using Q-learning or SARSA
updated_value = ... # Implement the logic to update the learning algorithm
return updated_value |
#!/bin/bash
BASENAME=`which basename`
LS=`which ls`
SED=`which sed`
PATHS=( "svg/dark" "svg/light")
output_path="../png"
for index in ${!PATHS[*]}; do
input_path="${PATHS[$index]}"
for i in `${LS} -1 ${input_path}/*.svg`; do
output_filename=`echo ${i##*/}`
output_filename=`echo ${output_filename%.svg}`
output_filename="${output_path}/${output_filename}.png"
echo "Converting ${i} to ${output_filename} ..."
./convert_svg_to_png.sh ${i} ${output_filename}
done
done
|
from django.contrib import admin
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('<int:post_id>/', views.post_detail, name='post_detail'),
path('admin/', admin.site.urls),
]
<!-- templates/index.html -->
<h1>Blog Posts</h1>
<ul>
{% for post in posts %}
<li><a href="{% url 'post_detail' post.id %}">{{ post.title }}</a></li>
{% endfor %}
</ul>
<!-- templates/post_detail.html -->
<h1>{{ post.title }}</h1>
<p>{{ post.body }}</p> |
GPU_ID=1
EVERY=1000
MODEL=LstmRandomModel
MODEL_DIR="../model/frame_level_lstm_random_model"
start=0
DIR="$(pwd)"
for checkpoint in $(cd $MODEL_DIR && python ${DIR}/training_utils/select.py $EVERY); do
echo $checkpoint;
if [[ $checkpoint -gt $start ]]; then
echo $checkpoint;
CUDA_VISIBLE_DEVICES=$GPU_ID python eval.py \
--train_dir="$MODEL_DIR" \
--model_checkpoint_path="${MODEL_DIR}/model.ckpt-${checkpoint}" \
--eval_data_pattern="/Youtube-8M/data/frame/validate/validatea*" \
--frame_features=True \
--feature_names="rgb,audio" \
--feature_sizes="1024,128" \
--batch_size=64 \
--model=$MODEL \
--video_level_classifier_model=MoeModel \
--moe_num_mixtures=8 \
--train=False \
--run_once=True
fi
done
|
import mx from '../../multiplex';
import * as mocks from './_mocks';
import {
qmodule,
qtest
} from '../../qunit';
qmodule('linq-sum');
function double(t) {
return t * 2;
}
function calcsum(items, selector) {
var sum = 0;
for (var i = 0; i < items.length; i++) {
sum += (selector ? selector(items[i]) : items[i]);
}
return sum;
}
qtest('basic "sum" test', function (assert) {
assert.equal(mx(mocks.array).sum(), calcsum(mocks.array), 'sum element of an array of numbers');
assert.equal(mx(mocks.array).sum(double), calcsum(mocks.array, double), 'sum element of an array of numbers with selector');
assert.equal(mx([]).sum(), 0, 'sum of empty array');
assert.equal(mx([]).sum(double), 0, 'sum of empty array with selector');
});
qtest('collections "sum" method tests', function (assert) {
var s1 = calcsum(mocks.array);
var s2 = calcsum(mocks.array, double);
assert.equal(mocks.enumerable.sum(), s1, 'sum element in an enumerable');
assert.equal(mocks.enumerable.sum(double), s2, 'sum element in an enumerable with predicate');
assert.equal(mocks.collection.sum(), s1, 'sum element in a Collection');
assert.equal(mocks.collection.sum(double), s2, 'sum element in a Collection with predicate');
assert.equal(mocks.list.sum(), s1, 'sum element in a List');
assert.equal(mocks.list.sum(double), s2, 'sum element in a List with predicate');
assert.equal(mocks.readOnlyCollection.sum(), s1, 'sum element in a ReadOnlyCollection');
assert.equal(mocks.readOnlyCollection.sum(double), s2, 'sum element in a ReadOnlyCollection with predicate');
assert.equal(mocks.linkedList.sum(), s1, 'sum element in a LinkedList');
assert.equal(mocks.linkedList.sum(double), s2, 'sum element in a LinkedList with predicate');
assert.equal(mocks.hashSet.sum(), s1, 'sum element in a HashSet');
assert.equal(mocks.hashSet.sum(double), s2, 'sum element in a HashSet with predicate');
assert.equal(mocks.stack.sum(), s1, 'sum element in a Stack');
assert.equal(mocks.stack.sum(double), s2, 'sum element in a Stack with predicate');
assert.equal(mocks.queue.sum(), s1, 'sum element in a Queue');
assert.equal(mocks.queue.sum(double), s2, 'sum element in a Queue with predicate');
assert.equal(mocks.set.sum(), s1, 'sum element in a Set');
assert.equal(mocks.set.sum(double), s2, 'sum element in a Set with predicate');
assert.equal(mocks.map.sum(function (t) {
return t[0];
}), s1, 'sum element in a Map');
assert.equal(mocks.dictionary.sum(function (t) {
return t.key;
}), s1, 'sum element in a Dictionary');
assert.equal(mocks.lookup.sum(function (t) {
return t.key;
}), s1, 'sum element in a Lookup');
assert.equal(mocks.sortedList.sum(function (t) {
return t.key;
}), s1, 'sum element in a SortedList');
});
qtest('"sum" method validations', function (assert) {
assert.throws(function () {
mx([1]).sum(1);
}, 'non function selector');
assert.throws(function () {
mx(['a']).sum();
}, 'non numeric iterator');
}); |
docker run -i -p 2181:2181 -p 9092:9092 -p 9093:9093 -p 9094:9094 --name kafka2.3-3in1 -h kafka learningjournal/kafka:kafka2.3-3in1-centos7
|
<filename>Modeling_WebApp/ModelingApp/src/views/toolbar.js
import { ApplicationSettingsDialogConfig } from "../config/actionDialogConfig.mjs";
import EntityTypes from "../config/entityTypes.mjs";
import UIModalDialog from "../representations/guiElements.dialog.mjs";
import ToolbarTools from "../representations/guiElements.toolbarTools.mjs";
import ModalDialog from "./modalDialog.mjs";
class Toolbar extends joint.mvc.View {
"use strict";
currentSystemName = ""; // TODO keep here?
appTools = [];
entityTools = [];
additionalRowTools = [];
#applicationModalDialog = {};
#toolbarButtonActionConfig = {};
constructor(documentElement, currentPaper, toolbarConfig, modalDialog, currentSystemName) { // TODO get current applicationName
if (!(currentPaper instanceof joint.dia.Paper)) {
throw new TypeError("Toolbar: The provided current paper has to be a joint.dia.Paper element");
}
super({
className: "toolbar",
el: documentElement,
paper: currentPaper,
graph: currentPaper.model,
appTools: toolbarConfig && toolbarConfig.Tools ? toolbarConfig.Tools : [],
entityTools: toolbarConfig && toolbarConfig.EntityConfig ? toolbarConfig.EntityConfig : [],
additionalRowToolsConfig: toolbarConfig && toolbarConfig.ToolbarRowConfig ? toolbarConfig.ToolbarRowConfig : [],
toolbarButtonActionConfig: toolbarConfig && toolbarConfig.ToolbarButtonActionConfig ? toolbarConfig.ToolbarButtonActionConfig : {},
currentSystemName: currentSystemName,
events: {
"initialSystemName #appNameTitle": "setInitialSystemName",
// Tool buttons
"click #fullScreen-button": "toggleFullScreen",
"click #closefullScreen-button": "toggleFullScreen",
"click #fitActivePaperToContent-button": "fitActivePaperToContent",
// TODO if lock mechanism is included
// "click #lockPaperInteractivity-button": "togglePaperInteractivity",
// "click #unlockPaperInteractivity-button": "togglePaperInteractivity",
"click #clearActivePaper-button": "clearActivePaper",
"click #printActivePaper-button": "printActivePaper",
"click #zoomOutPaper-button": "zoomOutPaper",
"click #zoomInPaper-button": "zoomInPaper",
"click #expandAll-button": "toggleEntityExpansion",
"click #collapseAll-button": "toggleEntityExpansion",
"click #applicationSettings-button": "editApplicationSettings",
"click #showEntityToolbarRow-button": "toggleEntityOptionsBar",
"click #hideEntityToolbarRow-button": "toggleEntityOptionsBar",
// Collapsible elements
"click #convertModeledSystemEntityToJson-dropdownItemButton": "convertToJson",
"click #convertModeledSystemEntityToTosca-dropdownItemButton": "convertToTosca",
"click #changeGrid-button": "changeGrid",
'click input[type="checkbox"]': "toggleEntityVisibility",
"mouseup": 'removeFocusFromSelection',
// Test section --> TODO
"click #fitAllElementsToEmbedded-button": "fitAllElementsToEmbedded"
}
});
this.appTools = this.options.appTools;
this.entityTools = this.options.entityTools;
this.additionalRowTools = this.options.additionalRowToolsConfig;
this.#toolbarButtonActionConfig = this.options.toolbarButtonActionConfig;
this.currentSystemName = this.options.currentSystemName;
this.delegateEvents();
this.listenTo(this.options.graph, "add", this.entityAdded);
this.listenTo(this.options.graph, "remove", this.entityRemoved);
this.listenTo(this.options.graph, "change:entity", (element, entity, opt) => { this.entityTypeChanged(opt.previousType, entity.type) });
this.listenTo(this.options.graph, "change:attrs", this.entityVisibilityChanged);
this.#applicationModalDialog = (modalDialog instanceof ModalDialog) ? modalDialog : {};
}
init() {
}
onRender() {
}
confirmUpdate() {
}
onSetTheme(oldTheme, newTheme) {
}
onRemove() {
}
render() {
const toolbarTools = new ToolbarTools();
toolbarTools.addSystemTitle();
for (const buttonGroup of this.appTools) {
toolbarTools.addButtonGroup(buttonGroup);
}
for (const entityElement of this.entityTools) {
toolbarTools.addEntityCheckboxTool(entityElement.entityType, entityElement.labelText, entityElement.tooltipText);
}
for (const additionalRowTool of this.additionalRowTools) {
switch (additionalRowTool.rowIndex) {
case 1:
toolbarTools.addAdditionalFirstRowConfigTool(additionalRowTool.tools);
break;
case 2:
toolbarTools.addAdditionalSecondRowConfigTool(additionalRowTool.tools);
break;
default:
toolbarTools.addAdditionalFirstRowConfigTool(additionalRowTool.tools);
break;
}
}
const appToolbar = toolbarTools.getCreatedToolbarTemplate();
$("#appToolbarContainer").append(appToolbar);
this.#configureGeneralToolbarBehaviour();
$(".buttonInitialHide").hide();
// remove focus from checkbox element after it has been clicked
$(".app-header-second-row").on("click", () => { $(':focus').blur(); });
return this;
}
toggleFullScreen() {
// decide what should be toggled --> Fix design and scrolling
// joint.util.toggleFullScreen(document.getElementById("app"));
joint.util.toggleFullScreen();
let openFullScreenButtonId = "fullScreen-button";
let closeFullScreenButtonId = "closefullScreen-button";
$("#" + openFullScreenButtonId).toggle();
$("#" + closeFullScreenButtonId).toggle();
// let
}
fitActivePaperToContent() {
this.options.paper.fitToContent({
padding: joint.util.normalizeSides(25),
minWidth: 300,
minHeight: 250
});
}
// TODO if lock mechanism is included
// togglePaperInteractivity() {
// if (this.options.paper.options.interactive == false) {
// this.options.paper.setInteractivity({
// labelMove: false,
// addLinkFromMagnet: true,
// linkMove: true
// });
// } else {
// this.options.paper.setInteractivity(false);
// }
// $("#lockPaperInteractivity-button").toggle();
// $("#unlockPaperInteractivity-button").toggle();
// }
clearActivePaper() {
const config = this.#toolbarButtonActionConfig["clearActivePaper"];
if (config) {
let modalDialog = new UIModalDialog("extern-clear", "clearActivePaper");
modalDialog.create(config);
modalDialog.render("modals", true);
modalDialog.configureSaveButtonAction(() => { this.options.graph.clear() });
modalDialog.show();
} else {
this.options.graph.clear();
}
}
changeGrid() {
// TODO for options
this.options.paper.clearGrid();
}
printActivePaper() {
// TODO Fixme that only paper is printed
window.print();
}
zoomInPaper() {
// TODO allow more finegrained
const dimensionsBeforeZoom = this.options.paper.getComputedSize();
this.options.paper.scale(1.5);
this.options.paper.fitToContent({
padding: 50,
minWidth: dimensionsBeforeZoom.width,
minHeight: dimensionsBeforeZoom.height
});
}
zoomOutPaper() {
// TODO allow more finegrained
const dimensionsBeforeZoom = this.options.paper.getComputedSize();
this.options.paper.scale(1);
this.options.paper.fitToContent({
padding: 50,
minWidth: dimensionsBeforeZoom.width,
minHeight: dimensionsBeforeZoom.height
});
}
convertToJson() {
let jsonSerlializedGraph = this.options.graph.toJSON();
// download created yaml taken from https://stackoverflow.com/a/22347908
let downloadElement = document.createElement("a");
downloadElement.setAttribute('href', 'data:text/plain;charset=utf-8,' + encodeURIComponent(JSON.stringify(jsonSerlializedGraph)));
downloadElement.setAttribute('download', `${this.currentSystemName}.json`);
downloadElement.click();
}
convertToTosca() {
const config = this.#toolbarButtonActionConfig["convertToTosca"];
if (config) {
let modalDialog = new UIModalDialog("tosca-export", "convertToTosca");
modalDialog.create(config);
modalDialog.render("modals", true);
modalDialog.configureSaveButtonAction(() => { this.options.graph.trigger("startToscaTransformation"); });
modalDialog.show();
}
}
removeFocusFromSelection(event) {
if (event.button === 2) {
// ignore right click
return;
}
if (event.target.id === "appNameTitle") {
// keep focus if text box input field is focused
return;
}
$('[data-toggle="tooltip"]').tooltip("hide");
$('[data-tooltip-toggle="tooltip"]').tooltip("hide");
document.activeElement.blur();
}
fitAllElementsToEmbedded() {
let elements = this.options.graph.getElements();
for (const element of elements) {
if (element.getEmbeddedCells("embeds").length === 0) {
continue;
}
element.fitEmbeds({
deep: true,
padding: 10
// padding: { //TODO useful values
// top: 40,
// bottom: 10,
// left: 10,
// right: 10
// }
});
}
}
toggleEntityExpansion(event) {
let elements = this.options.graph.getElements();
elements.forEach(element => {
if (!element.attr("icon") || element.prop("entity/type") === EntityTypes.REQUEST_TRACE) {
return;
}
element.prop("collapsed", (event.currentTarget.id.includes("collapse") ? true : false));
let toggledVisibility = event.currentTarget.id.includes("collapse") ? "visible" : "hidden";
// check if entity is currently hidden
let iconVisibility = element.attr("root/visibility") === "hidden" ? "hidden" : toggledVisibility;
element.attr("icon/visibility", iconVisibility, { isolate: true });
// hide embedded items
let embeddedCells = element.getEmbeddedCells({ deep: true });
embeddedCells.forEach(embeddedElement => {
const itemVisibility = event.currentTarget.id.includes("collapse") ? "hidden" : "visible";
// check if entity is currently filtered by checkbox
const entityTypeHidden = embeddedElement.prop("entityTypeHidden");
const newItemVisibility = entityTypeHidden ? "hidden" : itemVisibility;
embeddedElement.attr("root/visibility", newItemVisibility, { isolate: true });
embeddedElement.prop("parentCollapsed", ("hidden".localeCompare(toggledVisibility) === 0 ? false : true));
});
});
}
toggleEntityOptionsBar() {
$("#showEntityToolbarRow-button").toggle();
$("#hideEntityToolbarRow-button").toggle();
$("#additionalToolbar-groupDivider").toggle();
$(".app-header-second-row").toggle();
}
editApplicationSettings() {
ApplicationSettingsDialogConfig.saveButton.action = () => {
let newWidth = $("#" + ApplicationSettingsDialogConfig.content.Size.sectionContent.PaperWidth.providedFeature).val();
let newHeight = $("#" + ApplicationSettingsDialogConfig.content.Size.sectionContent.PaperHeight.providedFeature).val();
let newGridSize = $("#" + ApplicationSettingsDialogConfig.content.Grid.sectionContent.Size.providedFeature).val();
let newGridThickness = $("#" + ApplicationSettingsDialogConfig.content.Grid.sectionContent.Thickness.providedFeature).val();
this.options.paper.setDimensions(newWidth, newHeight);
this.options.paper.setGridSize(newGridSize);
this.options.paper.drawGrid({ thickness: newGridThickness });
};
// get current values
ApplicationSettingsDialogConfig.content.Size.sectionContent.PaperWidth.min = this.options.paper.getFitToContentArea({ padding: joint.util.normalizeSides(25) }).width;
ApplicationSettingsDialogConfig.content.Size.sectionContent.PaperHeight.min = this.options.paper.getFitToContentArea({ padding: joint.util.normalizeSides(25) }).height;
ApplicationSettingsDialogConfig.content.Size.sectionContent.PaperWidth.defaultValue = this.options.paper.options.width;
ApplicationSettingsDialogConfig.content.Size.sectionContent.PaperHeight.defaultValue = this.options.paper.options.height;
ApplicationSettingsDialogConfig.content.Grid.sectionContent.Size.defaultValue = this.options.paper.options.gridSize;
// TODO fix access
ApplicationSettingsDialogConfig.content.Grid.sectionContent.Thickness.defaultValue = this.options.paper._gridSettings[0].thickness;
// create dialog with information
this.#applicationModalDialog.renderActionDialog(ApplicationSettingsDialogConfig.title, ApplicationSettingsDialogConfig.content, ApplicationSettingsDialogConfig.cancelButton.text, ApplicationSettingsDialogConfig.saveButton);
this.#applicationModalDialog.show();
}
entityAdded(cell) {
if (!(cell.attributes.entity) || !(Object.values(EntityTypes).includes(cell.attributes.entity.type))) {
console.error("Entity Type does not exist"); // TODO error?
return;
}
this.#updateEntityCounter(cell.attributes.entity.type, "add");
}
entityRemoved(cell) {
if (!(cell.attributes.entity) || !(Object.values(EntityTypes).includes(cell.attributes.entity.type))) {
console.error("Entity Type does not exist"); // TODO error?
return;
}
this.#updateEntityCounter(cell.attributes.entity.type, "remove");
}
entityTypeChanged(previousType, newType) {
if (!previousType) {
// entity properties not type changed
return;
}
if (!(Object.values(EntityTypes).includes(previousType)) || !(Object.values(EntityTypes).includes(newType))) {
console.error("Entity Type does not exist"); // TODO error?
return;
}
this.#updateEntityCounter(previousType, "remove");
this.#updateEntityCounter(newType, "add");
}
updateEntityEventListener(event) {
const currentBadge = event.target.getAttribute("data-entity-type");
$('.addingEntity[data-entity-type="' + currentBadge + '"]').removeClass("addingEntity");
$('.removingEntity[data-entity-type="' + currentBadge + '"]').removeClass("removingEntity");
}
toggleEntityVisibility(event) {
let affectedEntityType = event.target.value;
let clickedCheckbox = $('.entityCheckBox[data-entity-type="' + affectedEntityType + '"]');
let graphCells = this.options.graph.getCells();
let filteredGraphCells = graphCells.filter((graphCell) => {
return graphCell.attributes.entity.type === affectedEntityType;
});
for (const relevantCell of filteredGraphCells) {
let newVisibilityValue = clickedCheckbox.prop("checked") ? "visible" : "hidden";
relevantCell.prop("entityTypeHidden", (clickedCheckbox.prop("checked") === false));
// ensure icon appears when needed despite filtering
if (relevantCell.attr("icon")) {
let entityCollapsed = relevantCell.get("collapsed") === true ? "visible" : "hidden";
let iconVisibility = clickedCheckbox.prop("checked") ? entityCollapsed : "hidden";
relevantCell.attr("icon/visibility", iconVisibility, { isolate: true });
}
// ensure child entities stay hidden when parent entity is collapsed
if (relevantCell.prop("parentCollapsed")) {
newVisibilityValue = "hidden";
}
relevantCell.attr("root/visibility", newVisibilityValue, { isolate: true });
}
}
entityVisibilityChanged(element, attrs, opt) {
if (opt.propertyPath && opt.propertyPath.includes("visibility")) {
let cellView = element.findView(this.options.paper);
cellView.hideTools();
joint.highlighters.stroke.remove(cellView);
}
}
#updateEntityCounter(dataEntityType, updateType) {
let counterElement = $('.numberOfEntities[data-entity-type="' + dataEntityType + '"]');
let oldValue = counterElement.text();
if (updateType === "add") {
counterElement.addClass("addingEntity")
counterElement.text(parseInt(oldValue) + 1);
} else if (updateType === "remove") {
counterElement.addClass("removingEntity")
const newValue = (parseInt(oldValue) - 1) >= 0 ? (parseInt(oldValue) - 1) : 0;
counterElement.text(newValue);
}
}
#configureGeneralToolbarBehaviour() {
$(".numberOfEntities[data-entity-type]").on("animationend", this.updateEntityEventListener);
$("#editApplicationNameBtn").on("click", () => {
this.#toggleApplicationNameEditingMode();
});
$("#cancelEditApplicationNameBtn").on("click", () => {
$("#appNameTitle").val(this.currentSystemName);
this.#toggleApplicationNameEditingMode();
});
$("#submitEditApplicationNameBtn").on("click", () => {
let editedAppName = $("#appNameTitle").val();
if (!editedAppName) {
$("#appNameTitle").val(this.currentSystemName);
} else {
$("#appNameTitle").val(editedAppName);
this.currentSystemName = editedAppName;
//
$("#appNameTitle").trigger({
type: "systemNameChanged",
updatedSystemName: editedAppName
});
this.options.graph.trigger("systemNameChanged", { editedAppName: editedAppName });
}
this.#toggleApplicationNameEditingMode();
});
// // TODO keep?
$("#addNewSystemEntity").on("click", () => {
alert("Maybe TODO");
});
// ensure tooltip is hidden when dropdown is opened
$(".buttonDropDownGroup").on('show.bs.dropdown', function () {
$('.toolbarDropdownButton[data-tooltip-toggle="tooltip"]').tooltip("hide");
})
$(".buttonDropDownGroup").on('shown.bs.dropdown', function () {
$('.toolbarDropdownButton[data-tooltip-toggle="tooltip"]').tooltip("hide");
})
}
#toggleApplicationNameEditingMode() {
let systemTitleInputField = $("#appNameTitle");
if (systemTitleInputField.attr("disabled")) {
systemTitleInputField.attr("disabled", false);
} else {
systemTitleInputField.attr("disabled", true);
}
$("#editApplicationNameBtn").toggle();
$("#cancelEditApplicationNameBtn").toggle();
$("#submitEditApplicationNameBtn").toggle();
}
setInitialSystemName(event) {
this.currentSystemName = event.systemName;
}
}
export default Toolbar; |
<reponame>maged995/DR-Math<filename>src/app/pages/pages-menu.ts<gh_stars>0
import { NbMenuItem } from '@nebular/theme';
import { title } from 'process';
import { roleMatch } from '../shared/roleMatch.roles';
import { NbMenuItemWithPermissions } from './pagesClass';
export const MENU_ITEMS: NbMenuItemWithPermissions[] = [
{
title:'البيانات الاساسيه',
icon:'shopping-cart-outline',
hidden:!roleMatch(["levels","items","trainers"]),
children:[
{
title:'المستويات',
link:'/pages/basicData/levels',
hidden:!roleMatch(["levels"]),
},
{
title:"الاصناف",
hidden:!roleMatch(["items"]),
children:[
{
title:'الكتب',
link:'/pages/basicData/items/1'
},
{
title:'عدادات',
link:'/pages/basicData/items/2'
},
{
title:'حقائب',
link:'/pages/basicData/items/3'
},
{
title:'تيشرتات',
link:'/pages/basicData/items/4'
},
{
title:'ميداليات',
link:'/pages/basicData/items/5'
},
{
title:'دروع',
link:'/pages/basicData/items/6'
},
{
title:'اخري',
link:'/pages/basicData/items/7'
}
]
},
{
title:'المدربين',
link:'/pages/basicData/trainers/1',
hidden:!roleMatch(["trainers"]),
},
{
title:'الموظفين',
link:'/pages/basicData/trainers/2',
hidden:!roleMatch(["trainers"]),
},
]
},
{
title:"المشتركين",
icon:'shopping-cart-outline',
hidden:!roleMatch(["subscriber"]),
children:[
{
title:'الطلاب',
link:'/pages/basicData/subscriber/1'
},
{
title:'حضانه',
link:'/pages/basicData/subscriber/2'
},
{
title:'مدرسه',
link:'/pages/basicData/subscriber/3'
},
{
title:'كورس',
link:'/pages/basicData/subscriber/4'
}
]
},
{
title:"طلبات الاوردور",
icon:'shopping-cart-outline',
hidden:!roleMatch(["ordersForm","orderList","trainerPayment","trainerAccountReports"]),
children:[
{
title:'طلب المدرب',
link:'/pages/basicData/ordersForm',
hidden:!roleMatch(["ordersForm"]),
},
{
title:'اعتماد الاداره',
link:'/pages/basicData/orderList',
hidden:!roleMatch(["orderList"]),
},
{
title:'مدفوعات المدرب',
link:'/pages/basicData/trainerPayment',
hidden:!roleMatch(["trainerPayment"]),
},
{
title:'حساب المدرب',
link:'/pages/basicData/trainerAccountReports',
hidden:!roleMatch(["trainerAccountReports"]),
}
]
},
{
title:'خرائط توضيحيه',
icon:'shopping-cart-outline',
hidden:!roleMatch(["mapsTrainer","mapsAdmin","mapsByTrainer"]),
children:[
{
title:'اماكن المتدربين للمدرب',
link:'/pages/basicData/mapsTrainer',
hidden:!roleMatch(["mapsTrainer"]),
},
{
title:'اماكن المتدربين لمحافظة المدرب',
link:'/pages/basicData/mapsGovernorate',
hidden:!roleMatch(["mapsTrainer"]),
},
{
title:'اماكن المتدربين للادمن',
link:'/pages/basicData/mapsAdmin',
hidden:!roleMatch(["mapsAdmin"]),
},
{
title:'اماكن المتدربين لمدرب واحد',
link:'/pages/basicData/mapsByTrainer',
hidden:!roleMatch(["mapsByTrainer"]),
},
]
},
{
title: 'تقارير شامله',
icon: 'shopping-cart-outline',
hidden:!roleMatch(["reports","StudentsFinished"]),
children:[
{
title:'تقارير عن فتره معينه',
link:'/pages/basicData/reports',
hidden:!roleMatch(["reports"]),
},
{
title:'طلاب انهوا التدريب',
link:'/pages/basicData/StudentsFinished',
hidden:!roleMatch(["StudentsFinished"]),
},
{
title:'حسابات المدربين',
link:'/pages/basicData/trainerAccount'
}
]
},
{
title: 'الامان',
icon: 'shopping-cart-outline',
// hidden:!roleMatch(["UserRoles","Roles","RegisterNewUser"]),
children: [
{
title:'تغيير كلمة السر',
link:'/users/changePassword',
},
{
title:"تغيير صلاحية المستخدمين",
link:'/pages/security/userRolesList',
hidden:!roleMatch(["UserRoles"])
},
{
title:'رتب الامان',
link:'/pages/security/roles',
hidden:!roleMatch(["Roles"])
},
{
title:'اضافة مستخدم جديد',
link:'/users/register',
hidden:!roleMatch(["RegisterNewUser"])
}
],
},
];
|
<gh_stars>0
package com.netflix.hystrix.contrib.javanica.test.aspectj.error;
import com.netflix.hystrix.contrib.javanica.test.common.error.BasicErrorPropagationTest;
import org.junit.BeforeClass;
/**
* Created by dmgcodevil
*/
public class ErrorPropagationTest extends BasicErrorPropagationTest {
@BeforeClass
public static void setUpEnv() {
System.setProperty("weavingMode", "compile");
}
@Override
protected UserService createUserService() {
return new UserService();
}
}
|
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import javax.ws.rs.FormParam;
public class InstanceProfileApi {
// Assume the necessary imports and class definition for InstanceProfile
public ListenableFuture<InstanceProfile> get(@FormParam("InstanceProfileName") String name) {
SettableFuture<InstanceProfile> future = SettableFuture.create();
// Asynchronously retrieve the instance profile with the given name
// Assume a method call to retrieve the instance profile asynchronously
// For example:
// asyncRetrieveInstanceProfile(name, future);
// Handle any potential exceptions and set the result or error in the future
try {
InstanceProfile instanceProfile = retrieveInstanceProfile(name);
if (instanceProfile != null) {
future.set(instanceProfile); // Set the retrieved instance profile as the result
} else {
future.setException(new RuntimeException("Instance profile not found")); // Set an error if the profile does not exist
}
} catch (Exception e) {
future.setException(e); // Set any exceptions as errors in the future
}
return future;
}
// Assume a method to retrieve the instance profile asynchronously
private void asyncRetrieveInstanceProfile(String name, SettableFuture<InstanceProfile> future) {
// Asynchronously retrieve the instance profile and set the result or error in the future
}
// Assume a method to retrieve the instance profile synchronously
private InstanceProfile retrieveInstanceProfile(String name) {
// Synchronously retrieve the instance profile with the given name
// Return the retrieved instance profile or null if it does not exist
return null;
}
} |
#!/bin/bash
fw_depends java sbt
sed -i 's|127.0.0.1|'${DBHOST}'|g' src/main/scala/scruffy/examples/Test2Endpoint.scala
sbt assembly
java -jar target/scala-2.11/scruffy-benchmark-assembly-11.0.jar -Dhostname=${DBHOST} &
|
package util
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/pip-services/pip-services-runtime-go/util"
)
func TestShort(t *testing.T) {
id1 := util.IdGenerator.Short()
assert.NotEmpty(t, id1)
assert.True(t, len(id1) >= 9)
id2 := util.IdGenerator.Short()
assert.NotEmpty(t, id2)
assert.True(t, len(id2) >= 9)
assert.NotEqual(t, id1, id2)
}
func TestUUID(t *testing.T) {
id1 := util.IdGenerator.UUID()
assert.NotEmpty(t, id1)
assert.Len(t, id1, 32)
id2 := util.IdGenerator.UUID()
assert.NotEmpty(t, id2)
assert.Len(t, id2, 32)
assert.NotEqual(t, id1, id2)
} |
<gh_stars>10-100
package delta
import (
"fmt"
"github.com/bwmarrin/discordgo"
)
type Discord interface {
Members() ([]DiscordMember, error)
Roles() ([]DiscordRole, error)
CreateRole(DeltaRoleCreate) error
EditRole(DeltaRoleEdit) error
SetRolePositions(DeltaRolePositions) error
AddUserRole(DeltaUserAddRole) error
RemoveUserRole(DeltaUserRemoveRole) error
}
type DiscordMember struct {
ID string
Name string
RoleNames []string
}
type DiscordRole struct {
ID string
Name string
Color int
Permissions int64
Position int
}
type discord struct {
session *discordgo.Session
guildID string
}
func NewDiscord(guildID, token string) (Discord, error) {
session, err := discordgo.New("Bot " + token)
if err != nil {
return nil, fmt.Errorf("init discordgo: %w", err)
}
return &discord{
session: session,
guildID: guildID,
}, nil
}
func (discord *discord) Members() ([]DiscordMember, error) {
discordMembers := []DiscordMember{}
discordRoles, err := discord.session.GuildRoles(discord.guildID)
if err != nil {
return nil, fmt.Errorf("get guild roles: %w", err)
}
after := ""
limit := 1000
for {
members, err := discord.session.GuildMembers(discord.guildID, after, limit)
if err != nil {
return nil, fmt.Errorf("get guild members: %w", err)
}
for _, member := range members {
roleNames := make([]string, len(member.Roles))
for i, roleID := range member.Roles {
var roleName string
for _, role := range discordRoles {
if role.ID == roleID {
roleName = role.Name
break
}
}
if roleName == "" {
return nil, fmt.Errorf("could not find name for role %s (user %s)", roleID, member.User)
}
roleNames[i] = roleName
}
discordMembers = append(discordMembers, DiscordMember{
ID: member.User.ID,
Name: member.User.String(),
RoleNames: roleNames,
})
after = member.User.ID
}
if len(members) < limit {
break
}
}
return discordMembers, nil
}
func (discord *discord) Roles() ([]DiscordRole, error) {
discordRoles, err := discord.session.GuildRoles(discord.guildID)
if err != nil {
return nil, fmt.Errorf("get guild roles: %w", err)
}
roles := make([]DiscordRole, len(discordRoles))
for i, r := range discordRoles {
roles[i] = DiscordRole{
ID: r.ID,
Name: r.Name,
Color: r.Color,
Permissions: r.Permissions,
Position: r.Position,
}
}
return roles, nil
}
func (discord *discord) CreateRole(delta DeltaRoleCreate) error {
role, err := discord.session.GuildRoleCreate(discord.guildID)
if err != nil {
return fmt.Errorf("create role: %w", err)
}
_, err = discord.session.GuildRoleEdit(
discord.guildID,
role.ID,
delta.RoleName,
delta.Color,
true,
delta.Permissions,
true,
)
if err != nil {
return fmt.Errorf("edit newly created role: %w", err)
}
return nil
}
func (discord *discord) EditRole(delta DeltaRoleEdit) error {
_, err := discord.session.GuildRoleEdit(
discord.guildID,
delta.RoleID,
delta.RoleName,
delta.Color,
true,
delta.Permissions,
true,
)
if err != nil {
return fmt.Errorf("edit newly created role: %w", err)
}
return nil
}
func (discord *discord) SetRolePositions(delta DeltaRolePositions) error {
roles, err := discord.session.GuildRoles(discord.guildID)
if err != nil {
return fmt.Errorf("get guild roles: %w", err)
}
var orderedRoles []*discordgo.Role
for position, roleName := range delta {
var foundRole bool
for _, role := range roles {
if role.Name == roleName {
role.Position = position + 1
orderedRoles = append(orderedRoles, role)
foundRole = true
break
}
}
if !foundRole {
return fmt.Errorf("role not found: %s", roleName)
}
}
_, err = discord.session.GuildRoleReorder(discord.guildID, orderedRoles)
if err != nil {
return fmt.Errorf("reorder roles: %w", err)
}
return nil
}
func (discord *discord) AddUserRole(delta DeltaUserAddRole) error {
roleID, err := discord.roleID(delta.RoleName)
if err != nil {
return err
}
err = discord.session.GuildMemberRoleAdd(discord.guildID, delta.UserID, roleID)
if err != nil {
return fmt.Errorf("add user role: %w", err)
}
return nil
}
func (discord *discord) RemoveUserRole(delta DeltaUserRemoveRole) error {
roleID, err := discord.roleID(delta.RoleName)
if err != nil {
return err
}
err = discord.session.GuildMemberRoleAdd(discord.guildID, delta.UserID, roleID)
if err != nil {
return fmt.Errorf("add user role: %w", err)
}
return nil
}
func (discord *discord) roleID(roleName string) (string, error) {
roles, err := discord.session.GuildRoles(discord.guildID)
if err != nil {
return "", fmt.Errorf("get guild roles: %w", err)
}
for _, role := range roles {
if role.Name == roleName {
return role.ID, nil
}
}
return "", fmt.Errorf("role not found: %s", roleName)
}
|
#! /usr/bin/env ruby
###############################################################################
# FILE : reward_star_wars.rb
# DESCRIPTION : A class to represent a reward for a purchase on May 4th.
# LICENSE : MIT
###############################################################################
###############################################################################
# IMPORTS
###############################################################################
require_relative "interface_reward"
###############################################################################
# CLASSES
###############################################################################
class RewardStarWars < InterfaceReward
# A class for the reward of a purchase on May 4th.
@@reward = "Star Wars item to be added to delivery."
def initialize
end
def what
@@reward
end
end
###############################################################################
# END
###############################################################################
# Local variables:
# mode: ruby
# End:
|
<gh_stars>0
package streams;
import java.util.function.Predicate;
import java.util.stream.Stream;
public class StreamsWithPredicates {
// we can use methods on classes
public static long countEmptyStrings(String... array) {
return Stream.of(array).filter(String::isEmpty).count();
}
// or wrap it into predicates
public static long countNonEmptyStrings(String... array) {
return Stream.of(array).filter(as(String::isEmpty).negate()).count();
}
public static <T> Predicate<T> as(Predicate<T> predicate) {
return predicate;
}
}
|
<gh_stars>0
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_print_address.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: hokutosuzuki <<EMAIL>ky +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2022/01/30 17:23:15 by hokutosuz #+# #+# */
/* Updated: 2022/02/16 20:19:45 by hokutosuz ### ########.fr */
/* */
/* ************************************************************************** */
#include "ft_printf.h"
#include "../libft/libft.h"
static void ft_print_space_address(t_stock *lst, size_t len)
{
if (0 < lst->width)
{
while (0 < lst->width - (long long)len)
ft_write(lst, " ", 1);
}
}
static char *ft_dtoa(size_t nbr, char *base, size_t len)
{
static size_t i;
static char res[20];
res[0] = '0';
res[1] = 'x';
i = 2;
if (len <= nbr)
ft_dtoa(nbr / len, base, len);
res[i++] = base[nbr % len];
res[i] = '\0';
return (res);
}
void ft_print_address(t_stock *lst, size_t address)
{
size_t len;
char *res;
res = ft_dtoa(address, "0123456789abcdef", 16);
len = ft_strlen(res);
if (lst->left_align == OFF)
ft_print_space_address(lst, len);
ft_write(lst, res, len);
ft_print_left_align_space(lst);
}
|
package fracCalc;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class FracCalcTestFinalSubtraction {
@Test
public void testSubtractionSimple1() {assertEquals("1/5", FracCalc.produceAnswer("3/5 - 2/5"));}
@Test public void testSubtractionSimple2() {assertEquals("0", FracCalc.produceAnswer("1/5 - 1/5"));}
@Test public void testSubtractionSimple3() {assertEquals("0", FracCalc.produceAnswer("4_1/2 - 4_1/2"));}
@Test public void testSubtractionReduce1() {assertEquals("4/5", FracCalc.produceAnswer("9/10 - 1/10"));}
@Test public void testSubtractionReduce2() {assertEquals("1/5", FracCalc.produceAnswer("5/10 - 3/10"));}
@Test public void testSubtractionWholeNumbers1() { assertEquals("0", FracCalc.produceAnswer("68591 - 68591"));}
@Test public void testSubtractionWholeNumbers2() {assertEquals("7", FracCalc.produceAnswer("42 - 35"));}
@Test public void testSubtractionWithNegatives1() { assertEquals("-2/5", FracCalc.produceAnswer("2/5 - 4/5"));}
@Test public void testSubtractionWithNegatives2() {assertEquals("-7/8", FracCalc.produceAnswer("5_3/4 - 6_5/8"));}
@Test public void testSubtractionWithNegatives3() {assertEquals("-1_1/4", FracCalc.produceAnswer("-3_3/4 - -2_2/4"));}
@Test public void testSubtractionWithNegatives4() {assertEquals("-1_5/8", FracCalc.produceAnswer("4_1/2 - 5_9/8"));}
@Test public void testSubtractionWithNegatives5() {assertEquals("-1_1/8", FracCalc.produceAnswer("3_3/4 - 4_7/8"));}
@Test public void testSubtractionWithNegatives6() {assertEquals("-6_1/4", FracCalc.produceAnswer("-3_3/4 - 2_2/4"));}
@Test public void testSubtractionWithNegatives7() {assertEquals("-36891", FracCalc.produceAnswer("48623 - 85514"));}
@Test public void testSubtractionWithNegatives8() {assertEquals("-9284", FracCalc.produceAnswer("0 - 9284"));}
@Test public void testSubtractionImproperFractionsAndReductions1() {assertEquals("53/96", FracCalc.produceAnswer("75/32 - 43/24"));}
@Test public void testSubtractionImproperFractionsAndReductions2() {assertEquals("16_23/24", FracCalc.produceAnswer("75/4 - 43/24"));}
// This unit test tries a number of combined concepts
@Test public void testSubtractionCombined1() { assertEquals("12_3/8", FracCalc.produceAnswer("5_3/4 - -6_5/8"));}
@Test public void testSubtractionCombined2() {assertEquals("8_5/21", FracCalc.produceAnswer("-12_3/7 - -20_2/3"));}
@Test public void testSubtractionCombined3() {assertEquals("-65_247/336", FracCalc.produceAnswer("-32_75/16 - 27_43/21"));}
}
|
# frozen_string_literal: true
module Neo4j
module Driver
module Ext
module InternalRecord
include MapConverter
include InternalKeys
def values
java_send(:values).map(&:as_ruby_object)
end
def [](key)
case key
when Integer
java_method(:get, [Java::int]).call(key)
else
java_method(:get, [java.lang.String]).call(key.to_s)
end.as_ruby_object
end
def first
java_method(:get, [Java::int]).call(0).as_ruby_object
end
end
end
end
end
|
package dev.webfx.kit.mapper.peers.javafxgraphics.markers;
import javafx.beans.property.Property;
/**
* @author <NAME>
*/
public interface HasPromptTextProperty {
Property<String> promptTextProperty();
default void setPromptText(String promptText) { promptTextProperty().setValue(promptText); }
default String getPromptText() { return promptTextProperty().getValue(); }
}
|
import org.junit.runner.RunWith;
import org.junit.runners.Suite.SuiteClasses;
@RunWith(value=org.junit.runners.Suite.class)
@SuiteClasses(value={
TestMillisecTimer.class,
TestUtils.class,
TestFlyerModel.class,
TestEnemyBullet.class,
TestInputResponder.class,
TestScoreTable.class,
TestInputHandler.class,
TestSplashModel.class,
TestTitleModel.class,
TestGameOverModel.class,
TestRingBullet.class,
TestSoundManager.class,
TestPlayerModel.class,
TestWeaponPickup.class,
TestModelController.class,
TestPickup.class,
TestBullet.class,
TestEnemyModel.class,
TestTitleView.class,
TestPlayerView.class,
TestLevelModel.class,
TestBossModel.class,
TestEnemyView.class,
TestSplashView.class,
})
public class TestAllSuite {}
|
<filename>utils/common/src/main/java/brooklyn/util/maven/MavenArtifact.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.util.maven;
import static com.google.common.base.Preconditions.checkNotNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.util.collections.MutableList;
import brooklyn.util.javalang.JavaClassNames;
import brooklyn.util.text.Strings;
public class MavenArtifact {
private static final Logger log = LoggerFactory.getLogger(MavenArtifact.class);
protected final @Nonnull String groupId;
protected final @Nonnull String artifactId;
protected final @Nonnull String packaging;
protected final @Nullable String classifier;
protected final @Nonnull String version;
/** a custom marker inserted after the artifactId and before the version, offset by an additional "-";
* defaults to null (nothing)
* <p>
* uses: when a shaded JAR is built, sometimes the word shaded is inserted before the version
* (and the "with-dependencies" classifier overwritten) */
protected @Nullable String customFileNameAfterArtifactMarker;
/** a custom marker inserted after the version and before the extension, offset by an additional "-" if non-empty;
* defaults to {@link #getClassifier()} if null, but can replace the classifier
* <p>
* uses: removing classifier by specifying "", or adding a notional classifier such as "dist" */
protected @Nullable String classifierFileNameMarker;
public MavenArtifact(String groupId, String artifactId, String packaging, String classifier, String version) {
super();
this.groupId = groupId;
this.artifactId = artifactId;
this.packaging = packaging;
this.classifier = classifier;
this.version = version;
}
public MavenArtifact(String groupId, String artifactId, String packaging, String version) {
super();
this.groupId = groupId;
this.artifactId = artifactId;
this.packaging = packaging;
this.classifier = null;
this.version = version;
}
public static MavenArtifact fromCoordinate(String coordinate) {
String[] parts = checkNotNull(coordinate, "coordinate").split(":");
if (parts.length==4)
return new MavenArtifact(parts[0], parts[1], parts[2], parts[3]);
if (parts.length==5)
return new MavenArtifact(parts[0], parts[1], parts[2], parts[3], parts[4]);
throw new IllegalArgumentException("Invalid maven coordinate '"+coordinate+"'");
}
public String getGroupId() {
return groupId;
}
public String getArtifactId() {
return artifactId;
}
public String getVersion() {
return version;
}
@Nullable public String getClassifier() {
return classifier;
}
public String getPackaging() {
return packaging;
}
public boolean isSnapshot() {
return getVersion().toUpperCase().contains("SNAPSHOT");
}
/** @see #customFileNameAfterArtifactMarker */
public String getCustomFileNameAfterArtifactMarker() {
return customFileNameAfterArtifactMarker;
}
/** @see #customFileNameAfterArtifactMarker */
public void setCustomFileNameAfterArtifactMarker(String customFileNameMarker) {
this.customFileNameAfterArtifactMarker = customFileNameMarker;
}
/** @ee {@link #classifierFileNameMarker} */
public String getClassifierFileNameMarker() {
return classifierFileNameMarker!=null ? classifierFileNameMarker : getClassifier();
}
/** @ee {@link #classifierFileNameMarker} */
public void setClassifierFileNameMarker(String classifierFileNameMarker) {
this.classifierFileNameMarker = classifierFileNameMarker;
}
/** returns a "groupId:artifactId:version:(classifier:)packaging" string
* which maven refers to as the co-ordinate */
public String getCoordinate() {
return Strings.join(MutableList.<String>of().append(groupId, artifactId, packaging).
appendIfNotNull(classifier).append(version), ":");
}
public String getFilename() {
return artifactId+"-"+
(Strings.isNonEmpty(getCustomFileNameAfterArtifactMarker()) ? getCustomFileNameAfterArtifactMarker()+"-" : "")+
version+
(Strings.isNonEmpty(getClassifierFileNameMarker()) ? "-"+getClassifierFileNameMarker() : "")+
(Strings.isNonEmpty(getExtension()) ? "."+getExtension() : "");
}
/** returns an extension, defaulting to {@link #packaging} if one cannot be inferred */
@Nullable public String getExtension() {
if ("jar".equalsIgnoreCase(packaging) || "bundle".equalsIgnoreCase(packaging))
return "jar";
if ("war".equalsIgnoreCase(packaging))
return "war";
log.debug("Unrecognised packaging for autodetecting extension, defaulting to {} for: {}", packaging, this);
return packaging;
}
@Override
public String toString() {
return JavaClassNames.simpleClassName(this)+"["+getCoordinate()+"]";
}
@Override
public int hashCode() {
// autogenerated code
final int prime = 31;
int result = 1;
result = prime * result + ((artifactId == null) ? 0 : artifactId.hashCode());
result = prime * result + ((classifier == null) ? 0 : classifier.hashCode());
result = prime * result + ((classifierFileNameMarker == null) ? 0 : classifierFileNameMarker.hashCode());
result = prime * result + ((customFileNameAfterArtifactMarker == null) ? 0 : customFileNameAfterArtifactMarker.hashCode());
result = prime * result + ((groupId == null) ? 0 : groupId.hashCode());
result = prime * result + ((packaging == null) ? 0 : packaging.hashCode());
result = prime * result + ((version == null) ? 0 : version.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
// autogenerated code
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
MavenArtifact other = (MavenArtifact) obj;
if (artifactId == null) {
if (other.artifactId != null)
return false;
} else if (!artifactId.equals(other.artifactId))
return false;
if (classifier == null) {
if (other.classifier != null)
return false;
} else if (!classifier.equals(other.classifier))
return false;
if (classifierFileNameMarker == null) {
if (other.classifierFileNameMarker != null)
return false;
} else if (!classifierFileNameMarker.equals(other.classifierFileNameMarker))
return false;
if (customFileNameAfterArtifactMarker == null) {
if (other.customFileNameAfterArtifactMarker != null)
return false;
} else if (!customFileNameAfterArtifactMarker.equals(other.customFileNameAfterArtifactMarker))
return false;
if (groupId == null) {
if (other.groupId != null)
return false;
} else if (!groupId.equals(other.groupId))
return false;
if (packaging == null) {
if (other.packaging != null)
return false;
} else if (!packaging.equals(other.packaging))
return false;
if (version == null) {
if (other.version != null)
return false;
} else if (!version.equals(other.version))
return false;
return true;
}
}
|
#store data in array
arr = [-1, -5, 0, 6, 3, 7]
#get smallest number
min = arr.min
#print result
puts "Smallest number: #{min}" |
from unittest.mock import patch, Mock
import unittest
from rrpproxy import RRPProxy
from your_module import status_domain_transfer # Import the function to be tested
class TestRRPProxyStatusDomainTransfer(unittest.TestCase):
@patch('your_module.RRPProxy.call') # Replace 'your_module' with the actual module containing the function to be tested
def test_calls_call_correctly(self, call_mock):
# Create a mock response from the API call
mock_response = {'status': 'pending'}
# Configure the mock to return the mock response
call_mock.return_value = mock_response
# Create an instance of the RRPProxy class
proxy = RRPProxy()
# Call the function under test with a sample domain name
response = status_domain_transfer(proxy, 'example.com') # Replace 'proxy' with the actual instance of RRPProxy
# Assert that the RRPProxy.call method was called with the correct parameters
call_mock.assert_called_once_with('StatusDomainTransfer', domain='example.com')
# Assert that the function returns the response from the API call
self.assertEqual(response, mock_response)
if __name__ == '__main__':
unittest.main() |
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
Uncomment a feed source
sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
|
<gh_stars>0
require 'bowling/frame'
module Bowling
class Frames
include Enumerable
attr_reader :current_frame_number
def initialize(players)
@frames = []
@current_frame_number = 1
players.each do |player|
10.times { |n| @frames << Frame.new(n + 1, player) }
end
end
def each(&block)
@frames.each(&block)
end
def record_chance(player, pins)
frame = find(current_frame_number, player)
frame.record_chance(pins)
@current_frame_number += 1 if frame.complete?
end
def find(num, player)
@frames.select { |f| f.number == num && f.player.name == player.name }.first
end
def method_missing(method_name, *args, &block)
@frames.send(method_name, *args, &block)
end
end
end
|
#!/bin/bash
# Test default installation
DIRS="cli controllers core cost_func hessian jacobian parsing results_processing utils"
for d in $DIRS; do FULL_DIRS="$FULL_DIRS fitbenchmarking/$d"; done
for d in $FULL_DIRS; do COV_ARGS="$COV_ARGS --cov=$d"; done
pytest $FULL_DIRS $COV_ARGS --cov-report term-missing --test-type default --junit-xml test-results/default_unit_pytest.xml
status=$?
if [[ $status != 0 ]]
then
exit 1
fi
exit 0
|
#!/usr/bin/env bats
load test_helper
@test "fields" {
vcsim_env
vm_id=$(new_id)
run govc vm.create $vm_id
assert_success
run govc fields.info vm/$vm_id host/*
assert_success
field=$(new_id)
result=$(govc fields.ls | grep $field | wc -l)
[ $result -eq 0 ]
key=$(govc fields.add $field)
result=$(govc fields.ls | grep $field | wc -l)
[ $result -eq 1 ]
key=$(govc fields.ls | grep $field | awk '{print $1}')
val="foo"
run govc fields.set $field $val $vm_id
assert_failure
run govc fields.set $field $val vm/$vm_id
assert_success
run govc fields.info vm/$vm_id
assert_success
run govc fields.info -n $val vm/$vm_id
assert_success
info=$(govc vm.info -json $vm_id | jq .VirtualMachines[0].CustomValue[0])
ikey=$(jq -r .Key <<<"$info")
assert_equal $key $ikey
ival=$(jq -r .Value <<<"$info")
assert_equal $val $ival
old_field=$field
field=$(new_id)
run govc fields.rename $key $field
assert_success
result=$(govc fields.ls | grep $old_field | wc -l)
[ $result -eq 0 ]
run govc fields.rm $field
assert_success
result=$(govc fields.ls | grep $field | wc -l)
[ $result -eq 0 ]
}
|
from typing import List
def generate_course_table(courses: List[str]) -> str:
table_html = '<table class="table">\n'
table_html += ' <tr>\n'
table_html += ' <th>Select A Course:</th>\n'
table_html += ' </tr>\n'
for course in courses:
table_html += f' <tr>\n'
table_html += f' <td>{course}</td>\n'
table_html += f' <td><button>Select</button></td>\n'
table_html += f' </tr>\n'
table_html += '</table>'
return table_html |
#!/bin/bash
VERSION_URL="https://chromedriver.storage.googleapis.com/LATEST_RELEASE"
VERSION=$(curl -f --silent $VERSION_URL)
if [ -z "$VERSION" ]; then
echo "Failed to read current version from $VERSION_URL. Aborting."
exit 1
else
echo "Current version is $VERSION"
fi
# Abort script if any of the next commands fails.
set -e
set -o pipefail
ZIPFILEPATH="/tmp/chromedriver-$VERSION.zip"
echo "Downloading to $ZIPFILEPATH"
curl -f --silent "https://chromedriver.storage.googleapis.com/$VERSION/chromedriver_mac64_m1.zip" > "$ZIPFILEPATH"
BINFILEPATH="/usr/local/bin/chromedriver-$VERSION"
echo "Extracting to $BINFILEPATH"
unzip -p "$ZIPFILEPATH" chromedriver > "$BINFILEPATH"
echo Setting execute flag
chmod +x "$BINFILEPATH"
echo Updating symlink
ln -nfs "$BINFILEPATH" /usr/local/bin/chromedriver
echo Removing ZIP file
rm "$ZIPFILEPATH"
echo "Done"
chromedriver -v
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/SwiftModuleA/SwiftModuleA.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/SwiftModuleA/SwiftModuleA.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>website/apps/client_details/models.py<gh_stars>10-100
class ClientDetailsBase(object):
"""
ClientDetails requires child classes to define a list TO_CLIENT_WHITELIST
This list should contain string values or (string, boolean) tuples indicating
whether or not a field is optionally included or (string, boolean, string) values
indicating the name of the attribute to store in the whitelisted fieldname.
'foo' -> required {'foo': self.foo}
('foo', True) -> optional {'foo': self.foo}
('foo', False, 'bar') -> required {'foo': self.bar}
"""
def __init__(self, details):
self._d = details
for k,v in details.iteritems():
# we call this to handle wrapper methods and properties defined in child classes
if not hasattr(self, k):
setattr(self, k, v)
def __eq__(self, other):
return isinstance(other, self.__class__) and hasattr(other, '_d') and self._d == other._d
def __getitem__(self, key):
return getattr(self, key)
def to_client(self, **kwargs):
# Things to add to the JSON serialization.
def get_val(val):
if callable(val):
return val()
return val
d = {}
for property_name in self.TO_CLIENT_WHITELIST:
if isinstance(property_name, tuple):
if len(property_name) == 2:
key, optional = property_name
alias = key
elif len(property_name) == 3:
alias, optional, key = property_name
else:
key = property_name
optional = False
alias = key
try:
val = getattr(self, key)
d[alias] = get_val(val)
except AttributeError as e:
if optional:
pass
else:
raise e
return d
def to_backend(self):
return self._d
@property
@classmethod
def TO_CLIENT_WHITELIST(cls):
raise NotImplementedError("You must define TO_CLIENT_WHITELIST in order to serialize.")
|
/***************************************************************************
* Copyright (C) 2011 by <NAME> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* As a special exception, if other files instantiate templates or use *
* macros or inline functions from this file, or you compile this file *
* and link it with other works to produce a work based on this file, *
* this file does not by itself cause the resulting work to be covered *
* by the GNU General Public License. However the source code for this *
* file must still be made available in accordance with the GNU General *
* Public License. This exception does not invalidate any other reasons *
* why a work based on this file might be covered by the GNU General *
* Public License. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, see <http://www.gnu.org/licenses/> *
***************************************************************************/
#include <cmath>
#ifndef MATH_HELPERS_H
#define MATH_HELPERS_H
/**
* Square matrix
* \param T type of an element of the matrix
* \param n matrix order
*/
template<typename T, int n>
class matrix_base
{
public:
/**
* Default constructor, yields a matrix with uninitialized values for speed
* reasons
*/
matrix_base() {}
/**
* Construct a matrix from an array of elements
* \param v array of elements. Its size must be n*n
*/
explicit matrix_base(const T *v) { for(int i=0;i<n*n;i++) data[i]=v[i]; }
/**
* \param row matrix row
* \param col matrix colums
* \return value[row][col]
*/
T& at(unsigned int row, unsigned int col) { return data[n*row+col]; }
/**
* \param row matrix row
* \param col matrix colums
* \return value[row][col]
*/
T at(unsigned int row, unsigned int col) const { return data[n*row+col]; }
/**
* Matrix sum
*/
matrix_base operator+(const matrix_base& rhs) const
{
matrix_base result;
for(int i=0;i<n*n;i++) result.data[i]=this->data[i]+rhs.data[i];
return result;
}
/**
* Matrix sum
*/
const matrix_base& operator+=(const matrix_base& rhs)
{
for(int i=0;i<n*n;i++) this->data[i]+=rhs.data[i];
return *this;
}
/**
* Matrix product
*/
matrix_base operator*(const matrix_base& rhs) const
{
matrix_base result;
result.clear();
for(int i=0;i<n;i++)
for(int j=0;j<n;j++)
for(int k=0;k<n;k++) result.at(i,j)+=this->at(i,k)*rhs.at(k,j);
return result;
}
/**
* Matrix product
*/
const matrix_base& operator*=(const matrix_base& rhs)
{
return *this=*this*rhs;
}
/**
* Set all elements to zero
*/
void clear() { for(int i=0;i<n*n;i++) data[i]=0; }
/**
* Set the matrix to the identity matrix
*/
void eye() { for(int i=0;i<n;i++) for(int j=0;j<n;j++) at(i,j)= i==j?1:0; }
//Uses default copy constructor and operator=
private:
T data[n*n];
};
/**
* Column vector
* \param T type of an element of the vector
* \param n vector size
*/
template<typename T, int n>
class vector_base
{
public:
/**
* Default constructor, yields a vector with uninitialized values for speed
* reasons
*/
vector_base() {}
/**
* Construct a vector from an array of elements
* \param v array of elements. Its size must be n
*/
explicit vector_base(const T *v) { for(int i=0;i<n;i++) data[i]=v[i]; }
/**
* \param row vector row
* \return value[row]
*/
T& at(unsigned int row) { return data[row]; }
/**
* \param row vector row
* \return value[row]
*/
T at(unsigned int row) const { return data[row]; }
/**
* Vector sum
*/
vector_base operator+(const vector_base& rhs) const
{
vector_base result;
for(int i=0;i<n;i++) result.data[i]=this->data[i]+rhs.data[i];
return result;
}
/**
* Vector sum
*/
const vector_base& operator+=(const vector_base& rhs)
{
for(int i=0;i<n;i++) this->data[i]+=rhs.data[i];
return *this;
}
/**
* Vector product
*/
vector_base operator*(const vector_base& rhs) const
{
vector_base result;
for(int i=0;i<n;i++) result.data[i]=this->data[i]*rhs.data[i];
return result;
}
/**
* Vector product
*/
const vector_base& operator*=(const vector_base& rhs)
{
for(int i=0;i<n;i++) this->data[i]*=rhs.data[i];
return *this;
}
/**
* Set all elements to zero
*/
void clear() { for(int i=0;i<n;i++) data[i]=0; }
//Uses default copy constructor and operator=
private:
T data[n];
};
/**
* Matrix * vector multiplication
*/
template<typename T, int n>
vector_base<T,n> operator*(const matrix_base<T,n>& m, const vector_base<T,n>& v)
{
vector_base<T,n> result;
result.clear();
for(int i=0;i<n;i++) for(int j=0;j<n;j++) result.at(i)+=m.at(i,j)*v.at(j);
return result;
}
typedef matrix_base<float,3> Matrix3f; ///< 3 dimensional matrix of float
typedef vector_base<float,3> Vector3f; ///< 3 dimensional vector of float
/**
* x axis rotation
* \param angle rotation angle
*/
inline Matrix3f xrot(float angle)
{
using namespace std;
float rotation[]=
{
1.0f, 0.0f, 0.0f,
0.0f, cos(angle), -sin(angle),
0.0f, sin(angle), cos(angle)
};
return Matrix3f(rotation);
}
/**
* y axis rotation
* \param angle rotation angle
*/
inline Matrix3f yrot(float angle)
{
using namespace std;
float rotation[]=
{
cos(angle), 0.0f, sin(angle),
0.0f, 1.0f, 0.0f,
-sin(angle), 0.0f, cos(angle)
};
return Matrix3f(rotation);
}
/**
* z axis rotation
* \param angle rotation angle
*/
inline Matrix3f zrot(float angle)
{
using namespace std;
float rotation[]=
{
cos(angle), -sin(angle), 0.0f,
sin(angle), cos(angle), 0.0f,
0.0f, 0.0f, 1.0f
};
return Matrix3f(rotation);
}
/**
* Scaling matrix
* \param ratio scale ratio, must be >=0 for the transformation to be a scaling
*/
inline Matrix3f scale(float ratio)
{
float scaling[]=
{
ratio, 0.0f, 0.0f,
0.0f, ratio, 0.0f,
0.0f, 0.0f, ratio
};
return Matrix3f(scaling);
}
#endif //MATH_HELPERS_H
|
import { css, CSSResultGroup, html, LitElement, TemplateResult } from "lit";
import { customElement, property } from "lit/decorators";
import "../../../src/components/ha-svg-icon";
@customElement("supervisor-formfield-label")
class SupervisorFormfieldLabel extends LitElement {
@property({ type: String }) public label!: string;
@property({ type: String }) public imageUrl?: string;
@property({ type: String }) public iconPath?: string;
@property({ type: String }) public version?: string;
protected render(): TemplateResult {
return html`
${this.imageUrl
? html`<img loading="lazy" .src=${this.imageUrl} class="icon" />`
: this.iconPath
? html`<ha-svg-icon .path=${this.iconPath} class="icon"></ha-svg-icon>`
: ""}
<span class="label">${this.label}</span>
${this.version
? html`<span class="version">(${this.version})</span>`
: ""}
`;
}
static get styles(): CSSResultGroup {
return css`
:host {
cursor: pointer;
display: flex;
align-items: center;
}
.label {
margin-right: 4px;
}
.version {
color: var(--secondary-text-color);
}
.icon {
max-height: 22px;
max-width: 22px;
margin-right: 8px;
}
`;
}
}
declare global {
interface HTMLElementTagNameMap {
"supervisor-formfield-label": SupervisorFormfieldLabel;
}
}
|
<reponame>EmmanuelArethens/firstnode
function db(){
let mysql = require('mysql');
let con = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "datadoggo"
});
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
});
let queryString = 'SELECT * FROM doggo';
con.query(queryString, function(err, rows, fields) {
if (err) throw err;
for (let i in rows) {
console.log('Doggo Name: ', rows[i].name, rows[i].race);
}
});
}
module.exports = db; |
#!/bin/bash
#./showcase.sh local test postbody.processes.success.json
### Set variables
if [ $# = 0 ]
then
echo "no profile specified";exit 0
fi
setlocal () {
echo "using local profile"
actinia_core_auth='actinia-gdi:actinia-gdi'
actinia_core_baseUrl="127.0.0.1:8088"
actinia_gdi_baseUrl="127.0.0.1:5000"
}
settest () {
url="$actinia_gdi_baseUrl/processes/test/jobs"
}
sets1 () {
url="$actinia_gdi_baseUrl/processes/sentinel1/jobs"
}
case $1 in
local) setlocal;;
*) echo -e "$1 not recognized as parameter. Aborting";exit 1;;
esac
case $2 in
test) settest;;
s1) sets1;;
*) echo -e "$1 not recognized as parameter. Aborting";exit 1;;
esac
json=$3
if [ -z $json ]
then
echo "no json file specified"
exit 0
else
echo "using json file: $json"
fi
### Start jobs
echo "Posting to $url..."
curl -X POST $url \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d @${json} > \
resp.json && cat resp.json
### Check status
# read ids from response
actinia_gdi_id=$(cat resp.json | json idpk_jobs)
actinia_core_id=$(cat resp.json | json actinia_core_jobid)
# check job status in actinia-gdi
if [ -z $actinia_gdi_id ]
then
echo -e "\n no actinia-gdi id found..."
else
curl -X GET "$url/$actinia_gdi_id"
echo "curl -X GET \"$url/$actinia_gdi_id\""
fi
# check job status in actinia-core
if [ -z $actinia_core_id ]
then
echo -e "\n no actinia-core id found... retrying"
# this happens when using test endpoint, as no actinia-gdi id is created
actinia_core_url=$(cat resp.json | json status)
curl -u $actinia_core_auth -X GET \
"$actinia_core_url"
echo "curl -u $actinia_core_auth -X GET \
\"$actinia_core_url\""
else
curl -u $actinia_core_auth -X GET \
"$actinia_core_baseUrl/api/v1/resources/actinia-gdi/$actinia_core_id"
echo "curl -u $actinia_core_auth -X GET \
\"$actinia_core_baseUrl/api/v1/resources/actinia-gdi/$actinia_core_id\""
fi
exit 0
##### Talk to actinia-core directly
url=$actinia_core_baseUrl/api/v1/locations/latlong/processing_async_export
json=../actinia_gdi/templates/actiniaCore/examples/pc_sleep.json
curl -X POST $url \
-u $actinia_core_auth \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d @${json} > \
resp.json && cat resp.json
cat resp.json | json urls.status | xargs curl -L -u $actinia_core_auth -X GET
|
for file in *.csv; do
echo """MNI Tag Point File
Volumes = 1;
Points = """ > $(basename $file .csv).tag
tail -n +2 $file | awk -v FPAT="([^,]+)|(\"[^\"]+\")" -v OFS=" " '{print $14,$15,$16,1,$3,1,$3}' >> $(basename $file .csv).tag
echo ";" >> $(basename $file .csv).tag
done
|
#!/bin/bash
# Copyright (C) 2019 SCARV project <info@scarv.org>
#
# Use of this source code is restricted per the MIT license, a copy of which
# can be found at https://opensource.org/licenses/MIT (or should be included
# as LICENSE.txt within the associated archive or repository).
# =============================================================================
if [ -z "${DOCKER_USER}" ] ; then
DOCKER_USER="scarv"
fi
if [ -z "${DOCKER_UID}" ] ; then
DOCKER_UID="1000"
fi
if [ -z "${DOCKER_GID}" ] ; then
DOCKER_GID="1000"
fi
groupadd --gid ${DOCKER_GID} ${DOCKER_USER} ; useradd --gid ${DOCKER_GID} --uid ${DOCKER_UID} --no-user-group --create-home --shell /bin/bash ${DOCKER_USER}
# -----------------------------------------------------------------------------
cd /mnt/scarv/libscarv ; source ./bin/conf.sh
exec /usr/sbin/gosu ${DOCKER_USER} bash --login -c "make --directory=${REPO_HOME} ${*}"
# =============================================================================
|
/*
* Copyright (c) 2015. Seagate Technology PLC. All rights reserved.
*/
package com.seagate.alto.provider.lyve.response;
import android.net.Uri;
import com.google.gson.Gson;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.seagate.alto.provider.lyve.LyveCloudProvider;
import com.seagate.alto.provider.Provider;
import java.util.Date;
public class Metadata implements Provider.FileMetadata {
@SerializedName("parent")
@Expose
public String parent;
@SerializedName("id")
@Expose
public String id;
@SerializedName("name")
@Expose
public String name;
@SerializedName("path_lower")
@Expose
public String pathLower;
@SerializedName(".tag")
@Expose
public String Tag;
@SerializedName("client_modified")
@Expose
public String clientModified;
@SerializedName("server_modified")
@Expose
public String serverModified;
@SerializedName("rev")
@Expose
public String rev;
@SerializedName("size")
@Expose
public Integer size;
@Override
public String name() {
return name;
}
@Override
public String pathLower() {
return pathLower;
}
@Override
public String parentSharedFolderId() {
return parent;
}
@Override
public String id() {
return id;
}
@Override
public Date clientModified() {
return LyveCloudProvider.dateFromString(clientModified);
}
@Override
public Date serverModified() {
return LyveCloudProvider.dateFromString(serverModified);
}
@Override
public String rev() {
return rev;
}
@Override
public long size() {
return size;
}
@Override
public Provider.MediaInfo mediaInfo() {
return new Provider.MediaInfo() {
public Provider.MediaInfo.Tag tag() {
return Provider.MediaInfo.Tag.metadata;
}
public Provider.MediaMetadata metadata() {
return new Provider.MediaMetadata() {
public Provider.Size dimensions() {
return new Provider.Size(0, 0);
}
public double latitude() {
return 0;
}
public double longitude() {
return 0;
}
public Date timeTaken() {
return new Date();
}
};
}
};
}
@Override
public Uri imageUri() {
return LyveCloudProvider.getImageUri(pathLower, null, null);
}
@Override
public Uri thumbnailUri(String type, String size) {
return LyveCloudProvider.getImageUri(pathLower, type, size);
}
@Override
public String toString() {
return new Gson().toJson(this);
}
}
|
public static int[,] GenerateRandomMatrix(int size)
{
Random random = new Random();
int[,] matrix = new int[size, size];
for (int i = 0; i < size; i++)
{
for (int j = 0; j < size; j++)
{
matrix[i, j] = random.Next(0, 10);
}
}
return matrix;
} |
from typing import Optional
class UnitDestroyedEvent:
"""An event indicating which unit just died."""
def __init__(self, unit_tag: int, unit: Optional[Unit]):
assert isinstance(unit_tag, int)
assert isinstance(unit, Unit) or unit is None
self.unit_tag: int = unit_tag
self.unit: Optional[Unit] = unit |
package io.opensphere.osh.aerialimagery.results;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.preferences.Preferences;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.osh.aerialimagery.model.LinkedLayer;
import io.opensphere.osh.aerialimagery.model.LinkedLayers;
/**
* Unit test for {@link LayerLinker} class.
*/
public class LayerLinkerTest
{
/**
* The linked layer id.
*/
private static final String ourLinkedLayerId = "linkedTypeKey2";
/**
* The other linked layer id.
*/
private static final String ourOtherLinkedLayerId = "otherLinkedTypeKey2";
/**
* Tests getting the linked layer id.
*/
@Test
public void testGetLinkedLayer()
{
EasyMockSupport support = new EasyMockSupport();
LinkedLayers links = createLinks();
PreferencesRegistry prefsReg = createPrefsRegistry(support, links);
support.replayAll();
LayerLinker linker = new LayerLinker(prefsReg);
assertEquals(ourOtherLinkedLayerId, linker.getLinkedLayerId(ourLinkedLayerId));
support.verifyAll();
}
/**
* Tests getting the linker layer id when the layer id is the other one.
*/
@Test
public void testGetLinkedLayerReverse()
{
EasyMockSupport support = new EasyMockSupport();
LinkedLayers links = createLinks();
PreferencesRegistry prefsReg = createPrefsRegistry(support, links);
support.replayAll();
LayerLinker linker = new LayerLinker(prefsReg);
assertEquals(ourLinkedLayerId, linker.getLinkedLayerId(ourOtherLinkedLayerId));
support.verifyAll();
}
/**
* Tests getting the linked layer of an unlinked layer.
*/
@Test
public void testGetLinkedNotLinked()
{
EasyMockSupport support = new EasyMockSupport();
LinkedLayers links = createLinks();
PreferencesRegistry prefsReg = createPrefsRegistry(support, links);
support.replayAll();
LayerLinker linker = new LayerLinker(prefsReg);
assertNull(linker.getLinkedLayerId("iamunlinked"));
support.verifyAll();
}
/**
* Tests getting the linked layer of an unlinked layer.
*/
@Test
public void testGetLinkedNull()
{
EasyMockSupport support = new EasyMockSupport();
PreferencesRegistry prefsReg = createPrefsRegistry(support, null);
support.replayAll();
LayerLinker linker = new LayerLinker(prefsReg);
assertNull(linker.getLinkedLayerId(ourLinkedLayerId));
support.verifyAll();
}
/**
* Creates the test linked layers.
*
* @return The linked layers.
*/
private LinkedLayers createLinks()
{
LinkedLayers linkedLayers = new LinkedLayers();
LinkedLayer linkedLayer = new LinkedLayer();
linkedLayer.setLinkedLayersTypeKey("linkedTypeKey1");
linkedLayer.setOtherLinkedLayersTypeKey("otherLinkedTypeKey1");
linkedLayers.getLinkedLayers().add(linkedLayer);
linkedLayer = new LinkedLayer();
linkedLayer.setLinkedLayersTypeKey(ourLinkedLayerId);
linkedLayer.setOtherLinkedLayersTypeKey(ourOtherLinkedLayerId);
linkedLayers.getLinkedLayers().add(linkedLayer);
return linkedLayers;
}
/**
* Creates the easy mocked preferences registry.
*
* @param support Used to create the mock.
* @param linkedLayers The linked layers to return.
* @return The preferences registry.
*/
private PreferencesRegistry createPrefsRegistry(EasyMockSupport support, LinkedLayers linkedLayers)
{
Preferences prefs = support.createMock(Preferences.class);
EasyMock.expect(prefs.getJAXBObject(EasyMock.eq(LinkedLayers.class), EasyMock.cmpEq("linkedlayers"), EasyMock.isNull()))
.andReturn(linkedLayers);
PreferencesRegistry registry = support.createMock(PreferencesRegistry.class);
EasyMock.expect(registry.getPreferences(EasyMock.eq(LayerLinker.class))).andReturn(prefs);
return registry;
}
}
|
import requests
def delete_snapshot_locks(base_path: str, sid: str) -> None:
uri_str = f"{base_path}/platform/1/snapshot/snapshots/{sid}/locks"
response = requests.delete(uri_str)
if response.status_code == 200:
print("Snapshot locks deleted successfully")
else:
print(f"Failed to delete snapshot locks. Status code: {response.status_code}") |
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 天猫机具解绑接口
*
* @author auto create
* @since 1.0, 2016-10-26 18:05:15
*/
public class AlipayOfflineProviderEquipmentAuthRemoveModel extends AlipayObject {
private static final long serialVersionUID = 4195676111684329126L;
/**
* 机具编号
*/
@ApiField("device_id")
private String deviceId;
/**
* 机具类型
*/
@ApiField("device_type")
private String deviceType;
/**
* 扩展信息,传json格式的字符串,包含auth_alipay_card_no =授权的商户支付宝卡号
*/
@ApiField("ext_info")
private String extInfo;
/**
* 机具厂商PID
*/
@ApiField("merchant_pid")
private String merchantPid;
/**
* 操作人名称
*/
@ApiField("operator")
private String operator;
/**
* 操作人ID
*/
@ApiField("operator_id")
private String operatorId;
public String getDeviceId() {
return this.deviceId;
}
public void setDeviceId(String deviceId) {
this.deviceId = deviceId;
}
public String getDeviceType() {
return this.deviceType;
}
public void setDeviceType(String deviceType) {
this.deviceType = deviceType;
}
public String getExtInfo() {
return this.extInfo;
}
public void setExtInfo(String extInfo) {
this.extInfo = extInfo;
}
public String getMerchantPid() {
return this.merchantPid;
}
public void setMerchantPid(String merchantPid) {
this.merchantPid = merchantPid;
}
public String getOperator() {
return this.operator;
}
public void setOperator(String operator) {
this.operator = operator;
}
public String getOperatorId() {
return this.operatorId;
}
public void setOperatorId(String operatorId) {
this.operatorId = operatorId;
}
}
|
package org.coins1920.group05.model.trello;
import lombok.Getter;
import lombok.ToString;
import org.coins1920.group05.model.general.AbstractMember;
@Getter
@ToString(callSuper = true)
public class Member extends AbstractMember {
private String avatarHash;
// Constructor
public Member(String memberId, String memberName, String avatarHash) {
super(memberId, memberName);
this.avatarHash = avatarHash;
}
// Getter for avatarHash
public String getAvatarHash() {
return avatarHash;
}
} |
declare const _default: {
rpc: {
getAvailableUnbonded: {
description: string;
params: {
name: string;
type: string;
}[];
type: string;
};
getLiquidStakingExchangeRate: {
description: string;
params: never[];
type: string;
};
};
types: {
SubAccountStatus: {
bonded: string;
available: string;
unbonding: string;
mockRewardRate: string;
};
Params: {
targetMaxFreeUnbondedRatio: string;
targetMinFreeUnbondedRatio: string;
targetUnbondingToFreeRatio: string;
unbondingToFreeAdjustment: string;
baseFeeRate: string;
};
Ledger: {
bonded: string;
unbondingToFree: string;
freePool: string;
toUnbondNextEra: string;
};
ChangeRate: {
_enum: {
NoChange: string;
NewValue: string;
};
};
ChangeRatio: {
_enum: {
NoChange: string;
NewValue: string;
};
};
BalanceInfo: {
amount: string;
};
PolkadotAccountId: string;
PolkadotAccountIdOf: string;
};
};
export default _default;
|
def store_latest_five_in_stack(item):
if len(stack) > 5:
stack = stack[1:]
stack.append(item)
return stack
stack = [] |
def capitalize(s):
return ' '.join(w.capitalize() for w in s.split()) |
module AuthServices
class AuthorizeApiRequest < ApplicationService
attr_reader :headers, :user
def initialize(headers = {})
super()
@headers = headers
@user = nil
end
def call
{
user: extract_user
}
end
private
def extract_user
# check if user is present in the db and memoize user object
@user ||= User.find(decoded_auth_token[:user_id]) if decoded_auth_token
rescue ActiveRecord::RecordNotFound => e
raise ExceptionHandler::InvalidToken, Message.invalid_token
end
def decoded_auth_token
@decoded_auth_token ||= JsonWebToken.decode(http_auth_header)
end
def http_auth_header
if headers['Authorization'].present?
return headers['Authorization'].split(' ').last
end
raise ExceptionHandler::MissingToken, Message.missing_token
end
end
end
|
# Outputs current branch info in prompt format
function git_prompt_info() {
local ref
if [[ "$(command git config --get oh-my-zsh.hide-status 2>/dev/null)" != "1" ]]; then
ref=$(command git symbolic-ref HEAD 2> /dev/null) || \
ref=$(command git rev-parse --short HEAD 2> /dev/null) || return 0
echo "$ZSH_THEME_GIT_PROMPT_PREFIX${ref#refs/heads/}$(parse_git_dirty)$ZSH_THEME_GIT_PROMPT_SUFFIX"
fi
}
# Checks if working tree is dirty
function parse_git_dirty() {
local STATUS=''
local FLAGS
FLAGS=('--porcelain')
if [[ "$(command git config --get oh-my-zsh.hide-dirty)" != "1" ]]; then
if [[ $POST_1_7_2_GIT -gt 0 ]]; then
FLAGS+='--ignore-submodules=dirty'
fi
if [[ "$DISABLE_UNTRACKED_FILES_DIRTY" == "true" ]]; then
FLAGS+='--untracked-files=no'
fi
STATUS=$(command git status ${FLAGS} 2> /dev/null | tail -n1)
fi
if [[ -n $STATUS ]]; then
echo "$ZSH_THEME_GIT_PROMPT_DIRTY"
else
echo "$ZSH_THEME_GIT_PROMPT_CLEAN"
fi
}
# Gets the difference between the local and remote branches
function git_remote_status() {
local remote ahead behind git_remote_status git_remote_status_detailed
remote=${$(command git rev-parse --verify ${hook_com[branch]}@{upstream} --symbolic-full-name 2>/dev/null)/refs\/remotes\/}
if [[ -n ${remote} ]]; then
ahead=$(command git rev-list ${hook_com[branch]}@{upstream}..HEAD 2>/dev/null | wc -l)
behind=$(command git rev-list HEAD..${hook_com[branch]}@{upstream} 2>/dev/null | wc -l)
if [[ $ahead -eq 0 ]] && [[ $behind -eq 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_EQUAL_REMOTE"
elif [[ $ahead -gt 0 ]] && [[ $behind -eq 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE"
git_remote_status_detailed="$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE$((ahead))%{$reset_color%}"
elif [[ $behind -gt 0 ]] && [[ $ahead -eq 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE"
git_remote_status_detailed="$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE$((behind))%{$reset_color%}"
elif [[ $ahead -gt 0 ]] && [[ $behind -gt 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_DIVERGED_REMOTE"
git_remote_status_detailed="$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE$((ahead))%{$reset_color%}$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE$((behind))%{$reset_color%}"
fi
if [[ -n $ZSH_THEME_GIT_PROMPT_REMOTE_STATUS_DETAILED ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_REMOTE_STATUS_PREFIX$remote$git_remote_status_detailed$ZSH_THEME_GIT_PROMPT_REMOTE_STATUS_SUFFIX"
fi
echo $git_remote_status
fi
}
# Outputs the name of the current branch
# Usage example: git pull origin $(git_current_branch)
# Using '--quiet' with 'symbolic-ref' will not cause a fatal error (128) if
# it's not a symbolic ref, but in a Git repo.
function git_current_branch() {
local ref
ref=$(command git symbolic-ref --quiet HEAD 2> /dev/null)
local ret=$?
if [[ $ret != 0 ]]; then
[[ $ret == 128 ]] && return # no git repo.
ref=$(command git rev-parse --short HEAD 2> /dev/null) || return
fi
echo ${ref#refs/heads/}
}
# Gets the number of commits ahead from remote
function git_commits_ahead() {
if $(echo "$(command git log @{upstream}..HEAD 2> /dev/null)" | grep '^commit' &> /dev/null); then
local COMMITS
COMMITS=$(command git log @{upstream}..HEAD | grep '^commit' | wc -l | tr -d ' ')
echo "$ZSH_THEME_GIT_COMMITS_AHEAD_PREFIX$COMMITS$ZSH_THEME_GIT_COMMITS_AHEAD_SUFFIX"
fi
}
# Outputs if current branch is ahead of remote
function git_prompt_ahead() {
if [[ -n "$(command git rev-list origin/$(git_current_branch)..HEAD 2> /dev/null)" ]]; then
echo "$ZSH_THEME_GIT_PROMPT_AHEAD"
fi
}
# Outputs if current branch is behind remote
function git_prompt_behind() {
if [[ -n "$(command git rev-list HEAD..origin/$(git_current_branch) 2> /dev/null)" ]]; then
echo "$ZSH_THEME_GIT_PROMPT_BEHIND"
fi
}
# Outputs if current branch exists on remote or not
function git_prompt_remote() {
if [[ -n "$(command git show-ref origin/$(git_current_branch) 2> /dev/null)" ]]; then
echo "$ZSH_THEME_GIT_PROMPT_REMOTE_EXISTS"
else
echo "$ZSH_THEME_GIT_PROMPT_REMOTE_MISSING"
fi
}
# Formats prompt string for current git commit short SHA
function git_prompt_short_sha() {
local SHA
SHA=$(command git rev-parse --short HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
}
# Formats prompt string for current git commit long SHA
function git_prompt_long_sha() {
local SHA
SHA=$(command git rev-parse HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
}
# Get the status of the working tree
function git_prompt_status() {
local INDEX STATUS
INDEX=$(command git status --porcelain -b 2> /dev/null)
STATUS=""
if $(echo "$INDEX" | command grep -E '^\?\? ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_UNTRACKED$STATUS"
fi
if $(echo "$INDEX" | grep '^A ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_ADDED$STATUS"
elif $(echo "$INDEX" | grep '^M ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_ADDED$STATUS"
fi
if $(echo "$INDEX" | grep '^ M ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
elif $(echo "$INDEX" | grep '^AM ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
elif $(echo "$INDEX" | grep '^ T ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
fi
if $(echo "$INDEX" | grep '^R ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_RENAMED$STATUS"
fi
if $(echo "$INDEX" | grep '^ D ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
elif $(echo "$INDEX" | grep '^D ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
elif $(echo "$INDEX" | grep '^AD ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
fi
if $(command git rev-parse --verify refs/stash >/dev/null 2>&1); then
STATUS="$ZSH_THEME_GIT_PROMPT_STASHED$STATUS"
fi
if $(echo "$INDEX" | grep '^UU ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_UNMERGED$STATUS"
fi
if $(echo "$INDEX" | grep '^## .*ahead' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_AHEAD$STATUS"
fi
if $(echo "$INDEX" | grep '^## .*behind' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_BEHIND$STATUS"
fi
if $(echo "$INDEX" | grep '^## .*diverged' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DIVERGED$STATUS"
fi
echo $STATUS
}
# Compares the provided version of git to the version installed and on path
# Outputs -1, 0, or 1 if the installed version is less than, equal to, or
# greater than the input version, respectively.
function git_compare_version() {
local INPUT_GIT_VERSION INSTALLED_GIT_VERSION
INPUT_GIT_VERSION=(${(s/./)1})
INSTALLED_GIT_VERSION=($(command git --version 2>/dev/null))
INSTALLED_GIT_VERSION=(${(s/./)INSTALLED_GIT_VERSION[3]})
for i in {1..3}; do
if [[ $INSTALLED_GIT_VERSION[$i] -gt $INPUT_GIT_VERSION[$i] ]]; then
echo 1
return 0
fi
if [[ $INSTALLED_GIT_VERSION[$i] -lt $INPUT_GIT_VERSION[$i] ]]; then
echo -1
return 0
fi
done
echo 0
}
# Outputs the name of the current user
# Usage example: $(git_current_user_name)
function git_current_user_name() {
command git config user.name 2>/dev/null
}
# Outputs the email of the current user
# Usage example: $(git_current_user_email)
function git_current_user_email() {
command git config user.email 2>/dev/null
}
# This is unlikely to change so make it all statically assigned
POST_1_7_2_GIT=$(git_compare_version "1.7.2")
# Clean up the namespace slightly by removing the checker function
unfunction git_compare_version
|
<filename>src/Farmacia.java
import javax.swing.JOptionPane;
public class Farmacia extends javax.swing.JFrame {
private NodoCola inicio;
private NodoCola fin;
private NodoListaSimple cabeza;
private NodoListaDoble inicioLED;
public Farmacia() {
initComponents();
setTitle("FARMACIA");
setResizable(false);
setLocationRelativeTo(null);
llenarCombo();
this.inicio=null;
this.fin=null;
this.cabeza=null;
}
public boolean vaciaC(){
if(inicio==null){
return true;
}else{
return false;
}
}
public boolean vaciaL(){
if(cabeza==null){
return true;
}else{
return false;
}
}
public boolean vaciaLED(){
if(inicioLED==null){
return true;
}else{
return false;
}
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
jPanel2 = new javax.swing.JPanel();
jButton4 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jScrollPane1 = new javax.swing.JScrollPane();
jTextArea1 = new javax.swing.JTextArea();
jLabel3 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jTextField1 = new javax.swing.JTextField();
jLabel4 = new javax.swing.JLabel();
jComboBox1 = new javax.swing.JComboBox<>();
jButton5 = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jPanel1.setBackground(new java.awt.Color(51, 51, 255));
jLabel1.setFont(new java.awt.Font("Tahoma", 0, 36)); // NOI18N
jLabel1.setForeground(new java.awt.Color(255, 255, 255));
jLabel1.setText("Farmacia");
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(320, 320, 320)
.addComponent(jLabel1)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel2.setBackground(new java.awt.Color(255, 255, 255));
jButton4.setBackground(new java.awt.Color(51, 51, 255));
jButton4.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton4.setForeground(new java.awt.Color(255, 255, 255));
jButton4.setText("Volver");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
jButton3.setBackground(new java.awt.Color(51, 51, 255));
jButton3.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton3.setForeground(new java.awt.Color(255, 255, 255));
jButton3.setText("Ver Lista Entrega Medicamentos");
jButton3.setCursor(new java.awt.Cursor(java.awt.Cursor.DEFAULT_CURSOR));
jButton3.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
jButton1.setBackground(new java.awt.Color(51, 51, 255));
jButton1.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton1.setForeground(new java.awt.Color(255, 255, 255));
jButton1.setText("Aceptar");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setBackground(new java.awt.Color(51, 51, 255));
jButton2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton2.setForeground(new java.awt.Color(255, 255, 255));
jButton2.setText("Ver Cola");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jTextArea1.setColumns(20);
jTextArea1.setRows(5);
jScrollPane1.setViewportView(jTextArea1);
jLabel3.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel3.setText("Medicamentos:");
jLabel2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel2.setText("Nombre:");
jLabel4.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel4.setText("Ficha:");
jButton5.setBackground(new java.awt.Color(51, 51, 255));
jButton5.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton5.setForeground(new java.awt.Color(255, 255, 255));
jButton5.setText("Ver Lista Entrega Doble");
jButton5.setCursor(new java.awt.Cursor(java.awt.Cursor.DEFAULT_CURSOR));
jButton5.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
jButton5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton5ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(121, 121, 121)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jLabel3)
.addComponent(jLabel2)
.addComponent(jLabel4))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jButton3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 108, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jButton2, javax.swing.GroupLayout.PREFERRED_SIZE, 108, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(jScrollPane1)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, 300, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jComboBox1, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButton5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(189, 189, 189)
.addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 168, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(255, Short.MAX_VALUE))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(24, 24, 24)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jLabel4)
.addComponent(jComboBox1, javax.swing.GroupLayout.PREFERRED_SIZE, 32, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(20, 20, 20)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel3)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 122, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jButton1)
.addComponent(jButton2))
.addGap(18, 18, 18)
.addComponent(jButton3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jButton5)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 30, Short.MAX_VALUE)
.addComponent(jButton4)
.addContainerGap())
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
new Inicio().setVisible(true);
}//GEN-LAST:event_jButton4ActionPerformed
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
int seguir=0;
if(seguir==0){
llenarCola();
seguir=JOptionPane.showConfirmDialog(null, "Desea agregar otro paciente?");
limpiar();
}
}//GEN-LAST:event_jButton1ActionPerformed
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
mostrarCola();
colaaLista();
}//GEN-LAST:event_jButton2ActionPerformed
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
mostrarLista();
listaSimpleaDoble();
}//GEN-LAST:event_jButton3ActionPerformed
private void jButton5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton5ActionPerformed
mostrarListaDoble();
}//GEN-LAST:event_jButton5ActionPerformed
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(Farmacia.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(Farmacia.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(Farmacia.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Farmacia.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new Farmacia().setVisible(true);
}
});
}
public void llenarCombo(){
int numFicha[]=new int[10];
int num=1;
for(int i=0;i<numFicha.length;i++){
numFicha[i]=num;
jComboBox1.addItem(String.valueOf(numFicha[i]));
num++;
}
}
public void limpiar(){
jComboBox1.setSelectedItem(0);
jTextField1.setText("");
jTextArea1.setText("");
}
public void llenarCola(){
FarmaciaDato farm=new FarmaciaDato();
farm.setFicha(Integer.parseInt(jComboBox1.getSelectedItem().toString()));
farm.setNombre(jTextField1.getText());
farm.setMedic(jTextArea1.getText());
NodoCola n=new NodoCola();
n.setElemento(farm);
if(vaciaC()){
inicio=n;
fin=n;
}else{
fin.setSiguienteCola(n);
fin=n;
}
}
public void mostrarCola(){
String s="";
if(vaciaC()==false){
NodoCola aux=inicio;
while(aux!=null){
s=s+" "+aux.getElemento().getFicha()+" | "+aux.getElemento().getNombre()+" | "+aux.getElemento().getMedic()+" -> ";
aux=aux.getSiguienteCola();
}
JOptionPane.showMessageDialog(null, "Cola de espera en farmacia:\n\n"+s);
}else{
JOptionPane.showMessageDialog(null, "Cola vacia!");
}
}
public void colaaLista(){
NodoCola aux=inicio;
while(aux!=null){
llenarLista(aux.getElemento());
aux=aux.getSiguienteCola();
}
}
public void llenarLista(FarmaciaDato farm){
NodoListaSimple n=new NodoListaSimple();
n.setElemento(farm);
if(vaciaL()){
cabeza=n;
}else if(farm.getFicha()<cabeza.getElemento().getFicha()){
n.setSiguiente(cabeza);
cabeza=n;
}else if(cabeza.getSiguiente()==null){
cabeza.setSiguiente(n);
}else{
NodoListaSimple aux=cabeza;
while((aux.getSiguiente()!=null)&&(aux.getSiguiente().getElemento().getFicha()<farm.getFicha())){
aux=aux.getSiguiente();
}
n.setSiguiente(aux.getSiguiente());
aux.setSiguiente(n);
}
}
public void listaSimpleaDoble(){
NodoListaSimple aux=cabeza;
while(aux!=null){
llenarListaDoble(aux.getElemento());
aux=aux.getSiguiente();
}
}
public void llenarListaDoble(FarmaciaDato farm){
NodoListaDoble n=new NodoListaDoble();
n.setElemento(farm);
if(vaciaLED()){
inicioLED=n;
}else if(farm.getFicha()<inicioLED.getElemento().getFicha()){
n.setSiguiente(inicioLED);
inicioLED.setAnterior(n);
inicioLED=n;
}else if(inicioLED.getSiguiente()==null){
inicioLED.setSiguiente(n);
n.setAnterior(inicioLED);
}else{
NodoListaDoble aux=inicioLED;
while((aux.getSiguiente()!=null)&&(aux.getSiguiente().getElemento().getFicha()<farm.getFicha())){
aux=aux.getSiguiente();
}
n.setSiguiente(aux.getSiguiente());
n.setAnterior(aux.getAnterior());
aux.setSiguiente(n);
n.setAnterior(aux);
}
}
public void mostrarLista(){
String s="";
if(vaciaL()==false){
NodoListaSimple aux=cabeza;
while(aux!=null){
s=s+aux.getElemento().getFicha()+"-"+aux.getElemento().getNombre()+"-"+aux.getElemento().getMedic()+" --> ";
aux=aux.getSiguiente();
}
JOptionPane.showMessageDialog(null, "La lista contiene:\n"+s);
}else{
JOptionPane.showMessageDialog(null, "Lista vacía!");
}
}
public void mostrarListaDoble(){
String s="";
if(vaciaLED()==false){
NodoListaDoble aux=inicioLED;
while(aux!=null){
s=s+aux.getElemento().getFicha()+"="+aux.getElemento().getNombre()+"="+aux.getElemento().getMedic()+" ==> ";
aux=aux.getSiguiente();
}
JOptionPane.showMessageDialog(null, "La lista enlazada doble contiene:\n"+s);
}else{
JOptionPane.showMessageDialog(null, "Lista enlazada doble vacia!");
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JButton jButton5;
private javax.swing.JComboBox<String> jComboBox1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTextArea jTextArea1;
private javax.swing.JTextField jTextField1;
// End of variables declaration//GEN-END:variables
}
|
#!/bin/sh
PWD=`pwd`
export DJANGO_SETTINGS_MODULE="morla.settings"
DJANGO_PYTHONPATH="$PWD/.."
export PYTHONPATH=$PYTHONPATH:$DJANGO_PYTHONPATH
|
function sortByProperty(arr, property) {
arr.sort(function(a, b) {
return a[property] - b[property];
});
return arr;
}
arr = [{name: "John", age: 25}, {name: "Mary", age: 20}, {name: "Bob", age: 30}];
sorted_arr = sortByProperty(arr, 'age');
console.log(sorted_arr); |
import { AUTHENTICATION_ERROR_HANDlER_PRIORITY, ACCESS_DENIED_ERROR_HANDlER_PRIORITY } from './error-protocol';
import { AuthenticationError, AccessDeniedError } from './error';
import { ErrorHandler, Context } from '@malagu/core/lib/node';
import { Component } from '@malagu/core';
@Component(ErrorHandler)
export class AuthenticationErrorHandler implements ErrorHandler {
readonly priority: number = AUTHENTICATION_ERROR_HANDlER_PRIORITY;
canHandle(ctx: Context, err: Error): Promise<boolean> {
return Promise.resolve(err instanceof AuthenticationError);
}
async handle(ctx: Context, err: AuthenticationError): Promise<void> {
ctx.response.statusCode = 401;
ctx.response.end(err.message);
}
}
@Component(ErrorHandler)
export class AccessDeniedErrorHandler implements ErrorHandler {
readonly priority: number = ACCESS_DENIED_ERROR_HANDlER_PRIORITY;
canHandle(ctx: Context, err: Error): Promise<boolean> {
return Promise.resolve(err instanceof AccessDeniedError);
}
async handle(ctx: Context, err: AccessDeniedError): Promise<void> {
ctx.response.statusCode = 403;
ctx.response.end(err.message);
}
}
|
<reponame>Hero2000/MediaEditSDK
package com.video.process.thread;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Build;
import android.view.Surface;
import androidx.annotation.RequiresApi;
import com.video.process.surface.InputSurface;
import com.video.process.surface.OutputSurface;
import com.video.process.utils.LogUtils;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public class VideoDecodeThread extends Thread {
private static final String TAG = "VideoDecodeThread";
private static final int TIMEOUT_USEC = 2500;
private MediaExtractor mExtractor;
private int mStartTime;
private int mEndTime;
private int mSrcFrameRate;
private int mDestFrameRate;
private float mSpeed;
private boolean mShouldDropFrame;
private int mVideoTrackIndex;
private AtomicBoolean mDecodeFinished;
private IVideoEncodeThread mVideoEncodeThread;
private InputSurface mInputSurface;
private OutputSurface mOutputSurface;
private Exception mException;
private MediaCodec mDecoder;
public VideoDecodeThread(MediaExtractor extractor,
int startTime, int endTime,
int srcFrameRate, int destFrameRate,
float speed, boolean shouldDropFrame,
int videoTrackIndex,
AtomicBoolean decodeFinished,
IVideoEncodeThread videoEncodeThread) {
super("VideoDecodeThread");
mExtractor = extractor;
mStartTime = startTime;
mEndTime = endTime;
mSrcFrameRate = srcFrameRate;
mDestFrameRate = destFrameRate;
mSpeed = speed;
mShouldDropFrame = shouldDropFrame;
mVideoTrackIndex = videoTrackIndex;
mDecodeFinished = decodeFinished;
mVideoEncodeThread = videoEncodeThread;
}
@Override
public void run() {
super.run();
try {
doVideoDecode();
} catch (Exception e) {
mException = e;
} finally {
if (mInputSurface != null) {
mInputSurface.release();
}
if (mOutputSurface != null) {
mOutputSurface.release();
}
if (mDecoder != null) {
mDecoder.stop();
mDecoder.release();
}
}
}
private void doVideoDecode() throws IOException {
CountDownLatch eglContextLatch = mVideoEncodeThread.getEglContextLatch();
try {
boolean await = eglContextLatch.await(5, TimeUnit.SECONDS);
if (await) {
mException = new TimeoutException("wait eglContext timeout!");
return;
}
} catch (Exception e) {
mException = e;
return;
}
Surface encodeSurface = mVideoEncodeThread.getSurface();
mInputSurface = new InputSurface(encodeSurface);
mInputSurface.makeCurrent();
MediaFormat videoFormat = mExtractor.getTrackFormat(mVideoTrackIndex);
mDecoder = MediaCodec.createDecoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
mOutputSurface = new OutputSurface();
mDecoder.configure(videoFormat, mOutputSurface.getSurface(), null, 0);
mDecoder.start();
//丢帧判断
int frameIntervalForDrop = 0;
int dropCount = 0;
int frameIndex = 1;
if (mShouldDropFrame && mSrcFrameRate != 0 && mDestFrameRate != 0) {
if (mSpeed != 0) {
mSrcFrameRate = (int)(mSrcFrameRate * mSpeed);
}
if (mSrcFrameRate > mDestFrameRate) {
frameIntervalForDrop = mDestFrameRate / (mSrcFrameRate - mDestFrameRate);
frameIntervalForDrop = frameIntervalForDrop == 0 ? 1 : frameIntervalForDrop;
dropCount = (mSrcFrameRate - mDestFrameRate) / mDestFrameRate;
dropCount = dropCount == 0 ? 1 : dropCount;
LogUtils.w(TAG,"帧率过高,需要丢帧:" + mSrcFrameRate + "->" + mDestFrameRate +
" frameIntervalForDrop:" + frameIntervalForDrop +
" dropCount:" + dropCount);
}
}
//开始解码
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
boolean decoderDone = false;
boolean inputDone = false;
long videoStartTimeUs = -1;
int decodeTryAgainCount = 0;
while (!decoderDone) {
//还有帧数据,输入解码器
if (!inputDone) {
boolean eof = false;
int index = mExtractor.getSampleTrackIndex();
if (index == mVideoTrackIndex) {
int inputBufIndex = mDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = mDecoder.getInputBuffer(inputBufIndex);
int chunkSize = mExtractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
mDecoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
decoderDone = true;
} else {
long sampleTime = mExtractor.getSampleTime();
mDecoder.queueInputBuffer(inputBufIndex, 0, chunkSize, sampleTime,
0);
mExtractor.advance();
}
}
} else if (index == -1) {
eof = true;
}
if (eof) {
//解码输入结束
int inputBufIndex = mDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
mDecoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
boolean decoderOutputAvailable = !decoderDone;
if (decoderDone) {
LogUtils.i(TAG, "decoderOutputAvailable:" + decoderOutputAvailable);
}
while (decoderOutputAvailable) {
int outputBufferIndex =
mDecoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
LogUtils.i(TAG, "outputBufferIndex = " + outputBufferIndex);
if (inputDone && outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
decodeTryAgainCount++;
if (decodeTryAgainCount > 10) {
//小米2上出现BUFFER_FLAG_END_OF_STREAM之后一直tryAgain的问题
LogUtils.i(TAG, "INFO_TRY_AGAIN_LATER 10 times,force End!");
decoderDone = true;
break;
}
} else {
decodeTryAgainCount = 0;
}
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mDecoder.getOutputFormat();
LogUtils.i(TAG, "decode newFormat = " + newFormat);
} else if (outputBufferIndex < 0) {
// ignore
LogUtils.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " +
outputBufferIndex);
} else {
boolean doRender = true;
//解码数据可用
if (mEndTime != -1 &&
bufferInfo.presentationTimeUs >= mEndTime * 1000) {
inputDone = true;
decoderDone = true;
doRender = false;
bufferInfo.flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
if (mStartTime != -1 &&
bufferInfo.presentationTimeUs < mStartTime * 1000) {
doRender = false;
LogUtils.e(TAG, "drop frame startTime = " + mStartTime +
" present time = " + bufferInfo.presentationTimeUs / 1000);
}
if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
decoderDone = true;
mDecoder.releaseOutputBuffer(outputBufferIndex, false);
LogUtils.i(TAG, "decoderDone");
break;
}
//检查是否需要丢帧
if (frameIntervalForDrop > 0) {
int remainder = frameIndex % (frameIntervalForDrop + dropCount);
if (remainder > frameIntervalForDrop || remainder == 0) {
LogUtils.w(TAG, "帧率过高,丢帧:" + frameIndex);
doRender = false;
}
}
frameIndex++;
mDecoder.releaseOutputBuffer(outputBufferIndex, doRender);
if (doRender) {
boolean errorWait = false;
try {
mOutputSurface.awaitNewImage();
} catch (Exception e) {
errorWait = true;
LogUtils.w(TAG, e.getMessage());
}
if (!errorWait) {
if (videoStartTimeUs == -1) {
videoStartTimeUs = bufferInfo.presentationTimeUs;
LogUtils.i(TAG, "videoStartTime:" + videoStartTimeUs / 1000);
}
mOutputSurface.drawImage(false);
long presentationTimeNs =
(bufferInfo.presentationTimeUs - videoStartTimeUs) * 1000;
if (mSpeed != 0) {
presentationTimeNs /= mSpeed;
}
LogUtils.i(TAG, "drawImage,setPresentationTimeMs:" +
presentationTimeNs / 1000 / 1000);
mInputSurface.setPresentationTime(presentationTimeNs);
mInputSurface.swapBuffers();
break;
}
}
}
}
}
mDecodeFinished.set(true);
}
}
|
# AnyKernel3 Ramdisk Mod Script
# osm0sis @ xda-developers
## AnyKernel setup
# begin properties
properties() { '
kernel.string=DuDu Kernel By YuvaRajAF
do.devicecheck=1
do.modules=0
do.cleanup=1
do.cleanuponabort=0
device.name1=phoenix
supported.versions=9 - 10
supported.patchlevels=
'; } # end properties
# shell variables
block=/dev/block/bootdevice/by-name/boot;
is_slot_device=0;
ramdisk_compression=auto;
## AnyKernel methods (DO NOT CHANGE)
# import patching functions/variables - see for reference
. tools/ak3-core.sh;
## AnyKernel file attributes
## AnyKernel install
dump_boot;
# begin ramdisk changes
# end ramdisk changes
write_boot;
## end install
|
function deleteFirstElement(arr) {
if (arr.length === 0) {
return "The array is empty";
} else {
arr.shift();
return arr;
}
}
arr = [1, 2, 3, 4, 5];
output = deleteFirstElement(arr);
console.log(output); |
INPUT=$1
DIRNAME=`dirname "$INPUT"`
cd $DIRNAME
# should load Nextflow and Singularity prior to submission
nextflow run /SAN/colcc/TX100_WGS_sarek/src/sarek/main.nf --input $INPUT -profile singularity -c /SAN/colcc/TX100_WGS_sarek/src/sarek/ucl.conf -resume --genome GRCh38 -qs 500 --igenomes_base /SAN/colcc/TX100_WGS_raw/wgs/data/references/ --trim_fastq --tools HaplotypeCaller,Manta,Strelka,ASCAT,Mutect2 --skip_qc FastQC,bamQC,BCFtools
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.streampipes.smp.parser;
import org.apache.commons.io.IOUtils;
import org.apache.streampipes.smp.model.AssetModel;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
public class TestDocumentationParser {
@Test
public void testPipelineElementNameReplacement() throws IOException {
ClassLoader classLoader = this.getClass().getClassLoader();
AssetModel assetModel = new AssetModel("abc", "Numerical Filter", "Numerical Filter " +
"Description");
String originalContent = IOUtils.toString(classLoader.getResourceAsStream("documentation.md"));
String expectedContent = IOUtils.toString(classLoader.getResourceAsStream("expected.documentation.md"));
String content =
new DocumentationParser(assetModel)
.parseAndStoreDocumentation(originalContent);
assertEquals(expectedContent, content);
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
mkdir -p "${DWARF_DSYM_FOLDER_PATH}"
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CropViewController/CropViewController.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CropViewController/CropViewController.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>andreapatri/cms_journal
/**
*
* Header
*
*/
import styled from 'styled-components';
import sizes from '../../assets/styles/sizes';
const Header = styled.div`
margin-bottom: 30px;
position: relative;
width: 100%;
height: 50px;
.justify-content-end {
display: flex;
}
.header-title p {
width: 100%;
margin: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
font-size: 1.3rem;
font-weight: ${sizes.fontWeight.regular};
line-height: normal;
color: #787e8f;
}
.sticky {
position: fixed;
top: 0;
left: 30.6rem;
z-index: 1050;
background-color: white;
width: calc(100vw - 30.6rem);
padding-right: 15px;
height: 60px;
display: flex;
.row {
width: 100%;
margin: auto;
margin-top: 16px;
.header-title p {
display: none;
}
> div > div {
padding-top: 0;
h1 {
font-size: 2rem;
svg {
margin-top: 8px;
}
}
button {
margin-top: auto;
margin-bottom: auto;
}
}
}
}
`;
export default Header;
|
const planetsData = {
"sun": {
title: "Sun",
information: "Sun, a star located at the center of our solar system. It was born approximately 4.6 billion years ago. It is by far the largest object and about one million Earths could fit inside the sun. The surface of the sun is about 5,500 degrees Celsius (10,000 degrees Fahrenheit). The sun is important because without sun's heat and light, lifes on Earth would not exist.",
img: "img/planets_imgs/sun.png"
},
"mercury": {
title: "Mercury",
information: "Mercury, the smallest planet in the solar system and the closest to the Sun. Its orbit around the Sun takes 87.97 Earth days. It has no atmosphere and no moons. It has a solid surface that is covered with craters. Temperatures on Mercury's surface can reach 800 degrees Fahrenheit (430 degrees Celsius).",
img: "img/planets_imgs/mercury.png"
},
"venus": {
title: "Venus",
information: "Venus, the second planet from the Sun and it is the sixth-largest and the hottest planet in our solar system. Its surface temperature is 462 degrees Celsius and it has no moon. Its orbit period is 224.70 Earth days. ",
img: "img/planets_imgs/venus.png"
},
"earth": {
title: "Earth",
information: "Earth, our home planet, is the third planet from the Sun and it is the fifth-largest planet in the solar system. It is currently the planet in the universe where we've ever identified life. It takes Earth 23.934 hours to complete a rotation on its axis and 365.26 days to complete an orbit around the sun.",
img: "img/planets_imgs/earth.png"
},
"mars": {
title: "Mars",
information: "Mars, the fourth planet from the Sun. It is one of the most explored bodies in our solar system. It is a dusty, cold, desert, world with a very thin atmosphere. The size of the planet is about half the size of Earth. A day on Mars is 24.6 hours and a year on Mars is 687 Earth days. ",
img: "img/planets_imgs/mars.png"
},
"jupiter": {
title: "Jupiter",
information: "Jupiter, it is the fifth planet from the sun and it is the largest planet in the solar system. More than 1,300 Earths would fit inside Jupiter. The planet is a giant ball of gas and it is extremely windy. A day on Jupiter is about 10 hours long and a year on Jupiter is 12 years on Earth.",
img: "img/planets_imgs/jupiter.png"
},
"saturn": {
title: "Saturn",
information: "Saturn, the sixth planet from the Sun and the second largest planet in our solar system. It has ring and the ring isn't solid. You also can't stand in Saturn and saturn is mostly made of gas. A day on Saturn is 10 hours and 42 minutes on Earth and a year on Saturn is 29 years on Earth. ",
img: "img/planets_imgs/saturn.png"
},
"uranus": {
title: "Uranus",
information: "Uranus, the seventh planet from the Sun and the third largest planet in our solar system. It is a very cold and windy planet. It rotates at a nearly 90 degree angle from the plane of its orbit. A day on Uranus is about 17 hours on Earth and a year on Uranus is about 84 Earth years.",
img: "img/planets_imgs/uranus.png"
},
"neptune": {
title: "Neptune",
information: "Neptune, the eighth planet from the Sun and it is the farest planet from the Sun. It is a ice giant type planet and it has 14 moons. One day on Neptune is 16 hours on Earth and one year on Neptune is 165 Earth years." ,
img: "img/planets_imgs/neptune.png"
}
}; |
module PolicyStatus
class Active
def initialize(as_of_date, other_params = {})
@as_of_date = as_of_date
@other_params = other_params
end
def query
active_as_of_expression.merge(@other_params)
end
def active_as_of_expression
target_date = @as_of_date
{
"$or" => [
{ :aasm_state => { "$ne" => "canceled"},
:eg_id => { "$not" => /DC0.{32}/ },
:enrollees => {"$elemMatch" => {
:rel_code => "self",
:coverage_start => {"$lte" => target_date},
:coverage_end => {"$gte" => target_date}
}}},
{ :aasm_state => { "$ne" => "canceled"},
:eg_id => { "$not" => /DC0.{32}/ },
:enrollees => {"$elemMatch" => {
:rel_code => "self",
:coverage_start => {"$lte" => target_date},
:coverage_end => {"$exists" => false}
}}},
{ :aasm_state => { "$ne" => "canceled"},
:eg_id => { "$not" => /DC0.{32}/ },
:enrollees => {"$elemMatch" => {
:rel_code => "self",
:coverage_start => {"$lte" => target_date},
:coverage_end => nil
}}}
]
}
end
def self.as_of(this_date, options = {})
self.new(this_date, options)
end
def self.today(options = {})
self.new(Date.today, options)
end
def self.between(s_date, e_date)
::PolicyStatus::ActiveRange.new(s_date, e_date)
end
end
end
|
<gh_stars>1-10
'use strict';
requireApp('communications/ftu/js/resources.js');
requireApp('communications/ftu/js/customizers/customizer.js');
suite(' Customizer > ', function() {
var eventName = 'test-event';
var resourceType = 'blob';
var resourcePath = '/ftu/test/unit/resources/wallpaper.jpg';
suite(' init > ', function() {
var resourceLoaderSpy;
var customizer;
suiteSetup(function() {
resourceLoaderSpy = sinon.spy(Resources, 'load');
});
setup(function() {
customizer = new Customizer(eventName, resourceType);
resourceLoaderSpy.reset();
});
teardown(function() {
customizer = null;
});
suiteTeardown(function() {
resourceLoaderSpy.restore();
});
test(' resource loaded ', function() {
// Init for adding the listener
customizer.init();
// Check that listener is working as expected
var customizationEvent = new CustomEvent('customization', {
detail: {
setting: eventName,
value: resourcePath
}
});
// Once we dispatch the event, handler should
// manage this properly
window.dispatchEvent(customizationEvent);
assert.isTrue(resourceLoaderSpy.calledOnce);
// A new event should not be handled, because
// we are removing the listener when the event
// is handled.
window.dispatchEvent(customizationEvent);
assert.isTrue(resourceLoaderSpy.calledOnce);
});
});
suite(' set > ', function() {
test(' resource available ', function(done) {
var onerror = function() {
assert.ok(false, 'Resource not loaded properly');
done();
};
var customizerSuccessful =
new Customizer(eventName, resourceType, onerror);
customizerSuccessful.set = function() {
done();
};
// Init for adding the listener
customizerSuccessful.init();
// Check that listener is working as expected
var customizationEvent = new CustomEvent('customization', {
detail: {
setting: eventName,
value: resourcePath
}
});
// Once we dispatch the event, handler should
// manage this properly
window.dispatchEvent(customizationEvent);
});
test(' resource unavailable ', function(done) {
var customizerError = new Customizer(eventName, resourceType, function() {
done();
});
customizerError.set = function() {
assert.ok(false, 'Resource available when it should not');
done();
};
// Init for adding the listener
customizerError.init();
// Check that listener is working as expected
var customizationEvent = new CustomEvent('customization', {
detail: {
setting: eventName,
value: 'wrong/path/file.jpg'
}
});
// Once we dispatch the event, handler should
// manage this properly
window.dispatchEvent(customizationEvent);
});
});
});
|
# Given an arg, check if it has been brew cask istall-ed yet.
# If not, brew-cask-install it.
function cask-ensure-installed {
if [[ `brew cask list` != *"$1"* ]]; then
brew cask install $1
fi
}
|
import pandas as pd
# Step 1: Read the data from the "input.xlsx" file
input_file = "input.xlsx"
df = pd.read_excel(input_file, sheet_name="data")
# Step 2: Extract all the rows where the value in the "status" column is "active"
active_rows = df[df["status"] == "active"]
# Step 3: Calculate the total of the "amount" column for the extracted rows
total_amount = active_rows["amount"].sum()
# Step 4: Write the total amount to a new Excel file named "output.xlsx" in a sheet named "summary"
output_file = "output.xlsx"
summary_df = pd.DataFrame({"Total Amount": [total_amount]})
summary_df.to_excel(output_file, sheet_name="summary", index=False) |
<gh_stars>0
import { Repository } from "typeorm";
import { MoneyAmount } from "../models/money-amount";
export declare class MoneyAmountRepository extends Repository<MoneyAmount> {
}
|
<reponame>TshiSelle/Web-Navigation-Bar
/**
@license
Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
// import polyfill for Symbol and Object.getOwnPropertySymbols
import 'get-own-property-symbols/build/get-own-property-symbols.max';
// Fix issue in toString patch when compiled into strict mode via closure
// https://github.com/es-shims/get-own-property-symbols/issues/16
const toString = Object.prototype.toString;
Object.prototype.toString = function () {
if (this === undefined) {
return '[object Undefined]';
}
else if (this === null) {
return '[object Null]';
}
else {
return toString.call(this);
}
};
// overwrite Object.keys to filter out symbols
Object.keys = function (obj) {
return Object.getOwnPropertyNames(obj).filter((name) => {
const prop = Object.getOwnPropertyDescriptor(obj, name);
return prop && prop.enumerable;
});
};
// implement iterators for IE 11
if (!String.prototype[Symbol.iterator] || !String.prototype.codePointAt) {
String.prototype[Symbol.iterator] = function* () {
for (let i = 0; i < this.length; i++) {
yield this[i];
}
};
}
if (!Set.prototype[Symbol.iterator]) {
Set.prototype[Symbol.iterator] = function* () {
const temp = [];
this.forEach((value) => {
temp.push(value);
});
for (let i = 0; i < temp.length; i++) {
yield temp[i];
}
};
}
if (!Map.prototype[Symbol.iterator]) {
Map.prototype[Symbol.iterator] = function* () {
const entries = [];
this.forEach((value, key) => {
entries.push([key, value]);
});
for (let i = 0; i < entries.length; i++) {
yield entries[i];
}
};
}
//# sourceMappingURL=symbol.js.map |
from flask_sqlalchemy import SQLAlchemy
class ForeignKey:
def __init__(self, db, app=None):
self.app = app
self.db = db
if app is not None:
self.init_app(app)
def init_app(self, app):
# app.config.setdefault('', '')
# app.teardown_appcontext(self.teardown)
pass
def has_attr(self, table):
def ref_table(cls, db=self.db):
setattr(cls, table + 's',
db.relationship(table.capitalize(), back_populates=cls.__name__.lower()))
return cls
return ref_table
def attr_to(self, table):
def ref_table(cls, db=self.db):
setattr(cls, '{0}_id'.format(table),
db.Column(db.String(50), db.ForeignKey('{0}.id'.format(table))))
setattr(cls, table, db.relationship(table.capitalize(), back_populates=cls.__name__.lower() + 's'))
return cls
return ref_table
def tag_to(self, table):
def ref_table(cls, db=self.db):
table_name_1 = table
table_name_2 = cls.__name__.lower()
if table_name_1 > table_name_2:
association_table_name = "_%s_%s" % (table_name_1, table_name_2)
else:
association_table_name = "_%s_%s" % (table_name_2, table_name_1)
association_table = db.Table(association_table_name, db.metadata,
db.Column("%s_id" % table_name_1, db.String(50),
db.ForeignKey('%s.id' % table_name_1),
primary_key=True),
db.Column("%s_id" % table_name_2, db.String(50),
db.ForeignKey('%s.id' % table_name_2),
primary_key=True),
extend_existing=True
)
setattr(cls, table + 's', db.relationship(table.capitalize(),
secondary=association_table,
back_populates=cls.__name__.lower() + 's'))
return cls
return ref_table |
<reponame>kwisoo/dac_vast
# キューポイントクラス
class Cuepoint < ApplicationRecord
# TODO
has_and_belongs_to_many :campaigns
has_many :results, dependent: :destroy
validates :name, presence: true, uniqueness: true, length: { maximum: 20, minimum: 5 }
end
|
export { default as Search } from './Search/SearchScreen'
export { default as MyBooks } from './MyBooks/MyBooksScreen' |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u1F619 = void 0;
var u1F619 = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M2268.5 1899.5Q2320 1968 2320 2014q0 168-299.5 291.5T1303 2429q-136 0-269.5-14.5T774 2369t-234-80.5-170.5-109-76-118.5-13.5-220q0-444 157.5-763t416-502T1411 393q217 0 441 101t336 296 112 454q0 111-19.5 219.5t-41.5 177-22 115.5q0 75 51.5 143.5zM807 951l7-6q9-9 27.5-17t37.5-8q37 0 60.5 24t29 59 35.5 35q15 0 26-10t11-27q0-46-44.5-100.5T879 846q-34 0-65.5 14.5T765 890l-10 10q-11 11-11 26 0 14 12 25t25 11q15 0 26-11zm250 233q-13-8-25-8-22 0-46.5-4t-47.5-4q-39 0-66 24.5t-27 62.5l1 11v14q0 19-11 25t-25 25-14 46q0 29 24 67.5t24 61.5q0 20 19.5 31t43.5 11q14 0 19-4t5-13q0-33-40-78t-40-71q0-14 9.5-27.5T888 1340q11 0 22 4.5t25 4.5q10 0 13.5-4.5t3.5-10.5q0-12-28.5-27.5T895 1266q0-23 11.5-43t32.5-20q13 0 28 3.5t30 7.5l26 8q12 4 18 4 13 0 21-4.5t8-13.5q0-16-13-24zm631 62q0-85-53.5-141t-130.5-56q-42 0-90 20t-48 50q0 17 11.5 28t26.5 11q10 0 39-16t60-16q53 0 81 38t28 87q0 19 11 29.5t28 10.5q15 0 25.5-10t11.5-25v-10z"
},
"children": []
}]
};
exports.u1F619 = u1F619; |
#!/bin/bash
# Copyright (c) 2021, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
# Description
# This script is to generate test parameters for coherence testing.
parametersPath=$1
githubUserName=$2
testbranchName=$3
cat <<EOF >${parametersPath}
{
"\$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"_artifactsLocation": {
"value": "https://raw.githubusercontent.com/${githubUserName}/arm-oraclelinux-wls-cluster/${testbranchName}/arm-oraclelinux-wls-cluster/src/main/arm/"
},
"_artifactsLocationSasToken": {
"value": ""
},
"adminPasswordOrKey": {
"value": "GEN-UNIQUE"
},
"adminUsername": {
"value": "GEN-UNIQUE"
},
"adminVMName": {
"value": "GEN-UNIQUE"
},
"enableCoherence": {
"value": true
},
"storageAccountName": {
"value": "GEN-UNIQUE"
},
"vmSizeSelect": {
"value": "Standard_D2as_v4"
},
"wlsPassword": {
"value": "GEN-UNIQUE"
},
"wlsUserName": {
"value": "GEN-UNIQUE"
}
}
}
EOF
|
<filename>sdk/keyvault/azure-security-keyvault-jca/src/samples/java/com/azure/security/keyvault/jca/ServerSSLSample.java<gh_stars>0
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.security.keyvault.jca;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLServerSocket;
import javax.net.ssl.SSLServerSocketFactory;
import java.security.KeyStore;
import java.security.Security;
/**
* The ServerSSL sample.
*/
public class ServerSSLSample {
public void serverSSLSample() throws Exception {
KeyVaultJcaProvider provider = new KeyVaultJcaProvider();
Security.addProvider(provider);
KeyStore ks = KeyStore.getInstance("AzureKeyVault");
KeyVaultLoadStoreParameter parameter = new KeyVaultLoadStoreParameter(
System.getProperty("azure.keyvault.uri"),
System.getProperty("azure.keyvault.aad-authentication-url"),
System.getProperty("azure.keyvault.tenant-id"),
System.getProperty("azure.keyvault.client-id"),
System.getProperty("azure.keyvault.client-secret"));
ks.load(parameter);
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(ks, "".toCharArray());
SSLContext context = SSLContext.getInstance("TLS");
context.init(kmf.getKeyManagers(), null, null);
SSLServerSocketFactory factory = context.getServerSocketFactory();
SSLServerSocket serverSocket = (SSLServerSocket) factory.createServerSocket(8765);
}
}
|
package com.java.study.answer.zuo.emiddle.class06;
public class Code04_SortedRatateArrayFindNum {
public static boolean isContains(int[] arr, int num) {
int low = 0;
int high = arr.length - 1;
int mid = 0;
while (low <= high) {
mid = (low + high) / 2;
if (arr[mid] == num) {
return true;
}
if (arr[low] == arr[mid] && arr[mid] == arr[high]) {
while (low != mid && arr[low] == arr[mid]) {
low++;
}
if (low == mid) {
low = mid + 1;
continue;
}
}
if (arr[low] != arr[mid]) {
if (arr[mid] > arr[low]) {
if (num >= arr[low] && num < arr[mid]) {
high = mid - 1;
} else {
low = mid + 1;
}
} else {
if (num > arr[mid] && num <= arr[high]) {
low = mid + 1;
} else {
high = mid - 1;
}
}
} else {
if (arr[mid] < arr[high]) {
if (num > arr[mid] && num <= arr[high]) {
low = mid + 1;
} else {
high = mid - 1;
}
} else {
if (num >= arr[low] && num < arr[mid]) {
high = mid - 1;
} else {
low = mid + 1;
}
}
}
}
return false;
}
public static void main(String[] args) {
int[] arr = { 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 1, 2, 3, 4, 5 };
int num = 10;
System.out.println(isContains(arr, num));
}
}
|
<reponame>burakahmetyoruk/ats-url-signer
package org.sarp.ats.urlsigner.enums;
public enum SigningPart {
FQDN_AND_ALL_DIRECTORY("1", "Use the FQDN and all directory parts for signature verification"),
FQDN_AND_FIRST_DIRECTORY("110", "Use the FQDN and first directory for signature verification, but ignore the remainder of the path"),
ONLY_ALL_DIRECTORY("01", "Ignore the FQDN, but verify using all directory parts"),
ONLY_FIRST_TWO_DIRECTORY("0110", "Ignore the FQDN, and use only the first two directory parts, skipping the remainder, for signatures");
private final String value;
private final String effect;
SigningPart(String value, String effect) {
this.value = value;
this.effect = effect;
}
public String getValue() {
return value;
}
public String getEffect() {
return effect;
}
}
|
#!/usr/bin/env bash
echo "Setting up HDF Trucking App Demo..."
CURR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SRC_HOME=$CURR_DIR/..
HDF_TRUCK_HOME=/opt/hdf-trucking-app
mkdir $HDF_TRUCK_HOME
echo "Installing Banana into Ambari Infra Solr..."
git clone https://github.com/lucidworks/banana
cp -R banana /usr/lib/ambari-infra-solr/server/solr-webapp/webapp/
echo "Creating new collection in Ambari Infra Solr..."
$CURR_DIR/solr/create_collection.sh
echo "Adding fields to new collection in Ambari Infra Solr..."
$CURR_DIR/solr/create_fields.sh
echo "Creating Kafka Topics..."
/usr/hdf/current/kafka-broker/bin/kafka-topics.sh --create --zookeeper localhost.localdomain:2181 --replication-factor 1 --partitions 1 --topic truck_speed_events
/usr/hdf/current/kafka-broker/bin/kafka-topics.sh --create --zookeeper localhost.localdomain:2181 --replication-factor 1 --partitions 1 --topic truck_average_speed
echo "Deploying NiFi Flow & Restarting NiFi..."
cd $SRC_HOME
/usr/hdf/current/nifi/bin/nifi.sh stop
/bin/cp -f conf/nifi/flow.xml.gz /var/lib/nifi/conf/
/usr/hdf/current/nifi/bin/nifi.sh start
echo "Installing Maven 3.0.5..."
wget http://mirrors.gigenet.com/apache/maven/maven-3/3.0.5/binaries/apache-maven-3.0.5-bin.tar.gz
tar -zxvf apache-maven-3.0.5-bin.tar.gz -C /opt/
export M2_HOME=/opt/apache-maven-3.0.5
export M2=$M2_HOME/bin
PATH=$M2:$PATH
echo "export M2_HOME=/opt/apache-maven-3.0.5" >> ~/.bashrc
echo "export M2=$M2_HOME/bin" >> ~/.bashrc
echo "PATH=$M2:$PATH" >> ~/.bashrc
source ~/.bashrc
source ~/.bashrc
echo "Building & Deploying Storm Average Speed Topology..."
cd $SRC_HOME/src
mvn clean install
storm jar $SRC_HOME/src/hdf-trucking-storm/target/hdf-trucking-storm-1.0-SNAPSHOT.jar com.hortonworks.trucking.storm.SpeedTopology speed-topology
echo "Building Trucking Data Simulator..."
cd ~
git clone https://github.com/bbende/hdp.git hdp-bbende
cd hdp-bbende
git checkout -b hdf-trucking-app origin/hdf-trucking-app
cd ~/hdp-bbende/app-utils/hdp-app-utils/
mvn clean install -DskipTests
cd ~/hdp-bbende/reference-apps/iot-trucking-app/
mvn clean install -DskipTests
cd ~/hdp-bbende/reference-apps/iot-trucking-app/trucking-data-simulator
mvn clean package assembly:single
echo "Deploying Trucking Data Simulator..."
mkdir $HDF_TRUCK_HOME/simulator
/bin/cp -f target/stream-simulator-jar-with-dependencies.jar $HDF_TRUCK_HOME/simulator/
/bin/cp -R src/main/resources/routes $HDF_TRUCK_HOME/simulator/
/bin/cp -f $SRC_HOME/conf/simulator/generate-data.sh $HDF_TRUCK_HOME/simulator/
chmod ugo+x $HDF_TRUCK_HOME/simulator/generate-data.sh
echo "Starting Trucking Data Simulator..."
cd $HDF_TRUCK_HOME/simulator/
./generate-data.sh
echo "Downloading MiNiFi..."
cd ~
wget https://dist.apache.org/repos/dist/release/nifi/minifi/0.2.0/minifi-0.2.0-bin.tar.gz
tar xzf minifi-0.2.0-bin.tar.gz -C $HDF_TRUCK_HOME/
echo "Deploying MiNiFi config & starting..."
/bin/cp -f $SRC_HOME/conf/minifi/config.yml $HDF_TRUCK_HOME/minifi-0.2.0/conf/
$HDF_TRUCK_HOME/minifi-0.2.0/bin/minifi.sh start
echo "Done setting up HDF Trucking App!"
echo "Go to Banana at http://localhost:8886/solr/banana/src/index.html and import the dashboard from hdf-trucking-app/conf/banana/HDF_Truck_Events-1478197521141"
|
../../../Target Support Files/Alamofire/Alamofire-umbrella.h |
import numpy as np
# Generate 10 random numbers from the standard normal distribution
nums = np.random.randn(10)
# Print the random numbers
print(nums) |
#!/bin/bash
set -euxo pipefail
# libzmq:
# create the static libraries
IOS_SDKROOT=$(xcrun --sdk iphoneos --show-sdk-path)
SIM_SDKROOT=$(xcrun --sdk iphonesimulator --show-sdk-path)
IOS_VERSION_MIN=8.0
OTHER_CPPFLAGS="-Os -fembed-bitcode"
pushd libzmq
ZMQ_DIR=$(pwd)
IOS_BUILD_DIR="${ZMQ_DIR}/builds/ios/libzmq_build/arm64-ios"
SIM_BUILD_DIR="${ZMQ_DIR}/builds/ios/libzmq_build/arm64-sim"
if [[ -e Makefile ]]; then make distclean; fi
./autogen.sh
mkdir -p ${IOS_BUILD_DIR}
./configure \
CC=clang CXX=clang++ \
CFLAGS="-arch arm64 -mios-version-min=${IOS_VERSION_MIN} -isysroot ${IOS_SDKROOT} ${OTHER_CPPFLAGS}" \
CPPFLAGS="-arch arm64 -mios-version-min=${IOS_VERSION_MIN} -isysroot ${IOS_SDKROOT} ${OTHER_CPPFLAGS}" \
CXXFLAGS="-arch arm64 -mios-version-min=${IOS_VERSION_MIN} -isysroot ${IOS_SDKROOT} ${OTHER_CPPFLAGS}" \
--prefix=${IOS_BUILD_DIR} \
--disable-shared \
--enable-static \
--host=arm-apple-darwin \
--disable-perf \
--disable-curve-keygen
make -j8
make install
make clean
mkdir -p ${SIM_BUILD_DIR}
./configure \
CC=clang CXX=clang++ \
CFLAGS="-arch arm64 -mios-simulator-version-min=${IOS_VERSION_MIN} -isysroot ${SIM_SDKROOT} ${OTHER_CPPFLAGS}" \
CPPFLAGS="-arch arm64 -mios-simulator-version-min=${IOS_VERSION_MIN} -isysroot ${SIM_SDKROOT} ${OTHER_CPPFLAGS}" \
CXXFLAGS="-arch arm64 -mios-simulator-version-min=${IOS_VERSION_MIN} -isysroot ${SIM_SDKROOT} ${OTHER_CPPFLAGS}" \
--prefix=${SIM_BUILD_DIR} \
--disable-shared \
--enable-static \
--host=arm-apple-darwin \
--disable-perf \
--disable-curve-keygen
make -j8
make install
make clean
popd
# then, merge them into XCframeworks:
framework=libzmq
rm -rf $framework.xcframework
xcodebuild -create-xcframework \
-library ${IOS_BUILD_DIR}/lib/libzmq.a -headers ${IOS_BUILD_DIR}/include \
-library ${SIM_BUILD_DIR}/lib/libzmq.a -headers ${SIM_BUILD_DIR}/include \
-output $framework.xcframework
|
#!/bin/bash
# get mappings for an index, print them in a jsonnet compatible format
set -eufo pipefail
IFS=$'\t\n'
index=$1
curl -sSL -H 'Content-Type: application/json' -X GET "${ES7_URL_WITH_CREDS}/${index}/_mappings" | jq '.[].mappings' | jsonnetfmt -
|
#!/bin/bash
# Sync db.bak files with remote host in the same network.
# Requires rsync
# Author: Luis Fernando Cruz Carrillo
# Email: lfcruz@udes.edu.mx - quattrococodrilo@gmail.com
backupDir=$BACKUPDIR
remoteDir=$REMOTEDIR
user=$USERREMOTE
host=$HOSTREMOTE
lastback=$(find $backupDir -type f -printf '%T@ %p\n' | sort -n | tail -1 | cut -f2- -d" ")
rsync -avz $lastback $user@$host:$remoteDir
|
<?php
function generatePassword() {
$chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
$pass = array(); //remember to declare $pass as an array
$charsLength = strlen($chars) - 1; //put the length -1 in cache
for ($i = 0; $i < 7; $i++) {
$n = rand(0, $charsLength);
$pass[] = $chars[$n];
}
return implode($pass); //turn the array into a string
}
echo generatePassword();
?> |
package chylex.hee.packets.client;
import io.netty.buffer.ByteBuf;
import java.util.Random;
import net.minecraft.client.entity.EntityClientPlayerMP;
import net.minecraft.tileentity.TileEntity;
import chylex.hee.HardcoreEnderExpansion;
import chylex.hee.packets.AbstractClientPacket;
import chylex.hee.system.abstractions.Vec;
import chylex.hee.system.util.MathUtil;
import chylex.hee.tileentity.TileEntityEnergyCluster;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class C10ParticleEnergyTransfer extends AbstractClientPacket{
private double startX, startY, startZ;
private double targetX, targetY, targetZ;
private byte red, green, blue, effType = 0, density = 3;
private float spacing = 0.2F;
public C10ParticleEnergyTransfer(){}
public C10ParticleEnergyTransfer(TileEntity tile, TileEntityEnergyCluster cluster){
this(tile.xCoord+0.5D, tile.yCoord+0.5D, tile.zCoord+0.5D, cluster.xCoord+0.5D, cluster.yCoord+0.5D, cluster.zCoord+0.5D, cluster.getColorRaw(0), cluster.getColorRaw(1), cluster.getColorRaw(2));
}
public C10ParticleEnergyTransfer(TileEntity tile, double targetX, double targetY, double targetZ, byte red, byte green, byte blue){
this(tile.xCoord+0.5D, tile.yCoord+0.5D, tile.zCoord+0.5D, targetX, targetY, targetZ, red, green, blue);
effType = 1;
}
public C10ParticleEnergyTransfer(double startX, double startY, double startZ, double targetX, double targetY, double targetZ, byte red, byte green, byte blue){
this.startX = startX;
this.startY = startY;
this.startZ = startZ;
this.targetX = targetX;
this.targetY = targetY;
this.targetZ = targetZ;
this.red = red;
this.green = green;
this.blue = blue;
}
@Override
public void write(ByteBuf buffer){
buffer.writeDouble(startX).writeDouble(startY).writeDouble(startZ);
buffer.writeDouble(targetX).writeDouble(targetY).writeDouble(targetZ);
buffer.writeByte(red).writeByte(green).writeByte(blue).writeByte(effType);
}
@Override
public void read(ByteBuf buffer){
startX = buffer.readDouble();
startY = buffer.readDouble();
startZ = buffer.readDouble();
targetX = buffer.readDouble();
targetY = buffer.readDouble();
targetZ = buffer.readDouble();
red = buffer.readByte();
green = buffer.readByte();
blue = buffer.readByte();
if (buffer.readByte() == 1){
density = 1;
spacing = 0.65F;
}
}
@Override
@SideOnly(Side.CLIENT)
protected void handle(EntityClientPlayerMP player){
Random rand = player.worldObj.rand;
Vec vec = Vec.xyz(targetX-startX, targetY-startY, targetZ-startZ);
int steps = MathUtil.floor(vec.length()*(1F/spacing));
vec = vec.normalized();
for(int a = 0; a < steps; a++){
for(int b = 0; b < density; b++)HardcoreEnderExpansion.fx.global("energy", startX+rand(rand, 0.05D), startY+rand(rand, 0.05D), startZ+rand(rand, 0.05D), rand(rand, 0.02D), rand(rand, 0.02D), rand(rand, 0.02D), (red+128F)/255F, (green+128F)/255F, (blue+128F)/255F);
startX += vec.x*spacing;
startY += vec.y*spacing;
startZ += vec.z*spacing;
}
}
/**
* Helper method that returns random number between -1 and 1 multiplied by number provided.
*/
private double rand(Random rand, double mp){
return (rand.nextDouble()-rand.nextDouble())*mp;
}
}
|
import CreateElement, { h } from './createElement'
import setEventListener, { e } from './utils/setEventListener'
import { events, selectors } from './utils/constants'
import { isNil } from './utils/isNil'
import { doc } from './utils/doc'
import { css } from './utils/cssClassName'
// Notifier constants
export const notify = {
type: {
danger: 'danger',
info: 'info',
warning: 'warning',
success: 'success',
reminder: 'reminder',
todo: 'todo'
}
}
/**
* Notifier
* --------
*
* by <NAME> @Augustpi
*
* @param {String} type notify.type constant
* @param {String} title title
* @param {String} content content
* @param {Number} duration duration
* @param {Number} delay delay after new notify again
*/
export default class Notifier extends CreateElement {
constructor ({ type, title, content, duration = 0 }) {
super()
this.type = type
this.title = title
this.content = content
this.duration = duration
this.container = this.#createContainer()
}
/**
* Show notification
*/
show () {
this.#createNotification(
this.type,
this.title,
this.content,
this.container,
this.duration
)
}
/**
* Confirm notification dialog
* @param {String} btnCaption button caption
* @param {Object} data object to view in confirm dialog
* @returns {Object} result: {
* ok: HTMLDivELement (okButton)
* cancel: HTMLDivELement (cancelButton)
* notification: Notifier instance
* }
*/
confirm ({
btnCaption = 'OK',
data = undefined
} = {}) {
const wrapper = this.#createNotification(this.type, this.title, this.content, this.container, 0)
const backdrop = this.elm.create(h.div, { class: 'modal-backdrop fade in' })
const btnWrapper = this.elm.create(h.div, { class: 'cbtn-wrapper' })
const space = this.elm.create(h.div, { class: 'cspace' })
const okButton = this.elm.create(h.div, { class: 'notifier-ok-btn btn cbtn cblue' }, btnCaption)
const cancelButton = this.elm.create(h.div, { class: 'notifier-ok-btn btn cbtn cred' }, 'CANCEL')
if (data) {
const dataWrapper = this.elm.create(h.div, { class: 'cwrapper' })
for (var key in data) {
const row = this.elm.create(h.div, { class: 'crow' })
const label = this.elm.create(h.div, { class: 'label' }, key)
const value = this.elm.create(h.div, { class: 'data' }, data[key])
this.elm.add(row, label)
this.elm.add(row, value)
this.elm.add(dataWrapper, row)
}
this.elm.add(wrapper, dataWrapper)
}
this.elm.add(btnWrapper, space)
this.elm.add(btnWrapper, okButton)
this.elm.add(btnWrapper, cancelButton)
this.elm.add(wrapper, btnWrapper)
this.elm.add(document.body, backdrop)
return {
result: {
ok: okButton,
cancel: cancelButton,
notification: wrapper
}
}
}
/**
* Create Container
* @returns Notifier container
*/
#createContainer () {
var nc = doc.getId('notifier-container')
if (!isNil(nc)) {
this.counter = nc.children.length + 1
return doc.getId('notifier-container')
} else {
var element = this.elm.create(h.div, { id: 'notifier-container', class: 'notify container' })
this.elm.add(document.body, element)
return doc.getId('notifier-container')
}
}
/**
* Create Notification
* @param {notify} type Notify object properties
* @param {String} title Title
* @param {String} content Notification content
* @param {Object} container container properties
* @param {Number} duration Notification duration
* @returns Notifer item element
*/
#createNotification (type, title, content, container, duration) {
var itemId = 'notifier-item-' + this.counter
var itemEl = this.elm.create(h.div, { class: `notify item ${type}`, id: itemId })
var titleEl = this.elm.create(h.div, { class: 'header' }, title)
var contentEl = this.elm.create(h.div, { class: 'content' }, content)
var clsEl = this.elm.create(h.div, { class: 'close-btn' }, 'x')
var iconEl = this.elm.create(h.div, { class: `img img-${type}` })
var c = this.elm.create(h.div, { class: 'cover' })
this.elm.add(itemEl, clsEl)
this.elm.add(itemEl, titleEl)
this.elm.add(c, iconEl)
this.elm.add(c, contentEl)
this.elm.add(itemEl, c)
this.elm.add(container, itemEl)
setEventListener({
element: clsEl,
event: events.click,
action: h.add,
callback: () => this.close({ elem: itemId })
})
setTimeout(() => {
css.add(this.container, 'z')
css.add(itemEl, 'show-notifier')
}, 100)
if (this.duration > 0) {
setTimeout(() => this.close({ elem: itemId }), duration)
}
return itemEl
}
/**
*
* @param {HTMLDivElement} elem Element
* @param {Boolean} closeBackDrop default false
*/
close ({ elem, closeBackDrop = false } = {}) {
const el = doc.getId(elem)
setEventListener({
element: el,
event: events.click,
action: e.remove,
callback: () => this.close()
})
if (el != null) {
css.remove(el, 'show-notifier')
setTimeout(() => el.remove(), 600)
setTimeout(() => this.#removeZ(), 630)
if (closeBackDrop) {
const backdrop = doc.query(selectors.backdrop)
if (backdrop) setTimeout(() => backdrop.remove(), 630)
}
}
}
/**
* handler
*/
#removeZ () {
const container = document.getElementById("notifier-container")
if (!isNil(container) && container.children.length === 0)
container.classList.remove("z")
}
}
|
#!/usr/bin/env bash
set -e -o xtrace -o errexit -o pipefail -o nounset -u
########################################################################################
# This is a dumbed down version of circle-lock for github
# It prevents conurrent deployments on the same branch
########################################################################################
branch=${GITHUB_REF#refs/heads/}
rest=()
github_base_url="api.github.com"
api_url="https://$github_base_url/repos/$GITHUB_REPOSITORY/actions/runs?status=in_progress&branch=$branch"
jq_prog=".workflow_runs | .[] | select(.run_number < $GITHUB_RUN_NUMBER) | .run_number"
echo "Checking for running builds..."
# unset errexit so we can detect and handle temporary github api failures
set +e
consecutive_failures=0
while true; do
builds=$(curl --fail --silent --connect-timeout 5 --max-time 10 -H "authorization: Bearer $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3+json" "$api_url" | jq "$jq_prog")
if [[ $? -ne 0 ]]; then
echo "Github api call failed"
consecutive_failures=$(($consecutive_failures + 1))
elif [[ ! -z ${builds} ]]; then
# reset failure counter
consecutive_failures=0
echo "Waiting on builds:"
echo "$builds"
else
break
fi
# limit the number of consecutive failures that we're willing to tolerate
if [[ ${consecutive_failures} -gt 5 ]]; then
echo "Failed $consecutive_failures consecutive attempts...giving up"
exit 1
fi
echo "Retrying in 10 seconds..."
sleep 10
done
echo "Acquired lock"
|
#!/usr/bin/env bats
# This file is used to test the installation and removal
# of a Debian package.
# WARNING: This testing file must be executed as root and can
# dramatically change your system. It removes the 'elasticsearch'
# user/group and also many directories. Do not execute this file
# unless you know exactly what you are doing.
# The test case can be executed with the Bash Automated
# Testing System tool available at https://github.com/sstephenson/bats
# Thanks to Sam Stephenson!
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Load test utilities
load packaging_test_utils
# Cleans everything for the 1st execution
setup() {
skip_not_dpkg
}
##################################
# Install DEB package
##################################
@test "[DEB] dpkg command is available" {
clean_before_test
dpkg --version
}
@test "[DEB] package is available" {
count=$(ls elasticsearch-$(cat version).deb | wc -l)
[ "$count" -eq 1 ]
}
@test "[DEB] package is not installed" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 1 ]
}
@test "[DEB] install package" {
dpkg -i elasticsearch-$(cat version).deb
}
@test "[DEB] package is installed" {
dpkg -s 'elasticsearch'
}
##################################
# Check that the package is correctly installed
##################################
@test "[DEB] verify package installation" {
verify_package_installation
}
##################################
# Check that Elasticsearch is working
##################################
@test "[DEB] test elasticsearch" {
start_elasticsearch_service
run_elasticsearch_tests
}
##################################
# Uninstall DEB package
##################################
@test "[DEB] remove package" {
dpkg -r 'elasticsearch'
}
@test "[DEB] package has been removed" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 0 ]
echo "$output" | grep -i "status" | grep -i "deinstall ok"
}
@test "[DEB] verify package removal" {
# The removal must stop the service
count=$(ps | grep Elasticsearch | wc -l)
[ "$count" -eq 0 ]
# The removal must disable the service
# see prerm file
if is_systemd; then
# Debian systemd distros usually returns exit code 3
run systemctl status elasticsearch.service
[ "$status" -eq 3 ]
run systemctl is-enabled elasticsearch.service
[ "$status" -eq 1 ]
fi
# Those directories are deleted when removing the package
# see postrm file
assert_file_not_exist "/var/log/elasticsearch"
assert_file_not_exist "/usr/share/elasticsearch/plugins"
assert_file_not_exist "/var/run/elasticsearch"
# The configuration files are still here
assert_file_exist "/etc/elasticsearch"
assert_file_exist "/etc/elasticsearch/elasticsearch.yml"
assert_file_exist "/etc/elasticsearch/logging.yml"
# The env file is still here
assert_file_exist "/etc/default/elasticsearch"
# The service files are still here
assert_file_exist "/etc/init.d/elasticsearch"
assert_file_exist "/usr/lib/systemd/system/elasticsearch.service"
}
@test "[DEB] purge package" {
dpkg --purge 'elasticsearch'
}
@test "[DEB] verify package purge" {
# all remaining files are deleted by the purge
assert_file_not_exist "/etc/elasticsearch"
assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml"
assert_file_not_exist "/etc/elasticsearch/logging.yml"
assert_file_not_exist "/etc/default/elasticsearch"
assert_file_not_exist "/etc/init.d/elasticsearch"
assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service"
assert_file_not_exist "/usr/share/elasticsearch"
assert_file_not_exist "/usr/share/doc/elasticsearch"
assert_file_not_exist "/usr/share/doc/elasticsearch/copyright"
}
@test "[DEB] package has been completly removed" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 1 ]
}
|
import http from 'http';
import stream from 'stream';
import chalk from 'chalk';
import { Socket } from 'net';
import { ProxyConfig } from './types/proxy-config';
import { createUpgradeHandler } from './mitmproxy/create-upgrade-handler';
import { createConnectHandler } from './mitmproxy/create-connect-handler';
import { createRequestHandler } from './mitmproxy/create-request-handler';
import { RequestHandlerFn } from './types/functions/request-handler-fn';
import { UpgradeHandlerFn } from './types/functions/upgrade-handler-fn';
import { ConnectHandlerFn } from './types/functions/connect-handler-fn';
import { FakeServersCenter } from './tls/fake-servers-center';
import { Context } from './types/contexts/context';
import { ContextNoMitm } from './types/contexts/context-no-mitm';
import { Logger } from './common/logger';
import { ExtendedNetSocket } from './types/extended-net-socket';
export class NewProxy {
public readonly httpServer: http.Server = new http.Server();
// handlers
private readonly requestHandler: RequestHandlerFn;
private readonly upgradeHandler: UpgradeHandlerFn;
private readonly connectHandler: ConnectHandlerFn;
private serverSockets = new Set<Socket>();
private clientSockets = new Set<ExtendedNetSocket>();
private _fakeServersCenter?: FakeServersCenter;
public constructor(private readonly proxyConfig: ProxyConfig, private readonly logger: Logger) {
this.requestHandler = createRequestHandler(this.proxyConfig, logger);
this.upgradeHandler = createUpgradeHandler(this.proxyConfig, logger);
this.connectHandler = createConnectHandler(
this.proxyConfig,
this.fakeServersCenter,
this.logger,
this.clientSockets,
);
}
get fakeServersCenter(): FakeServersCenter {
if (!this._fakeServersCenter) {
this._fakeServersCenter = new FakeServersCenter(
this.proxyConfig,
this.requestHandler,
this.upgradeHandler,
this.logger,
);
}
return this._fakeServersCenter;
}
public run(): Promise<void> {
// Don't reject unauthorized
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
return new Promise((resolve, reject) => {
this.httpServer.once('error', (error: Error) => {
reject(error);
});
this.httpServer.listen(this.proxyConfig.port, () => {
this.logger.log(`NewProxy is listening on port ${this.proxyConfig.port}`, chalk.green);
this.httpServer.on('error', (e: Error) => {
this.logger.logError(e);
});
this.httpServer.on('request', (req: http.IncomingMessage, res: http.ServerResponse) => {
// Plain HTTP request
const context = new Context(req, res, false);
this.requestHandler!(context);
});
// tunneling for https
this.httpServer.on(
'connect',
(connectRequest: http.IncomingMessage, clientSocket: stream.Duplex, head: Buffer) => {
clientSocket.on('error', () => {});
const context = new ContextNoMitm(connectRequest, clientSocket, head);
this.connectHandler!(context);
},
);
this.httpServer.on('connection', (socket: Socket) => {
this.serverSockets.add(socket);
socket.on('close', () => {
this.serverSockets.delete(socket);
});
});
// TODO: handle WebSocket
this.httpServer.on(
'upgrade',
(req: http.IncomingMessage, socket: stream.Duplex, head: Buffer) => {
const ssl = false;
this.upgradeHandler!(req, socket, head, ssl);
},
);
resolve();
});
});
}
public async stop(): Promise<void> {
// Destroy all open sockets first
this.serverSockets.forEach((socket) => {
socket.destroy();
});
this.clientSockets.forEach((socket) => {
socket.destroy();
});
this.serverSockets = new Set();
this.clientSockets = new Set();
const promise: Promise<any> = this.fakeServersCenter?.close() ?? Promise.resolve();
await Promise.all([this.closeServer(), promise]);
}
private closeServer(): Promise<void> {
return new Promise((resolve, reject) => {
this.httpServer.close((err) => {
if (err) reject(err);
resolve();
});
});
}
}
|
#!/bin/sh
jekyll build --config _config.yml,_config.prod.yml && chmod -cR +r _site && rm -vr /sites/chillidonut/assets/ && cp -r _site/* /sites/chillidonut/ -v
|
#!/usr/bin/env sh
# Always set the current working directory as the project's root
PROJECT_ROOT=$(cd $(dirname $0)/.. && pwd)
# HTTP method
METHOD=$1
# Run server
node $PROJECT_ROOT/backend/server &
PID=$!
# Try to make a successful request
for i in {1..3}
do
curl -Is -X $METHOD http://localhost:3000 | grep 200
EXITCODE=$?
if [ $EXITCODE -eq 0 ]
then
break
fi
sleep 1
done
# Stop server
kill -15 $PID
exit $EXITCODE
|
<gh_stars>0
import json
from project_manager_pro._meta import cache_commands
from colorama import init, Style, Fore, Back
init(autoreset=True)
def _acmd(alias, body):
file = open(cache_commands, 'r', encoding='utf-8')
commands = json.load(file)
commands[alias] = body
print(Fore.GREEN + '' + alias + ' \"' + commands[alias] + '\"' + ' command added')
with open(cache_commands, 'w', encoding='utf-8') as f:
f.write(json.dumps(commands, ensure_ascii=False))
|
<gh_stars>1-10
const assert = require("assert");
const httpTests = require("../../utils/httpTests");
const { sampleAppointment } = require("../../data/samples");
httpTests(__filename, ({ startServer }) => {
it("Vérifie qu'on peut prendre en compte des notifications via webhook pour le premier email cfa", async () => {
const { httpClient, components } = await startServer();
const messageId = "60ae479632bd2611ce<EMAIL>";
const emailStatus = "delivered";
const appointment = await components.appointments.createAppointment({
candidat_id: sampleAppointment.candidat_id,
etablissement_id: sampleAppointment.etablissement_id,
formation_id: sampleAppointment.formation_id,
motivations: sampleAppointment.motivations,
referrer: sampleAppointment.referrer,
});
await components.appointments.updateAppointment(appointment._id, {
email_premiere_demande_candidat_message_id: messageId,
});
const response = await httpClient.post(`/api/emails/webhook?apikey=1234`, {
event: emailStatus,
"message-id": messageId,
id: 385857,
date: "2021-05-26 17:19:32",
ts: 1622042372,
email: "<EMAIL>",
ts_event: 1622042372,
});
const appointmentUpdated = await components.appointments.getAppointmentById(appointment._id);
assert.strictEqual(response.status, 200);
assert.deepStrictEqual(response.data, {});
assert.strictEqual(appointmentUpdated.email_premiere_demande_candidat_statut, emailStatus);
});
it("Vérifie qu'on peut prendre en compte des notifications via webhook pour le premier email candidat", async () => {
const { httpClient, components } = await startServer();
const messageId = "60ae479632bd2611ce1bfd5<EMAIL>";
const emailStatus = "delivered";
const appointment = await components.appointments.createAppointment({
candidat_id: sampleAppointment.candidat_id,
etablissement_id: sampleAppointment.etablissement_id,
formation_id: sampleAppointment.formation_id,
motivations: sampleAppointment.motivations,
referrer: sampleAppointment.referrer,
});
await components.appointments.updateAppointment(appointment._id, {
email_premiere_demande_cfa_message_id: messageId,
});
const response = await httpClient.post(`/api/emails/webhook?apikey=1234`, {
event: emailStatus,
"message-id": messageId,
id: 385857,
date: "2021-05-26 17:19:32",
ts: 1622042372,
email: "<EMAIL>",
ts_event: 1622042372,
});
const appointmentUpdated = await components.appointments.getAppointmentById(appointment._id);
assert.strictEqual(response.status, 200);
assert.deepStrictEqual(response.data, {});
assert.strictEqual(appointmentUpdated.email_premiere_demande_cfa_statut, emailStatus);
});
it("Vérifie qu'on ne peut pas recevoir des notifications sans webhook key", async () => {
const { httpClient } = await startServer();
const response = await httpClient.post(`/api/emails/webhook`, {});
assert.strictEqual(response.status, 401);
assert.deepStrictEqual(response.data, {
error: "Unauthorized",
message: "Unauthorized",
statusCode: 401,
});
});
});
|
<gh_stars>0
import nacl
from Jumpscale import j
import binascii
JSConfigBase = j.baseclasses.object_config
from nacl.signing import VerifyKey
from nacl.public import PrivateKey, PublicKey, SealedBox
from Jumpscale.clients.gedis.GedisClient import GedisClientActors
class ThreebotClient(JSConfigBase):
_SCHEMATEXT = """
@url = jumpscale.threebot.client
name** = "" #is the bot dns
tid** = 0 (I) #threebot id
host = "127.0.0.1" (S) #for caching purposes
port = 8901 (ipport) #for caching purposes
pubkey = "" #for caching purposes
"""
def _init(self, **kwargs):
self._pubkey_obj = None
self._verifykey_obj = None
self._sealedbox_ = None
self._gedis_connections = {}
assert self.name != ""
@property
def actors_base(self):
cl = j.clients.gedis.get(name=self.name, host=self.host, port=self.port, package_name="zerobot.base")
return cl.actors
def client_get(self, packagename):
if not packagename in self._gedis_connections:
key = "%s__%s" % (self.name, packagename.replace(".", "__"))
cl = j.clients.gedis.get(name=key, port=8901, package_name=packagename)
self._gedis_connections[packagename] = cl
return self._gedis_connections[packagename]
def actors_get(self, package_name=None, status="installed"):
"""Get actors for package_name given. If status="all" then all the actors will be returned
:param package_name: name of package to be loaded that has the actors needed. If value is "all" then all actors from all packages are retrieved
:type package_name: str
:return: actors of package(s)
:type return: GedisClientActors (contains all the actors as properties)
"""
if not package_name:
actors = GedisClientActors()
package_manager_actor = j.clients.gedis.get(
name="packagemanager", host=self.host, port=self.port, package_name="zerobot.packagemanager"
).actors.package_manager
for package in package_manager_actor.packages_list(status=status).packages:
name = package.name
if name not in self._gedis_connections:
g = j.clients.gedis.get(
name=f"{name}_{self.name}", host=self.host, port=self.port, package_name=name
)
self._gedis_connections[name] = g
for k, v in self._gedis_connections[name].actors._ddict.items():
setattr(actors, k, v)
return actors
else:
if package_name not in self._gedis_connections:
g = j.clients.gedis.get(
name=f"{package_name}_{self.name}", host=self.host, port=self.port, package_name=package_name
)
self._gedis_connections[package_name] = g
return self._gedis_connections[package_name].actors
def reload(self):
for key, g in self._gedis_connections.items():
g.reload()
@property
def actors_all(self):
return self.actors_get(status="installed")
def encrypt_for_threebot(self, data, hex=False):
"""
Encrypt data using the public key of the remote threebot
:param data: data to be encrypted, should be of type binary
@return: encrypted data hex or binary
"""
if isinstance(data, str):
data = data.encode()
res = self._sealedbox.encrypt(data)
if hex:
res = binascii.hexlify(res)
return res
def verify_from_threebot(self, data, signature, data_is_hex=False):
"""
:param data, if string will unhexlify else binary data to verify against verification key of the threebot who send us the data
:return:
"""
if isinstance(data, str) or data_is_hex:
data = binascii.unhexlify(data)
if len(signature) == 128:
signature = binascii.unhexlify(signature)
return self.verifykey_obj.verify(data, signature=signature)
@property
def _sealedbox(self):
if not self._sealedbox_:
self._sealedbox_ = SealedBox(self.pubkey_obj)
return self._sealedbox_
@property
def pubkey_obj(self):
if not self._pubkey_obj:
self._pubkey_obj = self.verifykey_obj.to_curve25519_public_key()
return self._pubkey_obj
@property
def verifykey_obj(self):
if not self._verifykey_obj:
assert self.pubkey
verifykey = binascii.unhexlify(self.pubkey)
assert len(verifykey) == 32
self._verifykey_obj = VerifyKey(verifykey)
return self._verifykey_obj
def test_auth(self, bot_id):
nacl_cl = j.data.nacl.get()
nacl_cl._load_singing_key()
epoch = str(j.data.time.epoch)
signed_message = nacl_cl.sign(epoch.encode()).hex()
cmd = "auth {} {} {}".format(bot_id, epoch, signed_message)
return self._gedis._redis.execute_command(cmd)
|
#!/bin/bash
# This script is copied from librispeech/s5
# This is based on tdnn_1d_sp, but adding cnn as the front-end.
# The cnn-tdnn-f (tdnn_cnn_1a_sp) outperforms the tdnn-f (tdnn_1d_sp).
# bash local/chain/compare_wer.sh exp/chain_cleaned/tdnn_1d_sp exp/chain_cleaned/tdnn_cnn_1a_sp/
# System tdnn_1d_sp tdnn_cnn_1a_sp
# WER on dev(fglarge) 3.29 3.34
# WER on dev(tglarge) 3.44 3.39
# WER on dev(tgmed) 4.22 4.29
# WER on dev(tgsmall) 4.72 4.77
# WER on dev_other(fglarge) 8.71 8.62
# WER on dev_other(tglarge) 9.05 9.00
# WER on dev_other(tgmed) 11.09 10.93
# WER on dev_other(tgsmall) 12.13 12.02
# WER on test(fglarge) 3.80 3.69
# WER on test(tglarge) 3.89 3.80
# WER on test(tgmed) 4.72 4.64
# WER on test(tgsmall) 5.19 5.16
# WER on test_other(fglarge) 8.76 8.71
# WER on test_other(tglarge) 9.19 9.11
# WER on test_other(tgmed) 11.22 11.00
# WER on test_other(tgsmall) 12.24 12.16
# Final train prob -0.0378 -0.0420
# Final valid prob -0.0374 -0.0400
# Final train prob (xent) -0.6099 -0.6881
# Final valid prob (xent) -0.6353 -0.7180
# Num-parameters 22623456 18100736
set -e
# configs for 'chain'
stage=0
decode_nj=10
train_set=train_all_cleaned
gmm=tri4a_cleaned
nnet3_affix=_cleaned
# The rest are configs specific to this script. Most of the parameters
# are just hardcoded at this level, in the commands below.
affix=cnn_1a
tree_affix=
train_stage=-10
get_egs_stage=-10
# TDNN options
frames_per_eg=150,110,100
remove_egs=true
common_egs_dir=
xent_regularize=0.1
dropout_schedule='0,0@0.20,0.5@0.50,0'
test_online_decoding=true # if true, it will run the last decoding stage.
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
# The iVector-extraction and feature-dumping parts are the same as the standard
# nnet3 setup, and you can skip them by setting "--stage 11" if you have already
# run those things.
local/chain/run_ivector_common.sh --stage $stage \
--train-set $train_set \
--test-sets "$test_sets" \
--gmm $gmm \
--nnet3-affix "$nnet3_affix" || exit 1;
gmm_dir=exp/$gmm
ali_dir=exp/${gmm}_ali_${train_set}_sp
tree_dir=exp/chain${nnet3_affix}/tree_sp${tree_affix:+_$tree_affix}
lang=data/lang_chain
lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_sp_lats
dir=exp/chain${nnet3_afqstfix}/tdnn${affix:+_$affix}_sp
train_data_dir=data/${train_set}_sp_hires
lores_train_data_dir=data/${train_set}_sp
train_ivector_dir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires
# if we are using the speed-perturbed data we need to generate
# alignments for it.
for f in $gmm_dir/final.mdl $train_data_dir/feats.scp $train_ivector_dir/ivector_online.scp \
$lores_train_data_dir/feats.scp $ali_dir/ali.1.gz; do
[ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1
done
# Please take this as a reference on how to specify all the options of
# local/chain/run_chain_common.sh
local/chain/run_chain_common.sh --stage $stage \
--gmm-dir $gmm_dir \
--ali-dir $ali_dir \
--lores-train-data-dir ${lores_train_data_dir} \
--lang $lang \
--lat-dir $lat_dir \
--num-leaves 9000 \
--tree-dir $tree_dir || exit 1;
if [ $stage -le 14 ]; then
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $tree_dir/tree | grep num-pdfs | awk '{print $2}')
learning_rate_factor=$(echo "print (0.5/$xent_regularize)" | python)
cnn_opts="l2-regularize=0.01"
ivector_affine_opts="l2-regularize=0.0"
affine_opts="l2-regularize=0.008 dropout-proportion=0.0 dropout-per-dim=true dropout-per-dim-continuous=true"
tdnnf_first_opts="l2-regularize=0.008 dropout-proportion=0.0 bypass-scale=0.0"
tdnnf_opts="l2-regularize=0.008 dropout-proportion=0.0 bypass-scale=0.75"
linear_opts="l2-regularize=0.008 orthonormal-constraint=-1.0"
prefinal_opts="l2-regularize=0.008"
output_opts="l2-regularize=0.005"
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=100 name=ivector
input dim=43 name=input
# MFCC to filterbank
idct-layer name=idct input=input dim=40 cepstral-lifter=22 affine-transform-file=$dir/configs/idct.mat
linear-component name=ivector-linear $ivector_affine_opts dim=200 input=ReplaceIndex(ivector, t, 0)
batchnorm-component name=ivector-batchnorm target-rms=0.025
batchnorm-component name=idct-batchnorm input=idct
combine-feature-maps-layer name=combine_inputs input=Append(idct-batchnorm, ivector-batchnorm) num-filters1=1 num-filters2=5 height=40
conv-relu-batchnorm-layer name=cnn1 $cnn_opts height-in=40 height-out=40 time-offsets=-1,0,1 height-offsets=-1,0,1 num-filters-out=64
conv-relu-batchnorm-layer name=cnn2 $cnn_opts height-in=40 height-out=40 time-offsets=-1,0,1 height-offsets=-1,0,1 num-filters-out=64
conv-relu-batchnorm-layer name=cnn3 $cnn_opts height-in=40 height-out=20 height-subsample-out=2 time-offsets=-1,0,1 height-offsets=-1,0,1 num-filters-out=128
conv-relu-batchnorm-layer name=cnn4 $cnn_opts height-in=20 height-out=20 time-offsets=-1,0,1 height-offsets=-1,0,1 num-filters-out=128
conv-relu-batchnorm-layer name=cnn5 $cnn_opts height-in=20 height-out=10 height-subsample-out=2 time-offsets=-1,0,1 height-offsets=-1,0,1 num-filters-out=256
conv-relu-batchnorm-layer name=cnn6 $cnn_opts height-in=10 height-out=10 time-offsets=-1,0,1 height-offsets=-1,0,1 num-filters-out=256
# the first TDNN-F layer has no bypass
tdnnf-layer name=tdnnf7 $tdnnf_first_opts dim=1536 bottleneck-dim=256 time-stride=0
tdnnf-layer name=tdnnf8 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf9 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf10 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf11 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf12 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf13 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf14 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf15 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf16 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf17 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
tdnnf-layer name=tdnnf18 $tdnnf_opts dim=1536 bottleneck-dim=160 time-stride=3
linear-component name=prefinal-l dim=256 $linear_opts
## adding the layers for chain branch
prefinal-layer name=prefinal-chain input=prefinal-l $prefinal_opts big-dim=1536 small-dim=256
output-layer name=output include-log-softmax=false dim=$num_targets $output_opts
# adding the layers for xent branch
prefinal-layer name=prefinal-xent input=prefinal-l $prefinal_opts big-dim=1536 small-dim=256
output-layer name=output-xent dim=$num_targets learning-rate-factor=$learning_rate_factor $output_opts
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 15 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{5,6,7,8}/$USER/kaldi-data/egs/multi_cn-$(date +'%m_%d_%H_%M')/s5c/$dir/egs/storage $dir/egs/storage
fi
steps/nnet3/chain/train.py --stage $train_stage \
--use-gpu "wait" \
--cmd "$decode_cmd" \
--feat.online-ivector-dir $train_ivector_dir \
--feat.cmvn-opts "--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient 0.1 \
--chain.l2-regularize 0.0 \
--chain.apply-deriv-weights false \
--chain.lm-opts="--num-extra-lm-states=2000" \
--egs.dir "$common_egs_dir" \
--egs.stage $get_egs_stage \
--egs.opts "--frames-overlap-per-eg 0 --constrained false" \
--egs.chunk-width $frames_per_eg \
--trainer.dropout-schedule $dropout_schedule \
--trainer.add-option="--optimization.memory-compression-level=2" \
--trainer.num-chunk-per-minibatch 128,64 \
--trainer.frames-per-iter 3000000 \
--trainer.num-epochs 4 \
--trainer.optimization.num-jobs-initial 3 \
--trainer.optimization.num-jobs-final 16 \
--trainer.optimization.initial-effective-lrate 0.00015 \
--trainer.optimization.final-effective-lrate 0.000015 \
--trainer.max-param-change 2.0 \
--cleanup.remove-egs $remove_egs \
--feat-dir $train_data_dir \
--tree-dir $tree_dir \
--lat-dir $lat_dir \
--dir $dir || exit 1;
fi
graph_dir=$dir/graph_tg
if [ $stage -le 16 ]; then
# Note: it's not important to give mkgraph.sh the lang directory with the
# matched topology (since it gets the topology file from the model).
utils/mkgraph.sh --self-loop-scale 1.0 --remove-oov \
data/lang_combined_tg $dir $graph_dir
# remove <UNK> (word id is 3) from the graph, and convert back to const-FST.
fstrmsymbols --apply-to-output=true --remove-arcs=true "echo 3|" $graph_dir/HCLG.fst - | \
fstconvert --fst_type=const > $graph_dir/temp.fst
mv $graph_dir/temp.fst $graph_dir/HCLG.fst
fi
if [ $stage -le 17 ]; then
rm $dir/.error 2>/dev/null || true
for decode_set in $test_sets; do
(
steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \
--nj $decode_nj --cmd "$decode_cmd" \
--online-ivector-dir exp/nnet3${nnet3_affix}/ivectors_${decode_set}_hires \
$graph_dir data/${decode_set}/test_hires $dir/decode_${decode_set}_tg || exit 1
) || touch $dir/.error &
done
wait
[ -f $dir/.error ] && echo "$0: there was a problem while decoding" && exit 1
fi
if $test_online_decoding && [ $stage -le 18 ]; then
# note: if the features change (e.g. you add pitch features), you will have to
# change the options of the following command line.
steps/online/nnet3/prepare_online_decoding.sh \
--mfcc-config conf/mfcc_hires.conf --add-pitch true \
$lang exp/nnet3${nnet3_affix}/extractor $dir ${dir}_online
rm $dir/.error 2>/dev/null || true
for data in $test_sets; do
(
nspk=$(wc -l <data/${data}/test/spk2utt)
# note: we just give it "data/${data}" as it only uses the wav.scp, the
# feature type does not matter.
steps/online/nnet3/decode.sh \
--acwt 1.0 --post-decode-acwt 10.0 \
--nj $nspk --cmd "$decode_cmd" \
$graph_dir data/${data}/test ${dir}_online/decode_${data}_tg || exit 1
) || touch $dir/.error &
done
wait
[ -f $dir/.error ] && echo "$0: there was a problem while decoding" && exit 1
fi
exit 0;
|
def rearrange_characters(string):
result = ""
ch = None
for i in range(len(string)):
while ch == string[i]:
ch = random.choice(string)
result += ch
ch = string[i]
return result
string = "abccc"
print(rearrange_characters(string)) |
<reponame>KiraLT/common-stuff
import { isPlainObject } from '.'
/**
* Compares two values.
*
* Supported types: all primitives, `null`, `undefined`, `array`, `object`, `Date`
*
* @category Object
* @param a any value to compare
* @param b any value to compare
* @returns `true` if values are equal
*/
export function isEqual(a: unknown, b: unknown): boolean {
if (a === b) {
return true
}
if (a instanceof Date && b instanceof Date) {
return a.getTime() === b.getTime()
}
if (a instanceof Array && b instanceof Array) {
if (a.length !== b.length) {
return false
}
return a.every((v, i) => isEqual(v, b[i]))
}
if (isPlainObject(a) && isPlainObject(b)) {
const entriesA = Object.entries(a)
if (entriesA.length !== Object.keys(b).length) {
return false
}
return entriesA.every(([k, v]) => isEqual(v, b[k]))
}
return false
}
/**
* Converts object to entries, map's it with provided callback and flattens entry list.
*
* @example
* ```
* flatMapRecord({'a': 2}, ([k, v]) => [[k, v]])
* >> {'a': 2, 'b': 3}
* flatMapRecord({'a': 2, 'b': 3}, ([k, v]) => v === 2 ? [[k, v]] : [])
* >> {'a': 2}
* ```
* @category Object
* @param obj `Record` like object
* @param callback map callback, accepts entry pair (`[key, value]`) and should return list of entry pairs
* @returns new mapped object
*/
export function flatMapRecord<K extends keyof any, V, RK extends keyof any, RV>(
obj: Record<K, V>,
callback: (entry: [K, V]) => Array<[RK, RV]>
): Record<RK, RV> {
const entries = Object.entries(obj) as Array<[K, V]>
return entries.map(callback).reduce((prev, values) => {
values.forEach(([key, value]) => {
prev[key] = value
})
return prev
}, {} as Record<RK, RV>)
}
/**
* Converts object to entries and map's it with provided callback.
*
* @example
* ```
* mapRecord({'a': 2}, ([k, v]) => [v, k * 2])
* >> {'b': 4}
* mapRecord({'a': 'b'}, ([k, v]) => [v, k])
* >> {'b': 'a'}
* ```
* @category Object
* @param obj `Record` like plain object
* @param callback map callback, accepts entry pair (`[key, value]`) and should return entry pair
* @returns new mapped object
*/
export function mapRecord<K extends keyof any, V, RK extends keyof any, RV>(
obj: Record<K, V>,
callback: (entry: [K, V]) => [RK, RV]
): Record<RK, RV> {
return flatMapRecord(obj, (v) => [callback(v)])
}
/**
* Filter object by provided callback.
*
* @example
* ```
* filterRecord({'a': 2, 'b': 3}, ([k, v]) => v === 2)
* >> {'a': 2}
* ```
* @category Object
* @param obj `Record` like plain object
* @param callback map callback, accepts entry pair (`[key, value]`) and should boolean value
* @returns new filtered object
*/
export function filterRecord<K extends keyof any, V>(
obj: Record<K, V>,
callback: (entry: [K, V]) => boolean
): Record<K, V> {
return flatMapRecord(obj, (v) => (callback(v) ? [v] : []))
}
/**
* Merges `source` to `target` recursively
*
* @example
* ```
* merge({ a: 1 }, { b: 2 }))
* // { a: 1, b: 2 }
* ```
* @category Object
*/
export function merge<T>(
target: unknown,
source: unknown,
options?: {
/**
* When `source` has `null` or `undefined` value, do not overwrite `target`
*/
skipNulls: false
/**
* Array merge policy, default is `overwrite`
*
* Available policies:
* * `overwrite` - always replace `target` array with `source`
* * `merge` - merge `target` and `source` array values
* * `(target, source) => source` - custom array merge function
*/
arrayPolicy:
| 'overwrite'
| 'merge'
| ((target: unknown, source: unknown) => unknown)
}
): T {
const { skipNulls = false, arrayPolicy = 'overwrite' } = options ?? {}
if (isPlainObject(target) && isPlainObject(source)) {
return Object.entries(source).reduce(
(prev, [key, value]) => {
prev[key] = merge(prev[key], value)
return prev
},
{ ...target }
) as any as T
}
if (target instanceof Array && source instanceof Array) {
if (arrayPolicy === 'merge') {
return target.concat(source) as any as T
} else if (typeof arrayPolicy === 'function') {
return arrayPolicy(target, source) as any as T
} else {
return source as any as T
}
}
if (skipNulls && source == null) {
return target as T
}
return source as T
}
/**
* Return a clone of given value
*
* @category Object
* @param value any value
* @param recursive should recursive values (object and array) be cloned
*/
export function clone<T>(value: T, recursive = true): T {
if (isPlainObject(value)) {
return Object.entries(value).reduce((prev, [k, v]) => {
prev[k] = recursive ? clone(v) : v
return prev
}, {} as Record<keyof any, unknown>) as any as T
}
if (value instanceof Array) {
return value.map((v) => (recursive ? clone(v) : v)) as any as T
}
return value
}
/**
* Parse one level object to nested structure based on key separator
*
* @example
* ```
* // ENV: config__host=0.0.0.0 config__port=3000
* convertToNested(process.env, { separator: '__' })
* // {config: { host: '0.0.0.0', port: 3000 } }
*
* // ENV: CONFIG__PRIVATE_KEY="my key"
* // ENV: CONFIG__PUBLIC_KEY="my key"
* // ENV: CONFIG__ALLOWED_IPS='["127.0.0.1", "localhost"]'
* convertToNested(process.env, {
* separator: '__',
* transformKey: camelCase
* }).config
* // { privateKey: 'my key', publicKey: 'my key', allowedIps: ['127.0.0.1', 'localhost'] }
* ```
* @category Object
*/
export function convertToNested<T = Record<string, unknown>>(
array: Record<string, unknown>,
options?: {
/**
* Key separator, default `.`
*/
separator?: string
/**
* Key transform function, e.g. `camelCase` or `pascalCase`
*/
transformKey?: (value: string) => string
/**
* Value transform function, by default `JSON.parse` is used.
*/
transformValue?: (value: unknown) => unknown
}
): T {
const sep = options?.separator ?? '.'
const keyTransformer = options?.transformKey ?? ((v) => v)
const valueTransformer =
options?.transformValue ??
((v) => {
if (typeof v === 'string') {
try {
return JSON.parse(v)
} catch {
return v
}
}
return v
})
const createValue = (
[target, ...keys]: string[],
value: unknown
): unknown => {
if (!target) {
return value
}
return {
[target]: createValue(keys, value),
}
}
return Object.entries(array)
.map(
([key, value]) =>
[
key.split(sep).map(keyTransformer),
valueTransformer(value),
] as const
)
.filter(([keys]) => keys.every((v) => !!v))
.sort((a, b) => a[0].length - b[0].length)
.reduce(
(prev, [key, value]) => merge(prev, createValue(key, value)),
{} as T
)
}
|
<filename>backups/last.sql
-- MySQL dump 10.13 Distrib 8.0.20, for macos10.15 (x86_64)
--
-- Host: localhost Database: local_wedding
-- ------------------------------------------------------
-- Server version 8.0.23
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `guests`
--
DROP TABLE IF EXISTS `guests`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `guests` (
`id` int NOT NULL AUTO_INCREMENT,
`firstname` varchar(45) COLLATE utf8_unicode_ci DEFAULT NULL,
`lastname` varchar(45) COLLATE utf8_unicode_ci DEFAULT NULL,
`document` varchar(45) COLLATE utf8_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`idImage` int NOT NULL,
PRIMARY KEY (`id`),
KEY `fk_users_images_idx` (`idImage`),
CONSTRAINT `fk_users_images` FOREIGN KEY (`idImage`) REFERENCES `images` (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `guests`
--
LOCK TABLES `guests` WRITE;
/*!40000 ALTER TABLE `guests` DISABLE KEYS */;
INSERT INTO `guests` VALUES (1,'Guillermo','Asto','45219439',NULL,NULL,1);
/*!40000 ALTER TABLE `guests` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `images`
--
DROP TABLE IF EXISTS `images`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `images` (
`id` int NOT NULL AUTO_INCREMENT,
`path` varchar(250) COLLATE utf8_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`name` varchar(250) COLLATE utf8_unicode_ci DEFAULT NULL,
`idGuest` int DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `images`
--
LOCK TABLES `images` WRITE;
/*!40000 ALTER TABLE `images` DISABLE KEYS */;
INSERT INTO `images` VALUES (1,'45219439.jpeg',NULL,NULL,NULL,NULL),(2,'2021/08/08/610fdb2f24001.jpeg','2021-08-08 18:25:03','2021-08-08 18:25:03','banner_maestria.jpeg',1);
/*!40000 ALTER TABLE `images` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `messages`
--
DROP TABLE IF EXISTS `messages`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `messages` (
`id` int NOT NULL AUTO_INCREMENT,
`message` varchar(250) COLLATE utf8_unicode_ci DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`idImage` int NOT NULL,
`idGuest` int DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `fk_messages_images1_idx` (`idImage`),
CONSTRAINT `fk_messages_images1` FOREIGN KEY (`idImage`) REFERENCES `images` (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `messages`
--
LOCK TABLES `messages` WRITE;
/*!40000 ALTER TABLE `messages` DISABLE KEYS */;
INSERT INTO `messages` VALUES (1,'<NAME>','2021-08-08 01:31:12',1,1,'2021-08-08 01:31:12'),(2,'121212','2021-08-08 18:11:10',1,1,'2021-08-08 18:11:10'),(3,'23232','2021-08-08 18:13:28',1,1,'2021-08-08 18:13:28'),(4,'asdasd','2021-08-08 18:13:54',1,1,'2021-08-08 18:13:54'),(5,'sdsds','2021-08-08 18:14:12',1,1,'2021-08-08 18:14:12'),(6,'sdsds','2021-08-08 18:14:48',1,1,'2021-08-08 18:14:48'),(7,'sdsds','2021-08-08 18:14:57',1,1,'2021-08-08 18:14:57'),(8,'sdsds','2021-08-08 18:25:03',2,1,'2021-08-08 18:25:03');
/*!40000 ALTER TABLE `messages` ENABLE KEYS */;
UNLOCK TABLES;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2021-08-08 10:43:25
|
/**************************************************************************
*
* Copyright (c) 2016-2017 <NAME>, All Rights Reserved.
*
**************************************************************************/
"use strict";
/**
*
*/
define(function ( require ) {
var CookieManager = require("varmateo/util/CookieManager");
var Memoizer = require("varmateo/util/Memoizer");
var FriendsServiceManager =
require("varmateo/friendnav/FriendsServiceManager");
var FoursquareFriendsServiceFactory =
require("varmateo/friendnav/foursquare/FoursquareFriendsServiceFactory");
/**
*
*/
function FnavFacadeFactory () {
var self = this;
var configList = [{
name : "CookieManager",
builder : function () {
return new CookieManager();
},
}, {
name : "FoursquareFriendsServiceFactory",
builder : function () {
return new FoursquareFriendsServiceFactory();
},
}, {
name : "FriendsServiceManager",
builder : function () {
return self._newFriendsServiceManager();
},
},];
var memoizer = new Memoizer(configList);
memoizer.extendTo(this);
}
/**
*
*/
FnavFacadeFactory.prototype._newFriendsServiceManager = function () {
var friendsServiceFactories = {
"foursquare" : this.getFoursquareFriendsServiceFactory(),
};
return new FriendsServiceManager(
this.getCookieManager(),
friendsServiceFactories);
}
return FnavFacadeFactory;
});
|
import React from 'react';
import { useDispatch, useSelector } from 'react-redux';
import Header from 'src/components/Header';
import { actionSetServer, serverSelector } from 'src/module/Setting';
import { IServer, MAINNET_SERVER, TESTNET_SERVER } from 'src/services';
import styled from 'styled-components';
interface INetworkItem {
title: string;
desc: string;
onChangeNetwork?: () => void;
serverID: string;
}
const Styled = styled.div`
cursor: pointer;
.network-item {
margin-bottom: 30px;
}
.network-item .title {
margin-bottom: 15px;
}
.network-item .desc {
}
`;
const NetworkItem = React.memo((props: INetworkItem) => {
const { title, desc, onChangeNetwork, serverID } = props;
const handleClick = () => typeof onChangeNetwork === 'function' && onChangeNetwork();
const server = useSelector(serverSelector);
const actived = server.id === serverID;
return (
<div className={`network-item ${actived ? 'main-text' : 'sub-text'}`} onClick={handleClick}>
<p className="title fs-medium">{title}</p>
<p className="desc fs-regular">{desc}</p>
</div>
);
});
const Network = React.memo(() => {
const defaultServer = useSelector(serverSelector);
const dispatch = useDispatch();
const handleChangeNetwork = async (server: IServer) => {
if (defaultServer.id !== server.id && server.id !== 'local') {
await dispatch(actionSetServer(server));
}
};
return (
<Styled>
<Header />
{[MAINNET_SERVER, TESTNET_SERVER].map((server) => (
<NetworkItem
key={server.id}
serverID={server.id}
title={server.name}
desc={server.address}
onChangeNetwork={() => handleChangeNetwork(server)}
/>
))}
</Styled>
);
});
export default Network;
|
#!/usr/bin/env bash
# Set the root path.
export DARKNET_ROOT=/tmp/darknet
# Remove if it exists
rm -rf ${DARKNET_ROOT}
# Clone
darknet.py darknet clone
# Build with OpenCV
darknet.py darknet build --opencv
|
import ApiClient from '../api/client';
import Builder from '../api/builder';
import {
AUTH_ERROR_SCHEMA,
MESSAGE_SCHEMA,
} from '../schemas/common';
import ADD_BALANCE_SCHEMA from '../schemas/balance';
import Pay from '../models/payModel';
import PAY_SCHEMA from '../schemas/pay';
import Balance from '../models/balanceModel';
describe('Pay', () => {
it('with valid data', async () => {
const { client } = await new ApiClient().authorized();
const builder = await new Builder(client);
await builder.register();
await builder.auth();
await builder.addInfo();
await builder.addStore();
await builder.addItem();
await builder.addBalance();
builder.build();
const data = new Pay(builder.item.itemId);
const response = await client.pay.pay(builder.userId, data, PAY_SCHEMA);
expect(response.status).toBe(200);
expect(response.data.message).toBe('Payment was successful');
expect(response.data.balance).toBe(builder.balance.balance - builder.item.price);
expect(response.data.price).toBe(builder.item.price);
});
it('with none exist user id', async () => {
const userId = 1000;
const { client } = await new ApiClient().authorized();
const data = new Pay(1);
const response = await client.pay.pay(userId, data, MESSAGE_SCHEMA);
expect(response.status).toBe(404);
expect(response.data.message).toBe('User not found');
});
it('with out access token', async () => {
const client = await new ApiClient().unauthorized();
const userId = 1;
const data = new Pay(1);
const response = await client.pay.pay(userId, data, AUTH_ERROR_SCHEMA);
expect(response.status).toBe(401);
expect(response.data.description).toBe('Request does not contain an access token');
expect(response.data.error).toBe('Authorization Required');
expect(response.data.status_code).toBe(401);
});
it('with none exist item id', async () => {
const { client } = await new ApiClient().authorized();
const builder = await new Builder(client);
await builder.register();
await builder.auth();
await builder.addInfo();
await builder.addStore();
await builder.addItem();
await builder.addBalance();
builder.build();
const data = new Pay(1000);
const response = await client.pay.pay(builder.userId, data, MESSAGE_SCHEMA);
expect(response.status).toBe(404);
expect(response.data.message).toBe('Item not found');
});
it('with value of the goods is greater than the balance', async () => {
const { client } = await new ApiClient().authorized();
const builder = await new Builder(client);
await builder.register();
await builder.auth();
await builder.addInfo();
await builder.addStore();
await builder.addItem();
builder.build();
const balance = 10;
const dataBalance = new Balance(balance).random();
const responseBalance = await client.balance.addBalance(builder.userId, dataBalance,
ADD_BALANCE_SCHEMA);
expect(responseBalance.status).toBe(201);
const data = new Pay(builder.item.itemId);
const response = await client.pay.pay(builder.userId, data, MESSAGE_SCHEMA);
expect(response.status).toBe(400);
expect(response.data.message).toBe(`Not enough money. Your balance is ${balance}.0, item cost ${builder.item.price}.0`);
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.