text stringlengths 1 1.05M |
|---|
<filename>node_modules/react-icons-kit/md/ic_center_focus_weak_outline.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_center_focus_weak_outline = void 0;
var ic_center_focus_weak_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M5 15H3v4c0 1.1.9 2 2 2h4v-2H5v-4zM5 5h4V3H5c-1.1 0-2 .9-2 2v4h2V5zm7 3c-2.21 0-4 1.79-4 4s1.79 4 4 4 4-1.79 4-4-1.79-4-4-4zm0 6c-1.1 0-2-.9-2-2s.9-2 2-2 2 .9 2 2-.9 2-2 2zm7-11h-4v2h4v4h2V5c0-1.1-.9-2-2-2zm0 16h-4v2h4c1.1 0 2-.9 2-2v-4h-2v4z"
},
"children": []
}]
};
exports.ic_center_focus_weak_outline = ic_center_focus_weak_outline; |
<reponame>GerHobbelt/markdown-js
var Markdown = require("../src/markdown"),
tap = require("tap"),
mk_block = Markdown.mk_block;
/*
* This file contains tests that check various regressions on the individual
* parsers, rather than the parser as a whole.
*/
function test(name, cb) {
tap.test( name, function(t) {
cb(t, new Markdown() );
t.end();
});
}
test("split_block", function(t, md) {
t.equivalent(
md.split_blocks( "# h1 #\n\npara1\npara1L2\n \n\n\n\npara2\n" ),
[mk_block( "# h1 #", "\n\n", 1 ),
mk_block( "para1\npara1L2", "\n \n\n\n\n", 3 ),
mk_block( "para2", "\n", 9 )
],
"split_block should record trailing newlines");
t.equivalent(
md.split_blocks( "\n\n# heading #\n\npara\n" ),
[mk_block( "# heading #", "\n\n", 3 ),
mk_block( "para", "\n", 5 )
],
"split_block should ignore leading newlines");
});
test("headers", function(t, md) {
t.equivalent(
md.dialect.block.setextHeader.call( md, "h1\n===\n\n", [] ),
[ [ "header", { level: 1 }, "h1" ] ],
"Atx and Setext style H1s should produce the same output" );
t.equivalent(
md.dialect.block.atxHeader.call( md, "# h1\n\n"),
[ [ "header", { level: 1 }, "h1" ] ],
"Closing # optional on atxHeader");
t.equivalent(
md.dialect.block.atxHeader.call( md, "## h2\n\n", [] ),
[["header", {level: 2}, "h2"]],
"Atx h2 has right level");
t.equivalent(
md.dialect.block.setextHeader.call( md, "h2\n---\n\n", [] ),
[["header", {level: 2}, "h2"]],
"Atx and Setext style H2s should produce the same output" );
});
test("code", function(t, md) {
var code = md.dialect.block.code,
next = [ mk_block("next") ];
t.equivalent(
code.call( md, mk_block(" foo\n bar"), next ),
[["code_block", "foo\nbar" ]],
"Code block correct");
t.equivalent(
next, [mk_block("next")],
"next untouched when its not code");
next = [];
t.equivalent(
code.call( md, mk_block(" foo\n bar"), next ),
[["code_block", "foo" ]],
"Code block correct for abutting para");
t.equivalent(
next, [mk_block(" bar")],
"paragraph put back into next block");
t.equivalent(
code.call( md, mk_block(" foo"), [mk_block(" bar") ] ),
[["code_block", "foo\n\nbar" ]],
"adjacent code blocks ");
t.equivalent(
code.call( md, mk_block(" foo","\n \n \n"), [mk_block(" bar") ] ),
[["code_block", "foo\n\n\nbar" ]],
"adjacent code blocks preserve correct number of empty lines");
});
test( "bulletlist", function(t, md) {
var bl = function() { return md.dialect.block.lists.apply(md, arguments); };
t.equivalent(
bl( mk_block("* foo\n* bar"), [] ),
[ [ "bulletlist", [ "listitem", "foo" ], [ "listitem", "bar" ] ] ],
"single line bullets");
t.equivalent(
bl( mk_block("* [text](url)" ), [] ),
[ [ "bulletlist", [ "listitem", [ "link", { href: "url" }, "text" ] ] ] ],
"link in bullet");
t.equivalent(
bl( mk_block("* foo\nbaz\n* bar\nbaz"), [] ),
[ [ "bulletlist", [ "listitem", "foo\nbaz" ], [ "listitem", "bar\nbaz" ] ] ],
"multiline lazy bullets");
t.equivalent(
bl( mk_block("* foo\n baz\n* bar\n baz"), [] ),
[ [ "bulletlist", [ "listitem", "foo\nbaz" ], [ "listitem", "bar\nbaz" ] ] ],
"multiline tidy bullets");
t.equivalent(
bl( mk_block("* foo\n baz"), [] ),
[ [ "bulletlist", [ "listitem", "foo\n baz" ] ] ],
"only trim 4 spaces from the start of the line");
/* Test wrong: should end up with 3 nested lists here
t.equivalent(
bl( mk_block(" * one\n * two\n * three" ), [] ),
[ [ "bulletlist", [ "listitem", "one" ], [ "listitem", "two" ], [ "listitem", "three" ] ] ],
"bullets can be indented up to three spaces");
*/
t.equivalent(
bl( mk_block(" * one"), [ mk_block(" two") ] ),
[ [ "bulletlist", [ "listitem", [ "para", "one" ], [ "para", "two" ] ] ] ],
"loose bullet lists can have multiple paragraphs");
/* Case: no space after bullet - not a list
| *↵
|foo
*/
t.equivalent(
bl( mk_block(" *\nfoo") ),
undefined,
"Space required after bullet to trigger list");
/* Case: note the space after the bullet
| *␣
|foo
|bar
*/
t.equivalent(
bl( mk_block(" * \nfoo\nbar"), [ ] ),
[ [ "bulletlist", [ "listitem", "foo\nbar" ] ] ],
"space+continuation lines", {todo: true} );
/* Case I:
| * foo
| * bar
| * baz
*/
t.equivalent(
bl( mk_block(" * foo\n" +
" * bar\n" +
" * baz"),
[] ),
[ [ "bulletlist",
[ "listitem",
"foo",
[ "bulletlist",
[ "listitem",
"bar",
[ "bulletlist",
[ "listitem", "baz" ]
]
]
]
]
] ],
"Interesting indented lists I");
/* Case II:
| * foo
| * bar
| * baz
*/
t.equivalent(
bl( mk_block(" * foo\n * bar\n * baz"), [] ),
[ [ "bulletlist",
[ "listitem",
"foo",
[ "bulletlist",
[ "listitem", "bar" ]
]
],
[ "listitem", "baz" ]
] ],
"Interesting indented lists II");
/* Case III:
| * foo
| * bar
|* baz
| * fnord
*/
t.equivalent(
bl( mk_block(" * foo\n * bar\n* baz\n * fnord"), [] ),
[ [ "bulletlist",
[ "listitem",
"foo",
[ "bulletlist",
[ "listitem", "bar" ],
[ "listitem", "baz" ],
[ "listitem", "fnord" ]
]
]
] ],
"Interesting indented lists III");
/* Case IV:
| * foo
|
| 1. bar
*/
t.equivalent(
bl( mk_block(" * foo"), [ mk_block(" 1. bar\n") ] ),
[ [ "bulletlist",
["listitem", ["para", "foo"] ],
["listitem", ["para", "bar"] ]
] ],
"Different lists at same indent IV");
/* Case V:
| * foo
| * bar
| * baz
*/
t.equivalent(
bl( mk_block(" * foo\n * bar\n * baz"), [] ),
[ [ "bulletlist",
[ "listitem",
"foo",
[ "bulletlist",
["listitem", "bar"],
["listitem", "baz"]
]
]
] ],
"Indenting Case V");
/* Case VI: deep nesting
|* one
| * two
| * three
| * four
*/
t.equivalent(
bl( mk_block("* one\n * two\n * three\n * four"), [] ),
[ [ "bulletlist",
[ "listitem",
"one",
[ "bulletlist",
[ "listitem",
"two",
[ "bulletlist",
[ "listitem",
"three",
[ "bulletlist",
[ "listitem", "four" ]
]
]
]
]
]
]
] ],
"deep nested lists VI");
/* Case VII: This one is just fruity!
| * foo
| * bar
| * baz
|* HATE
| * flibble
| * quxx
| * nest?
| * where
| * am
| * i?
*/
t.equivalent(
bl( mk_block(" * foo\n" +
" * bar\n" +
" * baz\n" +
"* HATE\n" +
" * flibble\n" +
" * quxx\n" +
" * nest?\n" +
" * where\n" +
" * am\n" +
" * i?"),
[] ),
[ [ "bulletlist",
[ "listitem",
"foo",
[ "bulletlist",
["listitem", "bar"],
["listitem", "baz"],
["listitem", "HATE"],
["listitem", "flibble"]
]
],
[ "listitem",
"quxx",
[ "bulletlist",
[ "listitem",
"nest?",
[ "bulletlist",
["listitem", "where"],
["listitem", "am"],
["listitem", "i?"]
]
]
]
]
] ],
"Indenting Case VII");
/* Case VIII: Deep nesting + code block
| * one
| * two
| * three
| * four
|
| foo
*/
t.equivalent(
bl( mk_block(" * one\n" +
" 1. two\n" +
" * three\n" +
" * four",
"\n\n"),
[ mk_block(" foo") ] ),
[ [ "bulletlist",
[ "listitem",
["para", "one"],
[ "numberlist",
[ "listitem",
["para", "two"],
[ "bulletlist",
[ "listitem",
[ "para", "three\n * four"],
["code_block", "foo"]
]
]
]
]
]
] ],
"Case VIII: Deep nesting and code block");
});
test( "horizRule", function(t, md) {
var hr = md.dialect.block.horizRule,
strs = ["---", "_ __", "** ** **", "--- "];
strs.forEach( function(s) {
t.equivalent(
hr.call( md, mk_block(s), [] ),
[ [ "hr" ] ],
"simple hr from " + require("util").inspect(s));
});
});
test( "blockquote", function(t, md) {
var bq = md.dialect.block.blockquote;
t.equivalent(
bq.call( md, mk_block("> foo\n> bar"), [] ),
[ ["blockquote", ["para", "foo\nbar"] ] ],
"simple blockquote");
// Note: this tests horizRule as well through block processing.
t.equivalent(
bq.call( md, mk_block("> foo\n> bar\n>\n>- - - "), [] ),
[ ["blockquote",
["para", "foo\nbar"],
["hr"]
] ],
"blockquote with interesting content");
t.equivalent(
bq.call( md, mk_block( 'p\n> a', '\n\n', 1 ), [ mk_block( '> b', '\n', 4) ] ),
[ [ 'para', 'p' ],
[ 'blockquote',
[ 'para', 'a' ],
[ 'para', 'b' ]
]
],
"blockquote with abutting paragraph");
});
test( "referenceDefn", function(t, md) {
var rd = md.dialect.block.referenceDefn;
[ '[id]: http://example.com/ "Optional Title Here"',
"[id]: http://example.com/ 'Optional Title Here'",
'[id]: http://example.com/ (Optional Title Here)'
].forEach( function(s) {
md.tree = ["markdown"];
t.equivalent(rd.call( md, mk_block(s) ), [], "ref processed");
t.equivalent(md.tree[ 1 ].references,
{ "id": { href: "http://example.com/", title: "Optional Title Here" } },
"reference extracted");
});
// Check a para abbuting a ref works right
md.tree = ["markdown"];
var next = [];
t.equivalent(rd.call( md, mk_block("[id]: example.com\npara"), next ), [], "ref processed");
t.equivalent(md.tree[ 1 ].references, { "id": { href: "example.com" } }, "reference extracted");
t.equivalent(next, [ mk_block("para") ], "paragraph put back into blocks");
});
test( "inline_br", function(t, md) {
t.equivalent(
md.processInline("foo \n\\[bar"),
[ "foo", ["linebreak"], "[bar" ], "linebreak+escape");
});
test( "inline_escape", function(t, md) {
t.equivalent( md.processInline("\\bar"), [ "\\bar" ], "invalid escape" );
t.equivalent( md.processInline("\\>"), [ ">" ], "escapes >" );
t.equivalent( md.processInline("\\*foo*"), [ "*foo*" ], "escaped em" );
});
test( "inline_code", function(t, md) {
t.equivalent( md.processInline("`bar`"), [ ["inlinecode", "bar" ] ], "code I" );
t.equivalent( md.processInline("``b`ar``"), [ ["inlinecode", "b`ar" ] ], "code II" );
t.equivalent( md.processInline("```bar``` baz"), [ ["inlinecode", "bar" ], " baz" ], "code III" );
});
test( "inline_strong_em", function(t, md) {
// Yay for horrible edge cases >_<
t.equivalent( md.processInline("foo *abc* bar"), [ "foo ", ["em", "abc" ], " bar" ], "strong/em I" );
t.equivalent( md.processInline("*abc `code`"), [ "*abc ", ["inlinecode", "code" ] ], "strong/em II" );
t.equivalent( md.processInline("*abc**def* after"), [ ["em", "abc**def" ], " after" ], "strong/em III" );
t.equivalent( md.processInline("*em **strong * wtf**"), [ ["em", "em **strong " ], " wtf**" ], "strong/em IV" );
t.equivalent( md.processInline("*foo _b*a*r baz"), [ [ "em", "foo _b" ], "a*r baz" ], "strong/em V" );
});
test( "inline_img", function(t, md) {
t.equivalent( md.processInline( "![alt] (url)" ),
[ [ "img", { href: "url", alt: "alt" } ] ],
"inline img I" );
t.equivalent( md.processInline( "" ),
[ [ "img", { href: "url", alt: "alt", title: "title" } ] ],
"inline img II" );
t.equivalent( md.processInline( "![alt] (url 'tit'le') after')" ),
[ [ "img", { href: "url", alt: "alt", title: "tit'le" } ], " after')" ],
"inline img III" );
t.equivalent( md.processInline( "![alt] (url \"title\")" ),
[ [ "img", { href: "url", alt: "alt", title: "title" } ] ],
"inline img IV" );
t.equivalent( md.processInline( '' ),
[ [ "img", { href: "/path/to/img\\.jpg", alt: "Alt text", title: "Optional title" } ] ],
"inline img IV" );
t.equivalent( md.processInline( "![alt][id]" ),
[ [ "img_ref", { ref: "id", alt: "alt", original: "![alt][id]" } ] ],
"ref img I" );
t.equivalent( md.processInline( "![alt] [id]" ),
[ [ "img_ref", { ref: "id", alt: "alt", original: "![alt] [id]" } ] ],
"ref img II" );
t.equivalent( md.processInline( ".jpg)" ),
[ ["img", { href: "http://example.com/(parens).jpg", alt: "contains parens"} ] ],
"images with parentheses in the URL" );
});
test( "inline_link", function(t, md) {
t.equivalent( md.processInline( "[text] (url)" ),
[ [ "link", { href: "url" }, "text" ] ],
"inline link I" );
t.equivalent( md.processInline( "[text](url 'title')" ),
[ [ "link", { href: "url", title: "title" }, "text" ] ],
"inline link II" );
t.equivalent( md.processInline( "[text](url 'title')" ),
[ [ "link", { href: "url", title: "title" }, "text" ] ],
"inline link II" );
t.equivalent( md.processInline( "[text](url\t\t'title')" ),
[ [ "link", { href: "url", title: "title" }, "text" ] ],
"inline link II" );
t.equivalent( md.processInline( "[text](url 'tit'le') after')" ),
[ [ "link", { href: "url", title: "tit'le" }, "text" ], " after')" ],
"inline link III" );
t.equivalent( md.processInline( "[text](url \"title\")" ),
[ [ "link", { href: "url", title: "title" }, "text" ] ],
"inline link IV" );
t.equivalent( md.processInline( "[text][id]" ),
[ [ "link_ref", { ref: "id", original: "[text][id]" }, "text" ] ],
"ref link I" );
t.equivalent( md.processInline( "[text] [id]" ),
[ [ "link_ref", { ref: "id", original: "[text] [id]" }, "text" ] ],
"ref link II" );
/* jshint indent: false */
t.equivalent( md.processInline( "[to put it another way][SECTION 1] or even [link this](#SECTION-1)" ),
[
[ "link_ref",
{ ref: "section 1", original: "[to put it another way][SECTION 1]" },
"to put it another way"
],
" or even ",
[ "link",
{ href: "#SECTION-1" },
"link this"
]
],
"ref link II" );
/* jshint indent: 2 */
});
test( "inline_autolink", function(t, md) {
t.equivalent( md.processInline( "<http://foo.com>" ),
[ [ "link", { href: "http://foo.com" }, "http://foo.com" ] ],
"autolink I" );
t.equivalent( md.processInline( "<mailto:<EMAIL>>" ),
[ [ "link", { href: "mailto:<EMAIL>" }, "<EMAIL>" ] ],
"autolink II" );
t.equivalent( md.processInline( "<<EMAIL>>" ),
[ [ "link", { href: "mailto:<EMAIL>" }, "<EMAIL>" ] ],
"autolink III" );
});
test( "line_endings", function(t, md) {
// try to generate this tree with all types of line ending
var tree = [ "markdown", [ "para", "Foo" ], [ "para", "Bar" ] ];
t.equivalent( md.toTree( "Foo\n\nBar", [ "markdown" ] ), tree, "Unix line endings" );
t.equivalent( md.toTree( "Foo\r\n\r\nBar", [ "markdown" ] ), tree, "Windows line endings" );
t.equivalent( md.toTree( "Foo\r\rBar", [ "markdown" ] ), tree, "Mac line endings" );
t.equivalent( md.toTree( "Foo\r\n\nBar", [ "markdown" ] ), tree, "Mixed line endings" );
});
test( "header_in_paragraph", function(t, md){
var tree = [ "markdown",
[ "para", "Foo" ],
[ "header", { level: 1 }, "Title" ],
[ "para", "Bar" ] ];
t.equivalent( md.toTree("Foo\n#Title\nBar", [ "markdown" ]), tree, "Header in praragraph" );
t.equivalent( md.toTree("Foo\n\n#Title\n\nBar", [ "markdown" ]), tree, "Header in praragraph" );
});
|
<gh_stars>100-1000
require 'chefspec'
require 'chefspec/berkshelf'
require 'chefspec/cacher'
at_exit { ChefSpec::Coverage.report! }
module KubernetesCookbook
module SpecHelper
# Allows testing recipes in isolation
def global_stubs_include_recipe
# Don't worry about external cookbook dependencies
allow_any_instance_of(Chef::Cookbook::Metadata).to receive(:depends)
# Test each recipe in isolation, regardless of includes
@included_recipes = []
allow_any_instance_of(Chef::RunContext).to receive(:loaded_recipe?).and_return(false)
allow_any_instance_of(Chef::Recipe).to receive(:include_recipe) do |i|
allow_any_instance_of(Chef::RunContext).to receive(:loaded_recipe?).and_return(true)
@included_recipes << i
end
allow_any_instance_of(Chef::RunContext).to receive(:loaded_recipe).and_return(@included_recipes)
end
end
end
RSpec.configure do |config|
config.platform = 'redhat'
config.version = '7.1'
config.color = true
config.alias_example_group_to :describe_recipe, type: :recipe
Kernel.srand config.seed
config.order = :random
config.default_formatter = 'doc' if config.files_to_run.one?
config.expect_with :rspec do |expectations|
expectations.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.syntax = :expect
mocks.verify_partial_doubles = true
end
config.include KubernetesCookbook::SpecHelper
end
|
import RecentActivity from './RecentActivity'
import connector from './RecentActivity.connector'
export default connector(RecentActivity)
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.graph;
import java.util.Iterator ;
import java.util.List ;
import org.apache.jena.atlas.iterator.Iter ;
import org.apache.jena.graph.Graph ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.Triple ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.core.Quad ;
// Combine with Jena GraphUtils.
public class GraphOps
{
public static boolean containsGraph(DatasetGraph dsg, Node gn)
{
// [[DynDS]]
if ( Quad.isDefaultGraph(gn))
return true ;
if ( Quad.isUnionGraph(gn))
return true ;
return dsg.containsGraph(gn) ;
}
public static Graph getGraph(DatasetGraph dsg, Node gn)
{
// [[DynDS]]
if ( gn == null )
return dsg.getDefaultGraph() ;
if ( Quad.isDefaultGraph(gn) )
// Explicit or generated.
return dsg.getDefaultGraph() ;
if ( Quad.isUnionGraph(gn))
return unionGraph(dsg) ;
return dsg.getGraph(gn) ;
}
public static Graph unionGraph(DatasetGraph dsg)
{
List<Node> x = Iter.toList(dsg.listGraphNodes()) ;
return new GraphUnionRead(dsg, x) ;
}
public static void addAll(Graph g, Iterator<Triple> iter)
{
while(iter.hasNext())
g.add(iter.next()) ;
Iter.close(iter) ;
}
public static void addAll(Graph g, Iterable<Triple> iter)
{
addAll(g, iter.iterator()) ;
}
public static void deleteAll(Graph g, Iterator<Triple> iter)
{
while(iter.hasNext())
g.delete(iter.next()) ;
Iter.close(iter) ;
}
public static void deleteAll(Graph g, Iterable<Triple> iter)
{
deleteAll(g, iter.iterator()) ;
}
}
|
<reponame>nabex/funtastic<gh_stars>0
shareSetting=11;
function setUserInfo(){
$(function(){
var users={
"Name":$("#userChangeName").val(),
//"Id":$("#userId").val(),
"Phone":$("#userChangePhone").val(),
"Mail":$("#userChangeMail").val(),
//"Pass1":$("#userPass1").val(),
//"Pass2":$("#userPass2").val()
};
localStorage.contact=JSON.stringify(users);
localStorage.setItem("userId", users["Id"]);
Materialize.toast("Saved user information",2000);
PageControll(0);
});
}
function showUserInfo(){
$(function(){
if(!(localStorage.contact===void 0)){
var users=JSON.parse(localStorage.contact);
$("#userChangeName").val(users["Name"]);
//$("#userId").val(users["Id"]);
$("#userChangePhone").val(users["Phone"]);
$("#userChangeMail").val(users["Mail"]);
//$("#userPass1").val(users["Pass1"]);
//$("#userPass2").val(users["Pass2"]);
}
});
}
function showUserMenu(){
$(function(){
if(!(localStorage.contact===void 0)){
var users=JSON.parse(localStorage.contact);
$("#userNameM").html(users["Name"]);
$("#userPhoneM").html(users["Phone"]);
$("#userMailM").html(users["Mail"]);
}
});
}
function loadRangeSetting(){
$(function(){
$('#rangeContents').on('click',setContentsRange);
$('.dropdown-button').dropdown({
inDuration: 300,
outDuration: 225,
constrain_width: false, // Does not change width of dropdown to that of the activator
hover: true, // Activate on hover
gutter: 0, // Spacing from edge
belowOrigin: false, // Displays dropdown below the button
alignment: 'left' // Displays dropdown with edge aligned to the left of button
});
if(!(localStorage.setting=== void 0)){
shareSetting=localStorage.setting;
if((shareSetting&01)==01){
$('input[name="shareRange1"]').prop("checked",true);
}
if((shareSetting&10)==10){
$('input[name="shareRange2"]').prop("checked",true);
}
}
});
}
function searchUser(){
$(function(){
navigator.contacts.pickContact(function(contact){
//alert('The following contact has been selected:' + JSON.stringify(contact));
$(".userName").val(contact.displayName);
if(contact.phoneNumbers!=null){
var phone = contact.phoneNumbers[0].value.replace(/-/g,"");
//Materialize.toast(phone,2000);
$(".userPhone").val(phone);
}else{
$(".userPhone").val("");
}
if(contact.emails!=null){
$(".userMail").val(contact.emails[0].value);
}else{
$(".userMail").val("");
}
//alert('The following contact has been selected:' + JSON.stringify(contact));
},function(err){
Materialize.toast('Error: ' + err,2000,'red');
});
});
}
function setContentsRange(e){
var contentsR = e.target.getAttribute('data-nono');
localStorage.rangeSetting=contentsR;
if(whoAmI!="0"){
if(contentsR==0)
whoAmI = "11";
else
whoAmI = "1";
}
}
function setPhoneShare(){
shareSetting ^=01;
localStorage.setting=shareSetting;
}
function setMailShare(){
shareSetting ^=10;
localStorage.setting=shareSetting;
}
|
# Generate CHANGELOG_LAST.md from changes since the last version tag. (vx.y.z-dev tags are ignored)
# CHANGELOG_LAST.md can be edited manually if required and manually added to docs/CHANGELOG.md
#
# Requirements:
# npm install -g auto-changelog
#
# Usage:
# changelog-gen <next-version>
#
# Example: if the latest verision is v5.6.7 the followings can be used for bugfix, minor or major versions:
# changelog-gen v5.6.8
# changelog-gen v5.7.0
# changelog-gen v6.0.0
auto-changelog -t changelog-template.hbs -l false --latest-version $1 --unreleased-only --tag-pattern ^v[0-9]+.[0-9]+.[0-9]+$ -o CHANGELOG_LAST.md
|
#!/bin/bash
set -o errexit -o pipefail
if [[ ${target_platform} =~ linux.* ]] || [[ ${target_platform} == win-32 ]] || [[ ${target_platform} == win-64 ]] || [[ ${target_platform} == osx-64 ]]; then
export DISABLE_AUTOBREW=1
${R} CMD INSTALL --build .
else
mkdir -p "${PREFIX}"/lib/R/library/NHPoisson
mv ./* "${PREFIX}"/lib/R/library/NHPoisson
if [[ ${target_platform} == osx-64 ]]; then
pushd "${PREFIX}"
for libdir in lib/R/lib lib/R/modules lib/R/library lib/R/bin/exec sysroot/usr/lib; do
pushd "${libdir}" || exit 1
while IFS= read -r -d '' SHARED_LIB
do
echo "fixing SHARED_LIB ${SHARED_LIB}"
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5.0-MRO/Resources/lib/libR.dylib "${PREFIX}"/lib/R/lib/libR.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libR.dylib "${PREFIX}"/lib/R/lib/libR.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/local/clang4/lib/libomp.dylib "${PREFIX}"/lib/libomp.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/local/gfortran/lib/libgfortran.3.dylib "${PREFIX}"/lib/libgfortran.3.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libquadmath.0.dylib "${PREFIX}"/lib/libquadmath.0.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/local/gfortran/lib/libquadmath.0.dylib "${PREFIX}"/lib/libquadmath.0.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libgfortran.3.dylib "${PREFIX}"/lib/libgfortran.3.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libgcc_s.1.dylib "${PREFIX}"/lib/libgcc_s.1.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libiconv.2.dylib "${PREFIX}"/sysroot/usr/lib/libiconv.2.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libncurses.5.4.dylib "${PREFIX}"/sysroot/usr/lib/libncurses.5.4.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libicucore.A.dylib "${PREFIX}"/sysroot/usr/lib/libicucore.A.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libexpat.1.dylib "${PREFIX}"/lib/libexpat.1.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libcurl.4.dylib "${PREFIX}"/lib/libcurl.4.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libc++.1.dylib "${PREFIX}"/lib/libc++.1.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libc++.1.dylib "${PREFIX}"/lib/libc++.1.dylib "${SHARED_LIB}" || true
done < <(find . \( -type f -iname "*.dylib" -or -iname "*.so" -or -iname "R" \) -print0)
popd
done
popd
fi
fi
|
#! /bin/bash
BASE_URL=$1
set -e -E -o pipefail
source `dirname $0`/common.sh
touch /tmp/cleanup
rm -rf /tmp/headshots* /tmp/carphotos* /tmp/cleanup
function upload_headshot() {
curl --location -s -b $COOKIES_CURL -c $COOKIES_CURL $BASE_URL/action.php \
-X POST -F MAX_FILE_SIZE=2000000 -F photo="@$1" -F action=photo.upload -F repo=head \
| tee $DEBUG_CURL | check_success
}
function upload_car_photo() {
curl --location -s -b $COOKIES_CURL -c $COOKIES_CURL $BASE_URL/action.php \
-X POST -F MAX_FILE_SIZE=2000000 -F photo="@$1" -F action=photo.upload -F repo=car \
| tee $DEBUG_CURL | check_success
}
if [ `echo "$BASE_URL" | grep -i localhost` ]; then
# For localhost, just manufacture a tmp directory and copy files locally
tput setaf 2 # green text
echo " " Copying photo files instead of uploading "(localhost)"
tput setaf 0 # black text
PHOTO_DIR=`mktemp -d /tmp/headshots.XXXXXXXX`
# Need world write access to allow web host to create subfolders
chmod 777 "$PHOTO_DIR"
cp `dirname $0`/data/headshots/Cub* "$PHOTO_DIR"
cp `dirname $0`/data/headshots/head* "$PHOTO_DIR"
# CAR_PHOTO_DIR=`mktemp -d 2>/dev/null || mktemp -d /tmp/carphotos.XXXXXXXX`
CAR_PHOTO_DIR=`mktemp -d /tmp/carphotos.XXXXXXXX`
chmod 777 "$CAR_PHOTO_DIR"
cp `dirname $0`/data/carphotos/Car* "$CAR_PHOTO_DIR"
user_login_coordinator
curl_post action.php "action=settings.write&photo-dir=$PHOTO_DIR" | check_success
curl_post action.php "action=settings.write&car-photo-dir=$CAR_PHOTO_DIR" | check_success
curl_post action.php "action=settings.write&show-racer-photos=1&show-racer-photos-checkbox=1" | check_success
else
# For the remote case, assume that directories have been set up, and upload each photo
echo Headshot uploads begin
user_login_photo
for f in `dirname $0`/data/headshots/head-*.jpg `dirname $0`/data/headshots/Cub-*.jpg
do
echo $f
upload_headshot $f
done
echo Car photo uploads begin
for f in `dirname $0`/data/carphotos/Car-*.jpg
do
echo $f
upload_car_photo $f
done
echo Done
fi
|
package pulse.io.export;
import static java.io.File.separator;
import static java.util.Arrays.asList;
import static pulse.io.export.Exporter.getDefaultExportExtension;
import static pulse.util.Group.contents;
import static pulse.util.Reflexive.instancesOf;
import java.io.File;
import java.util.Objects;
import javax.swing.JFrame;
import pulse.tasks.TaskManager;
import pulse.util.Descriptive;
import pulse.util.Group;
/**
* Manages export operations. Provides tools to find a specific exporter
* suitable for a given target and shortcuts for export operations.
*
*/
public class ExportManager {
//current working dir
private static File cwd = null;
private ExportManager() {
// intentionally blank
}
/**
* Finds a suitable exporter for a non-null {@code target} by calling
* {@code findExporter(target.getClass())}.
*
* @param <T> an instance of {@code Descriptive}
* @param target the exported target
* @return an exporter that works for {@code target}
* @see findExporter
*/
@SuppressWarnings("unchecked")
public static <T extends Descriptive> Exporter<T> findExporter(T target) {
Objects.requireNonNull(target);
return (Exporter<T>) findExporter(target.getClass());
}
/**
* Finds an exporter that can work with {@code target}.
* <p>
* Searches through available instances of the {@code Exporter} class
* contained in this package and checks if any of those have their target
* set to the argument of this method, then returns the first occurrence. If
* nothing matches exactly the same class as specified, searches for
* exporters of any classes assignable from {@code target}.
* </p>
*
* @param <T> an instance of {@code Descriptive}
* @param target the target glass
* @return an instance of the Exporter class that can work worth the type T,
* null if nothing has been found
*/
@SuppressWarnings({"unchecked"})
public static <T extends Descriptive> Exporter<T> findExporter(Class<T> target) {
var allExporters = instancesOf(Exporter.class);
var exporter = allExporters.stream().filter(e -> e.target() == target).findFirst();
if (exporter.isPresent()) {
return exporter.get();
} else {
exporter = allExporters.stream().filter(e -> e.target().isAssignableFrom(target)).findFirst();
return exporter.isPresent() ? exporter.get() : null;
}
}
/**
* Finds an exporter matching to {@code target} and allows the user to
* select the location of export.
*
* @param <T> a {@code Descriptive} type
* @param target the target to be exported
* @param parentWindow a frame to which the file chooser dialog will be
* attached
* @param fileTypeLabel a brief description of the exported file types
* @see findExporter
* @see pulse.io.export.Exporter.askToExport()
* @throws IllegalArgumentException if no exporter can be found
*/
public static <T extends Descriptive> void askToExport(T target, JFrame parentWindow, String fileTypeLabel) {
var exporter = findExporter(target);
if (exporter != null) {
cwd = exporter.askToExport(target, parentWindow, fileTypeLabel, cwd);
} else {
throw new IllegalArgumentException("No exporter for " + target.getClass().getSimpleName());
}
}
/**
* Attempts to export the given {@code target} to the {@code directory} by
* saving the contents in a file with the given {@code Extension}.
* <p>
* The file is formatted according to the inherent format, i.e. if it is an
* {@code Extension.HTML} file, it will contain HTML tags, etc. If
* {@code extension} is not present in the list of supported extension of an
* exporter matching {@code target}, this will revert to the first supported
* extension. This method will not have any result if no exporter has been
* found fot {@code target}.
* </p>
*
* @param <T> the target type
* @param target the exported target
* @param directory a pre-selected directory
* @param extension the desired extension
*/
public static <T extends Descriptive> void export(T target, File directory, Extension extension) {
var exporter = findExporter(target);
if (exporter != null) {
var supportedExtensions = exporter.getSupportedExtensions();
if (supportedExtensions.length > 0) {
var confirmedExtension = asList(supportedExtensions).contains(extension) ? extension
: supportedExtensions[0];
exporter.export(target, directory, confirmedExtension);
}
}
}
/**
* This will invoke {@code exportGroup} on each task listed by the
* {@code TaskManager}.
*
* @param directory a pre-selected directory
* @param extension the desired extension
* @see exportGroup
* @see pulse.tasks.TaskManager
*/
public static void exportAllTasks(File directory, Extension extension) {
TaskManager.getManagerInstance().getTaskList().stream().forEach(t -> exportGroup(t, directory, extension));
}
/**
* Exports the currently selected task as a group of objects.
*
* @param directory a pre-selected directory
* @param extension the desired extension
* @see exportGroup
* @see pulse.tasks.TaskManager.getSelectedTask()
*/
public static void exportCurrentTask(File directory, Extension extension) {
exportGroup(TaskManager.getManagerInstance().getSelectedTask(), directory, extension);
}
/**
* Exports the currently selected task as a group of objects using the
* default export extension.
*
* @param directory a pre-selected directory
* @see exportGroup
* @see pulse.tasks.TaskManager.getSelectedTask()
*/
public static void exportCurrentTask(File directory) {
exportCurrentTask(directory, getDefaultExportExtension());
}
/**
* Exports all results generated previously during task execution for all
* tasks listed by the TaskManager, provided those tasks had the respective
* result assigned to them.
*
* @param directory a pre-selected directory
* @param extension the desired extension
*/
public static void exportAllResults(File directory, Extension extension) {
var instance = TaskManager.getManagerInstance();
instance.getTaskList().stream().map(t -> t.getStoredCalculations()).flatMap(x -> x.stream()).filter(Objects::nonNull)
.forEach(r -> export(r, directory, extension));
}
/**
* Fully exports {@code group} and all its contents to the root
* {@code directory} requesting the files to be saved with the
* {@code extension}.
* <p>
* If an {@code Exporter} exists that accepts the {@code group} as its
* argument, this will create files in the root {@code directory} in
* accordance to the root {@code Exporter} rules. All contents of the
* {@code group} will then be processed in a similar manner and the output
* will be stored in an internal directory, the name of which conforms to
* the respective description. Note this method is NOT recursive and it
* calls the {@code export} method of the {@code ExportManager}.
* </p>
*
* @param group a group
* @param directory a pre-selected root directory
* @param extension the desired extension
* @throws IllegalArgumentException if {@code directory} is not a directory
*/
public static void exportGroup(Group group, File directory, Extension extension) {
if (!directory.isDirectory()) {
throw new IllegalArgumentException("Not a directory: " + directory);
}
var internalDirectory = new File(directory + separator + group.describe() + separator);
internalDirectory.mkdirs();
export(group, directory, extension);
contents(group).stream().forEach(internalHolder -> export(internalHolder, internalDirectory, extension));
}
}
|
#include "catch.hpp"
#include "test_helpers.hpp"
using namespace duckdb;
using namespace std;
TEST_CASE("Test scalar LIKE statement", "[like]") {
unique_ptr<QueryResult> result;
DuckDB db(nullptr);
Connection con(db);
// scalar like
result = con.Query("SELECT 'aaa' LIKE 'bbb'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(false)}));
result = con.Query("SELECT 'aaa' LIKE 'aaa'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'aaa' LIKE '%'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'aaa' LIKE '%a'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'aaa' LIKE '%b'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(false)}));
result = con.Query("SELECT 'aaa' LIKE 'a%'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'aaa' LIKE 'b%'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(false)}));
result = con.Query("SELECT 'aaa' LIKE 'a_a'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'aaa' LIKE 'a_'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(false)}));
result = con.Query("SELECT 'aaa' LIKE '__%'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'aaa' LIKE '____%'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(false)}));
result = con.Query("SELECT 'ababac' LIKE '%abac'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(true)}));
result = con.Query("SELECT 'ababac' NOT LIKE '%abac'");
REQUIRE(CHECK_COLUMN(result, 0, {Value::BOOLEAN(false)}));
}
TEST_CASE("Test LIKE statement", "[like]") {
unique_ptr<QueryResult> result;
DuckDB db(nullptr);
Connection con(db);
REQUIRE_NO_FAIL(con.Query("CREATE TABLE strings(s STRING, pat STRING);"));
REQUIRE_NO_FAIL(con.Query("INSERT INTO strings VALUES ('abab', 'ab%'), "
"('aaa', 'a_a'), ('aaa', '%b%')"));
result = con.Query("SELECT s FROM strings WHERE s LIKE 'ab%'");
REQUIRE(CHECK_COLUMN(result, 0, {"abab"}));
result = con.Query("SELECT s FROM strings WHERE 'aba' LIKE pat");
REQUIRE(CHECK_COLUMN(result, 0, {"abab", "aaa", "aaa"}));
result = con.Query("SELECT s FROM strings WHERE s LIKE pat");
REQUIRE(CHECK_COLUMN(result, 0, {"abab", "aaa"}));
}
|
import { dirname } from 'path';
import c from '../colors';
import type { PlatformTarget } from '../common';
import { fatal } from '../errors';
import { resolveNode } from './node';
import { runCommand } from './subprocess';
import type { RunCommandOptions } from './subprocess';
export async function runNativeRun(
args: readonly string[],
options: RunCommandOptions = {},
): Promise<string> {
const p = resolveNode(
__dirname,
dirname('native-run/package'),
'bin/native-run',
);
if (!p) {
fatal(`${c.input('native-run')} not found.`);
}
return await runCommand(p, args, options);
}
export async function getPlatformTargets(
platformName: string,
): Promise<PlatformTarget[]> {
try {
const output = await runNativeRun([platformName, '--list', '--json']);
const parsedOutput = JSON.parse(output);
return [
...parsedOutput.devices.map((t: any) => ({ ...t, virtual: false })),
...parsedOutput.virtualDevices.map((t: any) => ({ ...t, virtual: true })),
];
} catch (e) {
const err = JSON.parse(e);
const errMsg = `${c.strong('native-run')} failed with error ${c.strong(
err.code,
)}: ${err.error}
\tMore details for this error may be available online: ${c.strong(
'https://github.com/ionic-team/native-run/wiki/Android-Errors',
)}
`;
throw errMsg;
}
}
|
#!/bin/bash
DIR=`cd $(dirname ${BASH_SOURCE[0]}); pwd`;
TYPE=$1
PID_FILE="$DIR/$GAIA_DOMAIN-$GAIA_PORT-server.pid";
if [ -s $PID_FILE ];
then
# make sure its dead
echo "Cleaning up dead server."
kill `cat $PID_FILE`
rm $PID_FILE;
else
echo "Is clean continue."
fi
$DIR/../../test-agent/node_modules/b2g-scripts/bin/b2g-scripts server \
--port $GAIA_PORT --gaia $DIR/../../../ $(foreach dir, $(GAIA_ALL_APP_SRCDIRS), --dir $(dir)) &
SERVER_PID=`jobs -p | tail -n 1`;
echo $SERVER_PID > $PID_FILE;
SCRIPT=$TYPE.sh;
$DIR/$SCRIPT
echo "$DIR/$SCRIPT"
EXIT_STATUS=$?;
kill $SERVER_PID;
exit $EXIT_STATUS;
|
#!/bin/bash
set -e
set -x
python build.py
|
<reponame>brenopessoa/diana-driver
/*
* Copyright (c) 2019 <NAME> and others
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php.
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
*
* <NAME>
*/
package org.jnosql.diana.cassandra.column;
import jakarta.nosql.Settings;
import org.jnosql.diana.driver.ConfigurationReader;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Supplier;
import static org.jnosql.diana.cassandra.column.CassandraConfiguration.CASSANDRA_FILE_CONFIGURATION;
public enum ManagerFactorySupplier implements Supplier<CassandraColumnFamilyManagerFactory> {
INSTANCE;
private final GenericContainer cassandra =
new GenericContainer("cassandra:latest")
.withExposedPorts(9042)
.waitingFor(Wait.defaultWaitStrategy());
{
cassandra.start();
}
@Override
public CassandraColumnFamilyManagerFactory get() {
Settings settings = getSettings();
CassandraConfiguration cassandraConfiguration = new CassandraConfiguration();
return cassandraConfiguration.get(settings);
}
Settings getSettings() {
Map<String, Object> configuration = new HashMap<>(ConfigurationReader.from(CASSANDRA_FILE_CONFIGURATION));
configuration.put("cassandra.host-1", cassandra.getContainerIpAddress());
configuration.put("cassandra.port", cassandra.getFirstMappedPort());
return Settings.of(configuration);
}
}
|
package kbasesearchengine.test.integration;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import kbasesearchengine.common.FileUtil;
import kbasesearchengine.search.FoundHits;
import org.apache.commons.io.FileUtils;
import org.apache.http.HttpHost;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoDatabase;
import junit.framework.Assert;
import kbasesearchengine.common.GUID;
import kbasesearchengine.events.StatusEvent;
import kbasesearchengine.events.StatusEventID;
import kbasesearchengine.events.StatusEventProcessingState;
import kbasesearchengine.events.StatusEventType;
import kbasesearchengine.events.StoredStatusEvent;
import kbasesearchengine.events.handler.CloneableWorkspaceClientImpl;
import kbasesearchengine.events.handler.WorkspaceEventHandler;
import kbasesearchengine.events.storage.MongoDBStatusEventStorage;
import kbasesearchengine.events.storage.StatusEventStorage;
import kbasesearchengine.main.LineLogger;
import kbasesearchengine.main.IndexerWorker;
import kbasesearchengine.main.IndexerWorkerConfigurator;
import kbasesearchengine.search.AccessFilter;
import kbasesearchengine.search.ElasticIndexingStorage;
import kbasesearchengine.search.IndexingStorage;
import kbasesearchengine.search.MatchFilter;
import kbasesearchengine.search.ObjectData;
import kbasesearchengine.search.PostProcessing;
import kbasesearchengine.system.TypeFileStorage;
import kbasesearchengine.system.FileLister;
import kbasesearchengine.system.ObjectTypeParsingRulesFileParser;
import kbasesearchengine.system.StorageObjectType;
import kbasesearchengine.system.TypeStorage;
import kbasesearchengine.system.TypeMappingParser;
import kbasesearchengine.system.YAMLTypeMappingParser;
import kbasesearchengine.test.common.TestCommon;
import kbasesearchengine.test.controllers.elasticsearch.ElasticSearchController;
import kbasesearchengine.test.controllers.workspace.WorkspaceController;
import kbasesearchengine.test.data.TestDataLoader;
import us.kbase.auth.AuthToken;
import us.kbase.common.service.JsonClientException;
import us.kbase.common.service.UObject;
import us.kbase.common.test.controllers.mongo.MongoController;
import us.kbase.test.auth2.authcontroller.AuthController;
import us.kbase.workspace.CreateWorkspaceParams;
import us.kbase.workspace.ObjectIdentity;
import us.kbase.workspace.ObjectSaveData;
import us.kbase.workspace.RegisterTypespecParams;
import us.kbase.workspace.RenameObjectParams;
import us.kbase.workspace.SaveObjectsParams;
import us.kbase.workspace.WorkspaceClient;
import us.kbase.workspace.WorkspaceIdentity;
public class IndexerWorkerIntegrationTest {
/* these tests bring up mongodb, elasticsearch, and the workspace and test the worker
* interactions with those services.
*/
private static AuthController auth;
private static IndexerWorker worker = null;
private static IndexingStorage storage = null;
private static MongoController mongo;
private static MongoClient mc;
private static MongoDatabase db;
private static ElasticSearchController es;
private static WorkspaceController ws;
private static String token1;
private static int wsid;
private static Path tempDirPath;
@BeforeClass
public static void prepare() throws Exception {
TestCommon.stfuLoggers();
tempDirPath = Paths.get(TestCommon.getTempDir()).resolve("IndexerWorkerIntegrationTest");
// should refactor to just use NIO at some point
FileUtils.deleteQuietly(tempDirPath.toFile());
tempDirPath.toFile().mkdirs();
// set up mongo
mongo = new MongoController(
TestCommon.getMongoExe(),
tempDirPath,
TestCommon.useWiredTigerEngine());
mc = new MongoClient("localhost:" + mongo.getServerPort());
final String dbName = "DataStatus";
db = mc.getDatabase(dbName);
// set up auth
auth = new AuthController(
TestCommon.getJarsDir(),
"localhost:" + mongo.getServerPort(),
"IndexerIntTestAuth",
tempDirPath);
final URL authURL = new URL("http://localhost:" + auth.getServerPort() + "/testmode");
System.out.println("started auth server at " + authURL);
TestCommon.createAuthUser(authURL, "user1", "display1");
TestCommon.createAuthUser(authURL, "user2", "display2");
token1 = TestCommon.createLoginToken(authURL, "user1");
final String token2 = TestCommon.createLoginToken(authURL, "user2");
final AuthToken userToken = new AuthToken(token1, "user1");
final AuthToken wsadmintoken = new AuthToken(token2, "user2");
// set up elastic search
es = new ElasticSearchController(TestCommon.getElasticSearchExe(), tempDirPath);
// set up Workspace
ws = new WorkspaceController(
TestCommon.getWorkspaceVersion(),
TestCommon.getJarsDir(),
"localhost:" + mongo.getServerPort(),
"MOPTestWSDB",
dbName,
wsadmintoken.getUserName(),
authURL,
tempDirPath);
System.out.println("Started workspace on port " + ws.getServerPort());
final Path typesDir = Paths.get(TestCommon.TYPES_REPO_DIR);
final Path mappingsDir = Paths.get(TestCommon.TYPE_MAP_REPO_DIR);
URL wsUrl = new URL("http://localhost:" + ws.getServerPort());
final String esIndexPrefix = "test_" + System.currentTimeMillis() + ".";
final HttpHost esHostPort = new HttpHost("localhost", es.getServerPort());
final LineLogger logger = new LineLogger() {
@Override
public void logInfo(String line) {
System.out.println(line);
}
@Override
public void logError(String line) {
System.err.println(line);
}
@Override
public void logError(Throwable error) {
error.printStackTrace();
}
@Override
public void timeStat(GUID guid, long loadMs, long parseMs, long indexMs) {
}
};
final Map<String, TypeMappingParser> parsers = ImmutableMap.of(
"yaml", new YAMLTypeMappingParser());
final TypeStorage ss = new TypeFileStorage(typesDir, mappingsDir,
new ObjectTypeParsingRulesFileParser(), parsers, new FileLister(), logger);
final StatusEventStorage eventStorage = new MongoDBStatusEventStorage(db);
final WorkspaceClient wsClient = new WorkspaceClient(wsUrl, wsadmintoken);
wsClient.setIsInsecureHttpConnectionAllowed(true); //TODO SEC only do if http
final WorkspaceEventHandler weh = new WorkspaceEventHandler(
new CloneableWorkspaceClientImpl(wsClient));
final ElasticIndexingStorage esStorage = new ElasticIndexingStorage(esHostPort,
FileUtil.getOrCreateSubDir(tempDirPath.toFile(), "esbulk"));
esStorage.setIndexNamePrefix(esIndexPrefix);
storage = esStorage;
final IndexerWorkerConfigurator.Builder wrkCfg = IndexerWorkerConfigurator.getBuilder(
"test", tempDirPath.resolve("WorkerTemp"), logger)
.withStorage(eventStorage, ss, esStorage)
.withEventHandler(weh);
worker = new IndexerWorker(wrkCfg.build());
loadTypes(wsUrl, wsadmintoken);
wsid = (int) loadTestData(wsUrl, userToken);
}
private static long loadTestData(final URL wsUrl, final AuthToken usertoken)
throws IOException, JsonClientException {
final WorkspaceClient wc = new WorkspaceClient(wsUrl, usertoken);
wc.setIsInsecureHttpConnectionAllowed(true);
final long wsid = wc.createWorkspace(new CreateWorkspaceParams().withWorkspace("MOPTest"))
.getE1();
loadData(wc, wsid, "Narr", "KBaseNarrative.Narrative-1.0", "NarrativeObject1");
loadData(wc, wsid, "Narr", "KBaseNarrative.Narrative-1.0", "NarrativeObject2");
loadData(wc, wsid, "Narr", "KBaseNarrative.Narrative-1.0", "NarrativeObject3");
loadData(wc, wsid, "Narr", "KBaseNarrative.Narrative-1.0", "NarrativeObject4");
loadData(wc, wsid, "Narr", "KBaseNarrative.Narrative-1.0", "NarrativeObject5");
loadData(wc, wsid, "Assy", "KBaseGenomeAnnotations.Assembly-1.0", "AssemblyObject");
loadData(wc, wsid, "Genome", "KBaseGenomes.Genome-1.0", "GenomeObject");
loadData(wc, wsid, "Paired", "KBaseFile.PairedEndLibrary-1.0", "PairedEndLibraryObject");
loadData(wc, wsid, "reads.2", "KBaseFile.SingleEndLibrary-1.0", "SingleEndLibraryObject");
return wsid;
}
private static void loadTypes(final URL wsURL, final AuthToken wsadmintoken) throws Exception {
final WorkspaceClient wc = new WorkspaceClient(wsURL, wsadmintoken);
wc.setIsInsecureHttpConnectionAllowed(true);
loadType(wc, "KBaseFile", "KBaseFile_ci_1477697265343",
Arrays.asList("SingleEndLibrary", "PairedEndLibrary"));
loadType(wc, "KBaseGenomeAnnotations", "KBaseGenomeAnnotations_ci_1471308269061",
Arrays.asList("Assembly"));
loadType(wc, "KBaseGenomes", "KBaseGenomes_ci_1482357978770", Arrays.asList("Genome"));
loadType(wc, "KBaseNarrative", "KBaseNarrative_ci_1436483557716",
Arrays.asList("Narrative"));
}
private static void loadData(
final WorkspaceClient wc,
final long wsid,
final String name,
final String type,
final String fileName)
throws JsonParseException, JsonMappingException, IOException, JsonClientException {
final String data = TestDataLoader.load(fileName);
final Object objdata = new ObjectMapper().readValue(data, Object.class);
wc.saveObjects(new SaveObjectsParams()
.withId(wsid)
.withObjects(Arrays.asList(new ObjectSaveData()
.withData(new UObject(objdata))
.withName(name)
.withType(type))));
}
private static void loadType(
final WorkspaceClient wc,
final String module,
final String fileName,
final List<String> types)
throws IOException, JsonClientException {
final String typespec = TestDataLoader.load(fileName);
wc.requestModuleOwnership(module);
final Map<String, String> cmd = new HashMap<>();
cmd.put("command", "approveModRequest");
cmd.put("module", module);
wc.administer(new UObject(cmd));
wc.registerTypespec(new RegisterTypespecParams()
.withDryrun(0L)
.withSpec(typespec)
.withNewTypes(types));
wc.releaseModule(module);
}
@AfterClass
public static void tearDownClass() throws Exception {
final boolean deleteTempFiles = TestCommon.getDeleteTempFiles();
if (auth != null) {
auth.destroy(deleteTempFiles);
}
if (ws != null) {
ws.destroy(deleteTempFiles);
}
if (mc != null) {
mc.close();
}
if (mongo != null) {
mongo.destroy(deleteTempFiles);
}
if (es != null) {
es.destroy(deleteTempFiles);
}
if (tempDirPath != null && tempDirPath.toFile().exists() && deleteTempFiles) {
FileUtils.deleteQuietly(tempDirPath.toFile());
}
}
@Before
public void init() throws Exception {
TestCommon.destroyDB(db);
}
@Test
public void testGenomeManually() throws Exception {
final StoredStatusEvent ev = StoredStatusEvent.getBuilder(StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseGenomes.Genome"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID(wsid)
.withNullableObjectID("3")
.withNullableVersion(1)
.withNullableisPublic(false)
.build(),
new StatusEventID("-1"),
StatusEventProcessingState.UNPROC)
.build();
worker.processEvent(ev);
PostProcessing pp = new PostProcessing();
pp.objectData = true;
pp.objectKeys = true;
List<String> objectTypes = ImmutableList.of("Genome");
System.out.println("Genome: " + storage.getObjectsByIds(
storage.searchIds(objectTypes,
MatchFilter.getBuilder().withNullableFullTextInAll("test").build(), null,
AccessFilter.create().withAdmin(true), null).guids, pp).get(0));
String query = "TrkA";
Map<String, Integer> typeToCount = storage.searchTypes(
MatchFilter.getBuilder().withNullableFullTextInAll(query).build(),
AccessFilter.create().withAdmin(true));
System.out.println("Counts per type: " + typeToCount);
if (typeToCount.size() == 0) {
return;
}
List<String> types = ImmutableList.of(typeToCount.keySet().iterator().next());
Set<GUID> guids = storage.searchIds(types,
MatchFilter.getBuilder().withNullableFullTextInAll(query).build(), null,
AccessFilter.create().withAdmin(true), null).guids;
System.out.println("GUIDs found: " + guids);
ObjectData obj = storage.getObjectsByIds(guids, pp).get(0);
System.out.println("Feature: " + obj);
}
private void indexFewVersions(final StoredStatusEvent evid) throws Exception {
final StatusEvent ev = evid.getEvent();
for (int i = Math.max(1, ev.getVersion().get() - 5); i <= ev.getVersion().get(); i++) {
final StoredStatusEvent ev2 = StoredStatusEvent.getBuilder(StatusEvent.getBuilder(
ev.getStorageObjectType().get(),
ev.getTimestamp(),
ev.getEventType())
.withNullableAccessGroupID(ev.getAccessGroupId().get())
.withNullableObjectID(ev.getAccessGroupObjectId().get())
.withNullableVersion(i)
.withNullableisPublic(ev.isPublic().get())
.build(),
evid.getID(),
StatusEventProcessingState.UNPROC)
.build();
worker.processEvent(ev2);
}
}
private void checkSearch(
final int expectedCount,
final List<String> types,
final String query,
final int accessGroupId,
final boolean debugOutput)
throws Exception {
Set<GUID> ids = storage.searchIds(types,
MatchFilter.getBuilder().withNullableFullTextInAll(query).build(), null,
AccessFilter.create().withAccessGroups(accessGroupId), null).guids;
if (debugOutput) {
PostProcessing pp = new PostProcessing();
pp.objectData = true;
pp.objectKeys = true;
System.out.println("DEBUG: " + storage.getObjectsByIds(ids, pp));
}
Assert.assertEquals(1, ids.size());
}
@Test
public void testNarrativeManually() throws Exception {
final StatusEvent ev = StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseNarrative.Narrative"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID(wsid)
.withNullableObjectID("1")
.withNullableVersion(5)
.withNullableisPublic(false)
.build();
indexFewVersions(StoredStatusEvent.getBuilder(ev, new StatusEventID("-1"),
StatusEventProcessingState.UNPROC).build());
checkSearch(1, ImmutableList.of("Narrative"), "tree", wsid, false);
checkSearch(1, ImmutableList.of("Narrative"), "species", wsid, false);
/*indexFewVersions(new ObjectStatusEvent("-1", "WS", 10455, "1", 78, null,
System.currentTimeMillis(), "KBaseNarrative.Narrative",
ObjectStatusEventType.CREATED, false));
checkSearch(1, "Narrative", "Catalog.migrate_module_to_new_git_url", 10455, false);
checkSearch(1, "Narrative", "Super password!", 10455, false);
indexFewVersions(new ObjectStatusEvent("-1", "WS", 480, "1", 254, null,
System.currentTimeMillis(), "KBaseNarrative.Narrative",
ObjectStatusEventType.CREATED, false));
checkSearch(1, "Narrative", "weird text", 480, false);
checkSearch(1, "Narrative", "functionality", 480, false);*/
}
@Test
public void testReadsManually() throws Exception {
final StatusEvent ev = StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseFile.PairedEndLibrary"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID(wsid)
.withNullableObjectID("4")
.withNullableVersion(1)
.withNullableisPublic(false)
.build();
indexFewVersions(StoredStatusEvent.getBuilder(ev, new StatusEventID("-1"),
StatusEventProcessingState.UNPROC).build());
checkSearch(1, ImmutableList.of("PairedEndLibrary"), "Illumina", wsid, true);
checkSearch(1, ImmutableList.of("PairedEndLibrary"), "sample1se.fastq.gz", wsid, false);
final StatusEvent ev2 = StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseFile.SingleEndLibrary"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID(wsid)
.withNullableObjectID("5")
.withNullableVersion(1)
.withNullableisPublic(false)
.build();
indexFewVersions(StoredStatusEvent.getBuilder(ev2, new StatusEventID("-1"),
StatusEventProcessingState.UNPROC).build());
checkSearch(1, ImmutableList.of("SingleEndLibrary"), "PacBio", wsid, true);
checkSearch(1, ImmutableList.of("SingleEndLibrary"), "reads.2", wsid, false);
}
@Test
public void testWorkspaceDeletion() throws Exception {
URL wsUrl = new URL("http://localhost:" + ws.getServerPort());
final AuthToken wsadmintoken = new AuthToken(token1, "<PASSWORD>");
final WorkspaceClient wc = new WorkspaceClient(wsUrl, wsadmintoken);
wc.setIsInsecureHttpConnectionAllowed(true); //TODO SEC only do if http
final String objName = "GenomeToDelete";
// create a workspace with an object
final long wsidToDelete = wc.createWorkspace(new CreateWorkspaceParams().withWorkspace("WorkspaceDeleteTest"))
.getE1();
loadData(wc, wsidToDelete, objName, "KBaseGenomes.Genome-1.0", "GenomeObject");
// create an event for the object
final StoredStatusEvent ev = StoredStatusEvent.getBuilder(StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseGenomes.Genome"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID((int)wsidToDelete)
.withNullableObjectID("1")
.withNullableVersion(1)
.withNullableisPublic(false)
.build(),
new StatusEventID("-1"),
StatusEventProcessingState.UNPROC)
.build();
// delete workspace
wc.deleteWorkspace(new WorkspaceIdentity().withId(wsidToDelete));
// process event
worker.processEvent(ev);
List<String> objectTypes = ImmutableList.of("Genome");
PostProcessing pp = new PostProcessing();
pp.objectData = true;
pp.objectKeys = true;
// object was not indexed
try {
FoundHits hits = storage.searchIds(objectTypes,
MatchFilter.getBuilder().
withNullableObjectName(objName).build(),
null,
AccessFilter.create().withAdmin(true), null);
// to compensate for side-effect when testGenomeManually
// is executed before this test. This side-effect needs to be fixed.
Assert.assertTrue(hits.guids.size() == 0);
} catch (IOException ex) {
Assert.assertTrue("No indexes exist for search type Genome".equals(ex.getMessage()));
}
}
@Test
public void testNonExistentWorkspaceCase() throws Exception {
// create an event for a workspace that does not exist
final StoredStatusEvent ev = StoredStatusEvent.getBuilder(StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseGenomes.Genome"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID(10000)
.withNullableObjectID("1")
.withNullableVersion(1)
.withNullableisPublic(false)
.build(),
new StatusEventID("-1"),
StatusEventProcessingState.UNPROC)
.build();
// process event
worker.processEvent(ev);
List<String> objectTypes = ImmutableList.of("Genome");
PostProcessing pp = new PostProcessing();
pp.objectData = true;
pp.objectKeys = true;
// object was not indexed
try {
GUID guid = new GUID("WS", 10000, "1", 1, null, null);
List<ObjectData> data = storage.getObjectsByIds(ImmutableSet.of(guid));
// to compensate for side-effect when testGenomeManually
// is executed before this test. This side-effect needs to be fixed.
Assert.assertTrue(data.size() == 0);
} catch (IOException ex) {
Assert.assertTrue("No indexes exist for search type Genome".equals(ex.getMessage()));
}
}
@Test
public void testRenameObject() throws Exception {
URL wsUrl = new URL("http://localhost:" + ws.getServerPort());
final AuthToken wsadmintoken = new AuthToken(token1, "user1");
final WorkspaceClient wc = new WorkspaceClient(wsUrl, wsadmintoken);
wc.setIsInsecureHttpConnectionAllowed(true); //TODO SEC only do if http
final String objName = "GenomeToRename";
final String newObjName = "GenomeRenamed";
// create an object with some name
final long wsidForRenameObject = wc.createWorkspace(new CreateWorkspaceParams().withWorkspace("RenameObjectTest"))
.getE1();
loadData(wc, wsidForRenameObject, objName, "KBaseGenomes.Genome-1.0", "GenomeObject");
// create event for object
final StoredStatusEvent ev = StoredStatusEvent.getBuilder(StatusEvent.getBuilder(
new StorageObjectType("WS", "KBaseGenomes.Genome"),
Instant.now(),
StatusEventType.NEW_VERSION)
.withNullableAccessGroupID((int)wsidForRenameObject)
.withNullableObjectID("1")
.withNullableVersion(1)
.withNullableisPublic(false)
.build(),
new StatusEventID("-1"),
StatusEventProcessingState.UNPROC)
.build();
// rename the object
wc.renameObject(new RenameObjectParams().
withObj(new ObjectIdentity().
withWsid(wsidForRenameObject).
withName(objName)).
withNewName(newObjName));
// then process event
worker.processEvent(ev);
List<String> objectTypes = ImmutableList.of("Genome");
PostProcessing pp = new PostProcessing();
pp.objectData = true;
pp.objectKeys = true;
FoundHits hits = storage.searchIds(objectTypes,
MatchFilter.getBuilder().
withNullableObjectName(newObjName).build(),
null,
AccessFilter.create().withAdmin(true), null);
List<ObjectData> data = storage.getObjectsByIds(
hits.guids, pp);
// processed event should reflect new object name
Assert.assertEquals("incorrect genome name",
newObjName,
data.get(0).getObjectName().get());
}
}
|
install_cfcli() {
local dir="$1"
CF_VERSION=6.17
local download_url="https://cli.run.pivotal.io/stable?release=linux64-binary&version=$CF_VERSION.0&source=github-rel"
echo "Downloading CF CLI [$download_url]"
curl --silent --fail --retry 5 --retry-max-time 15 -j -k -L -H "Cookie: oraclelicense=accept-securebackup-cookie" "$download_url" -o /tmp/cf.tar.gz || (echo "Unable to download cf CLI; does it exist?" && false)
echo "Download complete!"
echo "Installing CF CLI"
tar xzf /tmp/cf.tar.gz -C $dir
echo "Installation complete!"
} |
<gh_stars>0
module FlexibleAccessibility
class AccessProvider
class << self
def preload_permissions(user)
if user.instance_variable_get(:@_available_permissions).nil?
user.instance_variable_set(
:@_available_permissions,
AccessRule.where(owner_id: user.id).map(&:permission)
)
end
end
def action_permitted_for_user?(permission, user)
preload_permissions(user)
user.instance_variable_get(:@_available_permissions).include? permission
end
end
end
end
|
<script type="text/javascript">
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
// create cube
var cubeVerts = [
-1.0, -1.0, 1.0, // triangle 1
1.0, -1.0, 1.0,
1.0, 1.0, 1.0,
-1.0, -1.0, 1.0, // triangle 2
1.0, 1.0, 1.0,
-1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, // triangle 3
-1.0, 1.0, 1.0,
1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, // triangle 4
1.0, 1.0, 1.0,
1.0, -1.0, -1.0,
1.0, -1.0, -1.0, // triangle 5
1.0, 1.0, -1.0,
1.0, 1.0, 1.0,
1.0, -1.0, -1.0, // triangle 6
1.0, 1.0, 1.0,
-1.0, -1.0, 1.0,
-1.0, 1.0, -1.0, // triangle 7
-1.0, -1.0, -1.0,
1.0, -1.0, -1.0,
-1.0, 1.0, -1.0, // triangle 8
1.0, -1.0, -1.0,
1.0, 1.0, -1.0,
];
//...
// Setup and render cube
//...
function render() {
// spin cube
theta = theta+1;
var trX = 0.0;
var trY = 0.0;
var trZ = 0.0;
var rotatedX = Math.cos(theta) * trX - Math.sin(theta) * trZ;
var rotatedY = trY;
var rotatedZ = Math.sin(theta) * trX + Math.cos(theta) * trZ;
gl.uniform3f(transformLoc, rotatedX, rotatedY, rotatedZ);
gl.drawArrays(gl.TRIANGLES, 0, cubeVerts.length/3);
requestAnimationFrame(render);
}
render();
</script> |
TERMUX_PKG_HOMEPAGE=https://xcb.freedesktop.org/
TERMUX_PKG_DESCRIPTION="Utility libraries for XC Binding - client and window-manager helpers for ICCCM"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=0.4.1
TERMUX_PKG_REVISION=23
TERMUX_PKG_SRCURL=https://xcb.freedesktop.org/dist/xcb-util-wm-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_SHA256=28bf8179640eaa89276d2b0f1ce4285103d136be6c98262b6151aaee1d3c2a3f
TERMUX_PKG_DEPENDS="libxcb"
TERMUX_PKG_BUILD_DEPENDS="xorg-util-macros"
|
/// <summary>
/// Enum to specify different types of stream procedures.
/// </summary>
public enum StreamProcedureType
{
/// <summary>
/// Flag to signify that the end of the stream is reached.
/// </summary>
End = -2147483648,
/// <summary>
/// Create a "push" stream.
/// Instead of BASS pulling data from a StreamProcedure function, data is pushed to
/// BASS via <see cref="Bass.StreamPutData(int,IntPtr,int)"/>.
/// </summary>
PushStream = 1 // Define the missing member with a specific value
}
/// <summary>
/// Custom stream procedure function for a "push" stream.
/// </summary>
/// <param name="handle">The stream handle.</param>
/// <param name="buffer">Pointer to the buffer to write the data to.</param>
/// <param name="length">The number of bytes to write.</param>
/// <param name="user">User instance data.</param>
/// <returns>The number of bytes written to the buffer.</returns>
public delegate int StreamProcedure(int handle, IntPtr buffer, int length, IntPtr user);
// Implement the corresponding stream procedure function
public int PushStreamProcedure(int handle, IntPtr buffer, int length, IntPtr user)
{
// Custom implementation for the "push" stream procedure
// Write data to the buffer and return the number of bytes written
} |
module WOZLLA.component {
export class PropertyConverter {
public static array2rect(arr:Array<number>):WOZLLA.math.Rectangle {
return new WOZLLA.math.Rectangle(arr[0], arr[1], arr[2], arr[3]);
}
public static array2circle(arr:Array<number>):WOZLLA.math.Circle {
return new WOZLLA.math.Circle(arr[0], arr[1], arr[2]);
}
public static json2TextStyle(json:any):TextStyle {
var style = new TextStyle();
for(var i in json) {
style[i] = json[i];
}
return style;
}
public static array2Padding(arr:Array<number>):WOZLLA.layout.Padding {
return new WOZLLA.layout.Padding(arr[0], arr[1], arr[2], arr[3]);
}
public static array2Margin(arr:Array<number>):WOZLLA.layout.Margin {
return new WOZLLA.layout.Margin(arr[0], arr[1], arr[2], arr[3]);
}
}
} |
<reponame>choosewhatulike/500lines
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'catechism/version'
Gem::Specification.new do |spec|
spec.name = 'catechism'
spec.version = Catechism::VERSION
spec.authors = ['<NAME> and <NAME>']
spec.email = ['<EMAIL>']
spec.description = 'An object-oriented testing framework'
spec.summary = 'Put your code through an object-oriented trial by fire.'
spec.homepage = 'https://github.com/aosabook/500lines/tree/master/catechism'
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^trials/})
spec.require_paths = ['lib']
spec.bindir = 'bin'
spec.add_dependency 'thor'
spec.add_development_dependency 'bundler', '~> 1.3'
spec.add_development_dependency 'gem-release'
spec.add_development_dependency 'guard-shell'
end
|
#!/bin/bash
set -e
set -x
version=$1
[ ! "$version" ] && version=$(python setup.py --version | sed 's/\.dev.*//')
status=$(git status -sz)
[ -z "$status" ] || false
git checkout master
tox -epy37,py27,pep8
git push
git tag -s $version -m "Release version ${version}"
git checkout $version
git clean -fd
pbr_version=$(python setup.py --version)
if [ "$version" != "$pbr_version" ]; then
echo "something goes wrong pbr version is different from the provided one. ($pbr_version != $version)"
exit 1
fi
python setup.py sdist bdist_wheel
set +x
echo
echo "release: Cotyledon ${version}"
echo
echo "SHA1sum: "
sha1sum dist/*
echo "MD5sum: "
md5sum dist/*
echo
echo "uploading..."
echo
set -x
read
git push --tags
twine upload -r pypi -s dist/cotyledon-${version}.tar.gz dist/cotyledon-${version}-py2.py3-none-any.whl
git checkout master
|
# -*- mode: shell-script -*-
test_dir=$(cd $(dirname $0) && pwd)
source "$test_dir/setup.sh"
oneTimeSetUp() {
rm -rf "$WORKON_HOME"
mkdir -p "$WORKON_HOME"
source "$test_dir/../virtualenvwrapper.sh"
}
oneTimeTearDown() {
rm -rf "$WORKON_HOME"
}
setUp () {
echo
rm -f "$TMPDIR/catch_output"
}
test_create() {
mkvirtualenv "env1" >/dev/null 2>&1
assertTrue "Environment directory was not created" "[ -d $WORKON_HOME/env1 ]"
for hook in postactivate predeactivate postdeactivate
do
assertTrue "env1 $hook was not created" "[ -f $WORKON_HOME/env1/bin/$hook ]"
assertFalse "env1 $hook is executable" "[ -x $WORKON_HOME/env1/bin/$hook ]"
done
}
test_create_space_in_name() {
# Only test with leading and internal spaces. Directory names with trailing spaces are legal,
# and work with virtualenv on OSX, but error out on Linux.
mkvirtualenv " env with space" >/dev/null 2>&1
assertTrue "Environment directory was not created" "[ -d \"$WORKON_HOME/ env with space\" ]"
for hook in postactivate predeactivate postdeactivate
do
assertTrue "$hook was not created" "[ -f \"$WORKON_HOME/ env with space/bin/$hook\" ]"
assertFalse "$hook is executable" "[ -x \"$WORKON_HOME/ env with space/bin/$hook\" ]"
done
assertTrue virtualenvwrapper_verify_active_environment
env_name=$(basename "$VIRTUAL_ENV")
assertSame " env with space" "$env_name"
}
test_activates () {
mkvirtualenv "env2" >/dev/null 2>&1
assertTrue virtualenvwrapper_verify_active_environment
assertSame "env2" $(basename "$VIRTUAL_ENV")
}
test_hooks () {
echo "#!/bin/sh" > "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL premkvirtualenv \`pwd\` \"\$@\" >> \"$TMPDIR/catch_output\"" >> "$WORKON_HOME/premkvirtualenv"
chmod +x "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL postmkvirtualenv >> $TMPDIR/catch_output" > "$WORKON_HOME/postmkvirtualenv"
mkvirtualenv "env3" >/dev/null 2>&1
output=$(cat "$TMPDIR/catch_output")
workon_home_as_pwd=$(cd $WORKON_HOME; pwd)
expected="GLOBAL premkvirtualenv $workon_home_as_pwd env3
GLOBAL postmkvirtualenv"
assertSame "$expected" "$output"
rm -f "$WORKON_HOME/premkvirtualenv"
rm -f "$WORKON_HOME/postmkvirtualenv"
deactivate
rmvirtualenv "env3" >/dev/null 2>&1
}
test_no_virtualenv () {
# Find "which" before we change the path
which=$(which which)
old_path="$PATH"
PATH="/bin:/usr/sbin:/sbin"
venv=$($which virtualenv 2>/dev/null)
if [ ! -z "$venv" ]
then
echo "FOUND \"$venv\" in PATH so skipping this test"
export PATH="$old_path"
return 0
fi
mkvirtualenv should_not_be_created >/dev/null 2>&1
RC=$?
# Restore the path before testing because
# the test script depends on commands in the
# path.
export PATH="$old_path"
assertSame "$RC" "1"
}
test_no_args () {
mkvirtualenv 2>/dev/null 1>&2
RC=$?
assertSame "2" "$RC"
}
test_no_workon_home () {
old_home="$WORKON_HOME"
export WORKON_HOME="$WORKON_HOME/not_there"
mkvirtualenv should_be_created >"$old_home/output" 2>&1
output=$(cat "$old_home/output")
assertTrue "Did not see expected message in \"$output\"" "cat \"$old_home/output\" | grep 'does not exist'"
assertTrue "Did not create environment" "[ -d \"$WORKON_HOME/should_be_created\" ]"
WORKON_HOME="$old_home"
}
test_mkvirtualenv_sitepackages () {
# This part of the test is not reliable because
# creating a new virtualenv from inside the
# tox virtualenv inherits the setting from there.
# # Without the option, verify that site-packages are copied.
# mkvirtualenv "with_sp" >/dev/null 2>&1
# ngsp_file="`virtualenvwrapper_get_site_packages_dir`/../no-global-site-packages.txt"
# assertFalse "$ngsp_file exists" "[ -f \"$ngsp_file\" ]"
# rmvirtualenv "env3"
# With the argument, verify that they are not copied.
mkvirtualenv --no-site-packages "without_sp" >/dev/null 2>&1
ngsp_file="`virtualenvwrapper_get_site_packages_dir`/../no-global-site-packages.txt"
assertTrue "$ngsp_file does not exist" "[ -f \"$ngsp_file\" ]"
rmvirtualenv "env4" >/dev/null 2>&1
}
test_mkvirtualenv_hooks_system_site_packages () {
# See issue #189
echo "#!/bin/sh" > "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL premkvirtualenv \`pwd\` \"\$@\" >> \"$TMPDIR/catch_output\"" >> "$WORKON_HOME/premkvirtualenv"
chmod +x "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL postmkvirtualenv >> $TMPDIR/catch_output" > "$WORKON_HOME/postmkvirtualenv"
mkvirtualenv --system-site-packages "env189" >/dev/null 2>&1
output=$(cat "$TMPDIR/catch_output")
workon_home_as_pwd=$(cd $WORKON_HOME; pwd)
expected="GLOBAL premkvirtualenv $workon_home_as_pwd env189
GLOBAL postmkvirtualenv"
assertSame "$expected" "$output"
rm -f "$WORKON_HOME/premkvirtualenv"
rm -f "$WORKON_HOME/postmkvirtualenv"
deactivate
rmvirtualenv "env189" >/dev/null 2>&1
}
test_mkvirtualenv_args () {
# See issue #102
VIRTUALENVWRAPPER_VIRTUALENV_ARGS="--no-site-packages"
# With the argument, verify that they are not copied.
mkvirtualenv "without_sp2" >/dev/null 2>&1
ngsp_file="`virtualenvwrapper_get_site_packages_dir`/../no-global-site-packages.txt"
assertTrue "$ngsp_file does not exist" "[ -f \"$ngsp_file\" ]"
rmvirtualenv "env4" >/dev/null 2>&1
unset VIRTUALENVWRAPPER_VIRTUALENV_ARGS
}
test_no_such_virtualenv () {
VIRTUALENVWRAPPER_VIRTUALENV=/path/to/missing/program
echo "#!/bin/sh" > "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL premkvirtualenv \`pwd\` \"\$@\" >> \"$TMPDIR/catch_output\"" >> "$WORKON_HOME/premkvirtualenv"
chmod +x "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL postmkvirtualenv >> $TMPDIR/catch_output" > "$WORKON_HOME/postmkvirtualenv"
mkvirtualenv "env3" >/dev/null 2>&1
output=$(cat "$TMPDIR/catch_output" 2>/dev/null)
workon_home_as_pwd=$(cd $WORKON_HOME; pwd)
expected=""
assertSame "$expected" "$output"
rm -f "$WORKON_HOME/premkvirtualenv"
rm -f "$WORKON_HOME/postmkvirtualenv"
VIRTUALENVWRAPPER_VIRTUALENV=virtualenv
}
test_virtualenv_fails () {
# Test to reproduce the conditions in issue #76
# https://bitbucket.org/dhellmann/virtualenvwrapper/issue/76/
#
# Should not run the premkvirtualenv or postmkvirtualenv hooks
# because the environment is not created and even the
# premkvirtualenv hooks are run *after* the environment exists
# (but before it is activated).
export pre_test_dir=$(cd "$test_dir"; pwd)
VIRTUALENVWRAPPER_VIRTUALENV=false
echo "#!/bin/sh" > "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL premkvirtualenv \`pwd\` \"\$@\" >> \"$TMPDIR/catch_output\"" >> "$WORKON_HOME/premkvirtualenv"
chmod +x "$WORKON_HOME/premkvirtualenv"
echo "echo GLOBAL postmkvirtualenv >> $TMPDIR/catch_output" > "$WORKON_HOME/postmkvirtualenv"
mkvirtualenv "env3" >/dev/null 2>&1
output=$(cat "$TMPDIR/catch_output" 2>/dev/null)
workon_home_as_pwd=$(cd $WORKON_HOME; pwd)
expected=""
assertSame "$expected" "$output"
rm -f "$WORKON_HOME/premkvirtualenv"
rm -f "$WORKON_HOME/postmkvirtualenv"
VIRTUALENVWRAPPER_VIRTUALENV=virtualenv
}
test_mkvirtualenv_python_not_sticky () {
typeset _save=$VIRTUALENVWRAPPER_VIRTUALENV
VIRTUALENVWRAPPER_VIRTUALENV=true
mkvirtualenv --python blah foo
assertSame "" "$interpreter"
VIRTUALENVWRAPPER_VIRTUALENV=$_save
}
test_mkvirtualenv_python_short_option () {
typeset _save=$VIRTUALENVWRAPPER_VIRTUALENV
VIRTUALENVWRAPPER_VIRTUALENV=echo
output="$(mkvirtualenv -p python foo)"
assertSame "--python=python foo" "$output"
VIRTUALENVWRAPPER_VIRTUALENV=$_save
}
test_mkvirtualenv_python_long_option () {
typeset _save=$VIRTUALENVWRAPPER_VIRTUALENV
VIRTUALENVWRAPPER_VIRTUALENV=echo
output="$(mkvirtualenv --python python foo)"
assertSame "--python=python foo" "$output"
VIRTUALENVWRAPPER_VIRTUALENV=$_save
}
test_mkvirtualenv_python_long_option_equal () {
typeset _save=$VIRTUALENVWRAPPER_VIRTUALENV
VIRTUALENVWRAPPER_VIRTUALENV=echo
output="$(mkvirtualenv --python=python foo)"
assertSame "--python=python foo" "$output"
VIRTUALENVWRAPPER_VIRTUALENV=$_save
}
. "$test_dir/shunit2"
|
<reponame>leongaban/redux-saga-exchange
import { IQueryParams } from './app';
export interface IClosable {
isOpen: boolean;
}
export interface ITab<T = string> {
title: string;
key: T;
active: boolean;
disabled?: boolean;
hidden?: boolean;
onClick(): void;
}
export interface IAbstractColumn {
width?: number;
isSortable?: boolean;
sortKind?: 'simple' | 'date';
title(): string;
}
export interface IHoldingCellRenderer<T> {
renderCell(value: T, isSelectedRow: boolean): JSX.Element | string;
}
export interface IModelColumn<T> extends IAbstractColumn, Partial<IHoldingCellRenderer<T>> { }
export interface IExtraColumn<T> extends IAbstractColumn, IHoldingCellRenderer<T> { }
export interface IMTableColumnWidth {
unit: '%' | 'rem';
value: number;
}
export interface IMTableColumn<T> {
rightAligned?: boolean;
width?: IMTableColumnWidth;
getTitle(): string | JSX.Element;
renderCell(value: T): JSX.Element | string;
}
export interface IMTableRowSubcontent<T> {
rows: Array<IMTableRowSubcontentRow<T>>;
renderBottomPart?(value: T): JSX.Element | string;
}
export interface IMTableRowSubcontentRow<T> {
getTitle(): string;
renderValue(value: T): JSX.Element | string | number | null;
}
export type TableColumns<ColumnData, Model> = Record<keyof ColumnData, IModelColumn<Model>>;
export type NotificationKind = 'error' | 'info';
export interface INotification {
kind: NotificationKind;
text: string;
}
export type SortKind = 'simple' | 'date';
export type SortDirection = 'ascend' | 'descend';
export interface ISortInfo<T> {
column: keyof T;
kind: SortKind;
direction: SortDirection;
}
export interface IHoldingSortInfo<T> {
// TODO sort is a bad name
sort: ISortInfo<T>;
}
export type SortChangeHandler<T> = (newSort: ISortInfo<T>) => void;
export type UITheme = 'day' | 'night' | 'moon';
export type RadioColorsSet = 'primary' | 'sell' | 'buy';
export type ClientDeviceType = 'desktop' | 'mobile';
// TODO move such state of desktop tables to redux
export interface ITablePaginationState {
recordsPerPage: number;
activePage: number;
}
export interface ITablePaginationData {
state: ITablePaginationState;
pagesNumber: number;
recordsPerPageSelectOptions?: number[];
isRequesting?: boolean;
onPageChange(page: number): void;
onRecordsPerPageSelect(recordsPerPage: number): void;
renderHeader(renderPaginationControls: () => JSX.Element | null | undefined): JSX.Element;
}
export interface ISwitchableMobileContentProps<T> {
queryParams: IQueryParams;
onTabSwitch(tab: T): void;
}
|
test( "Collapsible widget works correctly", function() {
var collapsible = $( "#collapsible" ).collapsible();
deepEqual( collapsible.hasClass( "ui-collapsible" ), true, "Collapsible has class ui-collapsible" );
deepEqual( collapsible.children( "h1" ).hasClass( "ui-collapsible-heading" ), true, "Collapsible heading has class ui-collapsible-heading" );
deepEqual( collapsible.children( "h1" ).next().hasClass( "ui-collapsible-content" ), true, "Collapsible content has class ui-collapsible-content" );
deepEqual( collapsible.children().length, 2, "Collapsible contains exactly two children" );
});
|
from UIKit import *
from LinkPresentation import *
from Foundation import *
from rubicon.objc import *
from mainthread import mainthread
import pyto_ui as ui
# We subclass UIViewController
class MyViewController(UIViewController):
@objc_method
def close(self):
self.dismissViewControllerAnimated(True, completion=None)
@objc_method
def dealloc(self):
self.link_view.release()
# Overriding viewDidLoad
@objc_method
def viewDidLoad(self):
send_super(__class__, self, "viewDidLoad")
self.title = "Link"
self.view.backgroundColor = UIColor.systemBackgroundColor()
# 0 is the value for a 'Done' button
done_button = UIBarButtonItem.alloc().initWithBarButtonSystemItem(0, target=self, action=SEL("close"))
self.navigationItem.rightBarButtonItems = [done_button]
self.url = NSURL.alloc().initWithString("https://apple.com")
self.link_view = LPLinkView.alloc().initWithURL(self.url)
self.link_view.frame = CGRectMake(0, 0, 200, 000)
self.view.addSubview(self.link_view)
self.fetchMetadata()
@objc_method
def fetchMetadata(self):
@mainthread
def set_metadata(metadata):
self.link_view.setMetadata(metadata)
self.layout()
def fetch_handler(metadata: ObjCInstance, error: ObjCInstance) -> None:
set_metadata(metadata)
provider = LPMetadataProvider.alloc().init().autorelease()
provider.startFetchingMetadataForURL(self.url, completionHandler=fetch_handler)
@objc_method
def layout(self):
self.link_view.sizeToFit()
self.link_view.setCenter(self.view.center)
@objc_method
def viewDidLayoutSubviews(self):
self.layout()
@mainthread
def show():
# We initialize our view controller and a navigation controller
# This must be called from the main thread
vc = MyViewController.alloc().init().autorelease()
nav_vc = UINavigationController.alloc().initWithRootViewController(vc).autorelease()
ui.show_view_controller(nav_vc)
show()
|
#!/usr/bin/env bash
set -e
set -o pipefail
set -u
if [ -z `which jazzy` ]; then
echo "Installing jazzy…"
gem install jazzy
if [ -z `which jazzy` ]; then
echo "Unable to install jazzy. See https://github.com/mapbox/mapbox-gl-native/blob/master/platform/ios/INSTALL.md"
exit 1
fi
fi
OUTPUT=${OUTPUT:-documentation}
BRANCH=$( git describe --tags --match=ios-v*.*.* --abbrev=0 )
SHORT_VERSION=$( echo ${BRANCH} | sed 's/^ios-v//' )
RELEASE_VERSION=$( echo ${SHORT_VERSION} | sed -e 's/^ios-v//' -e 's/-.*//' )
rm -rf /tmp/mbgl
mkdir -p /tmp/mbgl/
README=/tmp/mbgl/README.md
cp platform/ios/docs/doc-README.md "${README}"
# http://stackoverflow.com/a/4858011/4585461
echo "## Changes in version ${RELEASE_VERSION}" >> "${README}"
sed -n -e '/^## /{' -e ':a' -e 'n' -e '/^## /q' -e 'p' -e 'ba' -e '}' platform/ios/CHANGELOG.md >> "${README}"
rm -rf ${OUTPUT}
mkdir -p ${OUTPUT}
cp platform/ios/screenshot.png "${OUTPUT}"
jazzy \
--config platform/ios/jazzy.yml \
--sdk iphonesimulator \
--github-file-prefix https://github.com/mapbox/mapbox-gl-native/tree/${BRANCH} \
--module-version ${SHORT_VERSION} \
--readme ${README} \
--root-url https://www.mapbox.com/ios-sdk/api/${RELEASE_VERSION}/ \
--theme platform/darwin/docs/theme \
--output ${OUTPUT}
# https://github.com/realm/jazzy/issues/411
find ${OUTPUT} -name *.html -exec \
perl -pi -e 's/Mapbox\s+(Docs|Reference)/Mapbox iOS SDK $1/' {} \;
|
class DescribeNsm:
def __init__(self):
self.value = None
self.unit = None
def set_measurement(self, value, unit):
self.value = value
self.unit = unit
def get_measurement(self):
return f"{self.value} {self.unit}" |
#!/bin/bash
#dwm-status
/bin/bash ~/scripts/dwm-status.sh &
#壁纸
feh --bg-fill ~/Pictures/wallpapers/*
#修改wmname
wmname LG3D
clash -d ~/.config/clash &
#显示器
~/scripts/dualmon.sh &
#/bin/bash ~/scripts/wp-autochange.sh &
#窗口渲染器
picom -b
#触摸板
/bin/bash ~/scripts/tap-to-click.sh &
/bin/bash ~/scripts/inverse-scroll.sh &
#lock
xautolock -time 10 -locker '/usr/bin/lock' -corners ---- -cornersize 30 &
#network manager 托盘
nm-applet &
#蓝牙
blueman-applet &
#caffeine -a &
caffeine &
#火焰截图
flameshot &
#电源管理
xfce4-power-manager &
#显卡切换
optimus-manager-qt &
#输入法
fcitx5 &
#conky
conky &
|
<filename>java/ql/test/library-tests/dataflow/call-sensitivity/A.java<gh_stars>1000+
public class A {
public static void sink(Object o) {
}
public Object flowThrough(Object o, boolean cond) {
if (cond) {
return o;
} else {
return null;
}
}
public void callSinkIfTrue(Object o, boolean cond) {
if (cond) {
sink(o);
}
}
public void callSinkIfFalse(Object o, boolean cond) {
if (!cond) {
sink(o);
}
}
public void callSinkFromLoop(Object o, boolean cond) {
while (cond) {
sink(o);
}
}
public void localCallSensitivity(Object o, boolean c) {
Object o1 = o;
Object o2 = null;
if (c) {
Object tmp = o1;
o2 = 1 == 1 ? (tmp) : (tmp);
}
Object o3 = o2;
sink(o3);
}
public void localCallSensitivity2(Object o, boolean b, boolean c) {
Object o1 = o;
Object o2 = null;
if (b || c) {
Object tmp = o1;
o2 = 1 == 1 ? (tmp) : (tmp);
}
Object o3 = o2;
sink(o3);
}
public void f1() {
// should not exhibit flow
callSinkIfTrue(new Integer(1), false);
callSinkIfFalse(new Integer(2), true);
callSinkFromLoop(new Integer(3), false);
localCallSensitivity(new Integer(4), false);
sink(flowThrough(new Integer(4), false));
// should exhibit flow
callSinkIfTrue(new Integer(1), true);
callSinkIfFalse(new Integer(2), false);
callSinkFromLoop(new Integer(3), true);
localCallSensitivity(new Integer(4), true);
localCallSensitivity2(new Integer(4), true, true);
localCallSensitivity2(new Integer(4), false, true);
localCallSensitivity2(new Integer(4), true, false);
sink(flowThrough(new Integer(4), true));
// expected false positive
localCallSensitivity2(new Integer(4), false, false);
}
public void f2() {
boolean t = true;
boolean f = false;
// should not exhibit flow
callSinkIfTrue(new Integer(4), f);
callSinkIfFalse(new Integer(5), t);
callSinkFromLoop(new Integer(6), f);
localCallSensitivity(new Integer(4), f);
sink(flowThrough(new Integer(4), f));
// should exhibit flow
callSinkIfTrue(new Integer(4), t);
callSinkIfFalse(new Integer(5), f);
callSinkFromLoop(new Integer(6), t);
localCallSensitivity(new Integer(4), t);
sink(flowThrough(new Integer(4), t));
}
public void f3(InterfaceA b) {
boolean t = true;
boolean f = false;
// should not exhibit flow
b.callSinkIfTrue(new Integer(4), f);
b.callSinkIfFalse(new Integer(5), t);
b.localCallSensitivity(new Integer(4), f);
// should exhibit flow
b.callSinkIfTrue(new Integer(4), t);
b.callSinkIfFalse(new Integer(5), f);
b.localCallSensitivity(new Integer(4), t);
}
class B implements InterfaceA {
@Override
public void callSinkIfTrue(Object o, boolean cond) {
if (cond) {
sink(o);
}
}
@Override
public void callSinkIfFalse(Object o, boolean cond) {
if (!cond) {
sink(o);
}
}
@Override
public void localCallSensitivity(Object o, boolean c) {
Object o1 = o;
Object o2 = null;
if (c) {
Object tmp = o1;
o2 = 1 == 1 ? (tmp) : (tmp);
}
Object o3 = o2;
sink(o3);
}
}
}
|
import axios from 'axios'
const SET_BILLS = 'SET_BILLS'
const GOT_NEW_BILL = 'GOT_NEW_BILL'
export const setBills = (bills) => {
return {
type: SET_BILLS,
bills,
}
}
export const gotNewBill = (bill) => ({
type: GOT_NEW_BILL,
bill,
})
export const fetchBills = (tripId) => {
return async (dispatch) => {
try {
const {data} = await axios.get(`/api/bills/${tripId}`)
dispatch(setBills(data))
} catch (err) {
console.log(err)
}
}
}
export const postBill = (formData, planId, tripId) => {
return async (dispatch) => {
try {
formData.planId = planId
const response = await axios.post(`/api/bills/${tripId}`, formData)
console.log('planId', planId)
console.log('formData', formData)
const newBill = response.data
dispatch(gotNewBill(newBill))
} catch (err) {
console.log(err)
}
}
}
const initialState = []
// Take a look at app/redux/index.js to see where this reducer is
// added to the Redux store with combineReducers
export default function (state = initialState, action) {
switch (action.type) {
case SET_BILLS:
return action.bills
case GOT_NEW_BILL: {
return [...state, action.bill]
}
default:
return state
}
}
|
<reponame>ch1huizong/learning<gh_stars>0
#!/usr/bin/env python
from urlparse import urlparse
url = 'http://netloc/path;param?query=arg#frag'
parsed = urlparse(url)
print'scheme :',parsed.scheme
print'netloc :',parsed.netloc
print'path :',parsed.path
print'params :',parsed.params
print'query :',parsed.query
print'fragment :',parsed.fragment
print'username :',parsed.username
print'password :',parsed.password
print'hostname :',parsed.hostname,'(netloc in lowercase)'
print'port :',parsed.port
|
//
// RFSettings.h
// Micro.blog
//
// Created by <NAME> on 5/13/17.
// Copyright © 2017 Riverfold Software. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface RFSettings : NSObject
+ (void) clearAllSettings; //BE VERY CAREFUL WITH THIS!!!
+ (BOOL) hasExternalBlog;
+ (BOOL) hasMicropubBlog;
+ (BOOL) needsExternalBlogSetup;
+ (BOOL) prefersPlainSharedURLs;
+ (BOOL) prefersBookmarkSharedURLs;
+ (BOOL) prefersExternalBlog;
+ (void) setPrefersPlainSharedURLs:(BOOL)value;
+ (void) setPrefersBookmarkSharedURLs:(BOOL)value;
+ (void) setPrefersExternalBlog:(BOOL)value;
+ (NSString *) accountDefaultSite;
+ (NSArray *) accountsUsernames;
+ (void) setAccountUsernames:(NSArray *)usernames;
+ (void) addAccountUsername:(NSString *)username;
//Snippets specific settings
+ (BOOL) hasSnippetsBlog;
+ (NSString*) snippetsUsername;
+ (NSString*) snippetsPassword;
+ (NSString*) snippetsPasswordForCurrentUser:(BOOL)useCurrentUser;
+ (NSString*) snippetsAccountFullName;
+ (NSDictionary*) selectedBlogInfo;
+ (NSString*) selectedBlogUid;
+ (void) setSnippetsUsername:(NSString*)username;
+ (void) setSnippetsPassword:(NSString*)password;
+ (void) setSnippetsPassword:(NSString*)password useCurrentUser:(BOOL)useCurrentUser;
+ (void) setAccountDefaultSite:(NSString*)value;
+ (void) setSnippetsAccountFullName:(NSString*)value;
+ (void) setSelectedBlogInfo:(NSDictionary*)blogInfo;
+ (NSArray*) blogList;
+ (void) setBlogList:(NSArray*)blogList;
//External Blog settings
+ (NSString*) externalBlogEndpoint;
+ (NSString*) externalBlogID;
+ (NSString*) externalBlogUsername;
+ (NSString*) externalBlogPassword;
+ (NSString*) externalBlogCategory;
+ (NSString*) externalBlogFormat;
+ (NSString*) externalBlogApp;
+ (BOOL) externalBlogUsesWordPress;
+ (void) setExternalBlogEndpoint:(NSString*)value;
+ (void) setExternalBlogID:(NSString*)value;
+ (void) setExternalBlogUsername:(NSString*)value;
+ (void) setExternalBlogPassword:(NSString*)value;
+ (void) setExternalBlogCategory:(NSString*)value;
+ (void) setExternalBlogFormat:(NSString*)value;
+ (void) setExternalBlogApp:(NSString*)value;
//Micropub specific settings
+ (NSString*) externalMicropubMe;
+ (NSString*) externalMicropubPostingEndpoint;
+ (NSString*) externalMicropubMediaEndpoint;
+ (NSString*) externalMicropubState;
+ (NSString*) externalMicropubTokenEndpoint;
+ (void) setExternalMicropubMe:(NSString*)value;
+ (void) setExternalMicropubPostingEndpoint:(NSString*)value;
+ (void) setExternalMicropubMediaEndpoint:(NSString*)value;
+ (void) setExternalMicropubState:(NSString*)value;
+ (void) setExternalMicropubTokenEndpoint:(NSString*)value;
//Drafts
+ (NSString *) draftTitle;
+ (NSString *) draftText;
+ (void) setDraftTitle:(NSString *)value;
+ (void) setDraftText:(NSString *)value;
+ (NSString *) preferredContentSize;
+ (void) setPreferredContentSize:(NSString *)value;
+ (float) lastStatusBarHeight;
+ (void) setLastStatusBarHeight:(float)value;
+ (void) migrateAllKeys;
+ (void) migrateCurrentUserKeys;
@end
|
<gh_stars>0
import React from 'react';
import { __ } from '@wordpress/i18n';
import { PanelBody } from '@wordpress/components';
import { props } from '@eightshift/frontend-libs/scripts';
import { SelectOptionOptions as SelectOptionOptionsComponent } from '../../../components/select-option/components/select-option-options';
export const SelectOptionOptions = ({ attributes, setAttributes }) => {
return (
<PanelBody title={__('Select Option', 'eightshift-forms')}>
<SelectOptionOptionsComponent
{...props('selectOption', attributes, {
setAttributes,
})}
/>
</PanelBody>
);
};
|
/**
* @example Lazy Instantiation
*
* A basic example demonstrating how a Container contains other items using the items config.
*/
Ext.require('Ext.tab.Panel');
Ext.require('Ext.window.MessageBox');
Ext.onReady(function() {
Ext.create('Ext.tab.Panel', {
renderTo: Ext.getBody(),
height: 100,
width: 200,
items: [
{
// Explicitly define the xtype of this Component configuration.
// This tells the Container (the tab panel in this case)
// to instantiate a Ext.panel.Panel when it deems necessary
xtype: 'panel',
title: 'Tab One',
html: 'The first tab',
listeners: {
render: function() {
Ext.MessageBox.alert('Rendered One', 'Tab One was rendered.');
}
}
},
{
// this component configuration does not have an xtype since 'panel' is the default
// xtype for all Component configurations in a Container
title: 'Tab Two',
html: 'The second tab',
listeners: {
render: function() {
Ext.MessageBox.alert('Rendered One', 'Tab Two was rendered.');
}
}
}
]
});
});
|
package com.pedrogomez.renderers;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.pedrogomez.renderers.exception.*;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.LinkedList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
/**
* Test class created to check the correct behaviour of RendererBuilder
*
* @author <NAME>.
*/
public class RendererBuilderTest {
/*
* Test data
*/
private ObjectRendererBuilder rendererBuilder;
private List<Renderer<Object>> prototypes;
private ObjectRenderer objectRenderer;
private SubObjectRenderer subObjectRenderer;
/*
* Mocks
*/
@Mock
private View mockedConvertView;
@Mock
private ViewGroup mockedParent;
@Mock
private LayoutInflater mockedLayoutInflater;
@Mock
private Object mockedContent;
@Mock
private View mockedRendererdView;
/*
* Before and after methods
*/
@Before
public void setUp() {
initializeMocks();
initializePrototypes();
initializeRendererBuilder();
}
/*
* Test methods
*/
@Test(expected = NeedsPrototypesException.class)
public void shouldThrowNeedsPrototypeExceptionIfPrototypesIsNull() {
rendererBuilder = new ObjectRendererBuilder(null);
}
@Test(expected = NeedsPrototypesException.class)
public void shouldThrowNeedsPrototypeExceptionIfPrototypesIsEmpty() {
prototypes = new LinkedList<Renderer<Object>>();
initializeRendererBuilder();
rendererBuilder = new ObjectRendererBuilder(prototypes);
}
@Test(expected = NullContentException.class)
public void shouldThrowNullContentExceptionIfBuildRendererWithoutContent() {
buildRenderer(null, mockedConvertView, mockedParent, mockedLayoutInflater);
}
@Test(expected = NullParentException.class)
public void shouldThrowNullParentExceptionIfBuildRendererWithoutParent() {
buildRenderer(mockedContent, mockedConvertView, null, mockedLayoutInflater);
}
@Test(expected = NullPrototypeClassException.class)
public void shouldThrowNullPrototypeClassExceptionIfRendererBuilderImplementationReturnsNullPrototypeClassAndGetItemViewType() {
when(rendererBuilder.getPrototypeClass(mockedContent)).thenReturn(null);
buildRenderer(mockedContent, mockedConvertView, mockedParent, mockedLayoutInflater);
rendererBuilder.getItemViewType(mockedContent);
}
@Test(expected = NullPrototypeClassException.class)
public void shouldThrowNullPrototypeClassExceptionIfRendererBuilderImplementationReturnsNullPrototypeClassAndBuildOneRenderer() {
when(rendererBuilder.getPrototypeClass(mockedContent)).thenReturn(null);
buildRenderer(mockedContent, mockedConvertView, mockedParent, mockedLayoutInflater);
rendererBuilder.build();
}
@Test(expected = NullLayoutInflaterException.class)
public void shouldThrowNullParentExceptionIfBuildARendererWithoutLayoutInflater() {
buildRenderer(mockedContent, mockedConvertView, mockedParent, null);
}
@Test
public void shouldReturnCreatedRenderer() {
when(rendererBuilder.getPrototypeClass(mockedContent)).thenReturn(ObjectRenderer.class);
Renderer<Object> renderer = buildRenderer(mockedContent, null, mockedParent, mockedLayoutInflater);
assertEquals(objectRenderer.getClass(), renderer.getClass());
}
@Test
public void shouldReturnRecycledRenderer() {
when(rendererBuilder.getPrototypeClass(mockedContent)).thenReturn(ObjectRenderer.class);
when(mockedConvertView.getTag()).thenReturn(objectRenderer);
Renderer<Object> renderer = buildRenderer(mockedContent, mockedConvertView, mockedParent, mockedLayoutInflater);
assertEquals(objectRenderer, renderer);
}
@Test
public void shouldCreateRendererEvenIfTagInConvertViewIsNotNull() {
when(rendererBuilder.getPrototypeClass(mockedContent)).thenReturn(ObjectRenderer.class);
when(mockedConvertView.getTag()).thenReturn(subObjectRenderer);
Renderer<Object> renderer = buildRenderer(mockedContent, mockedConvertView, mockedParent, mockedLayoutInflater);
assertEquals(objectRenderer.getClass(), renderer.getClass());
}
@Test
public void shouldReturnPrototypeSizeOnGetViewTypeCount() {
assertEquals(prototypes.size(), rendererBuilder.getViewTypeCount());
}
/*
* Auxiliary methods
*/
private void initializeMocks() {
MockitoAnnotations.initMocks(this);
}
private void initializePrototypes() {
prototypes = new LinkedList<Renderer<Object>>();
objectRenderer = new ObjectRenderer();
objectRenderer.setView(mockedRendererdView);
subObjectRenderer = new SubObjectRenderer();
subObjectRenderer.setView(mockedRendererdView);
prototypes.add(objectRenderer);
prototypes.add(subObjectRenderer);
}
private void initializeRendererBuilder() {
rendererBuilder = new ObjectRendererBuilder(prototypes);
rendererBuilder = spy(rendererBuilder);
}
private Renderer<Object> buildRenderer(Object content, View convertView, ViewGroup parent, LayoutInflater layoutInflater) {
rendererBuilder.withContent(content);
rendererBuilder.withParent(parent);
rendererBuilder.withLayoutInflater(layoutInflater);
rendererBuilder.withConvertView(convertView);
return rendererBuilder.build();
}
}
|
#!/bin/bash
# Get an updated config.sub and config.guess
cp -r ${BUILD_PREFIX}/share/libtool/build-aux/config.* .
# See: https://gitlab.com/gnutls/gnutls/issues/665
export CPPFLAGS="${CPPFLAGS//-DNDEBUG/}"
export CFLAGS="${CFLAGS//-DNDEBUG/}"
declare -a configure_opts
# What to build
configure_opts+=(--enable-shared)
configure_opts+=(--disable-static)
configure_opts+=(--disable-documentation)
# Building with conda-forge gmp causes a strange segfault.
# Using mini-gmp seems to solve the issue and gnutls still works.
#configure_opts+=(--enable-mini-gmp)
if [[ "$target_platform" == "osx-arm64" ]]; then
configure_opts+=(--disable-assembler)
fi
# --disable-openssl: do not include OpenSSL glue in demo program; especially
# important on macOS to avoid picking up older versions in Apple's SDK.
configure_opts+=(--disable-openssl)
./configure --prefix="${PREFIX}" \
--libdir="${PREFIX}/lib/" \
--with-lib-path="${PREFIX}/lib/" \
${configure_opts[@]} \
|| { cat config.log; exit 1; }
make -j${CPU_COUNT} ${VERBOSE_AT}
make install ${VERBOSE_AT}
make check
|
<gh_stars>10-100
package nl.pvanassen.steam.store;
import com.google.common.collect.ImmutableSet;
import nl.pvanassen.steam.community.friends.FriendService;
import nl.pvanassen.steam.community.friends.SteamFriendService;
import nl.pvanassen.steam.error.SteamException;
import nl.pvanassen.steam.http.Http;
import nl.pvanassen.steam.store.buy.BuyService;
import nl.pvanassen.steam.store.buy.SteamBuyService;
import nl.pvanassen.steam.store.buyorder.BuyOrderService;
import nl.pvanassen.steam.store.buyorder.SteamBuyOrderService;
import nl.pvanassen.steam.store.history.HistoryService;
import nl.pvanassen.steam.store.history.SteamHistoryService;
import nl.pvanassen.steam.store.inventory.InventoryService;
import nl.pvanassen.steam.store.inventory.SteamInventoryService;
import nl.pvanassen.steam.store.item.ItemService;
import nl.pvanassen.steam.store.item.SteamItemService;
import nl.pvanassen.steam.store.listing.ListingService;
import nl.pvanassen.steam.store.listing.SteamListingService;
import nl.pvanassen.steam.store.login.LoginService;
import nl.pvanassen.steam.store.login.SteamLoginService;
import nl.pvanassen.steam.store.marketpage.AppIds;
import nl.pvanassen.steam.store.marketpage.MarketPageService;
import nl.pvanassen.steam.store.marketpage.SteamMarketPageService;
import nl.pvanassen.steam.store.tradeoffer.SteamTradeOfferService;
import nl.pvanassen.steam.store.tradeoffer.TradeOfferService;
import java.util.Set;
/**
* Interface to the steam store
*
* @author <NAME>
*/
class SteamService implements StoreService {
private final Http http;
private final Set<Integer> appIds;
private final BuyService buyService;
private final BuyOrderService buyOrderService;
private final HistoryService historyService;
private final InventoryService inventoryService;
private final ListingService listingService;
private final ItemService itemService;
private final LoginService loginService;
private final MarketPageService marketPageService;
private final TradeOfferService tradeofferService;
private final FriendService friendService;
/**
* @param http For mocking
*/
private SteamService(Http http, String username) {
appIds = ImmutableSet.copyOf(AppIds.getAppids());
if (appIds.isEmpty()) {
throw new SteamException("Error initializing Steam library, app ids empty");
}
this.http = http;
loginService = new SteamLoginService(http);
marketPageService = new SteamMarketPageService(http, username);
buyService = new SteamBuyService(http, username);
buyOrderService = new SteamBuyOrderService(http, username);
historyService = new SteamHistoryService(http);
listingService = new SteamListingService(http, username);
itemService = new SteamItemService(http);
tradeofferService = new SteamTradeOfferService(http);
inventoryService = new SteamInventoryService(http, username, appIds);
friendService = new SteamFriendService(http, username);
}
SteamService(String cookies, String username) {
this(Http.getInstance(cookies, username), username);
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getAppIds()
*/
@Override
public Set<Integer> getAppIds() {
return appIds;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getBuyService()
*/
@Override
public BuyService getBuyService() {
return buyService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getBuyOrderService()
*/
@Override
public BuyOrderService getBuyOrderService() {
return buyOrderService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getHistoryService()
*/
@Override
public HistoryService getHistoryService() {
return historyService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getInventoryService()
*/
@Override
public InventoryService getInventoryService() {
return inventoryService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getListingService()
*/
@Override
public ListingService getListingService() {
return listingService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getItemService()
*/
@Override
public ItemService getItemService() {
return itemService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getLoginService()
*/
@Override
public LoginService getLoginService() {
return loginService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getMarketPageService()
*/
@Override
public MarketPageService getMarketPageService() {
return marketPageService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getTradeofferService()
*/
@Override
public TradeOfferService getTradeofferService() {
return tradeofferService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getFriendService()
*/
@Override
public FriendService getFriendService() {
return friendService;
}
/**
* {@inheritDoc}
*
* @see nl.pvanassen.steam.store.StoreService#getCookies()
*/
@Override
public String getCookies() {
return http.getCookies();
}
}
|
#!/bin/bash
echo "============== Validate Docker =========="
docker version
docker images
echo "========== Validate Docker Compose ======"
docker-compose version
echo "============== Validate version =========="
go version
echo "============== GOPATH =========="
echo $GOPATH
echo "============== Fabric =========="
peer version
orderer version
echo "============== Fabric CA ======="
fabric-ca-client version
fabric-ca-server version
|
package main
import (
"fmt"
"github.com/steinfletcher/github-org-clone/cloner"
"github.com/steinfletcher/github-org-clone/github"
"github.com/steinfletcher/github-org-clone/shell"
"github.com/urfave/cli"
"log"
"os"
"time"
)
var (
version = "dev"
commit = ""
date = time.Now().String()
)
func main() {
app := cli.NewApp()
app.Author = "<NAME>"
app.Name = "github-org-clone"
app.Usage = "clone github team repos"
app.UsageText = "github-org-clone -o MyOrg -t MyTeam"
app.Version = version
app.EnableBashCompletion = true
app.Description = "A simple cli to clone all the repos managed by a github team"
app.Metadata = map[string]interface{}{
"commit": commit,
"date": date,
}
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "org, o",
Usage: "github organisation",
},
cli.StringFlag{
Name: "team, t",
Usage: "github team",
},
cli.StringFlag{
Name: "username, u",
Usage: "github username",
EnvVar: "GITHUB_USER,GITHUB_USERNAME",
},
cli.StringFlag{
Name: "token, k",
Usage: "github personal access token",
EnvVar: "GITHUB_TOKEN,GITHUB_API_KEY,GITHUB_PERSONAL_ACCESS_TOKEN",
},
cli.StringFlag{
Name: "dir, d",
Usage: "directory to clone into. Defaults to the org name or org/team name if defined",
},
cli.StringFlag{
Name: "api, a",
Value: "https://api.github.com",
Usage: "github api url",
},
}
app.Action = func(c *cli.Context) error {
username := c.String("username")
token := c.String("token")
team := c.String("team")
org := c.String("org")
dir := c.String("dir")
api := c.String("api")
if len(username) == 0 {
die("env var GITHUB_USERNAME or flag -u must be set", c)
}
if len(token) == 0 {
die("env var GITHUB_TOKEN or flag -k must be set", c)
}
if len(org) == 0 {
die("github organisation (-o) not set", c)
}
if len(dir) == 0 {
if len(team) == 0 {
dir = org
} else {
if _, err := os.Stat(org); os.IsNotExist(err) {
os.Mkdir(org, os.ModePerm)
}
dir = fmt.Sprintf("%s/%s", org, team)
}
}
sh := shell.NewShell()
githubCli := github.NewGithub(username, token, api)
cl := cloner.NewCloner(githubCli, sh, dir)
err := cl.Clone(org, team)
if err != nil {
return cli.NewExitError(err.Error(), 1)
}
return nil
}
app.Run(os.Args)
}
func die(msg string, c *cli.Context) {
cli.ShowAppHelp(c)
log.Fatal(msg)
}
|
#! /bin/sh
username="$USER"
if [ "$username" == "aubjro" ]
then
module load gcc/6.1.0
fi
if [ -n "$PBS_JOBNAME" ]
then
source ${PBS_O_HOME}/.bash_profile
cd $PBS_O_WORKDIR
module load gcc/5.3.0
fi
simname="03pairs-dpp-root-0010-100k"
cfgpath="../configs/config-${simname}.yml"
outputdir="../simulations/validation/${simname}/batch005"
rngseed=670610568
nreps=100
mkdir -p "$outputdir"
simcoevolity --seed="$rngseed" -n "$nreps" -o "$outputdir" "$cfgpath"
|
git apply debian/patches/dynamically-link-tools.patch
git apply debian/patches/multiarch.patch
git apply debian/patches/rename-library.patch
git apply debian/patches/enable-fpic-for-static-lib.patch
rm -rf build
mkdir build
cd build
cmake ..
make -j`nproc` VERBOSE=1
sudo make install |
<filename>C++/space.cpp
#include <iostream>
int main() {
double current_earth_weight; // Current earth weight
int visit; // Visiting plane
std::cout << "Please enter your current earth weight: ";
std::cin >> current_earth_weight;
std::cout << "\nI have information for the following planets:\n\n";
std::cout << " 1. Venus 2. Mars 3. Jupiter\n";
std::cout << " 4. Saturn 5. Uranus 6. Neptune\n\n";
std::cout << "Which planet are you visiting? ";
std::cin >> visit;
switch (visit) {
case 1: //Venus
current_earth_weight = current_earth_weight * 0.78;
break;
case 2: //Mars
current_earth_weight = current_earth_weight * 0.39;
break;
case 3: //Jupiter
current_earth_weight = current_earth_weight * 2.65;
break;
case 4: //Saturn
current_earth_weight = current_earth_weight * 1.17;
break;
case 5: //Uranus
current_earth_weight = current_earth_weight * 1.05;
break;
case 6: //Neptune
current_earth_weight = current_earth_weight * 1.23;
break;
}
std::cout << "Your weight is: " << current_earth_weight << "\n";
}
|
def bubble_sort(arr):
# loop through the array
for i in range(len(arr)-1):
# loop through each element in the array
for j in range(len(arr)-i-1):
# if element is greater than next element
if arr[j] > arr[j+1]:
# swap positions
arr[j], arr[j+1] = arr[j+1], arr[j]
# return sorted array
return arr
print(bubble_sort([6, 4, 8, 2, 1]))
# Output: [1, 2, 4, 6, 8] |
#!/bin/bash
# you need to install sshpass first, for arch e.g.: yay sshpass
sshpass -praspberry ssh pi@localhost -o PreferredAuthentications=password -o PubkeyAuthentication=no -o StrictHostKeyChecking=no -p 5022 |
package com.pharmacySystem.repository;
import java.util.Set;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import com.pharmacySystem.model.user.Pharmacist;
public interface PharmacistRepository extends JpaRepository<Pharmacist, Long> {
@Query(value = "SELECT pharmacists FROM Pharmacy p WHERE p.id = ?1")
Set<Pharmacist> findAllByPharmacy(Long id);
@Query(value = "SELECT pharmacists FROM Pharmacy p")
Set<Pharmacist> findAllPharmacists();
}
|
#!/bin/bash
# Copyright 2015 The Kythe Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Script to package a release tar and create its associated .sha256 checksum.
#
# Usage: package_release.sh <shasum_tool> <path-to-output-tar.gz> [package contents]
#
# In the simplest case, each file given will be placed in the root of the
# resulting archive. The --relpath, --path, and --cp flags change this behavior
# so that file paths can be structured.
#
# --verbose: Log each action as it is taken.
# --path <path>: Each file is copied to ARCHIVE_ROOT/<path>/$(basename file).
# --relpaths <prefix>: Strip $GENBIR, $BINDIR and then <prefix> from each
# file's path. The resulting path is used for the file
# inside of the archive. This combines with --path to
# change the root of the resulting file path.
# --cp <path> <path>: Copy the first file to the archive using exactly the
# second path.
# --exclude <glob>: Exclude all further files matching the given glob.
#
# Example:
# BINDIR=bazel-bin/ \
# package_release.sh /path/to/sha /tmp/b.tar.gz README.adoc LICENSE \
# --path some/path/for/docs kythe/docs/kythe-{overview,storage}.txt \
# --relpaths kythe/docs bazel-bin/kythe/docs/schema/schema.html \
# --cp CONTRIBUTING.md kythe/docs/how-to-contribute.md
#
# Resulting tree in /tmp/b.tar.gz:
# README.adoc
# LICENSE
# kythe/docs/
# kythe-overview.txt
# kythe-storage.txt
# schema.html
# how-to-contribute.md
set -e
SHASUM_TOOL="$1"
shift
OUT="$1"
shift
PBASE="$OUT.dir/$(basename "$OUT" .tar.gz)"
P=$PBASE
mkdir -p "$PBASE"
trap "rm -rf '$PWD/$OUT.dir'" EXIT ERR INT
VERBOSE=
function log() {
if [[ -z "$VERBOSE" ]]; then
return
fi
echo "$@" >&2
}
EXCLUDE=()
while [[ $# -gt 0 ]]; do
case "$1" in
--verbose)
VERBOSE=true
;;
--relpaths)
RELPATHS=$2
shift
;;
--path)
P="$PBASE/$2"
mkdir -p "$P"
shift
;;
--cp)
mkdir -p "$PBASE/$(dirname "$3")"
cp "$2" "$PBASE/$3"
shift 2
;;
--exclude)
EXCLUDE+=("$2")
shift
;;
*)
skip=
for exclude in "${EXCLUDE[@]}"; do
if [[ "$1" =~ $exclude ]]; then
skip=true
break
fi
done
if [[ ! -z "$skip" ]]; then
log "Excluding $1"
elif [[ -z "$RELPATHS" ]]; then
log "Copying $1 to $P"
cp "$1" "$P"/
else
rp="${1#$GENDIR/}"
rp="${rp#$BINDIR/}"
rp="$(dirname "${rp#$RELPATHS/}")"
mkdir -p "$P/$rp"
log "Copying $1 to $P/$rp"
cp "$1" "$P/$rp"
fi
;;
esac
shift
done
tar czf "$OUT" -C "$OUT.dir" "$(basename "$PBASE")"
$SHASUM_TOOL "$OUT" > "$OUT.sha256"
|
#!/bin/bash
#
# tag-image.sh. Set xattr metadata on images, so that they become
# searchable based on their contents. Works with OSX/Spotlight.
force=0 # overwrite tags?
provider="clarifai" # use clarifai, google, amazon, watson, ...
confidence=0.6 # confidence threshold
while getopts "fp:c:" option
do
case "$option" in
f) force=1 ;;
p) provider="$OPTARG" ;;
c) confidence="$OPTARG" ;;
*) exit 1 ;;
esac
done
shift $(($OPTIND - 1))
for image in $@
do
#
# Check if search-related metadata are set already.
# i.e. have we already tagged this image.
if xattr -r "$image" \
| egrep '(kMDItemFinderComment|_kMDItemUserTags|kMDItemOMUserTags)' \
> /dev/null 2>&1 \
&& [ $force == 0 ]
then
echo "[${image}]: Tags found! Skipping!"
continue
fi
#
# Get tags, populate a plist/xml blob with them.
echo "[${image}]: retrieving tags from $provider"
plist=$( cat <<EOF
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0"><array>
$( fovea --provider $provider "$image" --confidence $confidence \
| sed -E 's/^[0-9]\.[0-9]*[[:space:]]+//g' \
| awk '{printf("<string>%s</string>\n", $0); }' )
</array></plist>
EOF
)
#
# If we don't have tags, don't modify metadata.
if ! echo "$plist" | grep string > /dev/null 2>&1
then
echo "[${image}]: no tags found!"
else
#
# But otherwise, we need to write our plist to three places.
for field in "kMDItemFinderComment" "_kMDItemUserTags" "kMDItemOMUserTags"
do
echo "[${image}]: setting ${field}"
xattr -w "com.apple.metadata:${field}" "$plist" "$image"
done
fi
done |
import {Stage, Match, MatchGame, Participant, ParticipantResult} from 'brackets-model';
import { BracketsViewer } from './main';
import { locales } from './i18n';
import {TFunction} from "i18next";
declare global {
interface Window {
bracketsViewer: BracketsViewer,
}
}
/**
* The data to display with `brackets-viewer.js`
*/
export interface ViewerData {
/** The stages to display. */
stages: Stage[],
/** The matches of the stage to display. */
matches: Match[],
/** The games of the matches to display. */
matchGames: MatchGame[],
/** The participants who play in the stage to display. */
participants: Participant[],
}
/**
* The possible placements of a participant's origin.
*/
export type Placement = 'none' | 'before' | 'after';
/**
* An optional config to provide to `brackets-viewer.js`
*/
export interface Config {
/**
* An optional selector to select the root element.
*/
selector?: string,
/**
* Where the position of a participant is placed relative to its name.
* - If `none`, the position is not added.
* - If `before`, the position is prepended before the participant name. "#1 Team"
* - If `after`, the position is appended after the participant name, in parentheses. "Team (#1)"
*/
participantOriginPlacement?: Placement,
/**
* Whether to show the origin of a slot (wherever possible).
*/
showSlotsOrigin?: boolean,
/**
* Whether to show the origin of a slot (in the lower bracket of an elimination stage).
*/
showLowerBracketSlotsOrigin?: boolean,
/**
* Whether to highlight every instance of a participant on hover.
*/
highlightParticipantOnHover?: boolean,
/**
* Function which is called when match was clicked
*/
participantOnClick(match: Match, participantId: number|null|undefined): void,
}
/**
* The possible types of connection between matches.
*/
export type ConnectionType = 'square' | 'straight' | false;
/**
* The possible types of final.
*/
export type FinalType = 'consolation_final' | 'grand_final';
/**
* The possible types of bracket.
*/
export type BracketType = 'single-bracket' | 'winner-bracket' | 'loser-bracket' | 'final-group';
/**
* A function returning an origin hint based on a participant's position.
*/
export type OriginHint = ((position: number) => string) | undefined;
/**
* A function returning a round name based on its number and the count of rounds.
*/
export type RoundName = (roundNumber: number, roundCount: number) => string;
/**
* Contains the information about the connections of a match.
*/
export interface Connection {
connectPrevious?: ConnectionType,
connectNext?: ConnectionType,
}
/**
* An item of the ranking.
*/
export interface RankingItem {
[prop: string]: number,
rank: number,
id: number,
played: number,
wins: number,
draws: number,
losses: number,
forfeits: number,
scoreFor: number,
scoreAgainst: number,
scoreDifference: number,
points: number,
}
/**
* Contains information about a header of the ranking and its tooltip.
*/
export interface RankingHeader {
text: string,
tooltip: string,
}
/**
* A formula which computes points given a ranking row.
*/
export type RankingFormula = (ranking: RankingItem) => number;
/**
* An object mapping ranking properties to their header.
*/
export type RankingHeaders = { [name in keyof RankingItem]: RankingHeader };
/**
* An object mapping a participant id to its row in the ranking.
*/
export type RankingMap = { [id: number]: RankingItem };
/**
* Definition of a ranking.
*/
export type Ranking = RankingItem[];
/**
* Structure containing all the containers for a participant.
*/
export interface ParticipantContainers {
participant: HTMLElement,
name: HTMLElement,
result: HTMLElement,
}
/**
* The format of a locale.
*/
export type Locale = typeof locales['en'];
/**
* An object containing all the locales for the project.
*/
export type Locales = { [lang: string]: Locale }; |
<filename>frontend/src/components/tokens/actions/MergeAction.tsx
import {MergeCellsOutlined} from "@ant-design/icons";
import {Button, Popover} from "antd";
import React, {useCallback, useState} from "react";
import {executeCommands} from "../commands/CommandExecutor";
import {MergeSegmentCommand, StartCommand, StopCommand} from "../commands/Commands";
import {TokenOverviewData} from "../TokenOverviewData";
export function MergeAction({row}: { row: TokenOverviewData }) {
const [loading, setLoading] = useState(false)
const onSplitAction = useCallback(async () => {
setLoading(true)
const chosenNode = row.allNodes[0];
const nodesToStop = row.allNodes.filter(node => node != chosenNode);
await executeCommands([
...nodesToStop.map(node => new StopCommand(node, row.processorName)),
new MergeSegmentCommand(chosenNode, row.processorName, row.segment),
...nodesToStop.map(node => new StartCommand(node, row.processorName)),
]
)
setLoading(false)
}, [row.processorName, row.segment, row.owner, row.allNodes])
return <Popover
content={<p>Merges the segment with its closest relative (segment {row.mergeableSegment}), creating one token
out of two. <br/>Effectively reduces active event processor threads by one.</p>}
placement={"bottom"}>
<Button type="default" loading={loading} onClick={onSplitAction}
disabled={row.owner == null || row.mergeableSegment === row.segment}>
<MergeCellsOutlined/>
</Button>
</Popover>
}
|
import { IVariant } from "./IVariant";
import { Types } from "./Types";
import { Dim } from "../spirv/Dim";
import { ImageFormat } from "../spirv/ImageFormat";
import { AccessQualifier } from "../spirv/AccessQualifier";
import { StorageClass } from "../spirv/StorageClass";
export declare enum SPIRBaseType {
Unknown = 0,
Void = 1,
Boolean = 2,
SByte = 3,
UByte = 4,
Short = 5,
UShort = 6,
Int = 7,
UInt = 8,
Int64 = 9,
UInt64 = 10,
AtomicCounter = 11,
Half = 12,
Float = 13,
Double = 14,
Struct = 15,
Image = 16,
SampledImage = 17,
Sampler = 18,
AccelerationStructure = 19,
RayQuery = 20,
ControlPointArray = 21,
Interpolant = 22,
Char = 23
}
export declare class SPIRTypeImageType {
type: TypeID;
dim: Dim;
depth: boolean;
arrayed: boolean;
ms: boolean;
sampled: number;
format: ImageFormat;
access: AccessQualifier;
clone(): SPIRTypeImageType;
equals(b: SPIRTypeImageType): boolean;
}
export declare class SPIRType extends IVariant {
static type: Types;
basetype: SPIRBaseType;
width: number;
vecsize: number;
columns: number;
array: number[];
array_size_literal: boolean[];
pointer_depth: number;
pointer: boolean;
forward_pointer: boolean;
storage: StorageClass;
member_types: TypeID[];
member_type_index_redirection: number[];
image: SPIRTypeImageType;
type_alias: TypeID;
parent_type: TypeID;
member_name_cache: Set<string>;
constructor(other?: SPIRType);
}
|
<reponame>firmanjabar/restaurant-app<filename>specs/helpers/testFactories.js<gh_stars>10-100
import LikeButtonInitiator from '../../src/scripts/utils/like-button-presenter';
import FavRestaurantIdb from '../../src/scripts/data/restaurant-idb';
const createLikeButtonPresenterWithRestaurant = async (restaurant) => {
await LikeButtonInitiator.init({
likeButtonContainer: document.querySelector('#likeButtonContainer'),
favoriteRestaurant: FavRestaurantIdb,
data: {
restaurant,
},
});
};
// eslint-disable-next-line import/prefer-default-export
export { createLikeButtonPresenterWithRestaurant };
|
<filename>apkanalyser/src/jerl/bcm/inj/impl/InjectCollection.java
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jerl.bcm.inj.impl;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
public class InjectCollection {
public void injectIDRegistration(MethodVisitor mv, int id) {
// TODO: change to correct call
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out",
"Ljava/io/PrintStream;");
mv.visitLdcInsn("entry: " + id);
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream",
"println", "(Ljava/lang/String;)V");
}
public void injectSystemOut(MethodVisitor mv, String str) {
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out",
"Ljava/io/PrintStream;");
mv.visitLdcInsn(str);
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream",
"println", "(Ljava/lang/String;)V");
}
public void injectMemberFieldSystemOut(MethodVisitor mv, String clazz, String field, String type, String printType, String prefix, boolean isStatic) {
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
mv.visitLdcInsn(prefix);
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream", "print", "(Ljava/lang/String;)V");
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
if (isStatic) {
mv.visitFieldInsn(Opcodes.GETSTATIC, clazz, field, type);
} else {
mv.visitVarInsn(Opcodes.ALOAD, 0); // this
mv.visitFieldInsn(Opcodes.GETFIELD, clazz, field, type);
}
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream", "println", "(" + printType + ")V");
}
public void injectMemberFieldArraySystemOut(MethodVisitor mv, String clazz, String field, String type, String printType, String prefix, boolean isStatic, int index) {
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
mv.visitLdcInsn(prefix);
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream", "print", "(Ljava/lang/String;)V");
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
if (isStatic) {
mv.visitFieldInsn(Opcodes.GETSTATIC, clazz, field, type);
} else {
mv.visitVarInsn(Opcodes.ALOAD, 0); // this
mv.visitFieldInsn(Opcodes.GETFIELD, clazz, field, type);
}
mv.visitLdcInsn(new Integer(index));
if (type.endsWith("[Z") || type.endsWith("[B")) {
mv.visitInsn(Opcodes.BALOAD);
} else if (type.endsWith("[C")) {
mv.visitInsn(Opcodes.CALOAD);
} else if (type.endsWith("[D")) {
mv.visitInsn(Opcodes.DALOAD);
} else if (type.endsWith("[F")) {
mv.visitInsn(Opcodes.FALOAD);
} else if (type.endsWith("[I")) {
mv.visitInsn(Opcodes.IALOAD);
} else if (type.endsWith("[J")) {
mv.visitInsn(Opcodes.LALOAD);
} else if (type.endsWith("[S")) {
mv.visitInsn(Opcodes.SALOAD);
} else {
mv.visitInsn(Opcodes.AALOAD);
}
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream", "println", "(" + printType + ")V");
}
public void injectSystemOutCurThread(MethodVisitor mv, String str) {
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out",
"Ljava/io/PrintStream;");
mv.visitLdcInsn(str);
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream",
"print", "(Ljava/lang/String;)V");
mv.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/System", "out",
"Ljava/io/PrintStream;");
mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Thread",
"currentThread", "()Ljava/lang/Thread;");
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/io/PrintStream",
"println", "(Ljava/lang/Object;)V");
}
public void injectGC(MethodVisitor mv) {
mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/System", "gc", "()V");
}
public void injectCrash(MethodVisitor mv) {
// TODO: change to correct call
mv.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/System", "crash", "()V");
}
public void injectRegister(MethodVisitor mv, int id) {
mv.visitLdcInsn(new Integer(id));
mv.visitMethodInsn(Opcodes.INVOKESTATIC, "jerl/bcm/util/Register", "register", "(I)V");
}
public void injectReplaceStackInt(MethodVisitor mv, int i) {
mv.visitInsn(Opcodes.POP);
mv.visitLdcInsn(new Integer(i));
}
public void injectReturn(MethodVisitor mv) {
mv.visitInsn(Opcodes.RETURN);
}
public void injectPrintRegs(MethodVisitor mv) {
mv.visitMethodInsn(Opcodes.INVOKESTATIC, "jerl/bcm/util/Register", "printRegistrations", "()V");
}
}
|
import React, {useState} from 'react';
const App = () => {
const [showTextField, setShowTextField] = useState(true);
const [text, setText] = useState('');
const handleTextChange = (e) => {
setText(e.target.value);
};
return (
<div>
{showTextField && <input type="text" value={text} onChange={handleTextChange} />}
{showTextField && <button onClick={() => setShowTextField(false)}>Go</button>}
<div>{!showTextField && text}</div>
</div>
);
};
export default App; |
Ext.provide('Phlexible.problems.model.Problem');
Phlexible.problems.model.Problem = Ext.data.Record.create([
'id',
'iconCls',
'msg',
'hint',
'severity',
'link',
'source',
'createdAt',
'lastCheckedAt'
]); |
/*
<NAME> - N00150552
06.03.19
World.js
+ + + + + + + + + + +
+ World Map 🌀 (Pages)
+ Index
+ ¬ App
+ ¬ Portal
+ ¬ Login
+ ¬ Signup
+ ¬ World <--- You are here 🚀
+ ¬ Playground
+ ¬ Saves
*/
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import axios from 'axios';
// Material Design Components
import { TopAppBarFixedAdjust } from '@material/react-top-app-bar';
// My Components
import AppBar from '../input/AppBar';
import Playground from './Playground';
import Saves from './Saves';
/*
World functions as an environment for creating jewellery 💎
*/
class World extends Component {
constructor(props) {
super(props);
this.state = {
drawerOpen: true,
tabIndex: 0,
navigationIcon: true,
// For the Scene
weightId: '',
geometry: 'Cube',
wireframe: false,
subdivisions: 0,
adjacentWeight: 0.125,
edgePointWeight: 0.375,
connectingEdgesWeight: 5
};
this.toggleDrawer = this.toggleDrawer.bind(this);
this.toggleTab = this.toggleTab.bind(this);
this.handleChange = this.handleChange.bind(this);
this.loadWeights = this.loadWeights.bind(this);
this.saveWeights = this.saveWeights.bind(this);
this.handleDelete = this.handleDelete.bind(this);
this.clear = this.clear.bind(this);
}
toggleDrawer() {
this.setState(prevState => ({ drawerOpen: !prevState.drawerOpen }));
}
toggleTab(tabIndex) {
this.setState(prevState => ({
tabIndex,
navigationIcon: !prevState.navigationIcon
}));
}
handleChange(event, sliderValue) {
const { id } = event.target;
const name = event.target.type === 'checkbox' ? 'wireframe' : event.target.name;
const value = event.target.type === 'checkbox' ? event.target.checked : event.target.value;
this.setState({
[name]: value
});
// Truthy/falsy
if (sliderValue || sliderValue === 0) {
this.setState({
[id]: sliderValue
});
}
}
loadWeights(data) {
this.setState({
drawerOpen: true,
tabIndex: 0,
navigationIcon: true,
// For the Scene
weightId: data.weightId,
geometry: data.geometry,
wireframe: false,
subdivisions: data.subdivisions,
adjacentWeight: data.adjacentWeight,
edgePointWeight: data.edgePointWeight,
connectingEdgesWeight: data.connectingEdgesWeight
});
}
saveWeights() {
const { id } = this.props;
const { weightId } = this.state;
const { geometry } = this.state;
const { subdivisions } = this.state;
const { adjacentWeight } = this.state;
const { edgePointWeight } = this.state;
const { connectingEdgesWeight } = this.state;
axios.put('api/users/save', {
id,
weightId,
geometry,
subdivisions,
adjacentWeight,
edgePointWeight,
connectingEdgesWeight
});
}
handleDelete() {
console.log('wowo');
this.setState({ weightId: '' });
}
clear() {
this.setState({
weightId: '',
geometry: 'Cube',
wireframe: false,
subdivisions: 0,
adjacentWeight: 0.125,
edgePointWeight: 0.375,
connectingEdgesWeight: 5
});
}
render() {
// Navigation
const { drawerOpen } = this.state;
const { tabIndex } = this.state;
const { navigationIcon } = this.state;
// For Scene
const { geometry } = this.state;
const { wireframe } = this.state;
const { subdivisions } = this.state;
const { adjacentWeight } = this.state;
const { edgePointWeight } = this.state;
const { connectingEdgesWeight } = this.state;
// Props
const { id } = this.props;
const { logout } = this.props;
return (
<div className="drawer-container">
<AppBar
icon={navigationIcon}
toggleDrawer={this.toggleDrawer}
tabIndex={tabIndex}
toggleTab={this.toggleTab}
logout={logout}
/>
<TopAppBarFixedAdjust className="top-app-bar-fix-adjust">
{!tabIndex ? (
<Playground
drawerOpen={drawerOpen}
handleChange={this.handleChange}
saveWeights={this.saveWeights}
clear={this.clear}
geometry={geometry}
wireframe={wireframe}
subdivisions={subdivisions}
adjacentWeight={adjacentWeight}
edgePointWeight={edgePointWeight}
connectingEdgesWeight={connectingEdgesWeight}
/>
) : (
<Saves
id={id}
loadWeights={this.loadWeights}
handleDelete={this.handleDelete}
/>
)}
</TopAppBarFixedAdjust>
</div>
);
}
}
World.propTypes = {
id: PropTypes.string,
logout: PropTypes.func.isRequired
};
// Specifies the default values for props:
World.defaultProps = {
id: ''
};
export default World;
|
require 'test_helper'
class HostsHelperTest < ActionView::TestCase
end
|
def add_number(a, b):
try:
return a + b
except TypeError:
print("Please enter numeric values") |
class Foo:
def __init__(self, id, bar):
self.id = id
self.bar = bar
# Assuming the existence of a protobuf definition file named foo_pb2
import socket
import foo_pb2
def main():
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('127.0.0.1', 12345))
while True:
data, addr = sock.recvfrom(1024)
foo = foo_pb2.Foo()
foo.ParseFromString(data)
print("Got foo with id={0} and bar={1}".format(foo.id, foo.bar))
if __name__ == "__main__":
main() |
<reponame>bcgov/EDUC-GRAD-TEST-AUTOMATION<filename>backend/grad-trax-test-suite/src/main/java/ca/bc/gov/educ/gtts/services/ReportServiceImpl.java<gh_stars>0
package ca.bc.gov.educ.gtts.services;
import org.javers.core.diff.Diff;
import org.springframework.stereotype.Service;
@Service
public class ReportServiceImpl implements ReportService {
@Override
public void reportDifferences(String ref, Diff diff) {
if(diff.hasChanges()){
//TODO: output to file or other means
System.out.println(ref + " had the following reported differences: " + diff.prettyPrint());
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sdb.layout2.index;
import java.util.HashMap;
import java.util.Map;
import org.apache.jena.sdb.Store ;
/** Experimental : cache for int <=> node hash
*
*/
public class IndexCache
{
static int LIMIT = 10 ;
static private Map<Store, IndexCache> indexes = new HashMap<Store, IndexCache>() ;
public static IndexCache getIndexCache(Store store)
{
IndexCache idx = indexes.get(store) ;
if ( idx == null )
{
idx = new IndexCache() ;
indexes.put(store, idx) ;
}
return idx ;
}
// Finite cache?
// Use a reverse structure for cache mgt?
// cache.entrySet()
// Later : use Trove on basic types.
private Map<Long, Integer>cache = new HashMap<Long, Integer>() ;
private IndexCache() {}
public Integer get(Long hashCode)
{
Integer i = _get(hashCode) ;
if ( i == null )
{
i = fetch(hashCode) ;
insert(hashCode, i) ;
}
return i ;
}
private Integer _get(Long hashCode)
{
Integer idx = cache.get(hashCode) ;
if ( idx != null )
{
// Move to end of LRU list.
}
return idx ;
}
private Integer fetch(Long hashCode)
{
return -1 ;
}
private void insert(Long hashCode, Integer idx)
{
if ( cache.size() > LIMIT )
{
}
}
}
|
<gh_stars>0
#include "library.h"
library::library(HMODULE instance) : m_instance(instance) {
}
library::pointer library::load(const string &file_name) {
HMODULE instance = LoadLibraryA(file_name.c_str());
if (!instance) {
throw LoadFailException(file_name);
}
return pointer(new library(instance));
}
HMODULE library::module() const { return m_instance; }
library::~library() {
FreeLibrary(m_instance);
}
library::LoadFailException::LoadFailException(const string &message) : std::runtime_error(message + " load failed") {
}
library::MethodNotFoundException::MethodNotFoundException(const string &message) : std::runtime_error(
"Method [" + message + "] not found") {
}
|
using NUnit.Framework;
using Moq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
namespace KayakoRestApi.UnitTests.News
{
[TestFixture]
public class NewsModuleTests
{
[Test]
public async Task RetrieveNewsArticle_ValidResponse_ReturnsNewsArticle()
{
// Arrange
var expectedNewsArticle = new NewsArticle
{
Title = "Sample News Article",
Content = "This is a sample news article content.",
PublishedAt = DateTime.Now
};
var mockHttpMessageHandler = new Mock<HttpMessageHandler>();
mockHttpMessageHandler.Protected()
.Setup<Task<HttpResponseMessage>>("SendAsync", ItExpr.IsAny<HttpRequestMessage>(), ItExpr.IsAny<CancellationToken>())
.ReturnsAsync(new HttpResponseMessage
{
StatusCode = HttpStatusCode.OK,
Content = new StringContent(JsonConvert.SerializeObject(expectedNewsArticle))
});
var httpClient = new HttpClient(mockHttpMessageHandler.Object);
var newsModule = new NewsModule(httpClient);
// Act
var retrievedNewsArticle = await newsModule.RetrieveNewsArticleAsync();
// Assert
Assert.IsNotNull(retrievedNewsArticle);
Assert.AreEqual(expectedNewsArticle.Title, retrievedNewsArticle.Title);
Assert.AreEqual(expectedNewsArticle.Content, retrievedNewsArticle.Content);
Assert.AreEqual(expectedNewsArticle.PublishedAt, retrievedNewsArticle.PublishedAt);
}
}
} |
from django.conf.urls import url
from apps.email import views
urlpatterns = [
url(r'^$', views.BroadcastView.as_view(), name='broadcast')
]
|
<reponame>tiptok/go-stash
package filter
import (
"strings"
"github.com/globalsign/mgo/bson"
)
func AddUriFieldFilter(inField, outFirld string) FilterFunc {
return func(m map[string]interface{}) map[string]interface{} {
if val, ok := m[inField].(string); ok {
var datas []string
idx := strings.Index(val, "?")
if idx < 0 {
datas = strings.Split(val, "/")
} else {
datas = strings.Split(val[:idx], "/")
}
for i, data := range datas {
if bson.IsObjectIdHex(data) {
datas[i] = "*"
}
}
m[outFirld] = strings.Join(datas, "/")
}
return m
}
}
|
class CreateInstancesUsers < ActiveRecord::Migration
def change
create_table :instances_users, id: false do |t|
t.belongs_to :instance, index: true
t.belongs_to :user, index: true
end
end
end
|
#!/bin/bash
bower update
compass compile -s compressed
|
#!/bin/bash
set -e
TARGET_ARCH=$1
TARGET_PATH=/output/opus/${TARGET_ARCH}
cp -r /sources/opus /tmp/opus
cd /tmp/opus/jni
ndk-build APP_ABI="${TARGET_ARCH}"
mkdir -p ${TARGET_PATH}/include
mkdir -p ${TARGET_PATH}/lib
cp -r ../include ${TARGET_PATH}/include/opus
cp ../obj/local/${TARGET_ARCH}/libopus.a ${TARGET_PATH}/lib/
rm -rf /tmp/pjsip |
<filename>src/main/java/org/olat/ldap/ui/LDAPAuthenticationController.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.ldap.ui;
import java.util.List;
import java.util.Locale;
import org.apache.logging.log4j.Logger;
import org.olat.basesecurity.AuthHelper;
import org.olat.basesecurity.BaseSecurityModule;
import org.olat.core.commons.persistence.DB;
import org.olat.core.dispatcher.DispatcherModule;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.ExternalLink;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController;
import org.olat.core.gui.control.generic.dtabs.Activateable2;
import org.olat.core.id.Identity;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.StateEntry;
import org.olat.core.logging.OLATRuntimeException;
import org.olat.core.logging.Tracing;
import org.olat.core.util.StringHelper;
import org.olat.core.util.UserSession;
import org.olat.core.util.Util;
import org.olat.core.util.WebappHelper;
import org.olat.core.util.i18n.I18nManager;
import org.olat.ldap.LDAPError;
import org.olat.ldap.LDAPLoginManager;
import org.olat.ldap.LDAPLoginModule;
import org.olat.login.LoginModule;
import org.olat.login.auth.AuthenticationController;
import org.olat.login.auth.AuthenticationStatus;
import org.olat.login.auth.OLATAuthManager;
import org.olat.login.auth.OLATAuthentcationForm;
import org.olat.registration.DisclaimerController;
import org.olat.registration.PwChangeController;
import org.olat.registration.RegistrationManager;
import org.olat.user.UserModule;
import org.springframework.beans.factory.annotation.Autowired;
public class LDAPAuthenticationController extends AuthenticationController implements Activateable2 {
private static final Logger log = Tracing.createLoggerFor(LDAPAuthenticationController.class);
public static final String PROVIDER_LDAP = "LDAP";
private VelocityContainer loginComp;
private Component pwLink;
private Controller subController;
private OLATAuthentcationForm loginForm;
private DisclaimerController disclaimerCtr;
private Identity authenticatedIdentity;
private String provider = null;
private CloseableModalController cmc;
@Autowired
private DB dbInstance;
@Autowired
private UserModule userModule;
@Autowired
private LoginModule loginModule;
@Autowired
private LDAPLoginModule ldapLoginModule;
@Autowired
private LDAPLoginManager ldapLoginManager;
@Autowired
private OLATAuthManager olatAuthenticationSpi;
@Autowired
private RegistrationManager registrationManager;
public LDAPAuthenticationController(UserRequest ureq, WindowControl control) {
// use fallback translator to login and registration package
super(ureq, control, Util.createPackageTranslator(LoginModule.class, ureq.getLocale(), Util.createPackageTranslator(RegistrationManager.class, ureq.getLocale())));
loginComp = createVelocityContainer("ldaplogin");
if(userModule.isAnyPasswordChangeAllowed() && ldapLoginModule.isPropagatePasswordChangedOnLdapServer()) {
Link link = LinkFactory.createLink("_ldap_login_change_pwd", "menu.pw", loginComp, this);
link.setElementCssClass("o_login_pwd");
pwLink = link;
} else if(StringHelper.containsNonWhitespace(ldapLoginModule.getChangePasswordUrl())) {
ExternalLink link = new ExternalLink("_ldap_login_change_pwd", "menu.pw");
link.setElementCssClass("o_login_pwd");
link.setName(translate("menu.pw"));
link.setUrl(ldapLoginModule.getChangePasswordUrl());
link.setTarget("_blank");
loginComp.put("menu.pw", link);
pwLink = link;
}
// Use the standard OLAT login form but with our LDAP translator
loginForm = new OLATAuthentcationForm(ureq, control, "ldap_login", getTranslator());
listenTo(loginForm);
loginComp.put("ldapForm", loginForm.getInitialComponent());
putInitialPanel(loginComp);
}
@Override
public void changeLocale(Locale newLocale) {
setLocale(newLocale, true);
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if (source == pwLink) {
openChangePassword(ureq, null);
}
}
protected void openChangePassword(UserRequest ureq, String initialEmail) {
// double-check if allowed first
if (!userModule.isAnyPasswordChangeAllowed() || !ldapLoginModule.isPropagatePasswordChangedOnLdapServer()) {
showError("error.password.change.not.allow");
} else {
removeAsListenerAndDispose(cmc);
removeAsListenerAndDispose(subController);
subController = new PwChangeController(ureq, getWindowControl(), initialEmail, true);
listenTo(subController);
String title = ((PwChangeController)subController).getWizardTitle();
cmc = new CloseableModalController(getWindowControl(), translate("close"), subController.getInitialComponent(), true, title);
listenTo(cmc);
cmc.activate();
}
}
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
LDAPError ldapError = new LDAPError();
if (source == loginForm && event == Event.DONE_EVENT) {
String login = loginForm.getLogin();
String pass = loginForm.getPass();
if (loginModule.isLoginBlocked(login)) {
// do not proceed when already blocked
showError("login.blocked", loginModule.getAttackPreventionTimeoutMin().toString());
getLogger().info(Tracing.M_AUDIT, "Login attempt on already blocked login for {}. IP::{}", login, ureq.getHttpReq().getRemoteAddr());
return;
}
authenticatedIdentity = ldapLoginManager.authenticate(login, pass, ldapError);
if(!ldapError.isEmpty()) {
final String errStr = ldapError.get();
if ("login.notauthenticated".equals(errStr)) {
// user exists in LDAP, authentication was ok, but user
// has not got the OLAT service or has not been created by now
getWindowControl().setError(translate("login.notauthenticated"));
return;
} else {
// tell about the error again
ldapError.insert(errStr);
}
}
if (authenticatedIdentity != null) {
provider = LDAPAuthenticationController.PROVIDER_LDAP;
try {
//prevents database timeout
dbInstance.commitAndCloseSession();
} catch (Exception e) {
log.error("", e);
}
} else {
// try fallback to OLAT provider if configured
if (ldapLoginModule.isCacheLDAPPwdAsOLATPwdOnLogin() || ldapLoginModule.isTryFallbackToOLATPwdOnLogin()) {
AuthenticationStatus status = new AuthenticationStatus();
authenticatedIdentity = olatAuthenticationSpi.authenticate(null, login, pass, status);
if(status.getStatus() == AuthHelper.LOGIN_INACTIVE) {
showError("login.error.inactive", WebappHelper.getMailConfig("mailSupport"));
return;
}
}
if (authenticatedIdentity != null) {
provider = BaseSecurityModule.getDefaultAuthProviderIdentifier();
}
}
// Still not found? register for hacking attempts
if (authenticatedIdentity == null) {
if (loginModule.registerFailedLoginAttempt(login)) {
logAudit("Too many failed login attempts for " + login + ". Login blocked. IP::" + ureq.getHttpReq().getRemoteAddr());
showError("login.blocked", loginModule.getAttackPreventionTimeoutMin().toString());
} else {
showError("login.error", ldapError.get());
}
return;
} else if(Identity.STATUS_INACTIVE.equals(authenticatedIdentity.getStatus())) {
showError("login.error.inactive", WebappHelper.getMailConfig("mailSupport"));
return;
} else {
try {
String language = authenticatedIdentity.getUser().getPreferences().getLanguage();
UserSession usess = ureq.getUserSession();
if(StringHelper.containsNonWhitespace(language)) {
usess.setLocale(I18nManager.getInstance().getLocaleOrDefault(language));
}
} catch (Exception e) {
logError("Cannot set the user language", e);
}
}
loginModule.clearFailedLoginAttempts(login);
// Check if disclaimer has been accepted
if (registrationManager.needsToConfirmDisclaimer(authenticatedIdentity)) {
// accept disclaimer first
removeAsListenerAndDispose(disclaimerCtr);
disclaimerCtr = new DisclaimerController(ureq, getWindowControl(), authenticatedIdentity, false);
listenTo(disclaimerCtr);
removeAsListenerAndDispose(cmc);
cmc = new CloseableModalController(getWindowControl(), translate("close"), disclaimerCtr.getInitialComponent());
listenTo(cmc);
cmc.activate();
} else {
// disclaimer acceptance not required
doLoginAndRegister(authenticatedIdentity, ureq, provider);
}
}
if (source == subController) {
if (event == Event.CANCELLED_EVENT || event == Event.DONE_EVENT) {
cmc.deactivate();
}
} else if (source == disclaimerCtr) {
cmc.deactivate();
if (event == Event.DONE_EVENT) {
// User accepted disclaimer, do login now
registrationManager.setHasConfirmedDislaimer(authenticatedIdentity);
doLoginAndRegister(authenticatedIdentity, ureq, provider);
} else if (event == Event.CANCELLED_EVENT) {
// User did not accept, workflow ends here
showWarning("disclaimer.form.cancelled");
}
} else if (source == cmc) {
// User did close disclaimer window, workflow ends here
showWarning("disclaimer.form.cancelled");
}
}
@Override
public void activate(UserRequest ureq, List<ContextEntry> entries, StateEntry state) {
if(entries == null || entries.isEmpty()) return;
String type = entries.get(0).getOLATResourceable().getResourceableTypeName();
if("changepw".equals(type)) {
String email = null;
if(entries.size() > 1) {
email = entries.get(1).getOLATResourceable().getResourceableTypeName();
}
openChangePassword(ureq, email);
}
}
/**
* Internal helper to perform the real login code and do all necessary steps to
* register the user session
*
* @param authIdentity The authenticated identity
* @param ureq
* @param myProvider The provider that identified the user
*/
private void doLoginAndRegister(Identity authIdentity, UserRequest ureq, String myProvider) {
if (provider.equals(PROVIDER_LDAP)) {
// prepare redirects to home etc, set status
int loginStatus = AuthHelper.doLogin(authIdentity, myProvider, ureq);
if (loginStatus == AuthHelper.LOGIN_OK) {
//update last login date and register active user
securityManager.setIdentityLastLogin(authIdentity);
} else if (loginStatus == AuthHelper.LOGIN_NOTAVAILABLE){
DispatcherModule.redirectToServiceNotAvailable( ureq.getHttpResp() );
} else if (loginStatus == AuthHelper.LOGIN_INACTIVE) {
getWindowControl().setError(translate("login.error.inactive", WebappHelper.getMailConfig("mailSupport")));
} else {
getWindowControl().setError(translate("login.error", WebappHelper.getMailConfig("mailSupport")));
}
} else if (provider.equals(BaseSecurityModule.getDefaultAuthProviderIdentifier())) {
// delegate login process to OLAT authentication controller
authenticated(ureq, authIdentity);
} else {
throw new OLATRuntimeException("Unknown login provider::" + myProvider, null);
}
}
}
|
<gh_stars>0
// the createConnection from typeorm automatically reads all the info
// in the ormconfig.json, so no parameters needed here.
import { createConnection } from 'typeorm';
createConnection();
|
use version_sync::assert_contains_regex;
fn validate_version_sync(crate_name: &str, file_path: &str) {
let regex_pattern = format!("^Thank you for your contribution to the '{}\\' repository!$", crate_name);
assert_contains_regex!(file_path, ®ex_pattern);
}
fn main() {
let crate_name = "your_crate_name";
let file_path = ".github/PULL_REQUEST_TEMPLATE.md";
validate_version_sync(crate_name, file_path);
} |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""Simple example with urllib2.urlopen().
"""
#end_pymotw_header
import urllib2
request = urllib2.Request('http://localhost:8080/')
request.add_header(
'User-agent',
'PyMOTW (http://www.doughellmann.com/PyMOTW/)',
)
response = urllib2.urlopen(request)
data = response.read()
print data
|
#! /bin/bash -x
################################################################################
#
# for K computer
#
################################################################################
#PJM --rsc-list "rscgrp=large"
#PJM --rsc-list "node=576"
#PJM --rsc-list "elapse=00:30:00"
#PJM --stg-transfiles all
#PJM --mpi "use-rankdir"
#-------------------------------------------------------------------------------
#PJM --stgin 'rank=* /data/ra000006/a00000/scale/src/bin/scale-les_pp %r:./'
#PJM --stgin 'rank=* ../config/pp.d03.topo.conf %r:./'
#PJM --stgin "rank=* /data/ra000006/SCALE/database/topo/DEM50M/Products/* %r:./topo/"
#xxx --stgin "rank=* /data/ra000006/SCALE/database/topo/GTOPO30/Products/* %r:./topo/"
#-------------------------------------------------------------------------------
#PJM --stgout "rank=* %r:./* ./domain_03/"
#PJM -j
#PJM -s
#
. /work/system/Env_base
#
export PARALLEL=8
export OMP_NUM_THREADS=8
#
# run
mpiexec ./scale-les_pp pp.d03.topo.conf || exit 1
|
package chain
import (
"context"
"gx/ipfs/QmR8BauakNcBa3RbE4nbQu76PDiJgoQgz8AJdhJuiU4TAw/go-cid"
"github.com/filecoin-project/go-filecoin/types"
)
// BlockProvider provides blocks. This is a subset of the ReadStore interface.
type BlockProvider interface {
GetBlock(ctx context.Context, cid cid.Cid) (*types.Block, error)
}
// GetParentTipSet returns the parent tipset of a tipset.
// The result is empty if the tipset has no parents (including if it is empty itself)
func GetParentTipSet(ctx context.Context, store BlockProvider, ts types.TipSet) (types.TipSet, error) {
newTipSet := types.TipSet{}
parents, err := ts.Parents()
if err != nil {
return nil, err
}
for it := parents.Iter(); !it.Complete() && ctx.Err() == nil; it.Next() {
newBlk, err := store.GetBlock(ctx, it.Value())
if err != nil {
return nil, err
}
if err := newTipSet.AddBlock(newBlk); err != nil {
return nil, err
}
}
return newTipSet, nil
}
// IterAncestors returns an iterator over tipset ancestors, yielding first the start tipset and
// then its parent tipsets until (and including) the genesis tipset.
func IterAncestors(ctx context.Context, store BlockProvider, start types.TipSet) *TipsetIterator {
return &TipsetIterator{ctx, store, start, nil}
}
// TipsetIterator is an iterator over tipsets.
type TipsetIterator struct {
ctx context.Context
store BlockProvider
value types.TipSet
err error
}
// Value returns the iterator's current value, if not Complete().
func (it *TipsetIterator) Value() types.TipSet {
return it.value
}
// Complete tests whether the iterator is exhausted.
func (it *TipsetIterator) Complete() bool {
return len(it.value) == 0
}
// Next advances the iterator to the next value.
func (it *TipsetIterator) Next() error {
it.value, it.err = GetParentTipSet(it.ctx, it.store, it.value)
return it.err
}
|
public class ClassTypeConverter {
public static String classTypeToLetter(Class<?> classType) {
if (classType == int.class) {
return "I";
} else if (classType == double.class) {
return "D";
} else if (classType == byte.class) {
return "B";
} else if (classType == char.class) {
return "C";
} else if (classType == boolean.class) {
return "Z";
} else if (classType == float.class) {
return "F";
} else if (classType == long.class) {
return "J";
} else if (classType == short.class) {
return "S";
} else {
return "U";
}
}
public static void main(String[] args) {
System.out.println(classTypeToLetter(int.class)); // Expected output: "I"
System.out.println(classTypeToLetter(double.class)); // Expected output: "D"
System.out.println(classTypeToLetter(byte.class)); // Expected output: "B"
System.out.println(classTypeToLetter(char.class)); // Expected output: "C"
System.out.println(classTypeToLetter(boolean.class)); // Expected output: "Z"
System.out.println(classTypeToLetter(float.class)); // Expected output: "F"
System.out.println(classTypeToLetter(long.class)); // Expected output: "J"
System.out.println(classTypeToLetter(short.class)); // Expected output: "S"
System.out.println(classTypeToLetter(String.class)); // Expected output: "U"
}
} |
package io.github.astrapi69.chareditor.actions;
import io.github.astrapi69.chareditor.SpringBootSwingApplication;
import io.github.astrapi69.chareditor.util.Constants;
import java.awt.event.ActionEvent;
import javax.swing.AbstractAction;
import javax.swing.JTextArea;
/**
* The Class NewFileAction.
*/
public class NewFileAction extends AbstractAction implements Constants {
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* Instantiates a new new file action.
*
* @param name the name
*/
public NewFileAction(String name) {
super(name);
}
/* (non-Javadoc)
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed(ActionEvent e) {
SpringBootSwingApplication frame = SpringBootSwingApplication.getInstance();
JTextArea textAreaInput = frame.getMainPanel().getJtaInput();
JTextArea textAreaPreview = frame.getMainPanel().getJtaPreview();
JTextArea textAreaHtmlEntitys = frame.getMainPanel().getJtaHtmlEntitys();
textAreaInput.replaceRange(EMPTYSTRING, 0, textAreaInput.getText().length());
textAreaPreview.replaceRange(EMPTYSTRING, 0, textAreaPreview.getText()
.length());
textAreaHtmlEntitys.replaceRange(EMPTYSTRING, 0, textAreaHtmlEntitys.getText()
.length());
}
}
|
#!/bin/bash
#input arguments
gm="${1-DGCNN}"
GPU=${2-0}
max_type="${3-gfusedmax}" #softmax, sparsemax, gfusedmax
norm_flag="${4-True}" #layer_norm_flag for attention
gamma=${5-1.0} #gamma controlling the sparsity, the smaller the sparser
lam=${6-1.0} #lambda controlling the smoothness, the larger the smoother
begin=${7-0}
end=${8-10}
declare -a dataset_name=("MUTAG" "ENZYMES" "NCI1" "NCI109" "DD" "PTC" "PROTEINS" "COLLAB" "IMDBBINARY" "IMDBMULTI")
dataset_num=${#array[@]}
for (( i=$begin; i<$begin+$end; i++ ));
do
./my_run.sh ${dataset_name[$i]} $gm 0 $GPU $max_type $norm_flag $gamma $lam
done
|
<reponame>DerSchmale/spirv4web
export class SpecializationConstant
{
// The ID of the specialization constant.
id: ConstantID;
// The constant ID of the constant, used in Vulkan during pipeline creation.
constant_id: number;
}; |
<reponame>lterfloth/BotBuilder-Samples
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
import { StatePropertyAccessor, TurnContext, UserState } from "botbuilder";
import {
ChoiceFactory,
ChoicePrompt,
ComponentDialog,
ConfirmPrompt,
DialogSet,
DialogTurnStatus,
PromptValidatorContext,
TextPrompt,
WaterfallDialog,
WaterfallStepContext
} from "botbuilder-dialogs";
import { FyiPost } from "../fyiPost";
const SOURCETYPE_PROMPT = "SOURCETYPE_PROMPT";
const CONFIRM_PROMPT = "CONFIRM_PROMPT";
const DESCRIPTION_PROMPT = "DESCRIPTION_PROMPT";
const URL_PROMPT = "URL_PROMPT";
const PRIORITY_PROMPT = "PRIORITY_PROMPT";
const USER_PROFILE = "USER_PROFILE";
const WATERFALL_DIALOG = "WATERFALL_DIALOG";
export class FyiPostDialog extends ComponentDialog {
private fyiPost: StatePropertyAccessor<FyiPost>;
constructor(userState: UserState) {
super("fyiPostDialog");
this.fyiPost = userState.createProperty(USER_PROFILE);
this.addDialog(new TextPrompt(DESCRIPTION_PROMPT));
this.addDialog(new TextPrompt(URL_PROMPT));
this.addDialog(new ChoicePrompt(SOURCETYPE_PROMPT));
this.addDialog(new ChoicePrompt(PRIORITY_PROMPT));
this.addDialog(new ConfirmPrompt(CONFIRM_PROMPT));
this.addDialog(
new WaterfallDialog(WATERFALL_DIALOG, [
this.sourceTypeStep.bind(this),
this.urlStep.bind(this),
this.descriptionStep.bind(this),
this.priorityStep.bind(this),
this.confirmStep.bind(this),
this.summaryStep.bind(this)
])
);
this.initialDialogId = WATERFALL_DIALOG;
}
/**
* The run method handles the incoming activity (in the form of a TurnContext) and passes it through the dialog system.
* If no dialog is active, it will start the default dialog.
* @param {*} turnContext
* @param {*} accessor
*/
public async run(turnContext: TurnContext, accessor: StatePropertyAccessor) {
const dialogSet = new DialogSet(accessor);
dialogSet.add(this);
const dialogContext = await dialogSet.createContext(turnContext);
const results = await dialogContext.continueDialog();
if (results.status === DialogTurnStatus.empty) {
await dialogContext.beginDialog(this.id);
}
}
private async sourceTypeStep(stepContext: WaterfallStepContext<FyiPost>) {
// WaterfallStep always finishes with the end of the Waterfall or with another dialog; here it is a Prompt Dialog.
// Running a prompt here means the next WaterfallStep will be run when the users response is received.
return await stepContext.prompt(SOURCETYPE_PROMPT, {
choices: ChoiceFactory.toChoices([
"Website",
"Konferenz",
"Literatur",
"Sonstiges"
]),
prompt: "Bitte sage mir kurz, um was für eine Quelle es sich handelt"
});
}
private async urlStep(stepContext: WaterfallStepContext<FyiPost>) {
stepContext.options.sourceType = stepContext.result.value;
return await stepContext.prompt(
URL_PROMPT,
"Bitte gebe mir die URL des Webinhaltes."
);
}
private async descriptionStep(stepContext: WaterfallStepContext<FyiPost>) {
stepContext.options.url = stepContext.result;
return await stepContext.prompt(
DESCRIPTION_PROMPT,
"Was möchtest du den anderen hinsichtlich der Quelle sagen (diese Angabe wird 1 zu 1 in übernommen)?"
);
}
private async priorityStep(stepContext: WaterfallStepContext<FyiPost>) {
stepContext.options.description = stepContext.result;
return await stepContext.prompt(PRIORITY_PROMPT, {
choices: ChoiceFactory.toChoices([
"Eher unwichtig",
"Wichtig",
"Dringend"
]),
prompt: "Bitte sage mir kurz, wie wichtig diese Quelle ist."
});
}
private async confirmStep(stepContext: WaterfallStepContext<FyiPost>) {
// WaterfallStep always finishes with the end of the Waterfall or with another dialog, here it is a Prompt Dialog.
stepContext.options.priority = stepContext.result.value;
return await stepContext.prompt(CONFIRM_PROMPT, {
prompt: "Sind deine Angaben so in Ordnung?"
});
}
private async summaryStep(stepContext: WaterfallStepContext<FyiPost>) {
if (stepContext.result) {
// Get the current profile object from user state.
const fyiPost = await this.fyiPost.get(
stepContext.context,
new FyiPost()
);
const stepContextOptions = stepContext.options;
fyiPost.sourceType = stepContextOptions.sourceType;
fyiPost.url = stepContextOptions.url;
fyiPost.description = stepContextOptions.description;
fyiPost.priority = stepContextOptions.priority;
let msg = `Ich hab deine Empfehlung vom Typ *${fyiPost.sourceType}* wie folgt abgespeichert:\n\n`;
msg += `**URL:** ${fyiPost.url}. \n \n`;
msg += `\n`;
msg += ` **Beschreibung:** ${fyiPost.description}.\n`;
msg += `\n`;
msg += ` Ebenfalls habe ich die Quelle als *${fyiPost.priority}* einsortiert.`;
await stepContext.context.sendActivity(msg);
} else {
await stepContext.context.sendActivity(
"Die Daten wurden nicht gespeichert."
);
}
// WaterfallStep always finishes with the end of the Waterfall or with another dialog, here it is the end.
return await stepContext.endDialog();
}
private async descriptionPromptValidator(
promptContext: PromptValidatorContext<Text>
) {
// This condition is our validation rule. You can also change the value at this point.
return promptContext.recognized.succeeded;
}
}
|
package proxy
import (
"bytes"
"crypto/tls"
"fmt"
"github.com/dustin/go-humanize"
"github.com/jmcvetta/randutil"
"io"
"log"
"miner-proxy/pkg"
"net"
"sync"
"sync/atomic"
"time"
)
// Proxy - Manages a Proxy connection, piping data between local and remote.
type Proxy struct {
sentBytes uint64
receivedBytes uint64
laddr, raddr *net.TCPAddr
lconn, rconn io.ReadWriteCloser
erred bool
errsig chan bool
tlsUnwrapp bool
tlsAddress string
Matcher func([]byte)
Replacer func([]byte) []byte
// Settings
Nagles bool
Log pkg.Logger
OutputHex bool
SecretKey string
IsClient bool
UseSendConfusionData bool
}
var (
totalSize uint64
)
// New - Create a new Proxy instance. Takes over local connection passed in,
// and closes it when finished.
func New(lconn *net.TCPConn, laddr, raddr *net.TCPAddr) *Proxy {
return &Proxy{
lconn: lconn,
laddr: laddr,
raddr: raddr,
erred: false,
errsig: make(chan bool),
Log: pkg.NullLogger{},
}
}
// NewTLSUnwrapped - Create a new Proxy instance with a remote TLS server for
// which we want to unwrap the TLS to be able to connect without encryption
// locally
func NewTLSUnwrapped(lconn *net.TCPConn, laddr, raddr *net.TCPAddr, addr string) *Proxy {
p := New(lconn, laddr, raddr)
p.tlsUnwrapp = true
p.tlsAddress = addr
return p
}
type setNoDelayer interface {
SetNoDelay(bool) error
}
var (
once sync.Once
startTime = time.Now()
)
func (p Proxy) TimerPrint() {
once.Do(func() {
t := time.Now()
for range time.Tick(time.Second * 30) {
total := atomic.LoadUint64(&totalSize)
log.Printf("从 %s 至现在总计加密转发 %s 数据; 平均转发速度 %s/秒 \n",
t.Format("2006-01-02 15:04:05"),
humanize.Bytes(total),
humanize.Bytes(uint64(float64(total)/time.Since(startTime).Seconds())),
)
}
})
}
// Start - open connection to remote and start proxying data.
func (p *Proxy) Start() {
defer pkg.Recover(true)
defer p.lconn.Close()
go p.TimerPrint()
var err error
//connect to remote
if p.tlsUnwrapp {
p.rconn, err = tls.Dial("tcp", p.tlsAddress, nil)
} else {
p.rconn, err = net.DialTCP("tcp", nil, p.raddr)
}
if err != nil {
p.Log.Warn("Remote connection failed: %s", err)
return
}
defer p.rconn.Close()
//nagles?
if p.Nagles {
if conn, ok := p.lconn.(setNoDelayer); ok {
conn.SetNoDelay(true)
}
if conn, ok := p.rconn.(setNoDelayer); ok {
conn.SetNoDelay(true)
}
}
//display both ends
p.Log.Info("Opened %s >>> %s", p.laddr.String(), p.raddr.String())
//bidirectional copy
go p.pipe(p.lconn, p.rconn)
go p.pipe(p.rconn, p.lconn)
if !p.IsClient { // 由于挖矿的特性, 只需要在服务端向客户端 发送随机数据
go p.SendRandomData(p.lconn)
}
//wait for close...
<-p.errsig
p.Log.Info("Closed (%d bytes sent, %d bytes recieved)", p.sentBytes, p.receivedBytes)
}
func (p *Proxy) err(s string, err error) {
if p.erred {
return
}
if err != io.EOF {
p.Log.Warn(s, err)
}
p.errsig <- true
p.erred = true
}
var (
proxyStart = []byte{87, 62, 64, 57, 136, 6, 18, 50, 118, 135, 214, 247}
proxyEnd = []byte{93, 124, 242, 154, 241, 48, 161, 242, 209, 90, 73, 163}
// proxyJustConfusionStart 只是混淆数据才回使用的开头
//proxyJustConfusionStart = []byte{113,158,190,157,204,56,4,142,189,85,168,56}
proxyConfusionStart = []byte{178, 254, 235, 166, 15, 61, 52, 198, 83, 207, 6, 83, 183, 115, 50, 58, 110, 6, 13, 60, 143, 242, 254, 143}
proxyConfusionEnd = []byte{114, 44, 203, 23, 55, 50, 148, 231, 241, 154, 112, 180, 115, 126, 148, 149, 180, 55, 115, 242, 98, 119, 170, 249}
randomStart = []byte("random-proxy")
// 启动时随机生成
randomPingData [][]byte
)
func init() {
for i := 0; i < 1000; i++ {
dataLength, _ := randutil.IntRange(10, 102)
var temp = make([]byte, dataLength)
for l := 0; l < dataLength; l++ {
char, _ := randutil.IntRange(0, 255)
temp[l] = uint8(char)
}
randomPingData = append(randomPingData, temp)
}
}
// separateConfusionData 分离混淆的数据
func (p *Proxy) separateConfusionData(data []byte) []byte {
if !p.UseSendConfusionData {
return data
}
var result = make([]byte, 0, len(data)/2)
for index, v := range data {
if index%2 == 0 {
continue
}
result = append(result, v)
}
return result
}
// buildConfusionData 构建混淆数据
// 从 10 - 135中随机一个数字作为本次随机数据的长度 N
// 循环 N 次, 每次从 1 - 255 中随机一个数字作为本次随机数据
// 最后在头部加入 proxyConfusionStart 尾部加入 proxyConfusionStart
func (p *Proxy) buildConfusionData() []byte {
number, _ := randutil.IntRange(10, 135)
var data = make([]byte, number)
for i := 0; i < number; i++ {
index, _ := randutil.IntRange(1, 255)
data[i] = uint8(index)
}
data = append(data, proxyConfusionEnd...)
return append(proxyConfusionStart, data...)
}
// EncryptionData 构建需要发送的加密数据
// 先使用 SecretKey aes 加密 data 如果 UseSendConfusionData 等于 true
// 那么将会每25个字符插入 buildConfusionData 生成的随机字符
func (p *Proxy) EncryptionData(data []byte) ([]byte, error) {
if p.UseSendConfusionData { // 插入随机混淆数据
confusionData := p.buildConfusionData()
confusionData = confusionData[len(proxyConfusionStart) : len(confusionData)-len(proxyConfusionEnd)]
var result []byte
for _, v := range data {
result = append(result, confusionData[0])
confusionData = append(confusionData[1:], confusionData[0])
result = append(result, v)
}
data = result
}
data, err := pkg.AesEncrypt(data, []byte(p.SecretKey))
if err != nil {
return nil, err
}
data = append(proxyStart, data...)
return append(data, proxyEnd...), nil
}
// DecryptData 解密数据
func (p *Proxy) DecryptData(data []byte) ([]byte, error) {
data = data[len(proxyStart) : len(data)-len(proxyEnd)]
data, err := pkg.AesDecrypt(data, []byte(p.SecretKey))
if err != nil {
return nil, err
}
if p.UseSendConfusionData { // 去除随机混淆数据
data = p.separateConfusionData(data)
}
return data, nil
}
// ReadByPlaintextSendEncryption 读取明文, 发送加密数据
func (p *Proxy) ReadByPlaintextSendEncryption(reader io.Reader, writer io.Writer) error {
data := make([]byte, 1024)
n, err := reader.Read(data)
if err != nil {
return err
}
data = data[:n]
t := time.Now()
EnData, err := p.EncryptionData(data)
if err != nil {
return err
}
p.Log.Debug("读取到 %d 明文数据, 加密后数据大小 %d; 加密耗时 %s", n, len(EnData), time.Since(t))
atomic.AddUint64(&totalSize, uint64(len(EnData)))
return NewPackage(EnData).Pack(writer)
}
// ReadEncryptionSendPlaintext 读取加密数据, 发送明文
func (p *Proxy) ReadEncryptionSendPlaintext(reader io.Reader, writer io.Writer) error {
var err error
readErr := new(Package).Read(reader, func(pck Package) {
deData, err := p.DecryptData(pck.Data)
if err != nil {
p.err("DecryptData error %s", err)
}
if bytes.HasPrefix(deData, randomStart) {
p.Log.Debug("读取到 %d 随机混淆数据", len(deData))
return
}
p.Log.Debug("读取到 %d 加密数据, 解密后数据大小 %d", len(pck.Data), len(deData))
atomic.AddUint64(&totalSize, uint64(len(pck.Data)))
_, err = writer.Write(deData)
if err != nil {
fmt.Println(err)
}
})
if readErr != nil {
return readErr
}
return err
}
func (p *Proxy) SendRandomData(dst io.Writer) {
sleepTime, _ := randutil.IntRange(3, 15)
for {
time.Sleep(time.Second * time.Duration(sleepTime))
// 写入随机数据
index, _ := randutil.IntRange(0, len(randomPingData))
data, err := p.EncryptionData(append(randomStart, randomPingData[index]...))
if err != nil {
return
}
p.Log.Debug("向客户端写入随机混淆数据 %d", len(data))
if err := NewPackage(data).Pack(dst); err != nil {
return
}
}
}
func (p *Proxy) pipe(src, dst io.ReadWriter) {
defer pkg.Recover(true)
islocal := src == p.lconn
var f func(reader io.Reader, writer io.Writer) error
var name string
switch {
case p.IsClient == islocal:
name = "读取明文, 发送加密数据"
f = p.ReadByPlaintextSendEncryption
default:
name = "读取加密数据, 发送明文"
f = p.ReadEncryptionSendPlaintext
}
p.Log.Debug("开始 %s", name)
name = fmt.Sprintf("%s error ", name) + "%s"
for {
err := f(src, dst)
if err != nil {
p.err(name, err)
return
}
}
}
|
def _buffer_proxy(filename_or_buf, function, reset_fp=True, file_mode="rb", *args, **kwargs):
if isinstance(filename_or_buf, str): # Check if filename_or_buf is a string
with open(filename_or_buf, file_mode) as file:
if reset_fp:
file.seek(0) # Reset file pointer to the beginning of the file
return function(file, *args, **kwargs) # Call function with open file
else: # filename_or_buf is a file-like object
if reset_fp:
filename_or_buf.seek(0) # Reset file pointer to the beginning of the file
return function(filename_or_buf, *args, **kwargs) # Call function with file-like object |
<filename>tests/framework/components/script/enabler.js
var Enabler = pc.createScript('enabler');
Enabler.attributes.add('entityToEnable', {type: 'entity'});
Enabler.prototype.initialize = function() {
window.initializeCalls.push(this.entity.getGuid() + ' initialize enabler');
this.entityToEnable.enabled = true;
this.entityToEnable.script.enabled = true;
if (this.entityToEnable.script.scriptA) {
this.entityToEnable.script.scriptA.enabled = true;
}
if (this.entityToEnable.script.scriptB) {
this.entityToEnable.script.scriptB.enabled = true;
}
};
Enabler.prototype.postInitialize = function () {
window.initializeCalls.push(this.entity.getGuid() + ' postInitialize enabler');
};
|
#!/bin/bash
# Run this after a new release to update dependencies
set -e
venv_dir=~/.electrum-ltfn-venv
contrib=$(dirname "$0")
# note: we should not use a higher version of python than what the binaries bundle
if [[ ! "$SYSTEM_PYTHON" ]] ; then
SYSTEM_PYTHON=$(which python3.6) || printf ""
else
SYSTEM_PYTHON=$(which $SYSTEM_PYTHON) || printf ""
fi
if [[ ! "$SYSTEM_PYTHON" ]] ; then
echo "Please specify which python to use in \$SYSTEM_PYTHON" && exit 1;
fi
which virtualenv > /dev/null 2>&1 || { echo "Please install virtualenv" && exit 1; }
${SYSTEM_PYTHON} -m hashin -h > /dev/null 2>&1 || { ${SYSTEM_PYTHON} -m pip install hashin; }
for i in '' '-hw' '-binaries' '-binaries-mac' '-build-wine' '-build-mac' '-build-sdist' '-build-appimage'; do
rm -rf "$venv_dir"
virtualenv -p ${SYSTEM_PYTHON} $venv_dir
source $venv_dir/bin/activate
echo "Installing dependencies... (requirements${i}.txt)"
# We pin all python packaging tools (pip and friends). Some of our dependencies might
# pull some of them in (e.g. protobuf->setuptools), and all transitive dependencies
# must be pinned, so we might as well pin all packaging tools. This however means
# that we should explicitly install them now, so that we pin latest versions if possible.
python -m pip install --upgrade pip setuptools wheel
python -m pip install -r "$contrib/requirements/requirements${i}.txt" --upgrade
echo "OK."
requirements=$(pip freeze --all)
restricted=$(echo $requirements | ${SYSTEM_PYTHON} $contrib/deterministic-build/find_restricted_dependencies.py)
requirements="$requirements $restricted"
echo "Generating package hashes... (requirements${i}.txt)"
rm "$contrib/deterministic-build/requirements${i}.txt"
touch "$contrib/deterministic-build/requirements${i}.txt"
for requirement in $requirements; do
echo -e "\r Hashing $requirement..."
${SYSTEM_PYTHON} -m hashin -r "$contrib/deterministic-build/requirements${i}.txt" "${requirement}"
done
echo "OK."
done
echo "Done. Updated requirements"
|
<gh_stars>0
export * from './markup';
export * from './react';
export * from './vue';
|
#!/bin/bash
set -o errexit
set -o pipefail
main() {
local startup_script="${1:-/usr/local/bin/startup.sh}"
local dyn_dir='/mnt/dynamic/rstudio'
local cacert='/var/run/secrets/kubernetes.io/serviceaccount/ca.crt'
local k8s_url="https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT}"
local launcher_k8s_conf="${dyn_dir}/launcher.kubernetes.conf"
local launcher_pem='/mnt/secret-configmap/rstudio/launcher.pem'
local launcher_pub="${dyn_dir}/launcher.pub"
local launcher_ns="${RSTUDIO_LAUNCHER_NAMESPACE:-rstudio}"
local lb_conf='/mnt/load-balancer/rstudio/load-balancer'
_logf 'Loading service account token'
local sa_token
sa_token="$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)"
_logf 'Loading service account ca.crt'
local ca_string
ca_string="$(tr -d '\n' <"${cacert}" | base64 | tr -d '\n')"
_logf 'Ensuring %s exists' "${dyn_dir}"
mkdir -p "${dyn_dir}"
_logf 'Checking kubernetes health via %s' "${k8s_url}"
curl -fsSL \
-H "Authorization: Bearer ${sa_token}" \
--cacert "${cacert}" \
"${k8s_url}/healthz" 2>&1 | _indent
printf '\n'
_logf "Setting env vars"
export KUBERNETES_API_URL=${k8s_url}
export KUBERNETES_AUTH_TOKEN=${sa_token}
_logf 'Configuring certs'
cp -v "${cacert}" ${dyn_dir}/k8s-cert 2>&1 | _indent
mkdir -p /usr/local/share/ca-certificates/Kubernetes
cp -v \
${dyn_dir}/k8s-cert \
/usr/local/share/ca-certificates/Kubernetes/cert-Kubernetes.crt 2>&1 | _indent
_logf 'Updating CA certificates'
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \
update-ca-certificates 2>&1 | _indent
_logf 'Replacing process with %s' "${startup_script}"
exec "${startup_script}"
}
_logf() {
local msg="${1}"
shift
local now
now="$(date -u +%Y-%m-%dT%H:%M:%S)"
local format_string
format_string="$(printf '#----> prestart.bash %s: %s' "${now}" "${msg}")\\n"
# shellcheck disable=SC2059
printf "${format_string}" "${@}"
}
_indent() {
sed -u 's/^/ /'
}
main "${@}"
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package lista10;
/**
*
* @author PauloCésar
*/
public class Temperatura {
public double converte(double f){
return ((f - 32) / 180)*100;
}
}
|
# ApiManagementGetEmailTemplate
RESOURCE_GROUP="myresourcegroup"
SERVICE_NAME="myservice"
TEMPLATE_NAME="mytemplate"
az resource show --id /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.ApiManagement/service/$SERVICE_NAME/templates/$TEMPLATE_NAME --api-version 2019-01-01 |
/*
* Copyright 2017 LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
* See LICENSE in the project root for license information.
*/
package com.concurrentli;
import com.concurrentli.Singleton;
import org.junit.Test;
public class SingletonTester {
private static long[] _arr = new long[1000];
public interface I {
public String get();
public void setZ(int z);
public int getZ();
}
public static class A implements I {
private int _z;
public void setZ(int z) {
_z = z;
}
public int getZ() { return _z; }
public A(int z) {
_z = z;
}
Singleton<String> _s = new Singleton<String>() {
@Override
protected String getValue() {
return Integer.toString(_z);
}
};
public String get() {
return _s.get();
}
}
public static class B implements I {
private int _z;
public B(int z) {
_z = z;
}
public void setZ(int z) {
_z = z;
}
public int getZ() { return _z; }
volatile String _s;
public String get() {
String t = _s;
if (t != null) {
return t;
}
synchronized (this) {
t = _s;
if (t == null) {
_s = t = Integer.toString(_z);
}
}
return t;
}
}
private void doBasic(I obj) {
for (long i = 0; i < 1000000000; i++) {
obj.setZ(obj.get().length());
}
}
private void testBasic(I obj) throws InterruptedException {
long start = System.nanoTime();
Thread a = new Thread(() -> doBasic(obj));
Thread b = new Thread(() -> doBasic(obj));
Thread c = new Thread(() -> doBasic(obj));
Thread d = new Thread(() -> doBasic(obj));
a.start();
b.start();
c.start();
d.start();
a.join();
b.join();
c.join();
d.join();
System.out.println((System.nanoTime() - start)/1000000000.0);
}
@Test
public void test1() throws InterruptedException {
testBasic(new A(4));
}
@Test
public void test2() throws InterruptedException {
testBasic(new B(4));
}
}
|
{%- if jmx_export %}
export HIVE_SERVER2_HADOOP_OPTS="$HIVE_SERVER2_HADOOP_OPTS -javaagent:/var/lib/prometheus_jmx_javaagent/jmx_prometheus_javaagent-0.10.jar=27011:/etc/hive/conf/jmx_hive.yaml"
export HIVE_METASTORE_HADOOP_OPTS="$HIVE_METASTORE_HADOOP_OPTS -javaagent:/var/lib/prometheus_jmx_javaagent/jmx_prometheus_javaagent-0.10.jar=27012:/etc/hive/conf/jmx_metastore.yaml"
{% endif %}
|
//============================================================================
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
//============================================================================
#ifndef VIEWER_SRC_STRINGMATCHCOMBO_HPP_
#define VIEWER_SRC_STRINGMATCHCOMBO_HPP_
#include <QComboBox>
#include <QToolButton>
#include "StringMatchMode.hpp"
class StringMatchTb : public QToolButton
{
public:
StringMatchTb(QWidget* parent=nullptr);
};
class StringMatchCombo : public QComboBox
{
Q_OBJECT
public:
explicit StringMatchCombo(QWidget* parent=nullptr);
StringMatchMode::Mode matchMode(int) const;
StringMatchMode::Mode currentMatchMode() const;
void setMatchMode(const StringMatchMode& mode);
};
#endif /* VIEWER_SRC_STRINGMATCHCOMBO_HPP_ */
|
echo " = Format volume (EXT4)"
mkfs.ext4 /dev/vdb1
echo " = Creating mount point: /data"
rmdir "/data"
mkdir "/data"
echo " = Mount volume"
mount /dev/vdb1 "/data"
echo " = Change mountpoint owner (ubuntu:ubuntu)"
chown ubuntu:ubuntu "/data"
|
def flatten_array(arr):
flattened_arr = []
for i in range(len(arr)):
for j in range(len(arr[i])):
flattened_arr.append(arr[i][j])
return flattened_arr
arr = [[1,2],[3,4],[5,6]]
print(flatten_array(arr)) |
#section support_code_apply
int APPLY_SPECIFIC(conv_desc)(PyArrayObject *filt_shp,
cudnnConvolutionDescriptor_t *desc) {
cudnnStatus_t err;
int pad[3] = {PAD_0, PAD_1, PAD_2};
int strides[3] = {SUB_0, SUB_1, SUB_2};
int upscale[3] = {1, 1, 1};
#if BORDER_MODE == 0
pad[0] = *(npy_int64 *)PyArray_GETPTR1(filt_shp, 2) - 1;
pad[1] = *(npy_int64 *)PyArray_GETPTR1(filt_shp, 3) - 1;
#if NB_DIMS > 2
pad[2] = *(npy_int64 *)PyArray_GETPTR1(filt_shp, 4) - 1;
#endif
#endif
if (PyArray_DIM(filt_shp, 0) - 2 != NB_DIMS) {
PyErr_Format(PyExc_ValueError, "Filter shape has too many dimensions: "
"expected %d, got %lld.", NB_DIMS,
(long long)PyArray_DIM(filt_shp, 0));
return -1;
}
err = cudnnCreateConvolutionDescriptor(desc);
if (err != CUDNN_STATUS_SUCCESS) {
PyErr_Format(PyExc_MemoryError, "could not allocate convolution "
"descriptor: %s", cudnnGetErrorString(err));
return -1;
}
err = cudnnSetConvolutionNdDescriptor(*desc, NB_DIMS, pad, strides, upscale,
CONV_MODE);
return 0;
}
|
<gh_stars>0
import { PropertySchema } from '@medplum/core';
import React from 'react';
import { AddressInput } from './AddressInput';
import { AttachmentArrayInput } from './AttachmentArrayInput';
import { AttachmentInput } from './AttachmentInput';
import { BackboneElementInput } from './BackboneElementInput';
import { CodeableConceptInput } from './CodeableConceptInput';
import { ContactPointInput } from './ContactPointInput';
import { DeviceNameInput } from './DeviceNameInput';
import { EnumInput } from './EnumInput';
import { HumanNameInput } from './HumanNameInput';
import { IdentifierInput } from './IdentifierInput';
import { PatientLinkInput } from './PatientLinkInput';
import { ReferenceInput } from './ReferenceInput';
import { ResourceArrayInput } from './ResourceArrayInput';
export interface ResourcePropertyInputProps {
property: PropertySchema;
name: string;
value: any;
arrayElement?: boolean;
}
export function ResourcePropertyInput(props: ResourcePropertyInputProps) {
const property = props.property;
const name = props.name;
const value = props.value;
if (property.array && !props.arrayElement) {
if (property.type === 'Attachment') {
return <AttachmentArrayInput name={name} values={value} />
}
return <ResourceArrayInput property={property} name={name} values={value} />
}
switch (property.type) {
case 'string':
case 'canonical':
case 'date':
case 'dateTime':
case 'instant':
case 'uri':
case 'url':
case 'http://hl7.org/fhirpath/System.String':
return (
<input type="text" name={name} defaultValue={value}></input>
);
case 'number':
case 'integer':
case 'positiveInt':
case 'unsignedInt':
return (
<input type="text" name={name} defaultValue={value}></input>
);
case 'enum':
return (
<EnumInput
name={name}
label={property.display}
options={property.enumValues}
helperText={property.description}
value={value}
/>);
case 'boolean':
return (
<input type="checkbox" name={name} defaultChecked={!!value} value="true" />
);
case 'markdown':
return (
<textarea name={name} defaultValue={value} />
);
case 'Address':
return <AddressInput name={name} value={value} />;
case 'Attachment':
return <AttachmentInput name={name} value={value} />;
case 'CodeableConcept':
return <CodeableConceptInput name={name} value={value} />;
case 'ContactPoint':
return <ContactPointInput name={name} value={value} />;
case 'Device_DeviceName':
return <DeviceNameInput name={name} value={value} />;
case 'HumanName':
return <HumanNameInput name={name} value={value} />;
case 'Identifier':
return <IdentifierInput name={name} value={value} />;
case 'Patient_Link':
return <PatientLinkInput name={name} value={value} />;
case 'Reference':
return <ReferenceInput name={name} value={value} />;
default:
return <BackboneElementInput property={property} name={name} value={value} />;
}
}
|
class MockLAContext: LAContext {
var enabled = true
var replySuccess = true
var replyError: Error? = nil
override func evaluatePolicy(_ policy: LAPolicy, localizedReason: String, reply: @escaping (Bool, Error?) -> Void) {
if enabled {
if let error = replyError {
reply(false, error)
} else {
reply(replySuccess, nil)
}
} else {
// Simulate disabled context behavior
reply(false, NSError(domain: LAErrorDomain, code: LAError.appCancel.rawValue, userInfo: nil))
}
}
} |
<filename>src/main/java/br/com/zuporange/bloqueio/BloqueioCartaoController.java
package br.com.zuporange.bloqueio;
import java.net.URI;
import java.util.Optional;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ResponseStatusException;
import org.springframework.web.util.UriComponentsBuilder;
import br.com.zuporange.cartao.Cartao;
import br.com.zuporange.cartao.CartaoFeignClient;
import br.com.zuporange.cartao.CartaoRepository;
import br.com.zuporange.cartao.StatusCartao;
import feign.FeignException;
@RestController
@RequestMapping("/bloqueio")
public class BloqueioCartaoController {
private CartaoRepository cartaoRepository;
private CartaoFeignClient cartaoFeignClient;
private BloqueioCartaoRepository bloqueioCartaoRepository;
public BloqueioCartaoController(CartaoRepository cartaoRepository, CartaoFeignClient cartaoFeignClient, BloqueioCartaoRepository bloqueioCartaoRepository) {
this.cartaoRepository = cartaoRepository;
this.cartaoFeignClient = cartaoFeignClient;
this.bloqueioCartaoRepository = bloqueioCartaoRepository;
}
@PostMapping("/{id}")
public ResponseEntity<?> bloquearCartao(@RequestParam("id") String id,
HttpServletRequest request, @RequestBody @Valid BloqueioRequest bloqueioRequest, UriComponentsBuilder uriComponentsBuilder){
Optional<Cartao> checaCartao = cartaoRepository.findById(id);
if(checaCartao.isEmpty()) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND);
}
if(checaCartao.get().getStatusCartao() == StatusCartao.BLOQUEADO) {
throw new ResponseStatusException(HttpStatus.UNPROCESSABLE_ENTITY,"Cartao ja bloqueado");
}
return bloquearCartao(checaCartao.get(), bloqueioRequest, request, uriComponentsBuilder);
}
private ResponseEntity<?> bloquearCartao(Cartao cartao, @RequestBody @Valid BloqueioRequest bloqueioRequest,
HttpServletRequest request, UriComponentsBuilder uriComponentsBuilder) {
try {
BloqueioCartao bloqueioCartao = new BloqueioCartao(request.getLocalAddr(), request.getHeader("user-agent"), cartao);
bloqueioCartao.bloquearCartao(cartao);
bloqueioCartao = bloqueioCartaoRepository.save(bloqueioCartao);
cartaoFeignClient.bloqueioCartao(cartao.getNumeroCartao(), new BloqueioRequest(bloqueioRequest));
URI uri = uriComponentsBuilder.path("/bloqueio/{id}").build(bloqueioCartao.getId());
return ResponseEntity.created(uri).build();
} catch (FeignException.UnprocessableEntity e) {
throw new ResponseStatusException(HttpStatus.UNPROCESSABLE_ENTITY,"Falha ao bloquear o cartão.");
}
}
}
|
# frozen_string_literal: true
class Integer
def allbits?(mask)
if mask.respond_to?(:to_int)
mask = mask.to_int
return self & mask == mask
end
classname = mask.class
classname = mask.inspect if mask.nil? || mask.equal?(false) || mask.equal?(true)
raise TypeError, "no implicit conversion of #{classname} into #{self.class}"
end
def anybits?(mask)
if mask.respond_to?(:to_int)
mask = mask.to_int
return !(self & mask).zero?
end
classname = mask.class
classname = mask.inspect if mask.nil? || mask.equal?(false) || mask.equal?(true)
raise TypeError, "no implicit conversion of #{classname} into #{self.class}"
end
def nobits?(mask)
if mask.respond_to?(:to_int)
mask = mask.to_int
return (self & mask).zero?
end
classname = mask.class
classname = mask.inspect if mask.nil? || mask.equal?(false) || mask.equal?(true)
raise TypeError, "no implicit conversion of #{classname} into #{self.class}"
end
end
|
"""
Generate a program to normalize the values in a list
"""
def normalize(arr):
max_val = max(arr)
min_val = min(arr)
range_val = max_val - min_val
return [(x - min_val)/range_val if range_val!=0 else 0 for x in arr]
arr = [2, 3, 5, 10, 4, 8]
print(normalize(arr)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.