text stringlengths 1 1.05M |
|---|
-- The SQL*Plus script collection_user.sql performs the following:
-- 1. Creates collection_user
-- 2. Creates the database object and collection type
-- 3. Populates the database tables with example data
-- This script should be run by the system user (or the DBA)
CONNECT system/manager;
-- drop collection_user
DROP USER collection_user CASCADE;
-- create collection_user
CREATE USER collection_user IDENTIFIED BY collection_password;
-- allow collection_user to connect and create database objects
GRANT connect, resource TO collection_user;
-- connect as collection_user
CONNECT collection_user/collection_password;
-- create the object and collection types
CREATE TYPE address_typ AS OBJECT (
street VARCHAR2(15),
city VARCHAR2(15),
state CHAR(2),
zip VARCHAR2(5)
);
/
CREATE TYPE varray_address_typ AS VARRAY(2) OF VARCHAR2(50);
/
CREATE TYPE nested_table_address_typ AS TABLE OF address_typ;
/
-- create the tables
CREATE TABLE customers_with_varray (
id INTEGER PRIMARY KEY,
first_name VARCHAR2(10),
last_name VARCHAR2(10),
addresses varray_address_typ
);
CREATE TABLE customers_with_nested_table (
id INTEGER PRIMARY KEY,
first_name VARCHAR2(10),
last_name VARCHAR2(10),
addresses nested_table_address_typ
)
NESTED TABLE
addresses
STORE AS
nested_addresses;
-- create the PL/SQL packages
CREATE OR REPLACE PACKAGE varray_package AS
TYPE ref_cursor_typ IS REF CURSOR;
FUNCTION get_customers RETURN ref_cursor_typ;
PROCEDURE insert_customer (
p_id IN customers_with_varray.id%TYPE,
p_first_name IN customers_with_varray.first_name%TYPE,
p_last_name IN customers_with_varray.last_name%TYPE,
p_addresses IN customers_with_varray.addresses%TYPE
);
END varray_package;
/
CREATE OR REPLACE PACKAGE BODY varray_package AS
FUNCTION get_customers
RETURN ref_cursor_typ IS
customers_ref_cursor ref_cursor_typ;
BEGIN
-- get the REF CURSOR
OPEN customers_ref_cursor FOR
SELECT *
FROM customers_with_varray;
-- return the REF CURSOR
RETURN customers_ref_cursor;
END get_customers;
PROCEDURE insert_customer (
p_id IN customers_with_varray.id%TYPE,
p_first_name IN customers_with_varray.first_name%TYPE,
p_last_name IN customers_with_varray.last_name%TYPE,
p_addresses IN customers_with_varray.addresses%TYPE
) IS
BEGIN
INSERT INTO customers_with_varray
VALUES (p_id, p_first_name, p_last_name, p_addresses);
COMMIT;
EXCEPTION
WHEN OTHERS THEN
ROLLBACK;
END insert_customer;
END varray_package;
/
CREATE OR REPLACE PACKAGE nested_table_package AS
TYPE ref_cursor_typ IS REF CURSOR;
FUNCTION get_customers RETURN ref_cursor_typ;
PROCEDURE insert_customer (
p_id IN customers_with_nested_table.id%TYPE,
p_first_name IN customers_with_nested_table.first_name%TYPE,
p_last_name IN customers_with_nested_table.last_name%TYPE,
p_addresses IN customers_with_nested_table.addresses%TYPE
);
END nested_table_package;
/
CREATE OR REPLACE PACKAGE BODY nested_table_package AS
FUNCTION get_customers
RETURN ref_cursor_typ IS
customers_ref_cursor ref_cursor_typ;
BEGIN
-- get the REF CURSOR
OPEN customers_ref_cursor FOR
SELECT *
FROM customers_with_nested_table;
-- return the REF CURSOR
RETURN customers_ref_cursor;
END get_customers;
PROCEDURE insert_customer (
p_id IN customers_with_nested_table.id%TYPE,
p_first_name IN customers_with_nested_table.first_name%TYPE,
p_last_name IN customers_with_nested_table.last_name%TYPE,
p_addresses IN customers_with_nested_table.addresses%TYPE
) IS
BEGIN
INSERT INTO customers_with_nested_table
VALUES (p_id, p_first_name, p_last_name, p_addresses);
COMMIT;
EXCEPTION
WHEN OTHERS THEN
ROLLBACK;
END insert_customer;
END nested_table_package;
/
CREATE OR REPLACE PACKAGE collection_method_examples AS
FUNCTION initialize_addresses (
id_par customers_with_nested_table.id%TYPE
) RETURN nested_table_address_typ;
PROCEDURE display_addresses (
addresses_par nested_table_address_typ
);
PROCEDURE delete_address (
address_num_par INTEGER
);
PROCEDURE extend_addresses;
PROCEDURE first_address;
PROCEDURE last_address;
PROCEDURE next_address;
PROCEDURE prior_address;
PROCEDURE trim_addresses;
END collection_method_examples;
/
CREATE OR REPLACE PACKAGE BODY collection_method_examples AS
FUNCTION initialize_addresses (
id_par customers_with_nested_table.id%TYPE
) RETURN nested_table_address_typ IS
addresses_var nested_table_address_typ;
BEGIN
DBMS_OUTPUT.PUT_LINE('Initializing addresses');
SELECT addresses
INTO addresses_var
FROM customers_with_nested_table
WHERE id = id_par;
DBMS_OUTPUT.PUT_LINE(
'Number of addresses = '|| addresses_var.COUNT
);
RETURN addresses_var;
END initialize_addresses;
PROCEDURE display_addresses (
addresses_par nested_table_address_typ
) IS
count_var INTEGER;
BEGIN
DBMS_OUTPUT.PUT_LINE(
'Current number of addresses = '|| addresses_par.COUNT
);
FOR count_var IN 1..addresses_par.COUNT LOOP
DBMS_OUTPUT.PUT_LINE('Address #' || count_var || ':');
DBMS_OUTPUT.PUT(addresses_par(count_var).street || ', ');
DBMS_OUTPUT.PUT(addresses_par(count_var).city || ', ');
DBMS_OUTPUT.PUT(addresses_par(count_var).state || ', ');
DBMS_OUTPUT.PUT_LINE(addresses_par(count_var).zip);
END LOOP;
END display_addresses;
PROCEDURE delete_address (
address_num_par INTEGER
) IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
display_addresses(addresses_var);
DBMS_OUTPUT.PUT_LINE('Deleting address #' || address_num_par);
addresses_var.DELETE(address_num_par);
display_addresses(addresses_var);
END delete_address;
PROCEDURE extend_addresses IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
display_addresses(addresses_var);
DBMS_OUTPUT.PUT_LINE('Extending addresses');
addresses_var.EXTEND(2, 1);
display_addresses(addresses_var);
END extend_addresses;
PROCEDURE first_address IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
DBMS_OUTPUT.PUT_LINE('First address = ' || addresses_var.FIRST);
DBMS_OUTPUT.PUT_LINE('Deleting address #1');
addresses_var.DELETE(1);
DBMS_OUTPUT.PUT_LINE('First address = ' || addresses_var.FIRST);
END first_address;
PROCEDURE last_address IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
DBMS_OUTPUT.PUT_LINE('Last address = ' || addresses_var.LAST);
DBMS_OUTPUT.PUT_LINE('Deleting address #2');
addresses_var.DELETE(2);
DBMS_OUTPUT.PUT_LINE('Last address = ' || addresses_var.LAST);
END last_address;
PROCEDURE next_address IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
DBMS_OUTPUT.PUT_LINE(
'addresses_var.NEXT(1) = ' || addresses_var.NEXT(1)
);
DBMS_OUTPUT.PUT_LINE(
'addresses_var.NEXT(2) = ' || addresses_var.NEXT(2)
);
END next_address;
PROCEDURE prior_address IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
DBMS_OUTPUT.PUT_LINE(
'addresses_var.PRIOR(2) = ' || addresses_var.PRIOR(2)
);
DBMS_OUTPUT.PUT_LINE(
'addresses_var.PRIOR(1) = ' || addresses_var.PRIOR(1)
);
END prior_address;
PROCEDURE trim_addresses IS
addresses_var nested_table_address_typ;
BEGIN
addresses_var := initialize_addresses(1);
display_addresses(addresses_var);
DBMS_OUTPUT.PUT_LINE('Extending addresses');
addresses_var.EXTEND(3, 1);
display_addresses(addresses_var);
DBMS_OUTPUT.PUT_LINE('Trimming 2 addresses from end');
addresses_var.TRIM(2);
display_addresses(addresses_var);
END trim_addresses;
END collection_method_examples;
/
-- insert sample data into customers_with_varray table
INSERT INTO customers_with_varray VALUES (
1, 'Steve', 'Brown',
varray_address_typ(
'2 State Street, Beantown, MA, 12345',
'4 Hill Street, Lost Town, CA, 54321'
)
);
-- insert sample data into customers_with_nested_table table
INSERT INTO customers_with_nested_table VALUES (
1, 'Steve', 'Brown',
nested_table_address_typ(
address_typ('2 State Street', 'Beantown', 'MA', '12345'),
address_typ('4 Hill Street', 'Lost Town', 'CA', '54321')
)
);
-- commit the transaction
COMMIT; |
import { Factory } from 'miragejs';
export default Factory.extend({
date: '2019-05-21',
downloads: i => (((i * 42) % 13) + 4) * 2345,
afterCreate(self) {
if (!self.versionId) {
throw new Error(`Missing \`version\` relationship on \`version-download:${self.date}\``);
}
},
});
|
<reponame>Trice254/alx-higher_level_programming
#!/usr/bin/python3
def search_replace(my_list, search, replace):
new = []
for x in my_list:
if x == search:
new.append(replace)
else:
new.append(x)
return (new)
|
import React from 'react';
import axios from 'axios';
class App extends React.Component {
constructor() {
super();
this.state = {
data: []
};
}
componentDidMount() {
axios.get('http://example.com/api/data')
.then(res => {
const data = res.data;
this.setState({ data });
});
}
render() {
return (
<div>
{this.state.data.map(item =>
<p key={item.id}>{item.name}</p>
)}
</div>
);
}
}
export default App; |
<gh_stars>1-10
var clockfaces = [
{
name: 'Red',
colors: {
background: "#200000",
lineOn: "#FF0000",
lineOff: "#5A0000"
},
text: true
},
{
name: 'Green',
colors: {
background: "#002000",
lineOn: "#00FF00",
lineOff: "#005A00"
},
text: true
},
{
name: 'Blue',
colors: {
background: "#000020",
lineOn: "#0000FF",
lineOff: "#00005A"
},
text: true
}
]; |
import {getElements} from './dom';
import {getShowtimes} from './time';
const DEFAULTS = {
context: document,
attr_after: 'data-stagetime-after',
attr_until: 'data-stagetime-until',
class_on: 'stagetime-on',
class_off: 'stagetime-off',
};
/**
Show or hide elements based on data attributes.
@memberof stagetime
@param {Element} [el] Element to initialize. If missing, all elements with
either a `data-stagetime-after` or `data-stagetime-until` attributes will be
initialized.
@param {Object} [settings] Override default settings.
@param {Object} [settings.context=document] The context to search for nodes.
@param {string} [settings.attr_after] The attribute for time start.
@param {string} [settings.attr_until] The attribute for time end.
@param {string} [settings.class_on] The class when object should be shown.
@param {string} [settings.class_off] The class when object should be hidden.
@returns {Element|Element[]} Returns the elements that were initialized.
@description
* This function inspects the `data-stagetime-after` and `data-stagetime-until`
* attributes of the given element and then adds or removes the `stagetime-on`
* or `stagetime-off` classes appropriately.
*
* This function will also schedule future checks in case the `after` or
* `until` times are in the future.
*/
const init = (el, settings = {}) => {
const opts = Object.assign({}, DEFAULTS, settings);
if (!el) {
const sel = `[${opts.attr_after}],[${opts.attr_until}]`;
return getElements(sel, opts.context).map(item => init(item, opts));
} // end if: all elements initialized
const times = getShowtimes(
el.getAttribute(opts.attr_after),
el.getAttribute(opts.attr_until)
);
el.classList.add(times.is_on ? opts.class_on : opts.class_off);
el.classList.remove(times.is_on ? opts.class_off : opts.class_on);
if (times.show_in) { setTimeout(() => init(el), times.show_in); }
if (times.hide_in) { setTimeout(() => init(el), times.hide_in); }
return el;
};
init(); // run automatically
export {init, DEFAULTS}
export default {
init: init,
DEFAULTS: DEFAULTS
}
|
let uuid = null;
if (process.env.PARSE_BUILD === 'weapp') {
uuid = function () {
const s = [];
const hexDigits = '0123456789abcdef';
for (let i = 0; i < 36; i++) {
s[i] = hexDigits.substr(Math.floor(Math.random() * 0x10), 1);
}
s[14] = '4'; // bits 12-15 of the time_hi_and_version field to 0010
s[19] = hexDigits.substr((s[19] & 0x3) | 0x8, 1); // bits 6-7 of the clock_seq_hi_and_reserved to 01
s[8] = s[13] = s[18] = s[23] = '-';
return s.join('');
};
} else {
uuid = require('uuid/v4');
}
module.exports = uuid;
|
<html>
<body>
<h3>Enter Any Number : </h3>
<form action="submit.php" method="post">
<input type="number" name="number" />
<input type="submit" name="submit" value="Submit" />
</form>
</body>
</html> |
<filename>internal/oas/parser/schema_parser_enum.go
package parser
import (
"encoding/json"
"github.com/go-faster/errors"
"github.com/go-faster/jx"
"github.com/ogen-go/ogen/internal/oas"
)
func parseEnumValues(typ oas.SchemaType, rawValues []json.RawMessage) ([]interface{}, error) {
var (
values []interface{}
unique = map[interface{}]struct{}{}
)
for _, raw := range rawValues {
val, err := parseJSONValue(typ, raw)
if err != nil {
return nil, errors.Wrapf(err, "parse value %q", raw)
}
if _, found := unique[val]; found {
return nil, errors.Errorf("duplicate enum value: '%v'", val)
}
unique[val] = struct{}{}
values = append(values, val)
}
return values, nil
}
func parseJSONValue(typ oas.SchemaType, v json.RawMessage) (interface{}, error) {
var (
d = jx.DecodeBytes(v)
next = d.Next()
)
if next == jx.Null {
return nil, nil
}
switch typ {
case oas.String:
if next != jx.String {
return nil, errors.Errorf("expected type %q, got %q", typ, next)
}
return d.Str()
case oas.Integer:
if next != jx.Number {
return nil, errors.Errorf("expected type %q, got %q", typ, next)
}
n, err := d.Num()
if err != nil {
return nil, err
}
if !n.IsInt() {
return nil, errors.New("expected integer, got float")
}
return n.Int64()
case oas.Number:
if next != jx.Number {
return nil, errors.Errorf("expected type %q, got %q", typ, next)
}
n, err := d.Num()
if err != nil {
return nil, err
}
return n.Float64()
case oas.Boolean:
if next != jx.Bool {
return nil, errors.Errorf("expected type %q, got %q", typ, next)
}
return d.Bool()
default:
return nil, errors.Errorf("unexpected type: %q", typ)
}
}
|
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p curl jq unzip
set -eu -o pipefail
# Helper to just fail with a message and non-zero exit code.
function fail() {
echo "$1" >&2
exit 1
}
# Helper to clean up after ourselves if we're killed by SIGINT.
function clean_up() {
TDIR="${TMPDIR:-/tmp}"
echo "Script killed, cleaning up tmpdirs: $TDIR/vscode_exts_*" >&2
rm -Rf "$TDIR/vscode_exts_*"
}
function get_vsixpkg() {
N="$1.$2"
# Create a tempdir for the extension download.
EXTTMP=$(mktemp -d -t vscode_exts_XXXXXXXX)
URL="https://$1.gallery.vsassets.io/_apis/public/gallery/publisher/$1/extension/$2/latest/assetbyname/Microsoft.VisualStudio.Services.VSIXPackage"
# Quietly but delicately curl down the file, blowing up at the first sign of trouble.
curl --silent --show-error --fail -X GET -o "$EXTTMP/$N.zip" "$URL"
# Unpack the file we need to stdout then pull out the version
VER=$(jq -r '.version' <(unzip -qc "$EXTTMP/$N.zip" "extension/package.json"))
# Calculate the SHA
SHA=$(nix-hash --flat --base32 --type sha256 "$EXTTMP/$N.zip")
# Clean up.
rm -Rf "$EXTTMP"
# I don't like 'rm -Rf' lurking in my scripts but this seems appropriate.
cat <<-EOF
{
name = "$2";
publisher = "$1";
version = "$VER";
sha256 = "$SHA";
}
EOF
}
# See if we can find our `code` binary somewhere.
if [ $# -ne 0 ]; then
CODE=$1
else
CODE=$(command -v code || command -v codium)
fi
if [ -z "$CODE" ]; then
# Not much point continuing.
fail "VSCode executable not found"
fi
# Try to be a good citizen and clean up after ourselves if we're killed.
trap clean_up SIGINT
# Begin the printing of the nix expression that will house the list of extensions.
printf '{ extensions = [\n'
# Note that we are only looking to update extensions that are already installed.
for i in $($CODE --list-extensions)
do
OWNER=$(echo "$i" | cut -d. -f1)
EXT=$(echo "$i" | cut -d. -f2)
get_vsixpkg "$OWNER" "$EXT"
done
# Close off the nix expression.
printf '];\n}' |
<reponame>aanchal-fatwani/Javascript-Practice-programs<filename>javascript-practice-programs/JavaScriptDates.js<gh_stars>10-100
let currentDate = new Date();
let day = currentDate.getDate();
let month = currentDate.getMonth() + 1;
let year = currentDate.getFullYear();
if (day < 10) {
day = '0' + day;
}
if (month < 10) {
month = '0' + month;
}
let formattedDate1 = month + '/' + day + '/' + year;
console.log(formattedDate1);
let formattedDate2 = month + '-' + day + '-' + year;
console.log(formattedDate2);
let formattedDate3 = day + '-' + month + '-' + year;
console.log(formattedDate3);
let formattedDate4 = day + '/' + month + '/' + year;
console.log(formattedDate4); |
""" Remove linear trend along axis from data.
Parameters
~~~~~~~~~~
input_array : panda obj
The input data.
Type : {'linear', 'constant'}, optional
The type of detrending.
If ``type == 'linear'`` (default),
The result of a linear least-squares fit to `data` is subtracted
from `data`.
If ``type == 'constant'``,
only the mean of `data` is subtracted.
Note
~~~~
see <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.detrend.html>_ for more info
"""
from scipy import signal
def detrend(input_array,args={'Type':{'linear':True, 'constant':False}}):
#mode=[key for key in args['Type'] if args['Type'][key]][0]
mode=args['Type']
input_array=signal.detrend(input_array,type=mode)
return input_array |
<filename>app/controllers/game_events_controller.rb
class GameEventsController < ApplicationController
before_action :set_game_event, only: [:show, :edit, :update, :destroy]
# GET /game_events
# GET /game_events.json
def index
@game_events = GameEvent.all
end
# GET /game_events/1
# GET /game_events/1.json
def show
end
# GET /game_events/new
def new
@game_event = GameEvent.new
end
# GET /game_events/1/edit
def edit
end
# POST /game_events
# POST /game_events.json
def create
@game_event = GameEvent.new(game_event_params)
respond_to do |format|
if @game_event.save
format.html { redirect_to @game_event, notice: 'Game event was successfully created.' }
format.json { render action: 'show', status: :created, location: @game_event }
else
format.html { render action: 'new' }
format.json { render json: @game_event.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /game_events/1
# PATCH/PUT /game_events/1.json
def update
respond_to do |format|
if @game_event.update(game_event_params)
format.html { redirect_to @game_event, notice: 'Game event was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: 'edit' }
format.json { render json: @game_event.errors, status: :unprocessable_entity }
end
end
end
# DELETE /game_events/1
# DELETE /game_events/1.json
def destroy
@game_event.destroy
respond_to do |format|
format.html { redirect_to game_events_url }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_game_event
@game_event = GameEvent.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def game_event_params
params.require(:game_event).permit(:description)
end
end
|
class MultiplicationTable {
constructor(num: number) {
for (let i = 1; i <= num; i++) {
for (let j = 1; j <= num; j++) {
console.log(i * j + ' ');
}
console.log('');
}
}
}
const table = new MultiplicationTable(5); |
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.menu = void 0;
var menu = {
"viewBox": "0 0 8 8",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0v1h8v-1h-8zm0 2.97v1h8v-1h-8zm0 3v1h8v-1h-8z",
"transform": "translate(0 1)"
}
}]
};
exports.menu = menu; |
#ifndef _KERNEL_MEM_VMM_H_
#define _KERNEL_MEM_VMM_H_
/* virtual memory management */
#include "pub/com.h"
#include "pub/dllist.h"
#include "mem/mmu.h"
#include "lib/sync.h"
struct vma_set_t_tag;
typedef struct {
struct vma_set_t_tag *set;
uintptr_t start;
uintptr_t end;
uint32_t flags;
dllist_t link;
} vma_t;
typedef struct vma_set_t_tag {
dllist_t mset;
vma_t *mcache;
size_t mcount;
pde_t *pgdir;
void *swap_data;
} vma_set_t;
#define dll2vma(dll, member) \
to_struct((dll), vma_t, member)
#define VMA_FLAG_READ 0x00000001
#define VMA_FLAG_WRITE 0x00000002
#define VMA_FLAG_EXEC 0x00000004
vma_t *vma_new(uintptr_t start, uintptr_t end, uint32_t flags);
vma_set_t *vma_set_new();
void vma_set_free(vma_set_t *set);
void vma_set_insert(vma_set_t *set, vma_t *vma);
vma_t *vma_set_find(vma_set_t *set, uintptr_t addr);
void vmm_init();
int vmm_doPageFault(vma_set_t *set, uint32_t error, uintptr_t addr);
size_t vmm_getPageFaultCount();
#endif
|
<gh_stars>1-10
package main
import (
"fmt"
)
type S string
func (s S) Println() {
fmt.Println(s)
}
func main() {
s := S("foo")
s.Println()
}
|
package com.lmj.vueblog.config.shiro;
import cn.hutool.core.bean.BeanUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.lmj.vueblog.entity.Permission;
import com.lmj.vueblog.entity.Role;
import com.lmj.vueblog.entity.User;
import com.lmj.vueblog.service.PermissionService;
import com.lmj.vueblog.service.RoleService;
import com.lmj.vueblog.service.UserService;
import com.lmj.vueblog.util.JwtUtils;
import org.apache.shiro.authc.*;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import java.util.List;
/**
* 进行登录或者权限校验的逻辑,核心
* AccountRealm是shiro进行登录或者权限校验的逻辑所在,这是核心,我们需要重写3个方法,分别是
* supports:为了让realm支持jwt的凭证校验
* doGetAuthorizationInfo:权限校验
* doGetAuthenticationInfo:登录认证校验
* */
//@Component
public class AccountRealm extends AuthorizingRealm {
@Autowired
JwtUtils jwtUtils;
@Autowired
UserService userService;
@Autowired
RoleService roleService;
@Autowired
PermissionService permissionService;
/**
* 判断token是否是JwtToken
* */
@Override
public boolean supports(AuthenticationToken token) {
return token instanceof JwtToken;
}
/**
* 权限校验
* */
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
System.out.println("执行了=>doGetAuthorizationInfo");
// 获取主身份信息
AccountProfile profile = (AccountProfile) principals.getPrimaryPrincipal();
User user = userService.getOne(new QueryWrapper<User>().eq("username",profile.getUsername()));
List<Role> roles = roleService.getRolesByUserId(user.getId());
if(!CollectionUtils.isEmpty(roles)){
SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo();
roles.forEach(role -> {
simpleAuthorizationInfo.addRole(role.getName());
List<Permission> permissions = permissionService.getPermissionsByRoleId(role.getId());
if(!CollectionUtils.isEmpty(permissions)){
permissions.forEach(permission -> {
simpleAuthorizationInfo.addStringPermission(permission.getName());
});
}
});
return simpleAuthorizationInfo;
}
return null;
}
/**
* 登录认证校验
* */
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws AuthenticationException {
System.out.println("执行了=>doGetAuthenticationInfo");
// UsernamePasswordToken userToken = (UsernamePasswordToken) token;
// User user = userService.getOne(new QueryWrapper<User>().eq("username", userToken.getUsername()));
//
// if(!ObjectUtils.isEmpty(user)){
// if (user.getStatus() == -1) {
// throw new LockedAccountException("账户已被锁定");
// }
// //AccountProfile,为了登录成功之后返回的一个用户信息的载体
// AccountProfile profile = new AccountProfile();
// BeanUtil.copyProperties(user, profile);
//
// // 密码认证
// return new SimpleAuthenticationInfo(profile, user.getPassword(), new CustomerByteSource(user.getSalt()), getName());
// }
// return null;
JwtToken jwtToken = (JwtToken) token;
String userId = jwtUtils.getClaimByToken((String) jwtToken.getPrincipal()).getSubject();
User user = userService.getById(Long.valueOf(userId));
if (user == null) {
throw new UnknownAccountException("账户不存在");
}
if (user.getStatus() == -1) {
throw new LockedAccountException("账户已被锁定");
}
//AccountProfile,为了登录成功之后返回的一个用户信息的载体
AccountProfile profile = new AccountProfile();
BeanUtil.copyProperties(user, profile);
// Token认证,这里不是标准写法,第一个参数应该填token,不然设置缓存后,登出删除不了信息
return new SimpleAuthenticationInfo(profile, jwtToken.getCredentials(), getName());
}
}
|
import { Component, OnInit } from '@angular/core';
import { UserFormQuestions } from '../models/user-form-questions';
import { FormDataService } from '../services/form-data.service';
@Component({
selector: 'app-form-step-five',
templateUrl: './form-step-five.component.html',
styleUrls: ['./form-step-five.component.scss'],
})
export class FormStepFiveComponent implements OnInit {
formQuestions: UserFormQuestions;
constructor(public formDataService: FormDataService) {
this.formQuestions = formDataService.formQuestions;
}
ngOnInit(): void {}
log() {
console.log('12 -', this.formQuestions.questionTwelve);
console.log('13 -', this.formQuestions.questionThirteen);
}
}
|
#!/usr/bin/env bats
#
# Load the helper functions in test_helper.bash
# Note the .bash suffix is omitted intentionally
#
load test_helper
#
# Test to run is denoted with at symbol test like below
# the string after is the test name and will be displayed
# when the test is run
#
# This test is as the test name states a check when everythin
# is peachy.
#
@test "Test success hardlink" {
# verify $KEPLER_SH is in path if not skip this test
skipIfKeplerNotInPath
echo "0,blah,some error," > "$THE_TMP/bin/command.tasks"
echo "hi=2" > "$THE_TMP/imodsource"
mkdir -p "$THE_TMP/foo/data"
echo "hi" > "$THE_TMP/foo/data/some.imod"
# Run kepler.sh
run $KEPLER_SH -runwf -redirectgui $THE_TMP -CWS_jobname jname -CWS_user joe -CWS_jobid 123 -imodfile "$THE_TMP/foo" -imodSourceScript $THE_TMP/imodsource -imodInfoCmd "$THE_TMP/bin/command" -CWS_outputdir $THE_TMP $WF
# Check exit code
[ "$status" -eq 0 ]
# will only see this if kepler fails
echoArray "${lines[@]}"
# Check output from kepler.sh
[[ "${lines[0]}" == "The base dir is"* ]]
# Will be output if anything below fails
cat "$THE_TMP/$README_TXT"
# Verify we did not get a WORKFLOW.FAILED.txt file
[ ! -e "$THE_TMP/$WORKFLOW_FAILED_TXT" ]
# Verify we got a README.txt
[ -s "$THE_TMP/$README_TXT" ]
# Check we got a workflow.status file
[ -s "$THE_TMP/$WORKFLOW_STATUS" ]
[ -e "$THE_TMP/data/input.mod" ]
}
|
## set permissions first at terminal
# chmod +x docs.command
## then run this:
# ./docs.command
documentation build js/** -f html -o docs
documentation build js/** -f md -o docs/docs.md
# docs.yml should allow a hierarchy but not working yet
# documentation build js/** --config docs.yml -f html -o docs
|
#!/bin/bash
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
cd "$SCRIPT_DIR" || exit
cd ../../learn || exit
python training.py ../mimicdata/mimic3/train_full.csv ../mimicdata/mimic3/vocab.csv full conv_attn 200 --filter-size 10 --num-filter-maps 50 --dropout 0.2 --patience 10 --criterion prec_at_8 --lr 0.0001 --lmbda 0.01 --embed-file ../mimicdata/mimic3/processed_full.embed
# --gpu
|
NAME=$1
NAMESPACE=$2
BOLD=' \e[1m' # Work only if "allow bold text" setting is enabled
STOP='\e[0m'
test -z ${NAMESPACE} && NAMESPACE=default
kubectl get pod "$NAME" -n ${NAMESPACE} -o=json | \
jq ' .metadata.labels'
|
/**
* Created with JetBrains WebStorm.
* User: Gwen
* @project CatchTheFlowers
* Date: 25/07/12
* Time: 11:52
* To change this template use File | Settings | File Templates.
*/
var StarsNode = CGSGNode.extend(
{
initialize : function() {
this._super(0, 0, cgsgCanvas.width, cgsgCanvas.height);
this.initShape();
},
/**
* Pre-render the flower into a temp canvas to optimize the perfs
*/
initShape : function() {
this._tmpCanvas = document.createElement('canvas');
this._tmpCanvas.width = cgsgCanvas.width;
this._tmpCanvas.height = cgsgCanvas.height;
var tmpContext = this._tmpCanvas.getContext('2d');
//draw the stars
var centerX = 0;
var centerY = 0;
var radius = 1;
for (var s = 0; s < 200; s++) {
centerX = Math.random() * cgsgCanvas.width;
centerY = Math.random() * (cgsgCanvas.height - 40);
radius = 0.01 + Math.random() * 1.3;
tmpContext.beginPath();
tmpContext.arc(centerX, centerY, radius, 0, CGSGMath.PI2, false);
tmpContext.fillStyle = 'white';
tmpContext.fill();
}
//draw the moon
centerX = 110;
centerY = 80;
radius = 30;
tmpContext.beginPath();
tmpContext.arc(centerX, centerY, radius, 0, 2 * Math.PI, false);
tmpContext.fillStyle = 'white';
tmpContext.shadowColor = 'white';
tmpContext.shadowBlur = 70;
tmpContext.shadowOffsetX = 0;
tmpContext.shadowOffsetY = 0;
tmpContext.fill();
tmpContext.beginPath();
tmpContext.arc(120, 85, 8, 0, 2 * Math.PI, false);
tmpContext.fillStyle = '#f8f8f8';
tmpContext.fill();
tmpContext.beginPath();
tmpContext.arc(90, 70, 3, 0, 2 * Math.PI, false);
tmpContext.fillStyle = '#f8f8f8';
tmpContext.fill();
tmpContext.beginPath();
tmpContext.arc(110, 85, 4, 0, 2 * Math.PI, false);
tmpContext.fillStyle = '#f8f8f8';
tmpContext.fill();
},
/**
* @override
* Must be defined to allow the scene graph to render the image nodes
* */
render : function(context) {
//save current state
//always call it
this.beforeRender(context);
context.globalAlpha = this.globalAlpha;
//render the pre-rendered canvas
context.drawImage(this._tmpCanvas, 0, 0);
//restore state
//always call it
this.afterRender(context);
}
}
); |
def reverseArray(arr):
# Initializing start and end point
start = 0
end = len(arr) - 1
# Traverse from start
# and swap the elements
while start < end:
arr[start], arr[end] = arr[end], arr[start]
start += 1
end -= 1
# Driver code
arr = [1, 2, 3, 4]
reverseArray(arr)
print(arr) # Outputs [4,3,2,1] |
<filename>src/utils/load-posts.js<gh_stars>0
export const loadPosts = async () => {
const postsResponse = fetch('https://jsonplaceholder.typicode.com/posts');
const photosResponse = fetch('https://jsonplaceholder.typicode.com/photos');
// Um array com chave posts e photos recebe as promessas resolvidas das requisições
const [posts, photos] = await Promise.all([postsResponse, photosResponse]);
// posts e photos também são promises por isso precisamos do await na passagem de promise para JSON.
const postsJson = await posts.json();
const photosJson = await photos.json();
//função de zipper pelo menor array
const postsAndPhotos = postsJson.map((post, index) => {
/* retorna um array de objetos. Onde cada objeto é um post da requisição com o atributo URL chamado de cover
aqui acontece o array ziper, onde o menor array é mapeado e do segundo array, é retirado o atributo URL baseado no index do array menor.
O motivo é que temos mais fotos na requisição do que nas postagens, logo se mapearmos de acordo com o index do array menor, temos só as fotos necessárias.
*/
return { ...post, cover: photosJson[index].url };
});
// retorno da função = um array com vários objetos -> [{},{}...]
return postsAndPhotos;
};
|
#!/bin/sh
## run EVM
../evidence_modeler.pl --genome genome.fasta \
--weights ./weights.txt \
--gene_predictions gene_predictions.gff3 \
--protein_alignments protein_alignments.gff3 \
--transcript_alignments transcript_alignments.gff3 \
> evm.out
echo
echo
echo "*** Created EVM output file: evm.out ***"
## convert output to GFF3 format
./../EvmUtils/EVM_to_GFF3.pl evm.out.orig Contig1 > evm.out.gff3
echo
echo
echo "*** Converted EVM output to GFF3 format: evm.out.gff3 ***"
echo
echo "Done."
|
package com.oyekanmiayo.rockito.domain;
import java.math.BigDecimal;
import java.util.List;
public class SavingsAccount {
private long id;
private int accountNumber;
private BigDecimal accountBalance;
private List<SavingsTransaction> primaryTransactionList;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public int getAccountNumber() {
return accountNumber;
}
public void setAccountNumber(int accountNumber) {
this.accountNumber = accountNumber;
}
public BigDecimal getAccountBalance() {
return accountBalance;
}
public void setAccountBalance(BigDecimal accountBalance) {
this.accountBalance = accountBalance;
}
public List<SavingsTransaction> getPrimaryTransactionList() {
return primaryTransactionList;
}
public void setPrimaryTransactionList(List<SavingsTransaction> primaryTransactionList) {
this.primaryTransactionList = primaryTransactionList;
}
}
|
import yaml
def compare_yaml_files(file1, file2):
try:
with open(file1, "r") as file1_stream, open(file2, "r") as file2_stream:
yaml1 = yaml.safe_load(file1_stream)
yaml2 = yaml.safe_load(file2_stream)
return yaml1 == yaml2
except FileNotFoundError:
print("One or both files do not exist.")
return False
except yaml.YAMLError:
print("Invalid YAML format in one or both files.")
return False
# Test cases
def test_compare_yaml_files():
# Test case for equivalent YAML files
assert compare_yaml_files("tests/test_files/bundles/api/bundle1/results/bundle.yaml",
"tests/test_files/bundles/api/bundle1/results/bundle.yaml") == True
# Test case for non-existent file
assert compare_yaml_files("nonexistent.yaml", "tests/test_files/bundles/api/bundle1/results/bundle.yaml") == False
# Test case for invalid YAML format
assert compare_yaml_files("tests/test_files/bundles/api/bundle1/results/bundle.yaml",
"tests/test_files/bundles/api/bundle1/invalid.yaml") == False |
// Copyright (c) 2015 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
export default `
#define SHADER_NAME minecraft-layer-vertex-shader
attribute vec3 positions;
attribute vec3 normals;
attribute vec2 texCoords;
attribute vec3 instancePositions;
attribute vec3 instancePositions64Low;
attribute float instanceBlockIds;
attribute vec4 instanceBlockData;
attribute float instanceVisibilities;
attribute vec3 instancePickingColors;
uniform float sliceY;
uniform vec2 blockDefsTextureDim;
uniform vec2 atlasTextureDim;
uniform sampler2D blockDefsTexture;
uniform sampler2D biomeTexture;
varying float isVisible;
varying vec4 vColorScale;
varying vec2 vTextureCoords;
mat3 getXYRotationMatrix(float radX, float radY) {
float cx = cos(radX);
float sx = sin(radX);
float cy = cos(radY);
float sy = sin(radY);
return mat3(
cy, 0.0, -sy,
sx * sy, cx, sx * cy,
cx * sy, -sx, cx * cy
);
}
float round(float x) {
return floor(x + 0.5);
}
vec4 getBlockDefAt(float faceIndex) {
vec2 coords = vec2(instanceBlockData.x * 8.0 + faceIndex, instanceBlockIds);
coords += vec2(0.5);
coords /= blockDefsTextureDim;
return texture2D(blockDefsTexture, coords);
}
float getFaceIndex(vec3 normal_modelspace) {
vec3 index = normal_modelspace * vec3(-1.0, 0.5, 1.0) +
abs(normal_modelspace) * vec3(4.0, 0.5, 3.0);
return round(index.x + index.y + index.z);
}
vec4 getBiomeColor() {
// extreme altitude
vec2 coords = instanceBlockData.yz / 255.;
coords.x = 1.0 - coords.x;
return mix(
texture2D(biomeTexture, coords),
vec4(1.5),
step(95., instancePositions.y)
);
}
bool getVisibility(float faceIndex) {
float b = pow(2., 5. - faceIndex);
return mod(instanceVisibilities, b * 2.) >= b;
}
// (scale, rotate, translate, face offset)
vec4 getTransform(vec4 t) {
return vec4(
round(t.x * 255.0) / 16.0 - 4.0,
round(t.y * 255.0) / 6.0 * PI,
round(t.z * 255.0) / 16.0 - 1.0,
round((1.0 - t.w) * 255.0) / 16.0
);
}
void main(void) {
geometry.pickingColor = instancePickingColors;
vec4 transformX = getTransform(getBlockDefAt(6.0));
vec4 transformY = getTransform(getBlockDefAt(7.0));
vec3 blockScale = vec3(transformX[0], transformY[0], 1.0);
mat3 blockRotation = getXYRotationMatrix(transformX[1], transformY[1]);
vec3 blockTranslation = vec3(transformX[2], transformY[2], 0.0);
vec3 faceOffset = vec3(transformX[3], transformY[3], transformX[3]);
vec3 position_modelspace =
blockRotation * (positions / 2. * blockScale - normals * faceOffset + blockTranslation);
vec3 normal_modelspace = blockRotation * normals;
vec2 texCoords_modelspace = texCoords * mix(
vec2(1.0),
blockScale.xy,
1.0 - abs(normals.xy)
);
float faceIndex = getFaceIndex(normals);
float faceIndex_modelspace = getFaceIndex(normal_modelspace);
// textures mapping
// returns [col, row, use_biome_shading, 1.0]
vec4 textureSettings = getBlockDefAt(faceIndex);
// texture size is 16x16 with 1px bleeding
vec4 textureFrame = vec4(
round(textureSettings.x * 255.) * 18.0 + 1.0,
round(textureSettings.y * 255. + 1.0) * 18.0 - 1.0,
16.0,
-16.0
);
vTextureCoords = (textureFrame.xy + texCoords_modelspace * textureFrame.zw) / atlasTextureDim;
// discard empty faces and ones facing opaque blocks
isVisible = float(
textureSettings.xy != vec2(0.) &&
(getVisibility(faceIndex_modelspace) ||
(faceIndex_modelspace == 1.0) && instancePositions.y == sliceY)
);
// calculate position
gl_Position = project_position_to_clipspace(instancePositions, instancePositions64Low, position_modelspace, geometry.position);
// calculate colors
vec4 biomeColor = mix(vec4(1.), getBiomeColor(), textureSettings.z);
vec3 lightWeight = lighting_getLightColor(vec3(1.0), project_uCameraPosition, geometry.position.xyz, normal_modelspace);
lightWeight += instanceBlockData.w / 15.0;
float isGhosted = float(instancePositions.y > sliceY);
if (picking_uActive) {
isVisible *= 1.0 - isGhosted;
}
vColorScale = vec4(lightWeight, mix(1.0, 0.1, isGhosted)) * biomeColor;
DECKGL_FILTER_COLOR(vColorScale, geometry);
}
`;
|
#!/bin/bash
##################################################################
# Created by Christian Haitian for use to easily update #
# various standalone emulators, libretro cores, and other #
# various programs for the RK3326 platform for various Linux #
# based distributions. #
# See the LICENSE.md file at the top-level directory of this #
# repository. #
##################################################################
cur_wd="$PWD"
bitness="$(getconf LONG_BIT)"
# Libretro gpsp build
if [[ "$var" == "gpsp" || "$var" == "all" ]]; then
cd $cur_wd
if [ ! -d "gpsp/" ]; then
git clone --recursive https://github.com/libretro/gpsp.git
if [[ $? != "0" ]]; then
echo " "
echo "There was an error while cloning the libretro git. Is Internet active or did the git location change? Stopping here."
exit 1
fi
cp patches/gpsp-patch* gpsp/.
fi
cd gpsp/
gpsp_patches=$(find *.patch)
if [[ ! -z "$gpsp_patches" ]]; then
for patching in gpsp-patch*
do
patch -Np1 < "$patching"
if [[ $? != "0" ]]; then
echo " "
echo "There was an error while applying $patching. Stopping here."
exit 1
fi
rm "$patching"
done
fi
make clean
if [[ "$bitness" == "64" ]]; then
make platform=arm64 -j$(($(nproc) - 1))
else
make platform=goadvance -j$(($(nproc) - 1))
fi
if [[ $? != "0" ]]; then
echo " "
echo "There was an error while building the newest lr-gpsp core. Stopping here."
exit 1
fi
strip gpsp_libretro.so
if [ ! -d "../cores$bitness/" ]; then
mkdir -v ../cores$bitness
fi
cp gpsp_libretro.so ../cores$bitness/.
gitcommit=$(git log | grep -m 1 commit | cut -c -14 | cut -c 8-)
echo $gitcommit > ../cores$bitness/$(basename $PWD)_libretro.so.commit
echo " "
echo "gpsp_libretro.so has been created and has been placed in the rk3326_core_builds/cores$bitness subfolder"
fi
|
#!/usr/bin/env bash
export LC_ALL=C
set -e -o pipefail
export TZ=UTC
# Although Guix _does_ set umask when building its own packages (in our case,
# this is all packages in manifest.scm), it does not set it for `guix
# environment`. It does make sense for at least `guix environment --container`
# to set umask, so if that change gets merged upstream and we bump the
# time-machine to a commit which includes the aforementioned change, we can
# remove this line.
#
# This line should be placed before any commands which creates files.
umask 0022
if [ -n "$V" ]; then
# Print both unexpanded (-v) and expanded (-x) forms of commands as they are
# read from this file.
set -vx
# Set VERBOSE for CMake-based builds
export VERBOSE="$V"
fi
# Check that required environment variables are set
cat << EOF
Required environment variables as seen inside the container:
DIST_ARCHIVE_BASE: ${DIST_ARCHIVE_BASE:?not set}
DISTNAME: ${DISTNAME:?not set}
HOST: ${HOST:?not set}
SOURCE_DATE_EPOCH: ${SOURCE_DATE_EPOCH:?not set}
JOBS: ${JOBS:?not set}
DISTSRC: ${DISTSRC:?not set}
OUTDIR: ${OUTDIR:?not set}
EOF
ACTUAL_OUTDIR="${OUTDIR}"
OUTDIR="${DISTSRC}/output"
#####################
# Environment Setup #
#####################
# The depends folder also serves as a base-prefix for depends packages for
# $HOSTs after successfully building.
BASEPREFIX="${PWD}/depends"
# Given a package name and an output name, return the path of that output in our
# current guix environment
store_path() {
grep --extended-regexp "/[^-]{32}-${1}-[^-]+${2:+-${2}}" "${GUIX_ENVIRONMENT}/manifest" \
| head --lines=1 \
| sed --expression='s|^[[:space:]]*"||' \
--expression='s|"[[:space:]]*$||'
}
# Set environment variables to point the NATIVE toolchain to the right
# includes/libs
NATIVE_GCC="$(store_path gcc-toolchain)"
NATIVE_GCC_STATIC="$(store_path gcc-toolchain static)"
unset LIBRARY_PATH
unset CPATH
unset C_INCLUDE_PATH
unset CPLUS_INCLUDE_PATH
unset OBJC_INCLUDE_PATH
unset OBJCPLUS_INCLUDE_PATH
export LIBRARY_PATH="${NATIVE_GCC}/lib:${NATIVE_GCC}/lib64:${NATIVE_GCC_STATIC}/lib:${NATIVE_GCC_STATIC}/lib64"
export C_INCLUDE_PATH="${NATIVE_GCC}/include"
export CPLUS_INCLUDE_PATH="${NATIVE_GCC}/include/c++:${NATIVE_GCC}/include"
export OBJC_INCLUDE_PATH="${NATIVE_GCC}/include"
export OBJCPLUS_INCLUDE_PATH="${NATIVE_GCC}/include/c++:${NATIVE_GCC}/include"
prepend_to_search_env_var() {
export "${1}=${2}${!1:+:}${!1}"
}
case "$HOST" in
*darwin*)
# When targeting darwin, zlib is required by native_libdmg-hfsplus.
zlib_store_path=$(store_path "zlib")
zlib_static_store_path=$(store_path "zlib" static)
prepend_to_search_env_var LIBRARY_PATH "${zlib_static_store_path}/lib:${zlib_store_path}/lib"
prepend_to_search_env_var C_INCLUDE_PATH "${zlib_store_path}/include"
prepend_to_search_env_var CPLUS_INCLUDE_PATH "${zlib_store_path}/include"
prepend_to_search_env_var OBJC_INCLUDE_PATH "${zlib_store_path}/include"
prepend_to_search_env_var OBJCPLUS_INCLUDE_PATH "${zlib_store_path}/include"
esac
# Set environment variables to point the CROSS toolchain to the right
# includes/libs for $HOST
case "$HOST" in
*mingw*)
# Determine output paths to use in CROSS_* environment variables
CROSS_GLIBC="$(store_path "mingw-w64-x86_64-winpthreads")"
CROSS_GCC="$(store_path "gcc-cross-${HOST}")"
CROSS_GCC_LIB_STORE="$(store_path "gcc-cross-${HOST}" lib)"
CROSS_GCC_LIBS=( "${CROSS_GCC_LIB_STORE}/lib/gcc/${HOST}"/* ) # This expands to an array of directories...
CROSS_GCC_LIB="${CROSS_GCC_LIBS[0]}" # ...we just want the first one (there should only be one)
# The search path ordering is generally:
# 1. gcc-related search paths
# 2. libc-related search paths
# 2. kernel-header-related search paths (not applicable to mingw-w64 hosts)
export CROSS_C_INCLUDE_PATH="${CROSS_GCC_LIB}/include:${CROSS_GCC_LIB}/include-fixed:${CROSS_GLIBC}/include"
export CROSS_CPLUS_INCLUDE_PATH="${CROSS_GCC}/include/c++:${CROSS_GCC}/include/c++/${HOST}:${CROSS_GCC}/include/c++/backward:${CROSS_C_INCLUDE_PATH}"
export CROSS_LIBRARY_PATH="${CROSS_GCC_LIB_STORE}/lib:${CROSS_GCC}/${HOST}/lib:${CROSS_GCC_LIB}:${CROSS_GLIBC}/lib"
;;
*darwin*)
# The CROSS toolchain for darwin uses the SDK and ignores environment variables.
# See depends/hosts/darwin.mk for more details.
;;
*linux*)
CROSS_GLIBC="$(store_path "glibc-cross-${HOST}")"
CROSS_GLIBC_STATIC="$(store_path "glibc-cross-${HOST}" static)"
CROSS_KERNEL="$(store_path "linux-libre-headers-cross-${HOST}")"
CROSS_GCC="$(store_path "gcc-cross-${HOST}")"
CROSS_GCC_LIB_STORE="$(store_path "gcc-cross-${HOST}" lib)"
CROSS_GCC_LIBS=( "${CROSS_GCC_LIB_STORE}/lib/gcc/${HOST}"/* ) # This expands to an array of directories...
CROSS_GCC_LIB="${CROSS_GCC_LIBS[0]}" # ...we just want the first one (there should only be one)
export CROSS_C_INCLUDE_PATH="${CROSS_GCC_LIB}/include:${CROSS_GCC_LIB}/include-fixed:${CROSS_GLIBC}/include:${CROSS_KERNEL}/include"
export CROSS_CPLUS_INCLUDE_PATH="${CROSS_GCC}/include/c++:${CROSS_GCC}/include/c++/${HOST}:${CROSS_GCC}/include/c++/backward:${CROSS_C_INCLUDE_PATH}"
export CROSS_LIBRARY_PATH="${CROSS_GCC_LIB_STORE}/lib:${CROSS_GCC}/${HOST}/lib:${CROSS_GCC_LIB}:${CROSS_GLIBC}/lib:${CROSS_GLIBC_STATIC}/lib"
;;
*)
exit 1 ;;
esac
# Sanity check CROSS_*_PATH directories
IFS=':' read -ra PATHS <<< "${CROSS_C_INCLUDE_PATH}:${CROSS_CPLUS_INCLUDE_PATH}:${CROSS_LIBRARY_PATH}"
for p in "${PATHS[@]}"; do
if [ -n "$p" ] && [ ! -d "$p" ]; then
echo "'$p' doesn't exist or isn't a directory... Aborting..."
exit 1
fi
done
# Disable Guix ld auto-rpath behavior
case "$HOST" in
*darwin*)
# The auto-rpath behavior is necessary for darwin builds as some native
# tools built by depends refer to and depend on Guix-built native
# libraries
#
# After the native packages in depends are built, the ld wrapper should
# no longer affect our build, as clang would instead reach for
# x86_64-apple-darwin19-ld from cctools
;;
*) export GUIX_LD_WRAPPER_DISABLE_RPATH=yes ;;
esac
# Make /usr/bin if it doesn't exist
[ -e /usr/bin ] || mkdir -p /usr/bin
# Symlink file and env to a conventional path
[ -e /usr/bin/file ] || ln -s --no-dereference "$(command -v file)" /usr/bin/file
[ -e /usr/bin/env ] || ln -s --no-dereference "$(command -v env)" /usr/bin/env
# Determine the correct value for -Wl,--dynamic-linker for the current $HOST
case "$HOST" in
*linux*)
glibc_dynamic_linker=$(
case "$HOST" in
i686-linux-gnu) echo /lib/ld-linux.so.2 ;;
x86_64-linux-gnu) echo /lib64/ld-linux-x86-64.so.2 ;;
arm-linux-gnueabihf) echo /lib/ld-linux-armhf.so.3 ;;
aarch64-linux-gnu) echo /lib/ld-linux-aarch64.so.1 ;;
riscv64-linux-gnu) echo /lib/ld-linux-riscv64-lp64d.so.1 ;;
powerpc64-linux-gnu) echo /lib64/ld64.so.1;;
powerpc64le-linux-gnu) echo /lib64/ld64.so.2;;
*) exit 1 ;;
esac
)
;;
esac
# Environment variables for determinism
export TAR_OPTIONS="--owner=0 --group=0 --numeric-owner --mtime='@${SOURCE_DATE_EPOCH}' --sort=name"
export TZ="UTC"
case "$HOST" in
*darwin*)
# cctools AR, unlike GNU binutils AR, does not have a deterministic mode
# or a configure flag to enable determinism by default, it only
# understands if this env-var is set or not. See:
#
# https://github.com/tpoechtrager/cctools-port/blob/55562e4073dea0fbfd0b20e0bf69ffe6390c7f97/cctools/ar/archive.c#L334
export ZERO_AR_DATE=yes
;;
esac
####################
# Depends Building #
####################
# Build the depends tree, overriding variables that assume multilib gcc
make -C depends --jobs="$JOBS" HOST="$HOST" \
${V:+V=1} \
${SOURCES_PATH+SOURCES_PATH="$SOURCES_PATH"} \
${BASE_CACHE+BASE_CACHE="$BASE_CACHE"} \
${SDK_PATH+SDK_PATH="$SDK_PATH"} \
i686_linux_CC=i686-linux-gnu-gcc \
i686_linux_CXX=i686-linux-gnu-g++ \
i686_linux_AR=i686-linux-gnu-ar \
i686_linux_RANLIB=i686-linux-gnu-ranlib \
i686_linux_NM=i686-linux-gnu-nm \
i686_linux_STRIP=i686-linux-gnu-strip \
x86_64_linux_CC=x86_64-linux-gnu-gcc \
x86_64_linux_CXX=x86_64-linux-gnu-g++ \
x86_64_linux_AR=x86_64-linux-gnu-ar \
x86_64_linux_RANLIB=x86_64-linux-gnu-ranlib \
x86_64_linux_NM=x86_64-linux-gnu-nm \
x86_64_linux_STRIP=x86_64-linux-gnu-strip \
qt_config_opts_i686_linux='-platform linux-g++ -xplatform bitcoin-linux-g++' \
qt_config_opts_x86_64_linux='-platform linux-g++ -xplatform bitcoin-linux-g++' \
FORCE_USE_SYSTEM_CLANG=1
###########################
# Source Tarball Building #
###########################
GIT_ARCHIVE="${DIST_ARCHIVE_BASE}/${DISTNAME}.tar.gz"
# Create the source tarball if not already there
if [ ! -e "$GIT_ARCHIVE" ]; then
mkdir -p "$(dirname "$GIT_ARCHIVE")"
git archive --prefix="${DISTNAME}/" --output="$GIT_ARCHIVE" HEAD
fi
mkdir -p "$OUTDIR"
###########################
# Binary Tarball Building #
###########################
# CONFIGFLAGS
CONFIGFLAGS="--enable-reduce-exports --disable-bench --disable-gui-tests --disable-fuzz-binary"
# CFLAGS
HOST_CFLAGS="-O2 -g"
case "$HOST" in
*linux*) HOST_CFLAGS+=" -ffile-prefix-map=${PWD}=." ;;
*mingw*) HOST_CFLAGS+=" -fno-ident" ;;
*darwin*) unset HOST_CFLAGS ;;
esac
# CXXFLAGS
HOST_CXXFLAGS="$HOST_CFLAGS"
case "$HOST" in
arm-linux-gnueabihf) HOST_CXXFLAGS="${HOST_CXXFLAGS} -Wno-psabi" ;;
esac
# LDFLAGS
case "$HOST" in
*linux*) HOST_LDFLAGS="-Wl,--as-needed -Wl,--dynamic-linker=$glibc_dynamic_linker -static-libstdc++ -Wl,-O2" ;;
*mingw*) HOST_LDFLAGS="-Wl,--no-insert-timestamp" ;;
esac
# Using --no-tls-get-addr-optimize retains compatibility with glibc 2.18, by
# avoiding a PowerPC64 optimisation available in glibc 2.22 and later.
# https://sourceware.org/binutils/docs-2.35/ld/PowerPC64-ELF64.html
case "$HOST" in
*powerpc64*) HOST_LDFLAGS="${HOST_LDFLAGS} -Wl,--no-tls-get-addr-optimize" ;;
esac
case "$HOST" in
powerpc64-linux-*|riscv64-linux-*) HOST_LDFLAGS="${HOST_LDFLAGS} -Wl,-z,noexecstack" ;;
esac
# Make $HOST-specific native binaries from depends available in $PATH
export PATH="${BASEPREFIX}/${HOST}/native/bin:${PATH}"
mkdir -p "$DISTSRC"
(
cd "$DISTSRC"
# Extract the source tarball
tar --strip-components=1 -xf "${GIT_ARCHIVE}"
./autogen.sh
# Configure this DISTSRC for $HOST
# shellcheck disable=SC2086
env CONFIG_SITE="${BASEPREFIX}/${HOST}/share/config.site" \
./configure --prefix=/ \
--disable-ccache \
--disable-maintainer-mode \
--disable-dependency-tracking \
--enable-elysium \
--enable-crash-hooks \
--without-libs \
${CONFIGFLAGS} \
${HOST_CFLAGS:+CFLAGS="${HOST_CFLAGS}"} \
${HOST_CXXFLAGS:+CXXFLAGS="${HOST_CXXFLAGS}"} \
${HOST_LDFLAGS:+LDFLAGS="${HOST_LDFLAGS}"}
sed -i.old 's/-lstdc++ //g' config.status libtool src/secp256k1/config.status src/secp256k1/libtool src/univalue/config.status src/univalue/libtool
# Build Bitcoin Core
make --jobs="$JOBS" ${V:+V=1}
mkdir -p "$OUTDIR"
# Make the os-specific installers
case "$HOST" in
*mingw*)
make deploy ${V:+V=1} BITCOIN_WIN_INSTALLER="${OUTDIR}/${DISTNAME}-win64-setup-unsigned.exe"
;;
esac
# Setup the directory where our Bitcoin Core build for HOST will be
# installed. This directory will also later serve as the input for our
# binary tarballs.
INSTALLPATH="${PWD}/installed/${DISTNAME}"
mkdir -p "${INSTALLPATH}"
# Install built Bitcoin Core to $INSTALLPATH
case "$HOST" in
*darwin*)
make install-strip DESTDIR="${INSTALLPATH}" ${V:+V=1}
;;
*)
make install DESTDIR="${INSTALLPATH}" ${V:+V=1}
;;
esac
case "$HOST" in
*darwin*)
make osx_volname ${V:+V=1}
make deploydir ${V:+V=1}
mkdir -p "unsigned-app-${HOST}"
cp --target-directory="unsigned-app-${HOST}" \
osx_volname \
contrib/macdeploy/detached-sig-{apply,create}.sh \
"${BASEPREFIX}/${HOST}"/native/bin/dmg
mv --target-directory="unsigned-app-${HOST}" dist
(
cd "unsigned-app-${HOST}"
find . -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-osx-unsigned.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-osx-unsigned.tar.gz" && exit 1 )
)
make deploy ${V:+V=1} OSX_DMG="${OUTDIR}/${DISTNAME}-osx-unsigned.dmg"
;;
esac
(
cd installed
# Prune libtool and object archives
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
# Prune pkg-config files
rm -rf "${DISTNAME}/lib/pkgconfig"
case "$HOST" in
*darwin*) ;;
*)
# Split binaries and libraries from their debug symbols
{
find "${DISTNAME}/bin" -type f -executable -print0
} | xargs -0 -n1 -P"$JOBS" -I{} "${DISTSRC}/contrib/devtools/split-debug.sh" {} {} {}.dbg
;;
esac
case "$HOST" in
*mingw*)
cp "${DISTSRC}/doc/README_windows.txt" "${DISTNAME}/readme.txt"
;;
*linux*)
cp "${DISTSRC}/README.md" "${DISTNAME}/"
;;
esac
# Finally, deterministically produce {non-,}debug binary tarballs ready
# for release
case "$HOST" in
*mingw*)
find "${DISTNAME}" -not -name "*.dbg" -print0 \
| xargs -0r touch --no-dereference --date="@${SOURCE_DATE_EPOCH}"
find "${DISTNAME}" -not -name "*.dbg" \
| sort \
| zip -X@ "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}.zip" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}.zip" && exit 1 )
find "${DISTNAME}" -name "*.dbg" -print0 \
| xargs -0r touch --no-dereference --date="@${SOURCE_DATE_EPOCH}"
find "${DISTNAME}" -name "*.dbg" \
| sort \
| zip -X@ "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}-debug.zip" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}-debug.zip" && exit 1 )
;;
*linux*)
find "${DISTNAME}" -not -name "*.dbg" -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}.tar.gz" && exit 1 )
find "${DISTNAME}" -name "*.dbg" -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}-debug.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}-debug.tar.gz" && exit 1 )
;;
*darwin*)
find "${DISTNAME}" -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST//x86_64-apple-darwin19/osx64}.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST//x86_64-apple-darwin19/osx64}.tar.gz" && exit 1 )
;;
esac
) # $DISTSRC/installed
case "$HOST" in
*mingw*)
cp -rf --target-directory=. contrib/windeploy
(
cd ./windeploy
mkdir -p unsigned
cp --target-directory=unsigned/ "${OUTDIR}/${DISTNAME}-win64-setup-unsigned.exe"
find . -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-win-unsigned.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-win-unsigned.tar.gz" && exit 1 )
)
;;
esac
) # $DISTSRC
rm -rf "$ACTUAL_OUTDIR"
mv --no-target-directory "$OUTDIR" "$ACTUAL_OUTDIR" \
|| ( rm -rf "$ACTUAL_OUTDIR" && exit 1 )
(
cd /outdir-base
{
echo "$GIT_ARCHIVE"
find "$ACTUAL_OUTDIR" -type f
} | xargs realpath --relative-base="$PWD" \
| xargs sha256sum \
| sort -k2 \
| sponge "$ACTUAL_OUTDIR"/SHA256SUMS.part
)
|
<reponame>zhoujiagen/learning-algorithms<filename>codes/java/concurrency/src/main/java/tamp/ch13/Hash/hash/Set.java<gh_stars>0
/*
* Set.java
*
* Created on December 29, 2005, 11:43 PM
*
* From "The Art of Multiprocessor Programming",
* by <NAME> and <NAME>.
* Copyright 2006 Elsevier Inc. All rights reserved.
*/
package tamp.ch13.Hash.hash;
import java.util.Iterator;
/**
* Interface satisfied by various buckets
*
* @author <NAME>
*/
public interface Set<T> extends Iterable<T> {
/**
* add object with given key
*
* @param x object to add
* @return whether object was absent
*/
boolean add(T x);
/**
* remove object from bucket
*
* @param x object to remove
* @return whether object was found
*/
public boolean remove(T x);
/**
* is object in bucket?
*
* @param x object being sought
* @return whether object is present
*/
public boolean contains(T x);
/**
* iterate through objects in bucket
*
* @return iterator over elements
*/
public Iterator<T> iterator();
}
|
#!/bin/bash
#delete old jar file and regenerate jar file
mvn clean package
rm -rf test/*.jar
mvn install
#generate new jar file mvn clean package
mvn eclipse:eclipse
#define docker name
dockername=quote
#Delete all containers and all volumes
#docker system prune -a
#Build of container and image
docker rm $dockername
docker build -t $dockername .
#Run container
docker run --publish 8081:8081 $dockername
|
<gh_stars>0
import path from 'path';
import 'styles/global.scss';
import actions from 'actions';
import AppChunk from 'containers/AppChunk';
import Avatar from 'components/Avatar';
import Button from 'components/Button';
import client from 'client';
import DevTools from 'components/DevTools';
import Link from 'components/Link';
import MetaTags from 'components/MetaTags';
import Modal, { BaseModal } from 'components/Modal';
import NavLink from 'components/NavLink';
import reducers from 'reducers';
import ScalableRect from 'components/ScalableRect';
import * as utils from 'utils';
import { requireWeak } from 'utils/webpack';
const serverUrl = module.webpackPolyfill ? './server'
: path.resolve(__dirname, './server');
const server = utils.isomorphy.isServerSide() ? requireWeak(serverUrl) : null;
export {
actions,
AppChunk,
Avatar,
BaseModal,
Button,
client,
DevTools,
Link,
NavLink,
MetaTags,
Modal,
ScalableRect,
reducers,
server,
};
export * from 'utils';
|
import tensorflow as tf
# define the model
model = tf.keras.models.Sequential([
tf.keras.layers.InputLayer(input_shape=(1,)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(1)
])
# compile the model
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss=tf.keras.losses.MeanSquaredError(),
metrics=[tf.keras.metrics.RootMeanSquaredError()])
# train the model
model.fit(x_train, y_train, epochs=20)
# evaluate the model
model.evaluate(x_test, y_test)
# use the model to predict ratings
model.predict(user_input) |
#!/usr/bin/env bash
set -eu -o pipefail
COMMAND=$1
# Simple convenience script to control the apps.
# Switch out nginx www.weboftomorrow.com.conf to
# www.weboftomorrow.com--down.conf to
# show the down page.
if test "${COMMAND}" == 'start'; then
rm -f /etc/nginx/sites-enabled/www.weboftomorrow.com--down.conf;
ln -sf /etc/nginx/sites-available/www.weboftomorrow.com.conf /etc/nginx/sites-enabled/www.weboftomorrow.com.conf;
elif test "${COMMAND}" == 'stop'; then
rm -f /etc/nginx/sites-enabled/www.weboftomorrow.com.conf;
ln -sf /etc/nginx/sites-available/www.weboftomorrow.com--down.conf /etc/nginx/sites-enabled/www.weboftomorrow.com--down.conf;
fi
systemctl reload nginx;
for app in www.weboftomorrow.com-chill;
do
echo "";
echo "systemctl $COMMAND $app;";
echo "----------------------------------------";
systemctl "$COMMAND" "$app" | cat;
done;
|
def logical_xor(a, b):
if (a and not b) or (not a and b):
return True
else:
return False |
import React, { useContext } from 'react'
import LayoutContext from '~/context/LayoutContext'
import useSiteMetadata from '~/hooks/useSiteMetadata'
import HamburgerIcon from './HamburgerIcon'
import CloseIcon from './CloseIcon'
import MobileNav from './MobileNav'
import {
Wrapper,
Title,
Items,
NavItem,
} from './styles'
const Navigation = () => {
const {
mobileNavVisible,
toggleMobileNav,
} = useContext(LayoutContext)
const { nav } = useSiteMetadata()
return (
<>
<MobileNav nav={nav} visible={mobileNavVisible} />
<Wrapper>
<Title to="/"><NAME></Title>
{mobileNavVisible
? <CloseIcon onClick={toggleMobileNav} />
: <HamburgerIcon onClick={toggleMobileNav} />}
<Items>
{nav.map(({ name, link }) => (
<NavItem to={link} key={name}>
{name}
</NavItem>
))}
</Items>
</Wrapper>
</>
)
}
export default Navigation
|
import { Injectable } from '@angular/core';
import { Store } from '@ngrx/store';
import { AppStore } from '../../app.store';
import { createAction } from '../create-action';
@Injectable()
export class UserActions {
static LOGIN = 'USER_LOGIN_SEND';
static LOGIN_SUCCESS = 'USER_LOGIN_SUCCESS';
static LOGIN_FAIL = 'USER_LOGIN_FAIL';
static SIGNUP = 'USER_SIGNUP_SEND';
static SIGNUP_SUCCESS = 'USER_SIGNUP_SUCCESS';
static SIGNUP_FAIL = 'USER_SIGNUP_FAIL';
static SET = 'USER_SET_SEND';
static SET_SUCCESS = 'USER_SET_SUCCESS';
static SET_FAIL = 'USER_SET_FAIL';
constructor(
private store: Store<AppStore>
) {}
public login(payload) {
this.store.dispatch(createAction(UserActions.LOGIN, payload));
}
public signup(payload) {
this.store.dispatch(createAction(UserActions.SIGNUP, payload));
}
public set(payload) {
this.store.dispatch(createAction(UserActions.SET, payload));
}
} |
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script removes the cluster registry, unjoins any clusters passed as
# arguments, and removes the federation from the current kubectl context.
set -o errexit
set -o nounset
set -o pipefail
source "$(dirname "${BASH_SOURCE}")/util.sh"
function delete-helm-deployment() {
if [[ ! "${NAMESPACED}" || "${DELETE_CLUSTER_RESOURCE}" ]]; then
# Clean cluster registry
${KCD} crd clusters.clusterregistry.k8s.io
fi
if [[ ! "${NAMESPACED}" ]]; then
${KCD} namespace ${PUBLIC_NS}
fi
# Clean federation resources
${KCD} -n "${NS}" FederatedTypeConfig --all
if [[ ! "${NAMESPACED}" || "${DELETE_CLUSTER_RESOURCE}" ]]; then
${KCD} crd $(kubectl get crd | grep -E 'federation.k8s.io' | awk '{print $1}')
fi
if [[ "${NAMESPACED}" ]]; then
helm delete --purge federation-v2-${NS}
else
helm delete --purge federation-v2
fi
}
KCD="kubectl --ignore-not-found=true delete"
NS="${FEDERATION_NAMESPACE:-federation-system}"
PUBLIC_NS=kube-multicluster-public
NAMESPACED="${NAMESPACED:-}"
DELETE_CLUSTER_RESOURCE="${DELETE_CLUSTER_RESOURCE:-}"
IMAGE_NAME=`kubectl get deploy -n ${NS} -oyaml | grep "image:" | awk '{print $2}'`
LATEST_IMAGE_NAME=quay.io/kubernetes-multicluster/federation-v2:latest
if [[ "${IMAGE_NAME}" == "$LATEST_IMAGE_NAME" ]]; then
USE_LATEST=y
else
USE_LATEST=
fi
KF_NS_ARG="--federation-namespace=${NS} "
# Unjoin clusters by removing objects added by kubefedctl.
HOST_CLUSTER="$(kubectl config current-context)"
JOINED_CLUSTERS="$(kubectl -n "${NS}" get federatedclusters -o=jsonpath='{range .items[*]}{.metadata.name}{" "}{end}')"
for c in ${JOINED_CLUSTERS}; do
./bin/kubefedctl unjoin "${c}" --host-cluster-context "${HOST_CLUSTER}" --remove-from-registry --v=2 ${KF_NS_ARG}
done
# Deploy federation resources
delete-helm-deployment
${KCD} ns "${NS}"
# Wait for the namespaces to be removed
function ns-deleted() {
kubectl get ns "${1}" &> /dev/null
[[ "$?" = "1" ]]
}
util::wait-for-condition "removal of namespace '${NS}'" "ns-deleted ${NS}" 120
if [[ ! "${NAMESPACED}" ]]; then
util::wait-for-condition "removal of namespace '${PUBLIC_NS}'" "ns-deleted ${PUBLIC_NS}" 120
fi
|
package org.rs2server.rs2.domain.service.api.content;
import org.rs2server.rs2.model.player.Player;
import javax.annotation.Nonnull;
/**
* @author tommo
*/
public interface MotherlodeMineService {
/**
* Attempts to claim any available ore in the sack for the player.
* @param player The player.
*/
void claimOreInSack(@Nonnull Player player);
/**
* Adds the given amount of pay dirt into the sack for the player.
* @param player The player.
* @param amount The amount of pay dirt to add.
*/
void addPayDirtToSack(@Nonnull Player player, final int amount);
/**
* Gets the amount of pay dirt the player has deposited into the hopper.
* @param player The player.
* @return The amount of pay dirt the player has deposited.
*/
int getPayDirtInSack(@Nonnull Player player);
/**
* Attempts to deposit all pay-dirt in the player's inventory into the hopper.
* @param player The player.
*/
void depositPayDirt(@Nonnull Player player);
}
|
def calculate_sum(numbers: Array[int]):
sum = 0
for i in numbers:
sum += i
return sum
nums = [3, 7, 10]
print("The sum is", calculate_sum(nums)) |
#include "duckdb/execution/operator/join/physical_cross_product.hpp"
#include "duckdb/execution/physical_plan_generator.hpp"
#include "duckdb/planner/operator/logical_cross_product.hpp"
using namespace duckdb;
using namespace std;
unique_ptr<PhysicalOperator> PhysicalPlanGenerator::CreatePlan(LogicalCrossProduct &op) {
assert(op.children.size() == 2);
auto left = CreatePlan(*op.children[0]);
auto right = CreatePlan(*op.children[1]);
return make_unique<PhysicalCrossProduct>(op, move(left), move(right));
}
|
#compdef kps
_arguments \
'1: :->level1' \
'2: :->level2' \
'3: :_files'
case $state in
level1)
case $words[1] in
kps)
_arguments '1: :(completion config create debug delete get help log service update)'
;;
*)
_arguments '*: :_files'
;;
esac
;;
level2)
case $words[2] in
log)
_arguments '2: :(app pipeline)'
;;
service)
_arguments '2: :(disable enable)'
;;
update)
_arguments '2: :(svcdomain)'
;;
completion)
_arguments '2: :(bash zsh)'
;;
config)
_arguments '2: :(create-context delete-context get-contexts use-context)'
;;
debug)
_arguments '2: :(function pipeline purge)'
;;
delete)
_arguments '2: :(application category datapipeline datasource function logcollector service)'
;;
get)
_arguments '2: :(application category datapipeline datasource function logcollector node project service svcdomain)'
;;
*)
_arguments '*: :_files'
;;
esac
;;
*)
_arguments '*: :_files'
;;
esac
|
#!/bin/bash
echo "--------------------WINE1.6/8------------------------" ;
sudo dpkg --configure -a;
sudo apt-get install -f;
sudo apt-get dist-upgrade -y ;
sudo apt-get update -y;
sudo apt-get upgrade -y ;
sudo DEBIAN_FRONTEND=noninteractive apt-get build-dep -y --force-yes --install-recommends wine1.6;
sudo dpkg --add-architecture i386;
sudo dpkg --configure -a;
sudo apt-get install -f;
sudo apt-get dist-upgrade -y ;
sudo apt-get update -y;
sudo apt-get upgrade -y ;
dpkg --print-architecture;
dpkg --print-foreign-architectures;
sudo dpkg --add-architecture i386;
sudo DEBIAN_FRONTEND=noninteractive apt-get build-dep -y --force-yes --install-recommends wine1.6;
sudo dpkg --add-architecture i386;
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --force-yes --install-recommends wine1.6;
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --force-yes --install-recommends wine1.8;
# wine32
#sudo DEBIAN_FRONTEND=noninteractive apt-get build-dep -y --force-yes --install-recommends q4wine;
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --force-yes --install-recommends q4wine;
env WINEARCH=win32 WINEPREFIX=~/.wine winecfg;
echo "--------------------WINE1.6/8------------------------" ; |
<gh_stars>0
class Office {
constructor(name="Untitled") {
this.name = name
this.available = true
}
}
|
// AppConfig.h
//AppConfig and related Classes
// #define _SCL_SECURE_NO_WARNINGS
#ifndef APPCONFIG_H
#define APPCONFIG_H
#pragma once
#include "Protocol.h"
class AppConfig
{
public:
AppConfig();
~AppConfig();
// Setters and getters
void SetAppName( std::wstring wstrName);
std::wstring GetAppName() const;
void SetProtocolID( std::string strID);
std::string GetProtocolID() const;
MessageDefinition GetControlDefinition( std::string strControlHash);
void SetControlDefinition( std::pair <std::string, MessageDefinition> myPair);
MessageDefinition GetControlDefinitionItem( unsigned int nItem);
std::map <std::string , MessageDefinition> GetHardwareControls()
{
return HardwareControls;
}
void SetHardwareControls( std::map <std::string , MessageDefinition> myNewHardwareControls)
{
HardwareControls = myNewHardwareControls;
}
DisplayDefinition GetDisplayDefinition( std::string strDisplayHash);
std::string GetDisplayHash( std::wstring wstrLabel);
void SetDisplayDefinition( std::pair <std::string, DisplayDefinition> myPair);
void UpdateDisplayDefinitions( std::map <std::string, DisplayDefinition> myNewDisplays);
DisplayDefinition GetDisplayDefinitionItem( unsigned int nItem);
std::vector <std::wstring> GetDisplayNames();
std::map <std::string , DisplayDefinition> GetDisplays()
{
return Displays;
}
void SetDisplays( std::map <std::string , DisplayDefinition> myNewDisplays)
{
Displays = myNewDisplays;
}
private:
friend std::ostream & operator<<( std::ostream &os, const AppConfig &AC);
friend std::istream & operator>>( std::istream &is, const AppConfig &AC);
friend class boost::serialization::access;
template<class Archive>
void serialize( Archive & myArchive, const unsigned int version)
{
try
{
const char * cAppName= "AppName";
myArchive & boost::serialization::make_nvp( cAppName, wstrAppName);
const char * cProtocolID = "ProtocolID";
myArchive & boost::serialization::make_nvp( cProtocolID, strProtocolID);
const char * cHardwareControls= "HardwareControls";
myArchive & boost::serialization::make_nvp( cHardwareControls, HardwareControls);
const char * cDisplays= "Displays";
myArchive & boost::serialization::make_nvp( cDisplays, Displays);
} // end try
catch( ...)
{
throw ID_LOAD_DATA_EXCEPTION;
} // end catch
}
// Internal storage
std::wstring wstrAppName;
std::string strProtocolID;
// This map stores the Hash of the message as an std::string key and a message definition
std::map <std::string , MessageDefinition> HardwareControls;
std::map <std::string , DisplayDefinition> Displays;
};
#endif
|
<gh_stars>0
// Package ed25519hash provides optimized routines for signing and verifying Sia
// hashes.
package ed25519hash
import (
"bytes"
"crypto/ed25519"
"crypto/sha512"
"strconv"
"gitlab.com/NebulousLabs/Sia/crypto"
"lukechampine.com/us/ed25519hash/internal/edwards25519"
)
// Verify reports whether sig is a valid signature of hash by pub.
func Verify(pub ed25519.PublicKey, hash crypto.Hash, sig []byte) bool {
if l := len(pub); l != ed25519.PublicKeySize {
panic("ed25519: bad public key length: " + strconv.Itoa(l))
}
if len(sig) != ed25519.SignatureSize || sig[63]&224 != 0 {
return false
}
var A edwards25519.ExtendedGroupElement
var publicKeyBytes [32]byte
copy(publicKeyBytes[:], pub)
if !A.FromBytes(&publicKeyBytes) {
return false
}
edwards25519.FeNeg(&A.X, &A.X)
edwards25519.FeNeg(&A.T, &A.T)
buf := make([]byte, 96)
copy(buf[:32], sig[:32])
copy(buf[32:], pub)
copy(buf[64:], hash[:])
digest := sha512.Sum512(buf)
var hReduced [32]byte
edwards25519.ScReduce(&hReduced, &digest)
var R edwards25519.ProjectiveGroupElement
var b [32]byte
copy(b[:], sig[32:])
edwards25519.GeDoubleScalarMultVartime(&R, &hReduced, &A, &b)
var checkR [32]byte
R.ToBytes(&checkR)
return bytes.Equal(sig[:32], checkR[:])
}
// Sign signs a hash with priv.
func Sign(priv ed25519.PrivateKey, hash crypto.Hash) []byte {
signature := make([]byte, ed25519.SignatureSize)
return sign(signature, priv, hash)
}
func sign(signature []byte, priv ed25519.PrivateKey, hash crypto.Hash) []byte {
if l := len(priv); l != ed25519.PrivateKeySize {
panic("ed25519: bad private key length: " + strconv.Itoa(l))
}
digest1 := sha512.Sum512(priv[:32])
var expandedSecretKey [32]byte
copy(expandedSecretKey[:], digest1[:32])
expandedSecretKey[0] &= 248
expandedSecretKey[31] &= 63
expandedSecretKey[31] |= 64
buf := make([]byte, 96)
copy(buf[:32], digest1[32:])
copy(buf[32:], hash[:])
messageDigest := sha512.Sum512(buf[:64])
var messageDigestReduced [32]byte
edwards25519.ScReduce(&messageDigestReduced, &messageDigest)
var R edwards25519.ExtendedGroupElement
edwards25519.GeScalarMultBase(&R, &messageDigestReduced)
var encodedR [32]byte
R.ToBytes(&encodedR)
copy(buf[:32], encodedR[:])
copy(buf[32:], priv[32:])
copy(buf[64:], hash[:])
hramDigest := sha512.Sum512(buf[:96])
var hramDigestReduced [32]byte
edwards25519.ScReduce(&hramDigestReduced, &hramDigest)
var s [32]byte
edwards25519.ScMulAdd(&s, &hramDigestReduced, &expandedSecretKey, &messageDigestReduced)
copy(signature[:32], encodedR[:])
copy(signature[32:], s[:])
return signature
}
// ExtractPublicKey extracts the PublicKey portion of priv.
func ExtractPublicKey(priv ed25519.PrivateKey) ed25519.PublicKey {
return ed25519.PublicKey(priv[32:])
}
|
def greater_than_n(nums, n):
return [num for num in nums if num > n]
greater_than_n([3,4,5,6,7], 5)
# Returns [6,7] |
<reponame>devinxi/ossos<filename>src/ikrig/rigs/IKRig.ts
//#region IMPORTS
import type { Armature, Pose } from '../../armature/index'
import { IKChain } from './IKChain';
//#endregion
class IKRig{
//#region MAIN
items: Map< string, IKChain > = new Map();
constructor(){}
//#endregion
//#region METHODS
// Change the Bind Transform for all the chains
// Mostly used for late binding a TPose when armature isn't naturally in a TPose
bindPose( pose: Pose ): this{
let ch: IKChain;
for( ch of this.items.values() ) ch.bindToPose( pose );
return this;
}
updateBoneLengths( pose: Pose ): this{
let ch: IKChain;
for( ch of this.items.values() ){
ch.resetLengths( pose );
}
return this;
}
get( name: string ): IKChain | undefined{
return this.items.get( name );
}
add( arm: Armature, name:string, bNames: string[] ): IKChain{
const chain = new IKChain( bNames, arm );
this.items.set( name, chain );
return chain;
}
//#endregion
}
export default IKRig; |
mySiteApp.factory('coService', function ($http, $location) {
return {
'Get': function (endpoint, data) {
var promise = $http({
url: endpoint,
method: "GET",
params: { data: data }
}).then(function (response) {
return response.data;
});
return promise;
},
'Post': function (endpoint, data) {
var promise = $http({
url: endpoint,
method: "POST",
params: { data: data }
}).then(function (response) {
return response.data;
});
return promise;
}
}
}); |
import { Router } from '@angular/router';
import { Injectable } from '@angular/core';
import { LoginModal } from '../model/login-class';
import { Observable, BehaviorSubject } from 'rxjs';
import 'rxjs/Rx';
import { ApiService } from '../services/api.service';
import { User } from '../model/user-class';
@Injectable({
providedIn: 'root'
})
export class AuthService {
token: string;
loggedOn: BehaviorSubject<any> = new BehaviorSubject(false);
currentUser: BehaviorSubject<any> = new BehaviorSubject(null);
constructor(private router: Router, private api: ApiService) {
}
signinUser(username: string, password: string) {
const user = new LoginModal();
user.UserName = username;
user.Password = password;
return this.onLogin(user)
.map((result) => {
if (!result[0].error && result[0].data.hasRows) {
console.log('result login', result);
// const res = JSON.parse(result);
localStorage.setItem('isLogged', 'true');
this.currentUser.next({ 'user': result[0].data.user, 'user_permission': result[0].data.user_permission, 'token': result[0].data.token, 'group_user_ids': result[0].data.group_user_ids });
localStorage.setItem('currentUser', JSON.stringify(result[0].data.user));
localStorage.setItem('currentUserPermission', JSON.stringify(result[0].data.user_permission));
localStorage.setItem('currentUserToken', JSON.stringify(result[0].data.token));
}
return result;
}).catch((error: any) => Observable.throw(error || 'Server error'));
// your code for checking credentials and getting tokens for for signing in user
}
logout() {
localStorage.removeItem('isLogged');
localStorage.removeItem('currentUser');
localStorage.removeItem('currentUserToken')
localStorage.removeItem('currentUserPermission')
this.currentUser.next(null);
this.router.navigate(['/auth']);
}
getToken() {
return this.token;
}
isAuthenticated() {
}
onLogin(data: LoginModal): Observable<any> {
// alert(data.UserName)
return this.api.apiCaller('post', '/SignIn', data);
}
}
|
#!/usr/bin/env bash
#
# MetaCall Build Bash Script by Parra Studios
# Build and install bash script utility for MetaCall.
#
# Copyright (C) 2016 - 2019 Vicente Eduardo Ferrer Garcia <vic798@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
RUN_AS_ROOT=0
SUDO_CMD=sudo
BUILD_TYPE=Release
BUILD_PYTHON=0
BUILD_RUBY=0
BUILD_NETCORE=0
BUILD_V8=0
BUILD_NODEJS=0
BUILD_FILE=0
BUILD_SCRIPTS=0
BUILD_EXAMPLES=0
BUILD_DISTRIBUTABLE=0
BUILD_PORTS=0
BUILD_COVERAGE=0
sub_options() {
for option in "$@"
do
if [ "$option" = 'root' ]; then
echo "Running build script as root"
RUN_AS_ROOT=1
SUDO_CMD=""
fi
if [ "$option" = 'debug' ]; then
echo "Build all scripts in debug mode"
BUILD_TYPE=Debug
fi
if [ "$option" = 'release' ]; then
echo "Build all scripts in release mode"
BUILD_TYPE=Release
fi
if [ "$option" = 'relwithdebinfo' ]; then
echo "Build all scripts in release mode with debug symbols"
BUILD_TYPE=RelWithDebInfo
fi
if [ "$option" = 'python' ]; then
echo "Build with python support"
BUILD_PYTHON=1
fi
if [ "$option" = 'ruby' ]; then
echo "Build with ruby support"
BUILD_RUBY=1
fi
if [ "$option" = 'netcore' ]; then
echo "Build with netcore support"
BUILD_NETCORE=1
fi
if [ "$option" = 'v8' ]; then
echo "Build with v8 support"
BUILD_V8=1
fi
if [ "$option" = 'nodejs' ]; then
echo "Build with nodejs support"
BUILD_NODEJS=1
fi
if [ "$option" = 'file' ]; then
echo "Build with file support"
BUILD_FILE=1
fi
if [ "$option" = 'scripts' ]; then
echo "Build all scripts"
BUILD_SCRIPTS=1
fi
if [ "$option" = 'examples' ]; then
echo "Build all examples"
BUILD_EXAMPLES=1
fi
if [ "$option" = 'distributable' ]; then
echo "Build distributable libraries"
BUILD_DISTRIBUTABLE=1
fi
if [ "$option" = 'ports' ]; then
echo "Build all ports"
BUILD_PORTS=1
fi
if [ "$option" = 'coverage' ]; then
echo "Build all coverage reports"
BUILD_COVERAGE=1
fi
done
}
sub_configure() {
BUILD_STRING="-DOPTION_BUILD_LOADERS=On \
-DOPTION_BUILD_LOADERS_MOCK=On"
# Scripts
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS=On"
else
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS=Off"
fi
# Python
if [ $BUILD_PYTHON = 1 ]; then
BUILD_STRING="$BUILD_STRING \
-DPYTHON_EXECUTABLE=/usr/bin/python3.5 \
-DOPTION_BUILD_LOADERS_PY=On"
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS_PY=On"
fi
fi
# Ruby
if [ $BUILD_RUBY = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_LOADERS_RB=On"
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS_RB=On"
fi
fi
# NetCore
if [ $BUILD_NETCORE = 1 ]; then
BUILD_STRING="$BUILD_STRING \
-DOPTION_BUILD_LOADERS_CS=On \
-DDOTNET_CORE_PATH=/usr/share/dotnet/shared/Microsoft.NETCore.App/1.1.10/"
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS_CS=On"
fi
fi
# V8
if [ $BUILD_V8 = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_LOADERS_JS=On"
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS_JS=On"
fi
fi
# NodeJS
if [ $BUILD_NODEJS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_LOADERS_NODE=On"
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS_NODE=On"
fi
fi
# File
if [ $BUILD_FILE = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_LOADERS_FILE=On"
if [ $BUILD_SCRIPTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_SCRIPTS_FILE=On"
fi
fi
# Examples
if [ $BUILD_EXAMPLES = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_EXAMPLES=On"
else
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_EXAMPLES=Off"
fi
# Distributable
if [ $BUILD_DISTRIBUTABLE = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_DIST_LIBS=On"
else
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_DIST_LIBS=Off"
fi
# Ports
if [ $BUILD_PORTS = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_PORTS=On"
else
BUILD_STRING="$BUILD_STRING -DOPTION_BUILD_PORTS=Off"
fi
# Coverage
if [ $BUILD_COVERAGE = 1 ]; then
BUILD_STRING="$BUILD_STRING -DOPTION_COVERAGE=On"
else
BUILD_STRING="$BUILD_STRING -DOPTION_COVERAGE=Off"
fi
# Build type
BUILD_STRING="$BUILD_STRING -DCMAKE_BUILD_TYPE=$BUILD_TYPE"
# Execute CMake
cmake $BUILD_STRING ..
}
sub_help() {
echo "Usage: `basename "$0"` list of options"
echo "Options:"
echo " root: build being run by root"
echo " debug | release | relwithdebinfo: build type"
echo " python: build with python support"
echo " ruby: build with ruby support"
echo " netcore: build with netcore support"
echo " v8: build with v8 support"
echo " nodejs: build with nodejs support"
echo " file: build with file support"
echo " scripts: build all scripts"
echo " examples: build all examples"
echo " distributable: build distributable libraries"
echo " tests: build and run all tests"
echo " install: install all libraries"
echo " static: build as static libraries"
echo " dynamic: build as dynamic libraries"
echo " ports: build all ports"
echo " coverage: build all coverage reports"
echo ""
}
case "$#" in
0)
sub_help
;;
*)
sub_options $@
sub_configure
;;
esac
|
<reponame>gccaldwell99/gccaldwell99.github.io
function namePuppy() {
var parent = document.getElementById("textbox");
var lineBreak = document.createElement("BR");
var input = document.createElement("INPUT");
input.type = "text";
input.id = "pname";
input.name = "pname";
input.value = "Spot";
parent.appendChild(lineBreak);
parent.appendChild(input);
}
function finishNamingPuppy() {
var puppyNameInput = document.getElementById("pname");
setPuppyName(puppyNameInput.value);
puppyNameInput.remove();
} |
#!/bin/sh
# Copyright (c) 2012-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
if [ $# -gt 1 ]; then
cd "$2" || exit 1
fi
if [ $# -gt 0 ]; then
FILE="$1"
shift
if [ -f "$FILE" ]; then
INFO="$(head -n 1 "$FILE")"
fi
else
echo "Usage: $0 <filename> <srcroot>"
exit 1
fi
git_check_in_repo() {
! { git status --porcelain -uall --ignored "$@" 2>/dev/null || echo '??'; } | grep -q '?'
}
DESC=""
SUFFIX=""
if [ "${BITCOIN_GENBUILD_NO_GIT}" != "1" -a -e "$(which git 2>/dev/null)" -a "$(git rev-parse --is-inside-work-tree 2>/dev/null)" = "true" ] && git_check_in_repo share/genbuild.sh; then
# clean 'dirty' status of touched files that haven't been modified
git diff >/dev/null 2>/dev/null
# if latest commit is tagged and not dirty, then override using the tag name
RAWDESC=$(git describe --abbrev=0 2>/dev/null)
if [ "$(git rev-parse HEAD)" = "$(git rev-list -1 $RAWDESC 2>/dev/null)" ]; then
git diff-index --quiet HEAD -- && DESC=$RAWDESC
fi
# otherwise generate suffix from git, i.e. string like "59887e8-dirty"
SUFFIX=$(git rev-parse --short HEAD)
git diff-index --quiet HEAD -- || SUFFIX="$SUFFIX-dirty"
fi
if [ -n "$DESC" ]; then
NEWINFO="#define BUILD_DESC \"$DESC\""
elif [ -n "$SUFFIX" ]; then
NEWINFO="#define BUILD_SUFFIX $SUFFIX"
else
NEWINFO="// No build information available"
fi
# only update build.h if necessary
if [ "$INFO" != "$NEWINFO" ]; then
echo "$NEWINFO" >"$FILE"
fi
|
import { IsString, Matches, MaxLength, MinLength } from 'class-validator';
export class AuthDTO {
@IsString()
@MinLength(6)
@MaxLength(20)
username: string;
@IsString()
@MinLength(8)
@MaxLength(30)
// @Matches(/^([^0-9]*|[^A-Z]*|[^a-z]*)$/, {
// message: 'Pass word is too weak!!!',
// })
password: string;
}
|
#!/bin/bash
set -e
# OS="darwin linux windows"
OS="linux"
ARCH="amd64"
echo "Getting build dependencies"
# go get .
#go get -u github.com/golang/lint/golint
echo "Ensuring code quality"
# go vet ./...
#golint ./...
# dmver=$(cd $GOPATH/src/github.com/docker/machine && git describe --abbrev=0 --tags)
# echo "VERSION docker-machine '$dmver'"
for GOOS in $OS; do
for GOARCH in $ARCH; do
arch="$GOOS-$GOARCH"
binary="bin/docker-machine-driver-otc.$arch"
echo "Building $binary"
GOOS=$GOOS GOARCH=$GOARCH go build -o $binary
done
done
|
import { Construct, Stack, IConstruct, Tags } from '@aws-cdk/core';
import { BaseStack, BaseStackProps } from '../components/base';
import { IGalaxyExtension } from '../galaxy/galaxy-extension-stack';
import { ISolarSystemCore } from './solar-system-core-stack';
import { SolarSystemCoreImport, SolarSystemCoreImportProps } from './solar-system-core-import';
const SOLAR_SYSTEM_EXTENSION_SYMBOL = Symbol.for('@cdk-cosmos/core.SolarSystemExtensionStack');
export interface ISolarSystemExtension extends Construct {
galaxy: IGalaxyExtension;
portal: ISolarSystemCore;
}
export interface SolarSystemExtensionStackProps extends BaseStackProps {
portal?: SolarSystemCoreImport;
portalProps?: Partial<SolarSystemCoreImportProps>;
}
export class SolarSystemExtensionStack extends BaseStack implements ISolarSystemExtension {
readonly galaxy: IGalaxyExtension;
readonly portal: SolarSystemCoreImport;
constructor(galaxy: IGalaxyExtension, id: string, props?: SolarSystemExtensionStackProps) {
super(galaxy, id, {
description:
'Cosmos SolarSystem Extension: App resources dependant on each App Env, like Services and Databases.',
type: 'SolarSystem',
...props,
});
Object.defineProperty(this, SOLAR_SYSTEM_EXTENSION_SYMBOL, { value: true });
const { portal, portalProps } = props || {};
this.galaxy = galaxy;
this.portal =
portal ||
new SolarSystemCoreImport(this.hidden, this.node.id, {
...portalProps,
galaxy: this.galaxy.portal,
});
this.addDependency(Stack.of(this.galaxy));
Tags.of(this).add('cosmos:solarsystem:extension', id);
}
static isSolarSystemExtension(x: any): x is SolarSystemExtensionStack {
return typeof x === 'object' && x !== null && SOLAR_SYSTEM_EXTENSION_SYMBOL in x;
}
static of(construct: IConstruct): SolarSystemExtensionStack {
const scopes = [construct, ...construct.node.scopes];
for (const scope of scopes) {
if (SolarSystemExtensionStack.isSolarSystemExtension(scope)) return scope;
}
throw new Error(`No Galaxy Core Stack could be identified for the construct at path ${construct.node.path}`);
}
}
|
<reponame>myerstina515/basic-express-server<filename>lib/middleware/validator.js<gh_stars>0
'use strict';
function validateQuery(req, res, next){
if (req.query.name){
next();
} else {
next('Error')
}
}
module.exports = validateQuery; |
const natural = require('natural');
const tokenizer = new natural.WordTokenizer();
const text = 'The quick brown fox jumps over the lazy dog';
// Parse input text
const parsedText = tokenizer.tokenize(text);
// Extract keywords
const keywordExtractor = new natural.KeywordExtractor();
const keywords = keywordExtractor.extract(text);
// Classify text
const classifier = new natural.BayesClassifier();
const predictedClassification = classifier.classify(parsedText);
console.log('Parsed text: ', parsedText);
console.log('Extracted keywords: ', keywords);
console.log('Predicted classification: ', predictedClassification); |
#!/usr/bin/env bash
MASON_NAME=cairo
MASON_VERSION=1.14.2
MASON_LIB_FILE=lib/libcairo.a
MASON_PKGCONFIG_FILE=lib/pkgconfig/cairo.pc
. ${MASON_DIR}/mason.sh
function mason_load_source {
mason_download \
http://cairographics.org/releases/${MASON_NAME}-${MASON_VERSION}.tar.xz \
3202106739cb0cb044c910a9b67769c95d0b6bce
mason_extract_tar_xz
export MASON_BUILD_PATH=${MASON_ROOT}/.build/${MASON_NAME}-${MASON_VERSION}
}
function mason_prepare_compile {
cd $(dirname ${MASON_ROOT})
${MASON_DIR}/mason install libpng 1.6.17
MASON_PNG=$(${MASON_DIR}/mason prefix libpng 1.6.17)
${MASON_DIR}/mason install freetype 2.6
MASON_FREETYPE=$(${MASON_DIR}/mason prefix freetype 2.6)
${MASON_DIR}/mason install pixman 0.32.6
MASON_PIXMAN=$(${MASON_DIR}/mason prefix pixman 0.32.6)
}
function mason_compile {
mason_step "Loading patch 'https://github.com/mapbox/mason/blob/${MASON_SLUG}/patch.diff'..."
curl --retry 3 -s -f -# -L \
https://raw.githubusercontent.com/mapbox/mason/${MASON_SLUG}/patch.diff \
-O || (mason_error "Could not find patch for ${MASON_SLUG}" && exit 1)
# patch cairo to avoid needing pkg-config as a build dep
patch -N -p1 < ./patch.diff
CFLAGS="${CFLAGS} -Wno-enum-conversion -I${MASON_PIXMAN}/include/pixman-1 -I${MASON_FREETYPE}/include/freetype2 -I${MASON_PNG}/include/"
LDFLAGS="-L${MASON_PIXMAN}/lib -lpixman-1 -L${MASON_FREETYPE}/lib -lfreetype -L${MASON_PNG}/lib -lpng"
CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} ./autogen.sh \
--prefix=${MASON_PREFIX} \
${MASON_HOST_ARG} \
--enable-static --disable-shared \
--enable-pdf=yes \
--enable-ft=yes \
--enable-png=yes \
--enable-svg=yes \
--enable-ps=yes \
--enable-fc=no \
--enable-script=no \
--enable-interpreter=no \
--enable-quartz=no \
--enable-quartz-image=no \
--enable-quartz-font=no \
--enable-trace=no \
--enable-gtk-doc=no \
--enable-qt=no \
--enable-win32=no \
--enable-win32-font=no \
--enable-skia=no \
--enable-os2=no \
--enable-beos=no \
--enable-drm=no \
--enable-gallium=no \
--enable-gl=no \
--enable-glesv2=no \
--enable-directfb=no \
--enable-vg=no \
--enable-egl=no \
--enable-glx=no \
--enable-wgl=no \
--enable-test-surfaces=no \
--enable-tee=no \
--enable-xml=no \
--disable-valgrind \
--enable-gobject=no \
--enable-xlib=no \
--enable-xlib-xrender=no \
--enable-xcb=no \
--enable-xlib-xcb=no \
--enable-xcb-shm=no \
--enable-full-testing=no \
--enable-symbol-lookup=no \
--disable-dependency-tracking
# The -i and -k flags are to workaround make[6]: [install-data-local] Error 1 (ignored)
make V=1 -j${MASON_CONCURRENCY} -i -k
make install -i -k
}
function mason_clean {
make clean
}
mason_run "$@"
|
package error
import (
"context"
"errors"
"fmt"
"github.com/hashicorp/go-multierror"
"github.com/peak/s5cmd/storage"
"github.com/peak/s5cmd/storage/url"
)
type Error struct {
// Op is the operation being performed, usually the name of the method
// being invoked (copy, move, etc.)
Op string
// Src is the source argument
Src *url.URL
// Dst is the destination argument
Dst *url.URL
// The underlying error if any
Err error
}
func (e *Error) FullCommand() string {
return fmt.Sprintf("%v %v %v", e.Op, e.Src, e.Dst)
}
func (e *Error) Error() string {
return e.Err.Error()
}
func IsCancelation(err error) bool {
if err == nil {
return false
}
if errors.Is(err, context.Canceled) {
return true
}
if storage.IsCancelationError(err) {
return true
}
merr, ok := err.(*multierror.Error)
if !ok {
return false
}
for _, err := range merr.Errors {
if IsCancelation(err) {
return true
}
}
return false
}
// OK-to-have error types (warnings) that is used when the job status is warning.
var (
ErrObjectExists = fmt.Errorf("object already exists")
ErrObjectIsNewer = fmt.Errorf("object is newer or same age")
ErrObjectSizesMatch = fmt.Errorf("object size matches")
)
func IsWarning(err error) bool {
switch err {
case ErrObjectExists, ErrObjectIsNewer, ErrObjectSizesMatch:
return true
}
return false
}
|
#!/bin/bash -e
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Script to install everything needed to build chromium (well, ideally, anyway)
# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
usage() {
echo "Usage: $0 [--options]"
echo "Options:"
echo "--[no-]syms: enable or disable installation of debugging symbols"
echo "--[no-]lib32: enable or disable installation of 32 bit libraries"
echo "--[no-]arm: enable or disable installation of arm cross toolchain"
echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
"fonts"
echo "--no-prompt: silently select standard options/defaults"
echo "Script will prompt interactively if options not given."
exit 1
}
while test "$1" != ""
do
case "$1" in
--syms) do_inst_syms=1;;
--no-syms) do_inst_syms=0;;
--lib32) do_inst_lib32=1;;
--no-lib32) do_inst_lib32=0;;
--arm) do_inst_arm=1;;
--no-arm) do_inst_arm=0;;
--chromeos-fonts) do_inst_chromeos_fonts=1;;
--no-chromeos-fonts) do_inst_chromeos_fonts=0;;
--no-prompt) do_default=1
do_quietly="-qq --assume-yes"
;;
--unsupported) do_unsupported=1;;
*) usage;;
esac
shift
done
ubuntu_versions="10\.04|10\.10|11\.04|11\.10|12\.04|12\.10"
ubuntu_codenames="lucid|maverick|natty|oneiric|precise|quantal"
ubuntu_issue="Ubuntu ($ubuntu_versions|$ubuntu_codenames)"
# GCEL is an Ubuntu-derived VM image used on Google Compute Engine; /etc/issue
# doesn't contain a version number so just trust that the user knows what
# they're doing.
gcel_issue="^GCEL"
if [ 0 -eq "${do_unsupported-0}" ] ; then
if ! egrep -q "($ubuntu_issue|$gcel_issue)" /etc/issue; then
echo "ERROR: Only Ubuntu 10.04 (lucid) through 12.10 (quantal) are"\
"currently supported" >&2
exit 1
fi
if ! uname -m | egrep -q "i686|x86_64"; then
echo "Only x86 architectures are currently supported" >&2
exit
fi
fi
if [ "x$(id -u)" != x0 ]; then
echo "Running as non-root user."
echo "You might have to enter your password one or more times for 'sudo'."
echo
fi
# Packages needed for chromeos only
chromeos_dev_list="libbluetooth-dev"
# Packages need for development
dev_list="apache2.2-bin bison curl elfutils fakeroot flex g++ gperf
language-pack-fr libapache2-mod-php5 libasound2-dev libbz2-dev
libcairo2-dev libcups2-dev libcurl4-gnutls-dev libelf-dev
libgconf2-dev libgl1-mesa-dev libglib2.0-dev libglu1-mesa-dev
libgnome-keyring-dev libgtk2.0-dev libkrb5-dev libnspr4-dev
libnss3-dev libpam0g-dev libpci-dev libpulse-dev libsctp-dev
libspeechd-dev libsqlite3-dev libssl-dev libudev-dev libwww-perl
libxslt1-dev libxss-dev libxt-dev libxtst-dev mesa-common-dev
metacity patch perl php5-cgi pkg-config python python-cherrypy3
python-dev python-psutil rpm ruby subversion ttf-dejavu-core
ttf-indic-fonts ttf-kochi-gothic ttf-kochi-mincho ttf-thai-tlwg
wdiff git-core
$chromeos_dev_list"
# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
# NaCl binaries. These are always needed, regardless of whether or not we want
# the full 32-bit "cross-compile" support (--lib32).
if [ "$(uname -m)" = "x86_64" ]; then
dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
fi
# Run-time libraries required by chromeos only
chromeos_lib_list="libpulse0 libbz2-1.0 libcurl4-gnutls-dev"
# Full list of required run-time libraries
lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcups2 libexpat1
libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0
libpng12-0 libspeechd2 libstdc++6 libsqlite3-0 libudev0 libx11-6
libxau6 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxdmcp6
libxext6 libxfixes3 libxi6 libxinerama1 libxrandr2 libxrender1
libxtst6 zlib1g $chromeos_lib_list"
# Debugging symbols for all of the run-time libraries
dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libfontconfig1-dbg
libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg
libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libxau6-dbg
libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg libxdamage1-dbg
libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg libxinerama1-dbg
libxrandr2-dbg libxrender1-dbg libxtst6-dbg zlib1g-dbg"
# arm cross toolchain packages needed to build chrome on arm
arm_list="libc6-armel-cross libc6-dev-armel-cross libgcc1-armel-cross
libgomp1-armel-cross linux-libc-dev-armel-cross
libgcc1-dbg-armel-cross libgomp1-dbg-armel-cross
binutils-arm-linux-gnueabi cpp-arm-linux-gnueabi
gcc-arm-linux-gnueabi g++-arm-linux-gnueabi
libmudflap0-dbg-armel-cross"
# Some package names have changed over time
if apt-cache show ttf-mscorefonts-installer >/dev/null 2>&1; then
dev_list="${dev_list} ttf-mscorefonts-installer"
else
dev_list="${dev_list} msttcorefonts"
fi
if apt-cache show libnspr4-dbg >/dev/null 2>&1; then
dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
lib_list="${lib_list} libnspr4 libnss3"
else
dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
lib_list="${lib_list} libnspr4-0d libnss3-1d"
fi
if apt-cache show libjpeg-dev >/dev/null 2>&1; then
dev_list="${dev_list} libjpeg-dev"
else
dev_list="${dev_list} libjpeg62-dev"
fi
# Some packages are only needed, if the distribution actually supports
# installing them.
if apt-cache show appmenu-gtk >/dev/null 2>&1; then
lib_list="$lib_list appmenu-gtk"
fi
# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
# been provided to yes_no(), the function also accepts RETURN as a user input.
# The parameter specifies the exit code that should be returned in that case.
# The function will echo the user's selection followed by a newline character.
# Users can abort the function by pressing CTRL-C. This will call "exit 1".
yes_no() {
if [ 0 -ne "${do_default-0}" ] ; then
return $1
fi
local c
while :; do
c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
stty -echo iuclc -icanon 2>/dev/null
dd count=1 bs=1 2>/dev/null | od -An -tx1)"
case "$c" in
" 0a") if [ -n "$1" ]; then
[ $1 -eq 0 ] && echo "Y" || echo "N"
return $1
fi
;;
" 79") echo "Y"
return 0
;;
" 6e") echo "N"
return 1
;;
"") echo "Aborted" >&2
exit 1
;;
*) # The user pressed an unrecognized key. As we are not echoing
# any incorrect user input, alert the user by ringing the bell.
(tput bel) 2>/dev/null
;;
esac
done
}
if test "$do_inst_syms" = ""
then
echo "This script installs all tools and libraries needed to build Chromium."
echo ""
echo "For most of the libraries, it can also install debugging symbols, which"
echo "will allow you to debug code in the system libraries. Most developers"
echo "won't need these symbols."
echo -n "Do you want me to install them for you (y/N) "
if yes_no 1; then
do_inst_syms=1
fi
fi
if test "$do_inst_syms" = "1"; then
echo "Installing debugging symbols."
else
echo "Skipping installation of debugging symbols."
dbg_list=
fi
# Install the Chrome OS default fonts.
if test "$do_inst_chromeos_fonts" != "0"; then
echo
echo "Installing Chrome OS fonts."
dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
if ! sudo $dir/linux/install-chromeos-fonts.py; then
echo "ERROR: The installation of the Chrome OS default fonts failed."
if [ `stat -f -c %T $dir` == "nfs" ]; then
echo "The reason is that your repo is installed on a remote file system."
else
echo "This is expected if your repo is installed on a remote file system."
fi
echo "It is recommended to install your repo on a local file system."
echo "You can skip the installation of the Chrome OS default founts with"
echo "the command line option: --no-chromeos-fonts."
exit 1
fi
else
echo "Skipping installation of Chrome OS fonts."
fi
# When cross building for arm on 64-bit systems the host binaries
# that are part of v8 need to be compiled with -m32 which means
# that basic multilib support is needed.
if [ "$(uname -m)" = "x86_64" ]; then
arm_list="$arm_list g++-multilib"
fi
if test "$do_inst_arm" = "1"; then
. /etc/lsb-release
if test "$DISTRIB_CODENAME" != "precise"; then
echo "ERROR: Installing the ARM cross toolchain is only available on" \
"Ubuntu precise." >&2
exit 1
fi
echo "Installing ARM cross toolchain."
else
echo "Skipping installation of ARM cross toolchain."
arm_list=
fi
sudo apt-get update
# We initially run "apt-get" with the --reinstall option and parse its output.
# This way, we can find all the packages that need to be newly installed
# without accidentally promoting any packages from "auto" to "manual".
# We then re-run "apt-get" with just the list of missing packages.
echo "Finding missing packages..."
packages="${dev_list} ${lib_list} ${dbg_list} ${arm_list}"
# Intentionally leaving $packages unquoted so it's more readable.
echo "Packages required: " $packages
echo
new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
if new_list="$(yes n | LANG=C $new_list_cmd)"; then
# We probably never hit this following line.
echo "No missing packages, and the packages are up-to-date."
elif [ $? -eq 1 ]; then
# We expect apt-get to have exit status of 1.
# This indicates that we cancelled the install with "yes n|".
new_list=$(echo "$new_list" |
sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d')
new_list=$(echo "$new_list" | sed 's/ *$//')
if [ -z "$new_list" ] ; then
echo "No missing packages, and the packages are up-to-date."
else
echo "Installing missing packages: $new_list."
sudo apt-get install ${do_quietly-} ${new_list}
fi
echo
else
# An apt-get exit status of 100 indicates that a real error has occurred.
# I am intentionally leaving out the '"'s around new_list_cmd,
# as this makes it easier to cut and paste the output
echo "The following command failed: " ${new_list_cmd}
echo
echo "It produces the following output:"
yes n | $new_list_cmd || true
echo
echo "You will have to install the above packages yourself."
echo
exit 100
fi
# Install 32bit backwards compatibility support for 64bit systems
if [ "$(uname -m)" = "x86_64" ]; then
if test "$do_inst_lib32" != "1"
then
echo "NOTE: If you were expecting the option to install 32bit libs,"
echo "please run with the --lib32 flag."
echo
echo "Installation complete."
exit 0
else
# This conditional statement has been added to deprecate and eventually
# remove support for 32bit libraries on 64bit systems. But for the time
# being, we still have to support a few legacy systems (e.g. bots), where
# this feature is needed.
# We only even give the user the option to install these libraries, if
# they explicitly requested doing so by setting the --lib32 command line
# flag.
# And even then, we interactively ask them one more time whether they are
# absolutely sure.
# In order for that to work, we must reset the ${do_inst_lib32} variable.
# There are other ways to achieve the same goal. But resetting the
# variable is the best way to document the intended behavior -- and to
# allow us to gradually deprecate and then remove the obsolete code.
do_inst_lib32=
fi
echo "WARNING"
echo
echo "We no longer recommend that you use this script to install"
echo "32bit libraries on a 64bit system. Instead, consider using the"
echo "install-chroot.sh script to help you set up a 32bit environment"
echo "for building and testing 32bit versions of Chrome."
echo
echo "The code for installing 32bit libraries on a 64bit system is"
echo "unmaintained and might not work with modern versions of Ubuntu"
echo "or Debian."
echo
echo -n "Are you sure you want to proceed (y/N) "
if yes_no 1; then
do_inst_lib32=1
fi
if test "$do_inst_lib32" != "1"
then
exit 0
fi
# Standard 32bit compatibility libraries
echo "First, installing the limited existing 32-bit support..."
cmp_list="ia32-libs lib32asound2-dev lib32stdc++6 lib32z1
lib32z1-dev libc6-dev-i386 libc6-i386 g++-multilib"
if [ -n "`apt-cache search lib32readline-gplv2-dev 2>/dev/null`" ]; then
cmp_list="${cmp_list} lib32readline-gplv2-dev"
else
cmp_list="${cmp_list} lib32readline5-dev"
fi
sudo apt-get install ${do_quietly-} $cmp_list
tmp=/tmp/install-32bit.$$
trap 'rm -rf "${tmp}"' EXIT INT TERM QUIT
mkdir -p "${tmp}/apt/lists/partial" "${tmp}/cache" "${tmp}/partial"
touch "${tmp}/status"
[ -r /etc/apt/apt.conf ] && cp /etc/apt/apt.conf "${tmp}/apt/"
cat >>"${tmp}/apt/apt.conf" <<EOF
Apt::Architecture "i386";
Dir::Cache "${tmp}/cache";
Dir::Cache::Archives "${tmp}/";
Dir::State::Lists "${tmp}/apt/lists/";
Dir::State::status "${tmp}/status";
EOF
# Download 32bit packages
echo "Computing list of available 32bit packages..."
sudo apt-get -c="${tmp}/apt/apt.conf" update
echo "Downloading available 32bit packages..."
sudo apt-get -c="${tmp}/apt/apt.conf" \
--yes --download-only --force-yes --reinstall install \
${lib_list} ${dbg_list}
# Open packages, remove everything that is not a library, move the
# library to a lib32 directory and package everything as a *.deb file.
echo "Repackaging and installing 32bit packages for use on 64bit systems..."
for i in ${lib_list} ${dbg_list}; do
orig="$(echo "${tmp}/${i}"_*_i386.deb)"
compat="$(echo "${orig}" |
sed -e 's,\(_[^_/]*_\)i386\(.deb\),-ia32\1amd64\2,')"
rm -rf "${tmp}/staging"
msg="$(fakeroot -u sh -exc '
# Unpack 32bit Debian archive
umask 022
mkdir -p "'"${tmp}"'/staging/dpkg/DEBIAN"
cd "'"${tmp}"'/staging"
ar x "'${orig}'"
tar Cfx dpkg data.tar*
tar zCfx dpkg/DEBIAN control.tar.gz
# Create a posix extended regular expression fragment that will
# recognize the includes which have changed. Should be rare,
# will almost always be empty.
includes=`sed -n -e "s/^[0-9a-z]* //g" \
-e "\,usr/include/,p" dpkg/DEBIAN/md5sums |
xargs -n 1 -I FILE /bin/sh -c \
"cmp -s dpkg/FILE /FILE || echo FILE" |
tr "\n" "|" |
sed -e "s,|$,,"`
# If empty, set it to not match anything.
test -z "$includes" && includes="^//"
# Turn the conflicts into an extended RE for removal from the
# Provides line.
conflicts=`sed -n -e "/Conflicts/s/Conflicts: *//;T;s/, */|/g;p" \
dpkg/DEBIAN/control`
# Rename package, change architecture, remove conflicts and dependencies
sed -r -i \
-e "/Package/s/$/-ia32/" \
-e "/Architecture/s/:.*$/: amd64/" \
-e "/Depends/s/:.*/: ia32-libs/" \
-e "/Provides/s/($conflicts)(, *)?//g;T1;s/, *$//;:1" \
-e "/Recommends/d" \
-e "/Conflicts/d" \
dpkg/DEBIAN/control
# Only keep files that live in "lib" directories or the includes
# that have changed.
sed -r -i \
-e "/\/lib64\//d" -e "/\/.?bin\//d" \
-e "\,$includes,s,[ /]include/,&32/,g;s,include/32/,include32/,g" \
-e "s, lib/, lib32/,g" \
-e "s,/lib/,/lib32/,g" \
-e "t;d" \
-e "\,^/usr/lib32/debug\(.*/lib32\),s,^/usr/lib32/debug,/usr/lib/debug," \
dpkg/DEBIAN/md5sums
# Re-run ldconfig after installation/removal
{ echo "#!/bin/sh"; echo "[ \"x\$1\" = xconfigure ]&&ldconfig||:"; } \
>dpkg/DEBIAN/postinst
{ echo "#!/bin/sh"; echo "[ \"x\$1\" = xremove ]&&ldconfig||:"; } \
>dpkg/DEBIAN/postrm
chmod 755 dpkg/DEBIAN/postinst dpkg/DEBIAN/postrm
# Remove any other control files
find dpkg/DEBIAN -mindepth 1 "(" -name control -o -name md5sums -o \
-name postinst -o -name postrm ")" -o -print |
xargs -r rm -rf
# Remove any files/dirs that live outside of "lib" directories,
# or are not in our list of changed includes.
find dpkg -mindepth 1 -regextype posix-extended \
"(" -name DEBIAN -o -name lib -o -regex "dpkg/($includes)" ")" \
-prune -o -print | tac |
xargs -r -n 1 sh -c "rm \$0 2>/dev/null || rmdir \$0 2>/dev/null || : "
find dpkg -name lib64 -o -name bin -o -name "?bin" |
tac | xargs -r rm -rf
# Remove any symbolic links that were broken by the above steps.
find -L dpkg -type l -print | tac | xargs -r rm -rf
# Rename lib to lib32, but keep debug symbols in /usr/lib/debug/usr/lib32
# That is where gdb looks for them.
find dpkg -type d -o -path "*/lib/*" -print |
xargs -r -n 1 sh -c "
i=\$(echo \"\${0}\" |
sed -e s,/lib/,/lib32/,g \
-e s,/usr/lib32/debug\\\\\(.*/lib32\\\\\),/usr/lib/debug\\\\1,);
mkdir -p \"\${i%/*}\";
mv \"\${0}\" \"\${i}\""
# Rename include to include32.
[ -d "dpkg/usr/include" ] && mv "dpkg/usr/include" "dpkg/usr/include32"
# Prune any empty directories
find dpkg -type d | tac | xargs -r -n 1 rmdir 2>/dev/null || :
# Create our own Debian package
cd ..
dpkg --build staging/dpkg .' 2>&1)"
compat="$(eval echo $(echo "${compat}" |
sed -e 's,_[^_/]*_amd64.deb,_*_amd64.deb,'))"
[ -r "${compat}" ] || {
echo "${msg}" >&2
echo "Failed to build new Debian archive!" >&2
exit 1
}
msg="$(sudo dpkg -i "${compat}" 2>&1)" && {
echo "Installed ${compat##*/}"
} || {
# echo "${msg}" >&2
echo "Skipped ${compat##*/}"
}
done
# Add symbolic links for developing 32bit code
echo "Adding missing symbolic links, enabling 32bit code development..."
for i in $(find /lib32 /usr/lib32 -maxdepth 1 -name \*.so.\* |
sed -e 's/[.]so[.][0-9].*/.so/' |
sort -u); do
[ "x${i##*/}" = "xld-linux.so" ] && continue
[ -r "$i" ] && continue
j="$(ls "$i."* | sed -e 's/.*[.]so[.]\([^.]*\)$/\1/;t;d' |
sort -n | tail -n 1)"
[ -r "$i.$j" ] || continue
sudo ln -s "${i##*/}.$j" "$i"
done
fi
|
<reponame>TyAiTi/ReactJS
import { createGlobalStyle} from 'styled-components'
export const GlobalStyle = createGlobalStyle`
body{
background-color: pink;
margin: 0px;
padding: 0px;
}
` |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.messages = exports.ruleName = undefined;
exports.default = function (expectation) {
return function (root, result) {
var validOptions = _stylelint.utils.validateOptions(result, ruleName, {
actual: expectation,
possible: ["always", "never"]
});
if (!validOptions) {
return;
}
var comments = (0, _utils.findCommentsInRaws)(root.source.input.css);
comments.forEach(function (comment) {
// Only process // comments
if (comment.type !== "double-slash") {
return;
}
// if it's `//` - no warning whatsoever; if `// ` - then trailing
// whitespace rule will govern this
if (comment.text === "") {
return;
}
var message = void 0;
if (expectation === "never" && comment.raws.left !== "") {
message = messages.rejected;
} else if (comment.raws.left === "" && expectation === "always") {
message = messages.expected;
} else {
return;
}
_stylelint.utils.report({
message: message,
node: root,
index: comment.source.start + comment.raws.startToken.length,
result: result,
ruleName: ruleName
});
});
};
};
var _utils = require("../../utils");
var _stylelint = require("stylelint");
var ruleName = exports.ruleName = (0, _utils.namespace)("double-slash-comment-whitespace-inside");
var messages = exports.messages = _stylelint.utils.ruleMessages(ruleName, {
expected: "Expected a space after //",
rejected: "Unexpected space after //"
}); |
<reponame>EvasiveXkiller/youtube-moosick
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Track = exports.AlbumURLHeader = exports.AlbumURL = void 0;
const item_js_1 = require("../../blocks/item.js");
class AlbumURL extends item_js_1.Item {
}
exports.AlbumURL = AlbumURL;
class AlbumURLHeader extends item_js_1.Item {
}
exports.AlbumURLHeader = AlbumURLHeader;
class Track extends item_js_1.Item {
}
exports.Track = Track;
//# sourceMappingURL=albumURL.js.map |
#!/bin/bash
./weather_analysis.sh -j sma -v
|
<filename>public/lib/js/ImgSave.js
function saveExit(){
for(x = 0; x < reciveObj.forms.length; x++){
if($(`.aviso${reciveObj.forms[x].name}`).hasClass('fraught')){
$(`.aviso${reciveObj.forms[x].name}`).css('visibility','hidden')
}else{
$(`.aviso${reciveObj.forms[x].name}`).css('visibility','hidden')
}
}
$('span.active').removeClass('active')
$('.save').addClass('active')
$('.menuTools').css('visibility','hidden')
html2canvas($(".marginView")[0], {
onrendered: function(canvas) {
var img = canvas.toDataURL()
window.open(img);
}
});
link = ''
switch(reciveObj.img){
case 'prefab4.png':
link = 'hack1.png';
break;
default:
link = 'no'
}
if(link != 'no'){
saveCanva('images/hack/'+link)
}else{
callAlert('danger','ERRO: IMG BUFFEDED IS FAIL 752: <br> Por favor, verifique se a todos os canvas possuím images.')
}
alert('salvado...')
window.location.href = './'
}
function saveCanva(url) {
$('body').append(`
<canvas id="tela" style="display=none" width="${$('.marginView').width()}" height="${$('.marginView').height()}"></canvas>
`)
var tela = document.getElementById("tela");
var c = tela.getContext("2d");
var imagem = new Image();
imagem.src = url;
imagem.onload = function(){
tela.getContext("2d").drawImage(this,0,0,$('.marginView').width(),$('.marginView').height());
var dURL = tela.toDataURL("image/png"),
aTag = document.createElement("a");
aTag.download = "image.png";
aTag.href = dURL;
aTag.textContent = "click to download";
document.body.appendChild(aTag);
aTag.click();
aTag.parentNode.removeChild(aTag);
};
callAlert('success','No final, tudo deu certo =D')
} |
#!/bin/bash
cd "$(dirname "$0")"
clear
echo -e "\033[1mTest factordb expression parsing\033[0m"
./RsaCtfTool.py --publickey "examples/factordb_parse.pub" --private --attack factordb
echo -e "\033[1m\nTest noveltyprimes\033[0m"
./RsaCtfTool.py --publickey examples/elite_primes.pub --private --attack noveltyprimes
echo -e "\033[1m\nTest small_q\033[0m"
./RsaCtfTool.py --publickey examples/small_q.pub --private --uncipherfile examples/small_q.cipher --attack smallq
echo -e "\033[1m\nTest Mersenne Primes\033[0m"
./RsaCtfTool.py --private -e 0x10001 -n 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001 --attack mersenne_primes
echo -e "\033[1m\nTest wiener\033[0m"
./RsaCtfTool.py --publickey examples/wiener.pub --private --uncipherfile examples/wiener.cipher --attack wiener
echo -e "\033[1m\nTest Boneh Durfee\033[0m"
./RsaCtfTool.py --publickey examples/wiener.pub --private --uncipherfile examples/wiener.cipher --attack boneh_durfee
echo -e "\033[1m\nTest primefac\033[0m"
./RsaCtfTool.py --publickey examples/primefac.pub --private --attack primefac
echo -e "\033[1m\nTest commonfactors\033[0m"
./RsaCtfTool.py --publickey "examples/commonfactor?.pub" --private --attack commonfactors
echo -e "\033[1m\nTest fermat\033[0m"
./RsaCtfTool.py --publickey examples/close_primes.pub --private --uncipherfile examples/close_primes.cipher --attack fermat
echo -e "\033[1m\nTest fermat2\033[0m"
./RsaCtfTool.py --publickey examples/fermat.pub --private --attack fermat
echo -e "\033[1m\nTest pastctfprimes\033[0m"
./RsaCtfTool.py --publickey examples/pastctfprimes.pub --private --attack pastctfprimes
echo -e "\033[1m\nTest SIQS\033[0m"
./RsaCtfTool.py --publickey examples/siqs.pub --private --attack siqs
echo -e "\033[1m\nTest ECM\033[0m"
./RsaCtfTool.py --publickey examples/ecm_method.pub --private --ecmdigits 25 --attack ecm --timeout 60
echo -e "\033[1m\nTest ECM2\033[0m"
./RsaCtfTool.py -n 14641034851154010900546719241402474912998133209474218975103977449764205791710698412984067810848509509669017831054155506105922179074286929418416328797379636196613023210067141695123691351917498467761961980966631958692894027223505926821780581042313171803091956255639968110368314924456998367348008686435826036480738828760312467761150839006456972383 -e 65537 --uncipher 7102577393434866594929140550804968099111271800384955683330956013020579564684516163830573468073604865935034522944441894535695787080676107364035121171758895218132464499398807752144702697548021940878072503062685829101838944413876346837812265739970980202827485238414586892442822429233004808821082551675699702413952211939387589361654209039260795229 --attack ecm2 --timeout 60
echo -e "\033[1m\nTest createpub\033[0m"
./RsaCtfTool.py --createpub -n 8616460799 -e 65537
echo -e "\033[1m\nCreatepub into Crack feedback\033[0m"
./RsaCtfTool.py --createpub -n 163325259729739139586456854939342071588766536976661696628405612100543978684304953042431845499808366612030757037530278155957389217094639917994417350499882225626580260012564702898468467277918937337494297292631474713546289580689715170963879872522418640251986734692138838546500522994170062961577034037699354013013 -e 65537 > /tmp/crackme.txt
./RsaCtfTool.py --publickey /tmp/crackme.txt --private
rm -f /tmp/crackme.txt
echo -e "\033[1m\nTest hastads\033[0m"
./RsaCtfTool.py --publickey "examples/hastads01.pub,examples/hastads02.pub,examples/hastads03.pub" --uncipher 261345950255088824199206969589297492768083568554363001807292202086148198540785875067889853750126065910869378059825972054500409296763768604135988881188967875126819737816598484392562403375391722914907856816865871091726511596620751615512183772327351299941365151995536802718357319233050365556244882929796558270337,147535246350781145803699087910221608128508531245679654307942476916759248311896958780799558399204686458919290159543753966699893006016413718139713809296129796521671806205375133127498854375392596658549807278970596547851946732056260825231169253750741639904613590541946015782167836188510987545893121474698400398826,633230627388596886579908367739501184580838393691617645602928172655297372145912724695988151441728614868603479196153916968285656992175356066846340327304330216410957123875304589208458268694616526607064173015876523386638026821701609498528415875970074497028482884675279736968611005756588082906398954547838170886958 --attack hastads
echo -e "\033[1m\nTest informations output (--dumpkey --ext)\033[0m"
./RsaCtfTool.py --publickey "examples/factordb_parse.pub" --private --attack factordb --dumpkey --ext
echo -e "\033[1m\nTest unciphering multiple files\033[0m"
./RsaCtfTool.py --publickey examples/primefac.pub --uncipherfile examples/cipher1,examples/cipher2,examples/cipher3
echo -e "\033[1m\nTest unciphering single file with multiple keys\033[0m"
./RsaCtfTool.py --publickey examples/boneh_durfee.pub,examples/primefac.pub --uncipherfile examples/cipher1
echo -e "\033[1m\nTest cube root\033[0m"
./RsaCtfTool.py --uncipher 2205316413931134031074603746928247799030155221252519872650101242908540609117693035883827878696406295617513907962419726541451312273821810017858485722109359971259158071688912076249144203043097720816270550387459717116098817458584146690177125 -e 3 -n 29331922499794985782735976045591164936683059380558950386560160105740343201513369939006307531165922708949619162698623675349030430859547825708994708321803705309459438099340427770580064400911431856656901982789948285309956111848686906152664473350940486507451771223435835260168971210087470894448460745593956840586530527915802541450092946574694809584880896601317519794442862977471129319781313161842056501715040555964011899589002863730868679527184420789010551475067862907739054966183120621407246398518098981106431219207697870293412176440482900183550467375190239898455201170831410460483829448603477361305838743852756938687673 --attack cube_root
echo -e "\033[1m\nTest Ekoparty ctf\033[0m"
./RsaCtfTool.py --private -e 65537 -n 79832181757332818552764610761349592984614744432279135328398999801627880283610900361281249973175805069916210179560506497075132524902086881120372213626641879468491936860976686933630869673826972619938321951599146744807653301076026577949579618331502776303983485566046485431039541708467141408260220098592761245010678592347501894176269580510459729633673468068467144199744563731826362102608811033400887813754780282628099443490170016087838606998017490456601315802448567772411623826281747245660954245413781519794295336197555688543537992197142258053220453757666537840276416475602759374950715283890232230741542737319569819793988431443 |
import simplejson
class QueryHandler:
def __init__(self):
self.records = []
def add_record(self, record):
self.records.append(record)
def update_record(self, record_id, new_data):
for record in self.records:
if record['id'] == record_id:
record.update(new_data)
break
def delete_record(self, record_id):
self.records = [record for record in self.records if record['id'] != record_id]
def get_records(self, criteria):
matching_records = []
for record in self.records:
match = True
for key, value in criteria.items():
if record.get(key) != value:
match = False
break
if match:
matching_records.append(record)
return matching_records |
#!/bin/bash
# Exit code of pidof is 1 if the process isn't running
pidof swayidle
IDLE_RUNNING=$?
echo $IDLE_RUNNING
if [ $IDLE_RUNNING -eq 1 ]; then
# Test swaylock to see if it's really "swaylock-effects". Will fail with
# exit code of 1 if `--screenshots` doesn't exist, 0 if it does.
swaylock --screenshots -v
STATUS=$?
TIMEOUT=600
SWAYLOCK_CMD="swaylock -f -i $1/frozen_husky.jpg.png"
if [ $STATUS -eq 0 ]; then
SWAYLOCK_CMD="swaylock -f --screenshots --effect-blur 4x5 \
--clock --indicator --timestr %H:%M --datestr %F"
fi
swayidle \
-w timeout $TIMEOUT "$SWAYLOCK_CMD" \
before-sleep "$SWAYLOCK_CMD"
fi
|
<!DOCTYPE html>
<html>
<head>
<title>Customer Profile</title>
</head>
<body>
<h1>Customer Profile</h1>
<div>
<p><strong>Name:</strong> John Doe</p>
<p><strong>Age:</strong> 22</p>
<p><strong>Address:</strong> 123 Main St, Anytown, USA</p>
<p><strong>Phone:</strong> (123) 456-7890</p>
<p><strong>Email:</strong> john.doe@example.com</p>
</div>
</body>
</html> |
import express from 'express';
import '@babel/polyfill';
const bodyParser = require('body-parser');
import cors from 'cors';
import logger from './Utils/logger';
import routes from './Routes/routes';
import path from 'path';
import databaseConnectionHandler from './DB/databaseConnectionHandler';
const staticPath = path.join(__dirname, '/build')
const app = express();
app.use(cors());
const port = process.env.PORT || 5000;
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
app.use(express.static(staticPath));
routes(app);
databaseConnectionHandler.testConnection();
app.listen(port, () => {
console.log(`Serving static files from ${staticPath}`);
logger.info(`Listening on port ${port}`);
})
|
package main
import (
"fmt"
"net/http"
"net/url"
"io/ioutil"
)
func main() {
// Create an HTTP request and format it
req, err := http.NewRequest("GET", fmt.Sprintf("http://maps.googleapis.com/maps/api/geocode/json?address=%s",
url.QueryEscape("1600 Amphitheatre Parkway, Mountain View, CA")), nil)
if err != nil {
panic(err.Error)
}
// Create our client
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
panic(err.Error)
}
defer resp.Body.Close()
// Read the response
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err.Error)
}
fmt.Println(string(respBody))
} |
/*
* Copyright (C) 2017-2018 by Godlike
* This code is licensed under the MIT license (MIT)
* (http://opensource.org/licenses/MIT)
*/
#ifndef EPONA_HYPER_PLANE_HPP
#define EPONA_HYPER_PLANE_HPP
#include <glm/glm.hpp>
namespace epona
{
/**
* @brief HyperPlane calculation algorithm
*/
class HyperPlane
{
public:
HyperPlane() = default;
/**
* @brief Constructs a plane in Hessian Normal Form
*
* Allows for the normal direction correction
* using below the hyperplane point if one is specified.
* @param[in] normal plane's normal vector of unit length
* @param[in] point point on the plane
* @param[in] below point below the plane, allows for the normal direction correction
*/
HyperPlane(glm::vec3 const& normal,
glm::vec3 const& point,
glm::vec3 const* below = nullptr
);
/**
* @brief Constructs a plane in Hessian Normal Form
*
* Constructs a hyperplane from the given vertices
* and allows for the normal direction correction
* using below the hyperplane point if one is specified.
* @param[in] a point on the plane
* @param[in] b point on the plane
* @param[in] c point on the plane
* @param[in] below point below the plane, allows for the normal direction correction
*/
HyperPlane(glm::vec3 const& a,
glm::vec3 const& b,
glm::vec3 const& c,
glm::vec3 const* below = nullptr
);
/**
* @brief Constructs a plane in Hessian Normal Form
*
* Constructs a hyperplane from the given vertices
* and allows for the normal direction correction
* using below the hyperplane point if one is specified.
* @param[in] vertices points on the plane
* @param[in] below point below the plane
*/
HyperPlane(
glm::mat3 const& vertices,
glm::vec3 const* below = nullptr
);
/** brief Returns point on the plane */
glm::vec3 const& GetPoint() const;
/** Returns plane normal vector */
glm::vec3 const& GetNormal() const;
/** Returns a distance from the plane to the origin */
float GetDistance() const;
/** Sets the plane normal vector */
void SetNormal(glm::vec3 const& normal);
/** Sets a point on the plane */
void SetPoint(glm::vec3 const& point);
/**
* @brief Calculates absolute distance from the plane to a point
* @param[in] point the point of interest
* @return absolute distance from the point to the plane
*/
float Distance(glm::vec3 const& point) const;
/**
* @brief Calculates signed distance from the plane to a point
* @param[in] point the point of interest
* @return signed distance from the plane to the point
*/
float SignedDistance(glm::vec3 const& point) const;
/**
* @brief Calculates whether a ray and the plane are intersecting
* @param[in] rayNormal ray direction vector
* @param[in] rayPoint point on the ray
* @param[out] resultPoint intersection point
* @return @c true if there is an intersection point, @c false otherwise
*/
bool RayIntersection(
glm::vec3 const& rayNormal, glm::vec3 const& rayPoint, glm::vec3& resultPoint
) const;
/**
* @brief Calculates whether a line segment and the plane are intersecting
* @param[in] lineStart start of the line segment
* @param[in] lineEnd end of the line segment
* @param[out] resultPoint intersection point
* @return @c true if there is intersection point, @c false otherwise
*/
bool LineSegmentIntersection(
glm::vec3 const& lineStart, glm::vec3 const& lineEnd, glm::vec3& resultPoint
) const;
/**
* @brief Calculates closest point on the plane to a given point
*
* @param point point of interest
*
* @return closest point on the plane
*/
glm::vec3 ClosestPoint(glm::vec3 const& point) const;
private:
glm::vec3 m_normal;
glm::vec3 m_point;
float m_distance;
};
} // namespace epona
#endif // EPONA_HYPER_PLANE_HPP
|
<gh_stars>1-10
"use strict";
/**
* @fileoverview Marker
* @author <EMAIL>
*/
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var util_1 = require("@antv/util");
var path_2_absolute_1 = require("@antv/path-util/lib/path-2-absolute");
var base_1 = require("./base");
var util_2 = require("../util/util");
var draw_1 = require("../util/draw");
var Symbols = {
// 圆
circle: function (x, y, r) {
return [
['M', x - r, y],
['A', r, r, 0, 1, 0, x + r, y],
['A', r, r, 0, 1, 0, x - r, y],
];
},
// 正方形
square: function (x, y, r) {
return [['M', x - r, y - r], ['L', x + r, y - r], ['L', x + r, y + r], ['L', x - r, y + r], ['Z']];
},
// 菱形
diamond: function (x, y, r) {
return [['M', x - r, y], ['L', x, y - r], ['L', x + r, y], ['L', x, y + r], ['Z']];
},
// 三角形
triangle: function (x, y, r) {
var diffY = r * Math.sin((1 / 3) * Math.PI);
return [['M', x - r, y + diffY], ['L', x, y - diffY], ['L', x + r, y + diffY], ['Z']];
},
// 倒三角形
'triangle-down': function (x, y, r) {
var diffY = r * Math.sin((1 / 3) * Math.PI);
return [['M', x - r, y - diffY], ['L', x + r, y - diffY], ['L', x, y + diffY], ['Z']];
},
};
var Marker = /** @class */ (function (_super) {
tslib_1.__extends(Marker, _super);
function Marker() {
return _super !== null && _super.apply(this, arguments) || this;
}
Marker.prototype.initAttrs = function (attrs) {
this._resetParamsCache();
};
// 重置绘制 path 存储的缓存
Marker.prototype._resetParamsCache = function () {
// 为了加速 path 的绘制、拾取和计算,这个地方可以缓存很多东西
// 这些缓存都是第一次需要时计算和存储,虽然增加了复杂度,但是频繁调用的方法,性能有很大提升
this.set('paramsCache', {}); // 清理缓存
};
// 更新属性时,检测是否更改了 path
Marker.prototype.onAttrChange = function (name, value, originValue) {
_super.prototype.onAttrChange.call(this, name, value, originValue);
if (['symbol', 'x', 'y', 'r', 'radius'].indexOf(name) !== -1) {
// path 相关属性更改时,清理缓存
this._resetParamsCache();
}
};
// 仅仅使用包围盒检测来进行拾取
// 所以不需要复写 isInStrokeOrPath 的方法
Marker.prototype.isOnlyHitBox = function () {
return true;
};
Marker.prototype._getR = function (attrs) {
// 兼容 r 和 radius 两种写法,推荐使用 r
return util_1.isNil(attrs.r) ? attrs.radius : attrs.r;
};
Marker.prototype._getPath = function () {
var attrs = this.attr();
var x = attrs.x, y = attrs.y;
var symbol = attrs.symbol || 'circle';
var r = this._getR(attrs);
var method;
var path;
if (util_2.isFunction(symbol)) {
method = symbol;
path = method(x, y, r);
// 将 path 转成绝对路径
path = path_2_absolute_1.default(path);
}
else {
// 内置 symbol 的 path 都是绝对路径,直接绘制即可,不需要对 path 进行特殊处理
method = Marker.Symbols[symbol];
path = method(x, y, r);
}
if (!method) {
console.warn(symbol + " marker is not supported.");
return null;
}
return path;
};
Marker.prototype.createPath = function (context) {
var path = this._getPath();
var paramsCache = this.get('paramsCache');
draw_1.drawPath(this, context, { path: path }, paramsCache);
};
Marker.Symbols = Symbols;
return Marker;
}(base_1.default));
exports.default = Marker;
//# sourceMappingURL=marker.js.map |
<reponame>lgoldstein/communitychest
package net.community.chest.rrd4j.common.graph;
import java.awt.Paint;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import net.community.chest.dom.DOMUtils;
import net.community.chest.dom.transform.XmlConvertible;
import net.community.chest.lang.ExceptionUtil;
import net.community.chest.rrd4j.common.RrdUtils;
import net.community.chest.rrd4j.common.proxy.RrdGraphDefReflectiveProxy;
import net.community.chest.rrd4j.common.proxy.RrdGraphDefFieldsAccessor;
import net.community.chest.util.datetime.TimeUnits;
import org.rrd4j.graph.RrdGraphDef;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Jan 14, 2008 1:14:37 PM
*/
public class RrdGraphDefExt extends RrdGraphDef implements XmlConvertible<RrdGraphDefExt> {
public RrdGraphDefExt ()
{
super();
}
public static final Object getFieldValue (RrdGraphDef g, String name) throws Exception
{
return RrdGraphDefFieldsAccessor.DEFAULT.getFieldValue(g, name);
}
protected Object getFieldValue (String name) throws RuntimeException
{
try
{
return getFieldValue(this, name);
}
catch(Exception e)
{
throw ExceptionUtil.toRuntimeException(e);
}
}
protected <T> T getCastFieldValue (String name, Class<T> objClass) throws RuntimeException
{
return objClass.cast(getFieldValue(name));
}
public static final String FILENAME_ATTR="filename";
public String getFilename ()
{
return getCastFieldValue(FILENAME_ATTR, String.class);
}
public static final String POOL_USED_ATTR="poolUsed";
public boolean isPoolUsed ()
{
return getCastFieldValue(POOL_USED_ATTR, Boolean.class).booleanValue();
}
public static final String START_TIME_ATTR="startTime";
public long getStartTime ()
{
return getCastFieldValue(START_TIME_ATTR, Long.class).longValue();
}
public void setStartTime (Date time)
{
setStartTime(RrdUtils.toRrdTime(time));
}
public void setStartTime (Calendar time)
{
setStartTime(RrdUtils.toRrdTime(time));
}
public static final String END_TIME_ATTR="endTime";
public long getEndTime ()
{
return getCastFieldValue(END_TIME_ATTR, Long.class).longValue();
}
public void setEndTime (Date time)
{
setEndTime(RrdUtils.toRrdTime(time));
}
public void setEndTime (Calendar time)
{
setEndTime(RrdUtils.toRrdTime(time));
}
public static final String STEP_ATTR="step";
public long getStep ()
{
return getCastFieldValue(STEP_ATTR, Long.class).longValue();
}
public void setStep (TimeUnits u, long stepVal)
{
final long val=u.getMilisecondValue(stepVal);
setStep(RrdUtils.toRrdTime(val));
}
public void setTimeSpan (Date startTime, Date endTime)
{
setTimeSpan(RrdUtils.toRrdTime(startTime), RrdUtils.toRrdTime(endTime));
}
public void setTimeSpan (Calendar startTime, Calendar endTime)
{
setTimeSpan(RrdUtils.toRrdTime(startTime), RrdUtils.toRrdTime(endTime));
}
public void setColor (GraphColorTag colorTag, Paint color)
{
setColor(colorTag.getTagValue(), color);
}
public static final String VRULE_ATTR="vrule";
public <VR extends VRuleExt> VR vrule (VR vr)
{
if (vr != null)
vrule(vr.getTimestamp(), vr.getColor(), vr.getLegend(), vr.getWidth());
return vr;
}
public VRuleExt vrule (Element elem) throws Exception
{
return vrule(new VRuleExt(elem));
}
public static final String HRULE_ATTR="hrule";
public <HR extends HRuleExt> HR hrule (HR hr)
{
if (hr != null)
hrule(hr.getValue(), hr.getColor(), hr.getLegend(), hr.getWidth());
return hr;
}
public HRuleExt hrule (Element elem) throws Exception
{
return hrule(new HRuleExt(elem));
}
public <L extends LineExt> L line (L l)
{
if (l != null)
line(l.getSrcName(), l.getColor(), l.getLegend(), l.getWidth());
return l;
}
public static final String LINE_ATTR="line";
public LineExt line (Element elem) throws Exception
{
return line(new LineExt(elem));
}
public <D extends CDefExt> D datasource (D def)
{
if (def != null)
datasource(def.getName(), def.getRpnExpression());
return def;
}
public <D extends SDefExt> D datasource (D def)
{
if (def != null)
datasource(def.getName(), def.getDefName(), def.getConsolFun());
return def;
}
public <D extends DefExt> D datasource (D def)
{
if (def != null)
datasource(def.getName(), def.getRrdPath(), def.getDsName(), def.getConsolFun(), def.getBackend());
return def;
}
public <S extends SourceExt> S addDatasource (S ds) throws NoSuchElementException
{
if (null == ds)
return ds;
if (ds instanceof CDefExt)
datasource((CDefExt) ds);
else if (ds instanceof SDefExt)
datasource((SDefExt) ds);
else if (ds instanceof DefExt)
datasource((DefExt) ds);
else
throw new NoSuchElementException("addDatasource(" + ds + ") unknown class: " + ds.getClass().getName());
return ds;
}
public static final String DEF_ELEM_NAME="Def",
SDEF_ELEM_NAME="SDef",
CDEF_ELEM_NAME="CDef";
public static final boolean isDefaultDatasourceElementName (String tagName)
{
return CDEF_ELEM_NAME.equalsIgnoreCase(tagName)
|| SDEF_ELEM_NAME.equalsIgnoreCase(tagName)
|| DEF_ELEM_NAME.equalsIgnoreCase(tagName)
;
}
public boolean isDatasourceElementName (String tagName)
{
return isDefaultDatasourceElementName(tagName);
}
public SourceExt datasource (Element elem) throws Exception
{
final String tagName=elem.getTagName();
if (CDEF_ELEM_NAME.equalsIgnoreCase(tagName))
return datasource(new CDefExt(elem));
else if (SDEF_ELEM_NAME.equalsIgnoreCase(tagName))
return datasource(new SDefExt(elem));
else if (DEF_ELEM_NAME.equalsIgnoreCase(tagName))
return datasource(new DefExt(elem));
throw new NoSuchElementException("datasource(" + tagName + ") unknown data source type");
}
public static final String STACK_ATTR="stack";
public <S extends StackExt> S stack (S s)
{
if (s != null)
stack(s.getSrcName(), s.getColor(), s.getLegend());
return s;
}
public StackExt stack (Element elem) throws Exception
{
return (null == elem) ? null : stack(new StackExt(elem));
}
public static final String AREA_ATTR="area";
public <A extends AreaExt> A area (A a)
{
if (a != null)
area(a.getSrcName(), a.getColor(), a.getLegend());
return a;
}
public AreaExt area (Element elem) throws Exception
{
return (null == elem) ? null : area(new AreaExt(elem));
}
public static final String TIME_AXIS_ATTR="TimeAxis";
public <A extends TimeAxisExt> A timeAxis (A a)
{
if (a != null)
setTimeAxis(a.getMinorUnit(), a.getMinorUnitCount(), a.getMajorUnit(), a.getMajorUnitCount(), a.getLabelUnit(), a.getLabelUnitCount(), a.getLabelSpan(), a.getFormat());
return a;
}
public TimeAxisExt timeAxis (Element elem) throws Exception
{
return (null == elem) ? null : timeAxis(new TimeAxisExt(elem));
}
// any returned non-null object is added to the addDataSourcesAndArchives returned collection
protected Object handleUnknownElement (Element elem, String tagName) throws Exception
{
// just so compiler does not complain about unreferenced parameters
if ((null == elem) || (null == tagName) || (tagName.length() <= 0))
throw new DOMException(DOMException.INVALID_STATE_ERR, "handleUnknownElement(" + tagName + ") incomplete arguments");
throw new UnsupportedOperationException("handleUnknownElement(" + tagName + ")");
}
public Object addRenderingElement (final Element elem) throws Exception
{
if (null == elem)
return null;
final String tagName=elem.getTagName();
if (isDatasourceElementName(tagName))
return datasource(elem);
else if (LINE_ATTR.equalsIgnoreCase(tagName))
return line(elem);
else if (HRULE_ATTR.equalsIgnoreCase(tagName))
return hrule(elem);
else if (VRULE_ATTR.equalsIgnoreCase(tagName))
return vrule(elem);
else if (STACK_ATTR.equalsIgnoreCase(tagName))
return stack(elem);
else if (AREA_ATTR.equalsIgnoreCase(tagName))
return area(elem);
else if (TIME_AXIS_ATTR.equalsIgnoreCase(tagName))
return timeAxis(elem);
else
return handleUnknownElement(elem, tagName);
}
// members can be datasource(s), line(s), stack(s), area(s), etc.
public Collection<?> addRenderingElements (final Collection<? extends Element> nodes) throws Exception
{
final int numNodes=(null == nodes) ? 0 : nodes.size();
if (numNodes <= 0)
return null;
Collection<Object> ret=null;
for (final Element elem : nodes)
{
final Object o=(null == elem) ? null : addRenderingElement(elem);
if (null == o)
continue;
if (null == ret)
ret = new LinkedList<Object>();
ret.add(o);
}
return ret;
}
// members can be either a datasource or a line
public Collection<?> addRenderingElements (Element root) throws Exception
{
return addRenderingElements(DOMUtils.extractAllNodes(Element.class, root, Node.ELEMENT_NODE));
}
public static final String VALUE_AXIS_ATTR="valueAxis",
COLOR_ATTR="color";
/*
* @see net.community.chest.dom.transform.XmlConvertible#fromXml(org.w3c.dom.Element)
*/
public RrdGraphDefExt fromXml (Element elem) throws Exception
{
if (RrdGraphDefReflectiveProxy.DEFAULT.fromXml(this, elem) != this)
throw new IllegalStateException("Mismatched recovered XML instances");
return this;
}
public RrdGraphDefExt (Element elem) throws Exception
{
if (this != fromXml(elem))
throw new IllegalStateException("Mismatched constructed instances");
}
/*
* @see net.community.chest.dom.transform.XmlConvertible#toXml(org.w3c.dom.Document)
*/
public Element toXml (Document doc) throws Exception
{
throw new UnsupportedOperationException("toXml() N/A");
}
}
|
/*
* Dit bestand is een onderdeel van AWV DistrictCenter.
* Copyright (c) AWV Agentschap <NAME>, <NAME>
*/
package be.vlaanderen.awv.atom.java;
import be.vlaanderen.awv.atom.Feed;
import org.apache.commons.io.FileUtils;
import org.assertj.core.api.Assertions;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests JSON parsing of Atom feed (with generic entry type).
*/
public class JacksonParseAtomFeedToScalaObjectTest {
private ObjectMapper mapper = new ObjectMapper();
@Test
@Ignore
public void testParseAtomJson() throws Exception {
String json = FileUtils.readFileToString(
new File(this.getClass().getClassLoader().getResource(
"be/vlaanderen/awv/atom/java/atom-feed-sample.txt").getFile()));
Feed<EventFeedEntryTo> feed = mapper.readValue(json, new TypeReference<Feed<EventFeedEntryTo>>() {
});
System.out.println(feed.entries().head().content().rawType());
System.out.println(feed.entries().head().content().value().head().getClass());
System.out.println(feed.entries().head().content().value().head());
assertThat(feed).isNotNull();
Assertions.assertThat(feed.entries().head().content().value().head()).isInstanceOf(EventFeedEntryTo.class);
}
}
|
# -*- coding: utf-8 -*-
require 'gtk2'
miquire :mui, 'extension', 'inneruserlist'
require 'set'
class Gtk::UserList < Gtk::EventBox
include Enumerable
attr_reader :listview
def initialize
super
@listview = Gtk::InnerUserList.new(self)
scrollbar = ::Gtk::VScrollbar.new(@listview.vadjustment)
add Gtk::HBox.new(false, 0).add(@listview).closeup(scrollbar)
end
def each
@listview.each{ |m, p, i| i[Gtk::InnerUserList::COL_USER] } end
def to_a
@to_a ||= inject(Users.new, &:<<).freeze end
# Userの配列 _users_ を追加する
# ==== Args
# [users] ユーザの配列
# ==== Return
# self
def add_user(users)
@to_a = nil
@listview.add_user(users)
end
# Userの配列 _users_ に含まれるユーザを削除する
# ==== Args
# [users] ユーザの配列
# ==== Return
# self
def remove_user(users)
@to_a = nil
@listview.remove_user(users)
end
def gen_order(user)
(@order_generator ||= gen_counter).call end
# ユーザ user の順番を再計算する
# ==== Args
# [user] ユーザ
# ==== Return
# self
def reorder(user)
type_strict user => User
@listview.reorder(user)
self end
end
|
// Boost.Geometry (aka GGL, Generic Geometry Library)
// Unit Test
// Copyright (c) 2007-2012 <NAME>, Amsterdam, the Netherlands.
// Copyright (c) 2008-2012 <NAME>, Paris, France.
// Copyright (c) 2009-2012 <NAME>, London, UK.
// Copyright (c) 2020, Oracle and/or its affiliates.
// Contributed and/or modified by <NAME>, on behalf of Oracle
// Parts of Boost.Geometry are redesigned from Geodan's Geographic Library
// (geolib/GGL), copyright (c) 1995-2010 Geodan, Amsterdam, the Netherlands.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#if defined(_MSC_VER)
// We deliberately mix float/double's here so turn off warning
#pragma warning( disable : 4244 )
#endif // defined(_MSC_VER)
#include <geometry_test_common.hpp>
#include <boost/geometry/algorithms/assign.hpp>
#include <boost/geometry/algorithms/intersection.hpp>
#include <boost/geometry/algorithms/detail/overlay/segment_as_subrange.hpp>
#include <boost/geometry/geometries/point.hpp>
#include <boost/geometry/geometries/segment.hpp>
#include <boost/geometry/geometries/adapted/boost_tuple.hpp>
#include <boost/geometry/policies/relate/intersection_policy.hpp>
#include <boost/geometry/strategies/intersection_result.hpp>
#include <boost/geometry/strategies/relate/cartesian.hpp>
BOOST_GEOMETRY_REGISTER_BOOST_TUPLE_CS(cs::cartesian);
template <typename P>
static void test_segment_intersection(int caseid,
int x1, int y1, int x2, int y2, int x3, int y3, int x4, int y4,
char expected_how,
int expected_x1 = -99, int expected_y1 = -99,
int expected_x2 = -99, int expected_y2 = -99)
{
using namespace boost::geometry;
typedef typename bg::coordinate_type<P>::type coordinate_type;
typedef bg::model::referring_segment<const P> segment_type;
P p1, p2, p3, p4;
bg::assign_values(p1, x1, y1);
bg::assign_values(p2, x2, y2);
bg::assign_values(p3, x3, y3);
bg::assign_values(p4, x4, y4);
segment_type s12(p1,p2);
segment_type s34(p3,p4);
bg::detail::segment_as_subrange<segment_type> sr12(s12);
bg::detail::segment_as_subrange<segment_type> sr34(s34);
std::size_t expected_count = 0;
if (expected_x1 != -99 && expected_y1 != -99)
{
expected_count++;
}
if (expected_x2 != -99 && expected_y2 != -99)
{
expected_count++;
}
// Using intersection_insert
std::vector<P> out;
bg::detail::intersection::intersection_insert<P>(s12, s34,
std::back_inserter(out));
// Using strategy
typedef bg::segment_intersection_points<P> result_type;
typedef bg::policies::relate::segments_intersection_points
<
result_type
> points_policy_type;
result_type is
= bg::strategy::intersection::cartesian_segments<>
::apply(sr12, sr34, points_policy_type());
bg::policies::relate::direction_type dir
= bg::strategy::intersection::cartesian_segments<>
::apply(sr12, sr34, bg::policies::relate::segments_direction());
//BOOST_CHECK_EQUAL(boost::size(out), expected_count);
BOOST_CHECK_EQUAL(is.count, expected_count);
BOOST_CHECK_MESSAGE(dir.how == expected_how,
caseid
<< " how: detected: " << dir.how
<< " expected: " << expected_how);
if (expected_count == 2
&& is.count == 2
&& boost::size(out) == 2)
{
// Two intersection points, reverse expectation if necessary
bool const first_matches
= std::fabs(bg::get<0>(out[0]) - expected_x1) < 1.0e-6
&& std::fabs(bg::get<1>(out[0]) - expected_y1) < 1.0e-6;
if (! first_matches)
{
std::swap(expected_x1, expected_x2);
std::swap(expected_y1, expected_y2);
}
}
if (expected_x1 != -99 && expected_y1 != -99
&& boost::size(out) >= 1)
{
BOOST_CHECK_CLOSE(bg::get<0>(out[0]), coordinate_type(expected_x1), 0.001);
BOOST_CHECK_CLOSE(bg::get<1>(out[0]), coordinate_type(expected_y1), 0.001);
BOOST_CHECK_CLOSE(bg::get<0>(is.intersections[0]), expected_x1, 0.001);
BOOST_CHECK_CLOSE(bg::get<1>(is.intersections[0]), expected_y1, 0.001);
}
if (expected_x2 != -99 && expected_y2 != -99
&& boost::size(out) >= 2)
{
BOOST_CHECK_CLOSE(bg::get<0>(out[1]), coordinate_type(expected_x2), 0.001);
BOOST_CHECK_CLOSE(bg::get<1>(out[1]), coordinate_type(expected_y2), 0.001);
BOOST_CHECK_CLOSE(bg::get<0>(is.intersections[1]), expected_x2, 0.001);
BOOST_CHECK_CLOSE(bg::get<1>(is.intersections[1]), expected_y2, 0.001);
}
}
template <typename P>
void test_all()
{
test_segment_intersection<P>( 1, 0,2, 2,0, 0,0, 2,2, 'i', 1, 1);
test_segment_intersection<P>( 2, 2,2, 3,1, 0,0, 2,2, 'a', 2, 2);
test_segment_intersection<P>( 3, 3,1, 2,2, 0,0, 2,2, 't', 2, 2);
test_segment_intersection<P>( 4, 0,2, 1,1, 0,0, 2,2, 'm', 1, 1);
test_segment_intersection<P>( 5, 1,1, 0,2, 0,0, 2,2, 's', 1, 1);
test_segment_intersection<P>( 6, 0,2, 2,0, 0,0, 1,1, 'm', 1, 1);
test_segment_intersection<P>( 7, 2,0, 0,2, 0,0, 1,1, 'm', 1, 1);
test_segment_intersection<P>( 8, 2,3, 3,2, 0,0, 2,2, 'd');
test_segment_intersection<P>( 9, 0,0, 2,2, 0,0, 2,2, 'e', 0, 0, 2, 2);
test_segment_intersection<P>(10, 2,2, 0,0, 0,0, 2,2, 'e', 2, 2, 0, 0);
test_segment_intersection<P>(11, 1,1, 3,3, 0,0, 2,2, 'c', 1, 1, 2, 2);
test_segment_intersection<P>(12, 3,3, 1,1, 0,0, 2,2, 'c', 1, 1, 2, 2);
test_segment_intersection<P>(13, 0,2, 2,2, 2,1, 2,3, 'm', 2, 2);
test_segment_intersection<P>(14, 2,2, 2,4, 2,0, 2,2, 'a', 2, 2);
test_segment_intersection<P>(15, 2,2, 2,4, 2,0, 2,1, 'd');
test_segment_intersection<P>(16, 2,4, 2,2, 2,0, 2,1, 'd');
test_segment_intersection<P>(17, 2,1, 2,3, 2,2, 2,4, 'c', 2, 2, 2, 3);
test_segment_intersection<P>(18, 2,3, 2,1, 2,2, 2,4, 'c', 2, 3, 2, 2);
test_segment_intersection<P>(19, 0,2, 2,2, 4,2, 2,2, 't', 2, 2);
test_segment_intersection<P>(20, 0,2, 2,2, 2,2, 4,2, 'a', 2, 2);
test_segment_intersection<P>(21, 1,2, 3,2, 2,1, 2,3, 'i', 2, 2);
test_segment_intersection<P>(22, 2,4, 2,1, 2,1, 2,3, 'c', 2, 1, 2, 3);
test_segment_intersection<P>(23, 2,4, 2,1, 2,3, 2,1, 'c', 2, 3, 2, 1);
test_segment_intersection<P>(24, 1,1, 3,3, 0,0, 3,3, 'c', 1, 1, 3, 3);
test_segment_intersection<P>(25, 2,0, 2,4, 2,1, 2,3, 'c', 2, 1, 2, 3);
test_segment_intersection<P>(26, 2,0, 2,4, 2,3, 2,1, 'c', 2, 3, 2, 1);
test_segment_intersection<P>(27, 0,0, 4,4, 1,1, 3,3, 'c', 1, 1, 3, 3);
test_segment_intersection<P>(28, 0,0, 4,4, 3,3, 1,1, 'c', 3, 3, 1, 1);
test_segment_intersection<P>(29, 1,1, 3,3, 0,0, 4,4, 'c', 1, 1, 3, 3);
test_segment_intersection<P>(30, 0,0, 2,2, 2,2, 3,1, 'a', 2, 2);
test_segment_intersection<P>(31, 0,0, 2,2, 2,2, 1,3, 'a', 2, 2);
test_segment_intersection<P>(32, 0,0, 2,2, 1,1, 2,0, 's', 1, 1);
test_segment_intersection<P>(33, 0,0, 2,2, 1,1, 0,2, 's', 1, 1);
test_segment_intersection<P>(34, 2,2, 1,3, 0,0, 2,2, 'a', 2, 2);
test_segment_intersection<P>(35, 2,2, 3,1, 0,0, 2,2, 'a', 2, 2);
test_segment_intersection<P>(36, 0,0, 2,2, 0,2, 1,1, 'm', 1, 1);
test_segment_intersection<P>(37, 0,0, 2,2, 2,0, 1,1, 'm', 1, 1);
test_segment_intersection<P>(38, 1,1, 0,2, 0,0, 2,2, 's', 1, 1);
test_segment_intersection<P>(39, 1,1, 2,0, 0,0, 2,2, 's', 1, 1);
test_segment_intersection<P>(40, 2,0, 1,1, 0,0, 2,2, 'm', 1, 1);
test_segment_intersection<P>(41, 1,2, 0,2, 2,2, 0,2, 'c', 1, 2, 0, 2);
test_segment_intersection<P>(42, 2,1, 1,1, 2,2, 0,2, 'd');
test_segment_intersection<P>(43, 4,1, 3,1, 2,2, 0,2, 'd');
test_segment_intersection<P>(44, 4,2, 3,2, 2,2, 0,2, 'd');
test_segment_intersection<P>(45, 2,0, 0,2, 0,0, 2,2, 'i', 1, 1);
// In figure: times 2
test_segment_intersection<P>(46, 8,2, 4,6, 0,0, 8, 8, 'i', 5, 5);
}
int test_main(int, char* [])
{
#if !defined(BOOST_GEOMETRY_TEST_ONLY_ONE_TYPE)
test_all<boost::tuple<double, double> >();
test_all<bg::model::point<float, 2, bg::cs::cartesian> >();
#endif
test_all<bg::model::point<double, 2, bg::cs::cartesian> >();
return 0;
}
|
#!/bin/bash
set -e
# Go to the project root directory
cd $(dirname ${0})/../..
PACKAGES=(cdk mosaic mosaic-moment-adapter)
REPOSITORIES=(cdk-builds mosaic-builds mosaic-moment-adapter-builds)
# Command line arguments.
COMMAND_ARGS=${*}
# Function to publish artifacts of a package to Github.
# @param ${1} Name of the package
# @param ${2} Repository name of the package.
publishPackage() {
packageName=${1}
packageRepo=${2}
buildDir="$(pwd)/dist/releases/${packageName}"
buildVersion=$(node -pe "require('./package.json').version")
branchName=$(git branch | sed -n '/\* /s///p' | awk -F'/' '{print $2}')
if [[ -z ${branchName} ]]; then
branchName='master'
fi
commitSha=$(git rev-parse --short HEAD)
commitAuthorName=$(git --no-pager show -s --format='%an' HEAD)
commitAuthorEmail=$(git --no-pager show -s --format='%ae' HEAD)
commitMessage=$(git log --oneline -n 1)
buildVersionName="${buildVersion}-${commitSha}"
buildTagName="${branchName}-${commitSha}"
buildCommitMessage="${branchName} - ${commitMessage}"
repoUrl="https://github.com/positive-js/${packageRepo}.git"
repoDir="tmp/${packageRepo}"
echo "Starting publish process of ${packageName} for ${buildVersionName} into ${branchName}.."
# Prepare cloning the builds repository
rm -rf ${repoDir}
mkdir -p ${repoDir}
echo "Starting cloning process of ${repoUrl} into ${repoDir}.."
if [[ $(git ls-remote --heads ${repoUrl} ${branchName}) ]]; then
echo "Branch ${branchName} already exists. Cloning that branch."
git clone ${repoUrl} ${repoDir} --depth 1 --branch ${branchName}
cd ${repoDir}
echo "Cloned repository and switched into the repository directory (${repoDir})."
else
echo "Branch ${branchName} does not exist on ${packageRepo} yet."
echo "Cloning default branch and creating branch '${branchName}' on top of it."
git clone ${repoUrl} ${repoDir} --depth 1
cd ${repoDir}
echo "Cloned repository and switched into directory. Creating new branch now.."
git checkout -b ${branchName}
fi
# Copy the build files to the repository
rm -rf ./*
cp -r ${buildDir}/* ./
echo "Removed everything from ${packageRepo}#${branchName} and added the new build output."
if [[ $(git ls-remote origin "refs/tags/${buildTagName}") ]]; then
echo "Skipping publish because tag is already published"
exit 0
fi
# Replace the version in every file recursively with a more specific version that also includes
# the SHA of the current build job. Normally this "sed" call would just replace the version
# placeholder, but the version placeholders have been replaced by the release task already.
sed -i "s/${buildVersion}/${buildVersionName}/g" $(find . -type f -not -path '*\/.*' ! -iname '*.css' ! -iname '*.js')
echo "Updated the build version in every file to include the SHA of the latest commit."
# Prepare Git for pushing the artifacts to the repository.
git config user.name "${commitAuthorName}"
git config user.email "${commitAuthorEmail}"
echo "Publish by user: ${commitAuthorName}"
echo "Git configuration has been updated to match the last commit author. Publishing now.."
echo "Commit message: ${buildCommitMessage}"
echo "Build Tag: ${buildTagName}"
git add -A
git commit --allow-empty -m "${buildCommitMessage}"
git tag "${buildTagName}"
git push origin ${branchName} --tags
echo "Published package artifacts for ${packageName}#${buildVersionName} into ${branchName}"
}
for ((i = 0; i < ${#PACKAGES[@]}; i++)); do
packageName=${PACKAGES[${i}]}
packageRepo=${REPOSITORIES[${i}]}
# Publish artifacts of the current package. Run publishing in a sub-shell to avoid working
# directory changes.
(publishPackage ${packageName} ${packageRepo})
done
|
from typing import List, Dict, Union
def process_deployment_script(commands: List[str]) -> Dict[str, Union[List[str], Dict[str, str]]]:
executables = []
environment_module = {}
for command in commands:
if command.startswith('wget'):
parts = command.split()
download_path = parts[4]
executable_name = download_path.split('/')[-1]
executables.append(f"{download_path}/{executable_name} - Downloaded and permissions set")
elif command.startswith('mkdir'):
environment_module['module_name'] = command.split('/')[-1]
elif command.startswith('echo "set root'):
environment_module['root_path'] = command.split()[-1]
elif command.startswith('echo -e "prepend-path'):
environment_module['appended_path'] = ' '.join(command.split()[1:])
return {"executables": executables, "environment_module": environment_module} |
<gh_stars>1-10
__author__ = 'mariosky'
import redis
import os
import json
import ast
HOST = 'REDIS_HOST' in os.environ and os.environ['REDIS_HOST'] or 'redis'
PORT = 'REDIS_PORT' in os.environ and os.environ['REDIS_PORT'] or '6379'
PASSWORD = '<PASSWORD>' in os.environ and os.environ['REDIS_PASSWORD'] or '<PASSWORD>'
r = redis.Redis(host=HOST, port=PORT, password=PASSWORD, decode_responses=True)
class Task:
def __init__(self, **kwargs):
self.id = kwargs['id']
self.method = kwargs.get('method', None)
self.params = kwargs.get('params', {})
self.state = kwargs.get('state', 'created')
self.expire = kwargs.get('expire', None)
self.result = None
self.__dict__.update(kwargs)
def enqueue(self, app_name):
pipe = r.pipeline()
if pipe.rpush('%s:task_queue' % app_name, self.id):
self.state = 'submitted'
message = json.dumps(self.__dict__)
pipe.set(self.id,message )
pipe.execute()
return True
else:
return False
def put_result(self, worker):
pipe = r.pipeline()
if pipe.zrem('%s:pending_set' % worker.cola.app_name, '%s:%s' % (worker.id, self.id)):
self.state = 'completed'
message = json.dumps(self.__dict__)
pipe.set(self.id, message)
pipe.sadd('%s:result_set' % worker.cola.app_name, self.id)
pipe.execute()
return True
else:
return None
def get_result(self, app_name, as_dict = False):
if r.sismember('%s:result_set' % app_name, self.id):
result = r.get(self.id)
print(result)
res = result.replace("\n","")
#print(type(res), res)
#_r = bytearray(res, 'utf-8')
_dict = json.loads(res)
print(type(_dict),_dict)
#_dict = eval(bytes(_r))
self.__dict__.update(_dict)
if as_dict:
return self.__dict__
else:
return self
else:
return None
def __repr__(self):
return self.id +" method:"+ str(self.method) +", params:" + str(self.params)
def as_dict(self):
return self.__dict__
class Cola:
def __init__(self, name):
self.app_name = name
self.task_counter = self.app_name+':task_counter'
self.pending_set = self.app_name+':pending_set'
self.task_queue = self.app_name+':task_queue'
self.result_set = self.app_name+':result_set'
self.worker_set = self.app_name+':worker_set'
def initialize(self):
r.flushall()
r.setnx(self.task_counter,0)
def enqueue(self, **kwargs):
if kwargs['id'] is None:
kwargs['id'] = "%s:task:%s" % (self.app_name, r.incr(self.task_counter))
t = Task(**kwargs)
t.enqueue(self.app_name)
return kwargs['id']
def get_dead_workers(self):
workers = r.smembers(self.worker_set)
dead = []
for w in workers:
if r.get(w):
pass
else:
r.srem(self.worker_set,w)
dead.append(w)
return dead
def get_workers(self):
pattern = '%s:worker:*' % (self.app_name)
return r.keys(pattern)
@staticmethod
def get_all_workers():
pattern = '*:worker:*'
print(pattern)
return r.keys(pattern)
|
bool IsActionAvailable(float m_coolDown, float deltaTime)
{
m_coolDown -= deltaTime; // Update the cooldown time
return m_coolDown <= 0; // Return true if the action is available, false otherwise
} |
<filename>hermes-common/src/main/java/pl/allegro/tech/hermes/domain/readiness/ReadinessRepository.java
package pl.allegro.tech.hermes.domain.readiness;
public interface ReadinessRepository {
boolean isReady();
void setReadiness(boolean isReady);
}
|
#! /bin/sh
# $OpenLDAP: pkg/ldap/build/version.sh,v 1.5.2.39 2003/03/29 15:45:42 kurt Exp $
## Copyright 2000-2003 The OpenLDAP Foundation
## COPYING RESTRICTIONS APPLY. See COPYRIGHT File in top level directory
## of this package for details.
#
DIR=`dirname $0`
. $DIR/version.var
if test $ol_patch != X ; then
ol_version=${ol_major}.${ol_minor}.${ol_patch}
ol_type=Release
elif test $ol_minor != X ; then
ol_version=${ol_major}.${ol_minor}.${ol_patch}
ol_type=Engineering
else
ol_version=${ol_major}.${ol_minor}
ol_type=Devel
ol_api_lib=0:0:0
fi
ol_string="${ol_package} ${ol_version}-${ol_type}"
echo OL_PACKAGE=\"${ol_package}\"
echo OL_MAJOR=$ol_major
echo OL_MINOR=$ol_minor
echo OL_PATCH=$ol_patch
echo OL_API_INC=$ol_api_inc
echo OL_API_LIB=$ol_api_lib
echo OL_VERSION=$ol_version
echo OL_TYPE=$ol_type
echo OL_STRING=\"${ol_string}\"
echo OL_RELEASE_DATE=\"${ol_release_date}\"
|
<reponame>atomlab/stihi-frontend-1.0<filename>src/app/users/users-comments.component.ts<gh_stars>1-10
import { Component, ElementRef, HostListener, Input, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import {PostsService} from "../posts/posts.service";
import {HeaderService} from "../shared/services/header.service";
import { Subscription } from 'rxjs/Subscription';
declare var jquery:any;
declare var $ :any;
@Component({
selector: 'app-users-comments',
templateUrl: './users-comments.component.html',
styleUrls: []
})
export class UsersCommentsComponent implements OnInit {
type: string = '';
owner_cnt: number = 0;
reply_cnt: number = 0;
routeSubscription: any;
id: number = 0;
objects: any = [];
announces: any = [];
selectedObjects: any = [];
user_id: number = 0;
user: any = {ban: false,};
previousTop: number = 0;
posts_load_count: number = 20;
scroll_direction: number = 0;
queue_interval: any;
announces_load_count: number = 3;
comments_load_count: number = 20;
tag: string = '';
current_user_votes: any = {};
comments_current_user_votes: any = {};
votes: any = [];
isCanScroll: boolean = false;
login: string = '';
page: string = '';
commentsArticles: any = [];
comments_votes: any = {};
isLoading: boolean = false;
queryParams: any = {};
comments: any = [];
isDragging: boolean = false;
TODO: boolean = false;
subscription_comments: Subscription;
isFirstLoad: boolean = true;
@HostListener('document:click', ['$event'])
clickout(event) {
for (let i = 0; i < event.path.length; i++) {
if (event.path[i].className && event.path[i].className.length > 0 && event.path[i].className.indexOf("simpleDropdown__container") !== -1) return;
}
this.resetCommentsTooltips(this.comments);
}
@HostListener('document:dragstart', ['$event'])
onDragStart(event) {
this.isDragging = true;
}
@HostListener('document:dragend', ['$event'])
onDragEnd(event) {
this.isDragging = false;
}
resetCommentsTooltips(o: any) {
for (let i = 0; i < o.length; i++) {
o[i].is_show_votes = false;
o[i].is_show_price = false;
o[i].is_show_profile = false;
o[i].is_show_comments = false;
if (o[i].comments && o[i].comments.length > 0) {
this.resetCommentsTooltips(o[i].comments);
}
}
}
@HostListener("window:scroll", [])
onWindowScroll() {
let top = window.pageYOffset || document.documentElement.scrollTop;
let direction: boolean = false;
if (top > this.previousTop) direction = true;
if (this.objects.length == 0 && this.announces.length == 0 && this.comments.length == 0) return;
if (top < 600 && !direction) this.scroll_direction = 2;
if (this.type == 'main') {
let postid = this.announces.length - 1;
if (this.announces.length > 2) postid = this.announces.length - 3;
if (postid > -1 && this.isScrolledIntoView(document.getElementById('announcementid'+postid)) && direction) this.scroll_direction = 1;
} else {
if (this.type == 'comments' || true) {
let postid = this.comments.length - 1;
if (this.comments.length > 2) postid = this.comments.length - 3;
if (postid > -1 && this.isScrolledIntoView(document.getElementById('commentid'+postid)) && direction) this.scroll_direction = 1;
}
}
this.previousTop = top;
}
isScrolledIntoView(elem) {
let docViewTop = $(window).scrollTop();
let docViewBottom = docViewTop + $(window).height();
let elemTop = $(elem).offset().top;
let elemBottom = elemTop + $(elem).height();
return ((elemBottom <= docViewBottom) && (elemTop >= docViewTop));
}
constructor(
private postsService: PostsService,
private route: ActivatedRoute,
private router: Router,
private headerService: HeaderService,
) {
if (this.isFirstLoad) {
this.isFirstLoad = false;
}
this.subscription_comments = headerService.commentChanged$.subscribe(
data => {
this.load('reply', true);
this.load('owner', true);
});
}
initScroll() {
}
ngOnInit() {
this.initScroll();
this.router.routeReuseStrategy.shouldReuseRoute = function(){
return false;
};
this.routeSubscription = this.route.params.subscribe(
params => {
if (this.route.snapshot.data.type) this.type = this.route.snapshot.data.type;
this.login = params['login'];
this.loadUser();
this.queue_interval = setInterval(() => {
if (this.scroll_direction == 0) return;
if (this.scroll_direction == 1) {
this.comments_load_count += 20;
}
this.scroll_direction = 0;
this.load(this.type);
}, 2000);
});
}
loadUser() {
this.postsService.getUser({name: this.login})
.subscribe((data) => {
let user: any = {};
this.user = data.user;
this.id = data.user.id;
this.user_id = this.id;
this.page = '/@'+this.login;
this.load('reply');
this.load('owner');
});
}
ngOnDestroy() {
clearInterval(this.queue_interval);
this.routeSubscription.unsubscribe();
this.subscription_comments.unsubscribe();
}
openSinglePost(id: number, returnUri: string) {
this.headerService.openPost(id, returnUri, 0, false, this.queryParams, true);
return false;
}
processComment(o: any) {
for (let i =0; i < o.length; i++) {
o[i].displayName = this.postsService.getUserDisplayName(o[i].user);
o[i].displayNameBlog = this.postsService.getUserDisplayNameBlog(o[i].user);
o[i].displayNameLogin = this.postsService.getUserLogin(o[i].user);
if (o[i].comments && o[i].comments.length > 0) this.processComment(o[i].comments);
}
}
load(load_type: string = null, force: boolean = false) {
this.isLoading = true;
let params: any = {user_id: parseInt(""+this.user_id), type: this.type, full: true, count: this.comments_load_count, mode: 'first'};
if (load_type != null) params.type = load_type;
this.postsService.getUserCommentsList(params)
.subscribe((data) => {
this.processComment(data.list);
let cnt = 0;
this.owner_cnt = data.owner_count;
this.reply_cnt = data.reply_count;
this.isLoading = false;
if (this.type != load_type) {
return;
}
if (!force) {
for (let i = 0; i < this.comments.length; i++) {
for (let z = 0; z < data.list.length; z++) {
if (this.comments[i].id == data.list[z].id) {
data.list[z] = this.comments[i];
}
}
}
}
Object.keys(data.articles).forEach((key,index) => {
data.articles[key].displayName = this.postsService.getUserDisplayName(data.articles[key].user);
data.articles[key].displayNameBlog = this.postsService.getUserDisplayNameBlog(data.articles[key].user);
});
this.comments = [];
this.comments = data.list;
this.commentsArticles = data.articles;
if (data.current_user_votes) this.comments_current_user_votes = data.current_user_votes;
else this.comments_current_user_votes = {};
if (this.comments && this.comments.length > 0) this.comments_votes = data.votes;
else this.comments_votes = {};
this.postsService.processJWT(data);
});
}
getUserPower(value, type) {
return this.postsService.getUserPower(value, type);
}
getUserReputation(value) {
return this.postsService.getUserReputation(value);
}
}
|
#!/bin/bash
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
set -e
set -o pipefail
# --- Setup run dirs ---
find output/* ! -name '*summary-info*' -type f -exec rm -f {} +
mkdir output/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
rm -R -f /tmp/%FIFO_DIR%/
mkdir -p /tmp/%FIFO_DIR%/fifo/
mkdir /tmp/%FIFO_DIR%/fifo/full_correlation/
mkfifo /tmp/%FIFO_DIR%/fifo/gul_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_pltcalc_P1
mkdir work/gul_S1_summaryleccalc
mkdir work/gul_S1_summaryaalcalc
mkdir work/gul_S2_summaryleccalc
mkdir work/gul_S2_summaryaalcalc
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_pltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_pltcalc_P1
mkdir work/full_correlation/gul_S1_summaryleccalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S2_summaryleccalc
mkdir work/full_correlation/gul_S2_summaryaalcalc
# --- Do ground up loss computes ---
eltcalc < /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P1 > work/kat/gul_S1_eltcalc_P1 & pid1=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P1 > work/kat/gul_S1_summarycalc_P1 & pid2=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P1 > work/kat/gul_S1_pltcalc_P1 & pid3=$!
eltcalc < /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P1 > work/kat/gul_S2_eltcalc_P1 & pid4=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P1 > work/kat/gul_S2_summarycalc_P1 & pid5=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P1 > work/kat/gul_S2_pltcalc_P1 & pid6=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P1 /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P1 work/gul_S1_summaryaalcalc/P1.bin work/gul_S1_summaryleccalc/P1.bin > /dev/null & pid7=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P1 /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P1 work/gul_S2_summaryaalcalc/P1.bin work/gul_S2_summaryleccalc/P1.bin > /dev/null & pid8=$!
summarycalc -i -1 /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P1 -2 /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P1 < /tmp/%FIFO_DIR%/fifo/gul_P1 &
eve 1 1 | getmodel | gulcalc -S0 -L0 -r -j /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P1 -a1 -i - > /tmp/%FIFO_DIR%/fifo/gul_P1 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8
# --- Do computes for fully correlated output ---
# --- Do ground up loss computes ---
eltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P1 > work/full_correlation/kat/gul_S1_eltcalc_P1 & pid1=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P1 > work/full_correlation/kat/gul_S1_summarycalc_P1 & pid2=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P1 > work/full_correlation/kat/gul_S1_pltcalc_P1 & pid3=$!
eltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P1 > work/full_correlation/kat/gul_S2_eltcalc_P1 & pid4=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P1 > work/full_correlation/kat/gul_S2_summarycalc_P1 & pid5=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P1 > work/full_correlation/kat/gul_S2_pltcalc_P1 & pid6=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P1 work/full_correlation/gul_S1_summaryaalcalc/P1.bin work/full_correlation/gul_S1_summaryleccalc/P1.bin > /dev/null & pid7=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P1 work/full_correlation/gul_S2_summaryaalcalc/P1.bin work/full_correlation/gul_S2_summaryleccalc/P1.bin > /dev/null & pid8=$!
summarycalc -i -1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P1 -2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P1 < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P1 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8
# --- Do ground up loss kats ---
kat work/kat/gul_S1_eltcalc_P1 > output/gul_S1_eltcalc.csv & kpid1=$!
kat work/kat/gul_S1_pltcalc_P1 > output/gul_S1_pltcalc.csv & kpid2=$!
kat work/kat/gul_S1_summarycalc_P1 > output/gul_S1_summarycalc.csv & kpid3=$!
kat work/kat/gul_S2_eltcalc_P1 > output/gul_S2_eltcalc.csv & kpid4=$!
kat work/kat/gul_S2_pltcalc_P1 > output/gul_S2_pltcalc.csv & kpid5=$!
kat work/kat/gul_S2_summarycalc_P1 > output/gul_S2_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats for fully correlated output ---
kat work/full_correlation/kat/gul_S1_eltcalc_P1 > output/full_correlation/gul_S1_eltcalc.csv & kpid7=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P1 > output/full_correlation/gul_S1_pltcalc.csv & kpid8=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P1 > output/full_correlation/gul_S1_summarycalc.csv & kpid9=$!
kat work/full_correlation/kat/gul_S2_eltcalc_P1 > output/full_correlation/gul_S2_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/gul_S2_pltcalc_P1 > output/full_correlation/gul_S2_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/gul_S2_summarycalc_P1 > output/full_correlation/gul_S2_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
aalcalc -Kgul_S1_summaryaalcalc > output/gul_S1_aalcalc.csv & lpid1=$!
leccalc -r -Kgul_S1_summaryleccalc -F output/gul_S1_leccalc_full_uncertainty_aep.csv -f output/gul_S1_leccalc_full_uncertainty_oep.csv -S output/gul_S1_leccalc_sample_mean_aep.csv -s output/gul_S1_leccalc_sample_mean_oep.csv -W output/gul_S1_leccalc_wheatsheaf_aep.csv -M output/gul_S1_leccalc_wheatsheaf_mean_aep.csv -m output/gul_S1_leccalc_wheatsheaf_mean_oep.csv -w output/gul_S1_leccalc_wheatsheaf_oep.csv & lpid2=$!
aalcalc -Kgul_S2_summaryaalcalc > output/gul_S2_aalcalc.csv & lpid3=$!
leccalc -r -Kgul_S2_summaryleccalc -F output/gul_S2_leccalc_full_uncertainty_aep.csv -f output/gul_S2_leccalc_full_uncertainty_oep.csv -S output/gul_S2_leccalc_sample_mean_aep.csv -s output/gul_S2_leccalc_sample_mean_oep.csv -W output/gul_S2_leccalc_wheatsheaf_aep.csv -M output/gul_S2_leccalc_wheatsheaf_mean_aep.csv -m output/gul_S2_leccalc_wheatsheaf_mean_oep.csv -w output/gul_S2_leccalc_wheatsheaf_oep.csv & lpid4=$!
aalcalc -Kfull_correlation/gul_S1_summaryaalcalc > output/full_correlation/gul_S1_aalcalc.csv & lpid5=$!
leccalc -r -Kfull_correlation/gul_S1_summaryleccalc -F output/full_correlation/gul_S1_leccalc_full_uncertainty_aep.csv -f output/full_correlation/gul_S1_leccalc_full_uncertainty_oep.csv -S output/full_correlation/gul_S1_leccalc_sample_mean_aep.csv -s output/full_correlation/gul_S1_leccalc_sample_mean_oep.csv -W output/full_correlation/gul_S1_leccalc_wheatsheaf_aep.csv -M output/full_correlation/gul_S1_leccalc_wheatsheaf_mean_aep.csv -m output/full_correlation/gul_S1_leccalc_wheatsheaf_mean_oep.csv -w output/full_correlation/gul_S1_leccalc_wheatsheaf_oep.csv & lpid6=$!
aalcalc -Kfull_correlation/gul_S2_summaryaalcalc > output/full_correlation/gul_S2_aalcalc.csv & lpid7=$!
leccalc -r -Kfull_correlation/gul_S2_summaryleccalc -F output/full_correlation/gul_S2_leccalc_full_uncertainty_aep.csv -f output/full_correlation/gul_S2_leccalc_full_uncertainty_oep.csv -S output/full_correlation/gul_S2_leccalc_sample_mean_aep.csv -s output/full_correlation/gul_S2_leccalc_sample_mean_oep.csv -W output/full_correlation/gul_S2_leccalc_wheatsheaf_aep.csv -M output/full_correlation/gul_S2_leccalc_wheatsheaf_mean_aep.csv -m output/full_correlation/gul_S2_leccalc_wheatsheaf_mean_oep.csv -w output/full_correlation/gul_S2_leccalc_wheatsheaf_oep.csv & lpid8=$!
wait $lpid1 $lpid2 $lpid3 $lpid4 $lpid5 $lpid6 $lpid7 $lpid8
rm -R -f work/*
rm -R -f /tmp/%FIFO_DIR%/
|
"""timed_input: add a timeout to standard input.
Approach was inspired by: https://github.com/johejo/inputimeout
"""
import sys
SP = " "
CR = "\r"
LF = "\n"
CRLF = CR + LF
def _echo(prompt: str) -> None:
sys.stdout.write(prompt)
sys.stdout.flush()
def _posix_timed_input(prompt: str, timeout: float) -> str:
_echo(prompt)
sel = selectors.DefaultSelector()
sel.register(sys.stdin, selectors.EVENT_READ, data=sys.stdin.readline)
events = sel.select(timeout=timeout)
for key, _ in events:
input_callback = key.data
input_data: str = input_callback()
if not input_data: # end-of-file - treat as timeout
raise TimeoutError
return input_data.rstrip(LF)
_echo(LF)
termios.tcflush(sys.stdin, termios.TCIFLUSH)
raise TimeoutError
def _windows_timed_input(prompt: str, timeout: float) -> str:
interval = 0.1
_echo(prompt)
begin = time.monotonic()
end = begin + timeout
line = ""
while time.monotonic() < end:
if msvcrt.kbhit(): # type: ignore[attr-defined]
c = msvcrt.getwche() # type: ignore[attr-defined]
if c in (CR, LF):
_echo(CRLF)
return line
if c == "\003":
raise KeyboardInterrupt
if c == "\b":
line = line[:-1]
cover = SP * len(prompt + line + SP)
_echo("".join([CR, cover, CR, prompt, line]))
else:
line += c
time.sleep(interval)
_echo(CRLF)
raise TimeoutError
def timed_input(prompt: str, timeout: float, show_timeout: bool = True) -> str:
"""Behaves like builtin `input()` but adds timeout.
Args:
prompt (str): Prompt to output to stdout.
timeout (float): Timeout to wait for input.
show_timeout (bool): Show timeout in prompt
Raises:
TimeoutError: exception raised if timeout occurred.
"""
if show_timeout:
prompt = f"{prompt}({timeout:.0f} second timeout) "
return _timed_input(prompt=prompt, timeout=timeout)
try:
import msvcrt
except ImportError:
import selectors
import termios
_timed_input = _posix_timed_input
else:
import time
_timed_input = _windows_timed_input
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
install_artifact() {
artifact="$1"
base="$(basename "$artifact")"
case $base in
*.framework)
install_framework "$artifact"
;;
*.dSYM)
# Suppress arch warnings since XCFrameworks will include many dSYM files
install_dsym "$artifact" "false"
;;
*.bcsymbolmap)
install_bcsymbolmap "$artifact"
;;
*)
echo "error: Unrecognized artifact "$artifact""
;;
esac
}
copy_artifacts() {
file_list="$1"
while read artifact; do
install_artifact "$artifact"
done <$file_list
}
ARTIFACT_LIST_FILE="${BUILT_PRODUCTS_DIR}/cocoapods-artifacts-${CONFIGURATION}.txt"
if [ -r "${ARTIFACT_LIST_FILE}" ]; then
copy_artifacts "${ARTIFACT_LIST_FILE}"
fi
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/NLTencentOpenAPI/NLTencentOpenAPI.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/NLTencentOpenAPI/NLTencentOpenAPI.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<gh_stars>0
from distutils.version import StrictVersion
__version__ = StrictVersion('0.1')
|
<gh_stars>0
import * as React from "react";
import Svg, { Path, SvgProps } from "react-native-svg";
interface Props extends SvgProps {
size?: number;
}
const Bell = ({ size = 24, ...props }: Props) => {
return (
<Svg
viewBox="0 0 20 20"
fill="currentColor"
width={size}
height={size}
{...props}
>
<Path d="M10 2a6 6 0 00-6 6v3.586l-.707.707A1 1 0 004 14h12a1 1 0 00.707-1.707L16 11.586V8a6 6 0 00-6-6zm0 16a3 3 0 01-3-3h6a3 3 0 01-3 3z" />
</Svg>
);
};
export default Bell;
|
#!/bin/bash
# Extensions necessary to tell fourmolu about
EXTENSIONS="-o -XTypeApplications -o -XTemplateHaskell -o -XImportQualifiedPost -o -XPatternSynonyms -o -fplugin=RecordDotPreprocessor -o -XBangPatterns"
SOURCES=$(git ls-tree -r HEAD --full-tree --name-only | grep -E '.*\.hs')
~/.local/bin/fourmolu --mode check --check-idempotence $EXTENSIONS $SOURCES
|
# -*- sh -*-
function run-hooks()
{
local hooks=$(get-by-varname $1)
local hook
shift
if [ ! -z "$hooks" ]; then
for hook in $hooks; do
if debug-p; then
echo "Running $hook"
fi
$hook "$@"
done
fi
}
function add-hook()
{
local hooks_varname=$1
local hooks=$(get-by-varname $hooks_varname)
local hook_name=$2
push-word hooks $hook_name
set-by-varname $hooks_varname $hooks
}
function remove-hook()
{
local hooks_varname=$1
local hooks=$(get-by-varname $1)
local hook_to_remove=$2
local hook
local new_hooks
for hook in $hooks; do
if [ "$hook" != "$hook_to_remove" ]; then
push-word $new_hooks $hook
fi
done
set-by-varname $hooks_varname $new_hooks
}
|
<filename>lib/ribosome.js
var path = require('path')
var tmp = require('tmp')
var html = require('./html.js')
var pdf = require('./pdf.js')
module.exports = ribosome = {}
// Setup graceful cleanup in case of exceptions.
tmp.setGracefulCleanup()
ribosome.translate = function(url, success, error, license) {
tmp.dir({ unsafeCleanup: true }, function(err, dirPath, cleanupCallback) {
if (err) {
error(err)
} else {
var htmlPath = path.join(dirPath, "tmp.html")
var pdfPath = path.join(dirPath, "tmp.pdf")
html.save(url, htmlPath, function() {
pdf.generate(htmlPath, pdfPath, function(pdfContent) {
success(pdfContent)
cleanupCallback()
}, function(err) {
error(err)
}, license)
}, function(err) {
error(err)
})
}
})
}
|
<filename>ios/Classes/TiSirikit.h
//
// TiSirikit.h
// titanium-sirikit
//
// Created by Your Name
// Copyright (c) 2019 Your Company. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for TiSirikit.
FOUNDATION_EXPORT double TiSirikitVersionNumber;
//! Project version string for TiSirikit.
FOUNDATION_EXPORT const unsigned char TiSirikitVersionString[];
#import "TiSirikitModuleAssets.h"
|
package fwcd.fructose.structs;
import java.util.Arrays;
import java.util.function.DoubleBinaryOperator;
import java.util.function.DoubleConsumer;
import java.util.function.DoublePredicate;
import java.util.function.DoubleUnaryOperator;
import java.util.stream.DoubleStream;
import fwcd.fructose.Copyable;
public class DoubleList implements Copyable<DoubleList> {
private double[] data;
private int size = 0;
public DoubleList() {
this(10);
}
public DoubleList(int initialSize) {
data = new double[initialSize];
}
public DoubleList(double[] data) {
this.data = data;
size = data.length;
}
public DoubleList(DoubleList other) {
data = Arrays.copyOf(other.data, other.data.length);
size = other.size;
}
public DoubleStream stream() {
return Arrays.stream(data);
}
private void ensureCapacity() {
if (size >= data.length - 1) {
double[] newArr = Arrays.copyOf(data, data.length + 10);
data = newArr;
}
}
public int size() {
return size;
}
public DoubleList filter(DoublePredicate predicate) {
DoubleList result = copy();
result.filterInPlace(predicate);
return result;
}
public void filterInPlace(DoublePredicate predicate) {
IntList toBeRemoved = new IntList();
for (int i=0; i<size; i++) {
if (!predicate.test(data[i])) {
toBeRemoved.add(i);
}
}
remove(toBeRemoved.toArray());
}
public DoubleList map(DoubleUnaryOperator mapper) {
DoubleList result = copy();
result.mapInPlace(mapper);
return result;
}
public void mapInPlace(DoubleUnaryOperator mapper) {
for (int i=0; i<size; i++) {
data[i] = mapper.applyAsDouble(data[i]);
}
}
public double sum() {
double sum = 0;
for (int i=0; i<size; i++) {
sum += data[i];
}
return sum;
}
public double reduce(DoubleBinaryOperator associativeAccumulator) {
double result = data[0];
for (int i=1; i<size; i++) {
result = associativeAccumulator.applyAsDouble(result, data[i]);
}
return result;
}
public void add(double v) {
size++;
ensureCapacity();
data[size - 1] = v;
}
public void addAll(double... v) {
int offset = size;
size += v.length;
ensureCapacity();
System.arraycopy(v, 0, data, offset, v.length);
}
public void addAll(DoubleList list) {
addAll(list.data);
}
public void removeLast() {
size--;
}
public void remove(int... indices) {
Arrays.sort(indices);
int shift = 0;
for (int removingIndex : indices) {
for (int j=removingIndex+1+shift; j<size; j++) {
data[j - 1] = data[j];
}
size--;
shift--;
}
}
public double get(int i) {
if (i < size) {
return data[i];
} else {
throw new IndexOutOfBoundsException(Integer.toString(i));
}
}
public double[] toArray() {
return Arrays.copyOf(data, size);
}
public void forEach(DoubleConsumer consumer) {
for (int i=0; i<size; i++) {
consumer.accept(data[i]);
}
}
@Override
public String toString() {
StringBuilder s = new StringBuilder("[");
for (int i=0; i<size; i++) {
s.append(data[i]).append(", ");
}
return s.delete(s.length() - 2, s.length()).append(']').toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
for (int i=0; i<size; i++) {
result = (int) (prime * result + data[i]);
}
result = prime * result + size;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DoubleList other = (DoubleList) obj;
if (size != other.size) {
return false;
}
for (int i=0; i<size; i++) {
if (data[i] != other.data[i]) {
return false;
}
}
return true;
}
@Override
public DoubleList copy() {
return new DoubleList(this);
}
}
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module SecurityCenter
module Settings
module V1beta1
# Request message for GetServiceAccount.
# @!attribute [rw] name
# @return [::String]
# Required. The relative resource name of the service account resource.
# Format:
# * `organizations/{organization}/serviceAccount`
class GetServiceAccountRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# An organization-level service account to be used by threat detection
# components.
# @!attribute [rw] name
# @return [::String]
# The relative resource name of the service account resource.
# Format:
# * `organizations/{organization}/serviceAccount`
# @!attribute [rw] service_account
# @return [::String]
# Security Center managed service account for the organization
# example <EMAIL>
# This service_account will be stored in the ComponentSettings field for the
# SCC, SHA, and Infra Automation components.
class ServiceAccount
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GetSettings.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the settings to retrieve.
# Formats:
# * `organizations/{organization}/settings`
# * `folders/{folder}/settings`
# * `projects/{project}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/settings`
class GetSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for UpdateSettings.
# @!attribute [rw] settings
# @return [::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings]
# Required. The settings to update.
#
# The settings' `name` field is used to identify the settings to be updated.
# Formats:
# * `organizations/{organization}/settings`
# * `folders/{folder}/settings`
# * `projects/{project}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/settings`
# @!attribute [rw] update_mask
# @return [::Google::Protobuf::FieldMask]
# The list of fields to be updated on the settings.
class UpdateSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for ResetSettings.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the settings to reset.
# Formats:
# * `organizations/{organization}/settings`
# * `folders/{folder}/settings`
# * `projects/{project}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/settings`
# @!attribute [rw] etag
# @return [::String]
# A fingerprint used for optimistic concurrency. If none is provided,
# then the existing settings will be blindly overwritten.
class ResetSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for BatchGetSettings.
# @!attribute [rw] parent
# @return [::String]
# Required. The relative resource name of the organization shared by all of the
# settings being retrieved.
# Format:
# * `organizations/{organization}`
# @!attribute [rw] names
# @return [::Array<::String>]
# The names of the settings to retrieve.
# A maximum of 1000 settings can be retrieved in a batch.
# Formats:
# * `organizations/{organization}/settings`
# * `folders/{folder}/settings`
# * `projects/{project}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/settings`
class BatchGetSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for BatchGetSettings.
# @!attribute [rw] settings
# @return [::Array<::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings>]
# Settings requested.
class BatchGetSettingsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for CalculateEffectiveSettings.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the effective settings to retrieve.
# Formats:
# * `organizations/{organization}/effectiveSettings`
# * `folders/{folder}/effectiveSettings`
# * `projects/{project}/effectiveSettings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/effectiveSettings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/effectiveSettings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/effectiveSettings`
class CalculateEffectiveSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for BatchGetEffectiveSettings.
# @!attribute [rw] parent
# @return [::String]
# Required. The relative resource name of the organization shared by all of the
# settings being retrieved.
# Format:
# * `organizations/{organization}`
# @!attribute [rw] requests
# @return [::Array<::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveSettingsRequest>]
# The requests specifying the effective settings to retrieve.
# A maximum of 1000 effective settings can be retrieved in a batch.
class BatchCalculateEffectiveSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for BatchGetEffectiveSettings.
# @!attribute [rw] settings
# @return [::Array<::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings>]
# Settings requested.
class BatchCalculateEffectiveSettingsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GetComponentSettings.
# @!attribute [rw] name
# @return [::String]
# Required. The component settings to retrieve.
#
# Formats:
# * `organizations/{organization}/components/{component}/settings`
# * `folders/{folder}/components/{component}/settings`
# * `projects/{project}/components/{component}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/components/{component}/settings`
class GetComponentSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for UpdateComponentSettings.
# @!attribute [rw] component_settings
# @return [::Google::Cloud::SecurityCenter::Settings::V1beta1::ComponentSettings]
# Required. The component settings to update.
#
# The component settings' `name` field is used to identify the component
# settings to be updated. Formats:
# * `organizations/{organization}/components/{component}/settings`
# * `folders/{folder}/components/{component}/settings`
# * `projects/{project}/components/{component}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/components/{component}/settings`
# @!attribute [rw] update_mask
# @return [::Google::Protobuf::FieldMask]
# The list of fields to be updated on the component settings resource.
class UpdateComponentSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for ResetComponentSettings.
# @!attribute [rw] name
# @return [::String]
# Required. The component settings to reset.
#
# Formats:
# * `organizations/{organization}/components/{component}/settings`
# * `folders/{folder}/components/{component}/settings`
# * `projects/{project}/components/{component}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/components/{component}/settings`
# @!attribute [rw] etag
# @return [::String]
# An fingerprint used for optimistic concurrency. If none is provided,
# then the existing settings will be blindly overwritten.
class ResetComponentSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for CalculateEffectiveComponentSettings.
# @!attribute [rw] name
# @return [::String]
# Required. The effective component settings to retrieve.
#
# Formats:
# * `organizations/{organization}/components/{component}/settings`
# * `folders/{folder}/components/{component}/settings`
# * `projects/{project}/components/{component}/settings`
# * `projects/{project}/locations/{location}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/regions/{region}/clusters/{cluster}/components/{component}/settings`
# * `projects/{project}/zones/{zone}/clusters/{cluster}/components/{component}/settings`
class CalculateEffectiveComponentSettingsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for ListDetectors.
# @!attribute [rw] parent
# @return [::String]
# Required. The parent, which owns this collection of detectors.
# Format:
# * `organizations/{organization}`
# @!attribute [rw] filter
# @return [::String]
# Filters to apply on the response. Filters can be applied on:
# * components
# * labels
# * billing tiers
#
# Component filters will retrieve only detectors for the components
# specified. Label filters will retrieve only detectors that match one of the
# labels specified. Billing tier filters will retrieve only detectors for
# that billing tier.
#
# The filters
# @!attribute [rw] page_size
# @return [::Integer]
# The maximum number of detectors to return. The service may return fewer
# than this value. If unspecified, at most 100 detectors will be returned.
# The maximum value is 1000; values above 1000 will be coerced to 1000.
# @!attribute [rw] page_token
# @return [::String]
# A page token, received from a previous `ListDetectors` call.
# Provide this to retrieve the subsequent page.
#
# When paginating, all other parameters provided to `ListDetectors` must
# match the call that provided the page token.
class ListDetectorsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for ListDetectors.
# @!attribute [rw] detectors
# @return [::Array<::Google::Cloud::SecurityCenter::Settings::V1beta1::Detector>]
# The detectors from the specified organization.
# @!attribute [rw] next_page_token
# @return [::String]
# A token that can be sent as `page_token` to retrieve the next page.
# If this field is omitted, there are no subsequent pages.
class ListDetectorsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for ListComponents.
# @!attribute [rw] parent
# @return [::String]
# Required. The parent, which owns this collection of components.
# Format:
# * `organizations/{organization}`
# @!attribute [rw] page_size
# @return [::Integer]
# The maximum number of components to return. The service may return fewer
# than this value. If unspecified, at most 100 components will be returned.
# The maximum value is 1000; values above 1000 will be coerced to 1000.
# @!attribute [rw] page_token
# @return [::String]
# A page token, received from a previous `ListComponents` call.
# Provide this to retrieve the subsequent page.
#
# When paginating, all other parameters provided to `ListComponents` must
# match the call that provided the page token.
class ListComponentsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for ListComponents.
# @!attribute [rw] components
# @return [::Array<::String>]
# The components from the specified organization.
# @!attribute [rw] next_page_token
# @return [::String]
# A token that can be sent as `page_token` to retrieve the next page.
# If this field is omitted, there are no subsequent pages.
class ListComponentsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
|
let arr = ["apple", "orange", "banana"];
let randomItem = arr[Math.floor(Math.random() * arr.length)];
console.log(randomItem); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.