text stringlengths 1 1.05M |
|---|
#include "roCommandAllocator.h"
#include "..\Core\roErrorHandling.h"
ID3D12CommandAllocator* roCommandAllocator::GetAllocator(uint64_t fenceValue) {
std::lock_guard<std::mutex> lg(sm_AllocMutex);
ID3D12CommandAllocator* cmdAlloc = nullptr;
if (sm_UsedCommandAllocators.size() != 0) {
auto cmdAllocTemp = sm_UsedCommandAllocators.front();
if (fenceValue >= cmdAllocTemp.FenceValue) {
cmdAllocTemp.CmdAlloc->Reset();
cmdAlloc = cmdAllocTemp.CmdAlloc;
sm_UsedCommandAllocators.pop();
return cmdAlloc;
}
}
ID3D12CommandAllocator* cmdAlloc_;
roRootGraphics::sm_Device->CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT,
IID_PPV_ARGS(&cmdAlloc_));
cmdAlloc = cmdAlloc_;
sm_CommandAllocators.push_back(cmdAlloc_);
return cmdAlloc;
}
void roCommandAllocator::Shutdown() {
for (auto& alloc : sm_CommandAllocators) {
SAFE_RELEASE(alloc);
}
}
void roCommandAllocator::DiscardAllocator(ID3D12CommandAllocator* cmdAlloc, uint64_t fenceVal) {
std::lock_guard<std::mutex> lg(sm_AllocMutex);
AllocInfo cmdAllocInfo;
cmdAllocInfo.CmdAlloc = cmdAlloc;
cmdAllocInfo.FenceValue = fenceVal;
sm_UsedCommandAllocators.push(cmdAllocInfo);
}
std::vector<ID3D12CommandAllocator*> roCommandAllocator::sm_CommandAllocators;
std::queue<roCommandAllocator::AllocInfo> roCommandAllocator::sm_UsedCommandAllocators;
std::mutex roCommandAllocator::sm_AllocMutex; |
/******************************************************************************\
* The Romualdo Language *
* *
* Copyright 2020-2021 <NAME> *
* Licensed under the MIT license (see LICENSE.txt for details) *
\******************************************************************************/
package bytecode
import (
"fmt"
"reflect"
)
// A ValueKind represents one of the types a value in the Romualdo Virtual
// Machine can have. This is the type from the perspective of the VM (in the
// sense that user-defined types are obviously not directly represented here).
// We use "kind" in the name because "type" is a keywork in Go.
type ValueKind int
const (
// ValueFloat identifies a floating-point value. In this implementation,
// they are repsented by a 64-bit IEEE 754 number, but I'd argue that if you
// depend on the exact representation, Romualdo is not the right tool for
// you.
//
// On the VM level, this is also used for Bounded Numbers. The compiler
// knows the difference between a float and a bnum, but for the VM they are
// all floats.
ValueFloat ValueKind = iota
// ValueInt identifies a signed integer value. In this implementation, they
// are 64-bit. Other implementations may use different representations, but
// let's all agree the number will be at least 32-bit.
ValueInt
// ValueBool identifies a Boolean value.
ValueBool
// ValueString identifies a string value.
ValueString
)
// Value is a Romualdo language value.
type Value struct {
Value interface{}
}
// NewValueFloat creates a new Value initialized to the floating-point number
// v.
func NewValueFloat(v float64) Value {
return Value{
Value: v,
}
}
// NewValueInt creates a new Value initialized to the integer number v.
func NewValueInt(v int64) Value {
return Value{
Value: v,
}
}
// NewValueBool creates a new Value initialized to the Boolean value v.
func NewValueBool(v bool) Value {
return Value{
Value: v,
}
}
// NewValueString creates a new Value initialized to the string value v.
//
// This shall not be called directly by most client code. Instead, use the
// wrappers that go by the name of NewInternedValueString (as methods on objects
// around the code base). These wrappers use interning to hopefully make the
// string handling more efficient.
func NewValueString(v string) Value {
return Value{
Value: v,
}
}
// AsFloat returns this Value's value, assuming it is a floating-point number.
func (v Value) AsFloat() float64 {
return v.Value.(float64)
}
// AsInt returns this Value's value, assuming it is an integer number.
func (v Value) AsInt() int64 {
return v.Value.(int64)
}
// AsBool returns this Value's value, assuming it is a Boolean value.
func (v Value) AsBool() bool {
return v.Value.(bool)
}
// AsString returns this Value's value, assuming it is a string value.
func (v Value) AsString() string {
return v.Value.(string)
}
// IsFloat checks if the value contains a floating-point number.
func (v Value) IsFloat() bool {
_, ok := v.Value.(float64)
return ok
}
// IsInt checks if the value contains an integer number.
func (v Value) IsInt() bool {
_, ok := v.Value.(int64)
return ok
}
// IsBool checks if the value contains a Boolean value.
func (v Value) IsBool() bool {
_, ok := v.Value.(bool)
return ok
}
// IsString checks if the value contains a string value.
func (v Value) IsString() bool {
_, ok := v.Value.(string)
return ok
}
// String converts the value to a string.
func (v Value) String() string {
switch vv := v.Value.(type) {
case float64:
return fmt.Sprintf("%g", vv)
case int64:
return fmt.Sprintf("%d", vv)
case bool:
return fmt.Sprintf("%v", vv)
case string:
return fmt.Sprintf("%v", vv)
default:
return fmt.Sprintf("<Unexpected type %T>", vv)
}
}
// ValuesEqual checks if a and b are considered equal.
func ValuesEqual(a, b Value) bool {
if reflect.TypeOf(a.Value) != reflect.TypeOf(b.Value) {
return false
}
switch va := a.Value.(type) {
case bool:
return va == b.Value.(bool)
case float64:
return va == b.Value.(float64)
case int64:
return va == b.Value.(int64)
case string:
return va == b.Value.(string)
default:
panic(fmt.Sprintf("Unexpected Value type: %T", va))
}
}
|
curl 'http://bet.hkjc.com/marksix/Results.aspx?lang=ch' -H 'Host: bet.hkjc.com' -H 'User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' --compressed -H 'DNT: 1' -H 'Referer: http://bet.hkjc.com/marksix/Results.aspx?lang=ch' -H 'Cookies: s_sess=%20s_cc%3Dtrue%3B%20s_sq%3D%3B; s_pers=%20s_fid%3D748621042E6879BC-3D4C4D8CEF2E6CBF%7C1521036726634%3B' -H 'Connection: keep-alive' --data '&selectDrawID=1000&hiddenSelectDrawID=1000&radioDrawRange=GetDrawDate&_ctl0%3AContentPlaceHolder1%3AresultsMarkSix%3AselectDrawFromMonth=02&hiddenSelectDrawFromMonth=02&_ctl0%3AContentPlaceHolder1%3AresultsMarkSix%3AselectDrawFromYear=1993&hiddenSelectDrawFromYear=1993&_ctl0%3AContentPlaceHolder1%3AresultsMarkSix%3AselectDrawToMonth=03&hiddenSelectDrawToMonth=03&_ctl0%3AContentPlaceHolder1%3AresultsMarkSix%3AselectDrawToYear=2016&hiddenSelectDrawToYear=2016&radioResultType=GetAll'
|
import shortcuts from '../../common/shortcuts';
import {Cell, Edge, Graph, Node} from '@antv/x6';
import {MIN_ZOOM, MAX_ZOOM} from '../../common/const';
import baseCellSchemaMap from '../../common/baseCell';
import previewCellSchemaMap from '../../common/previewCell';
import MiniMapSimpleNode from '../../components/miniMapSimpleNode';
// X6 register base/preview cell shape
[baseCellSchemaMap, previewCellSchemaMap]
.forEach(schemas => Object.values(schemas).forEach(schema => {
const {base, ...rest} = schema;
base.define(rest);
}));
const registerEvents = (flowChart: Graph): void => {
flowChart.on('node:added', (args) => {
flowChart.cleanSelection();
flowChart.select(args.cell);
});
flowChart.on('selection:changed', () => {
flowChart.trigger('toolBar:forceUpdate');
flowChart.trigger('settingBar:forceUpdate');
});
flowChart.on('edge:connected', (args) => {
const edge = args.edge as Edge;
const sourceNode = edge.getSourceNode() as Node;
if(sourceNode && sourceNode.shape === 'imove-branch') {
const portId = edge.getSourcePortId();
if(portId === 'right' || portId === 'bottom') {
edge.setLabelAt(0, sourceNode.getPortProp(portId, 'attrs/text/text'));
sourceNode.setPortProp(portId, 'attrs/text/text', '');
}
}
});
flowChart.on('edge:mouseenter', (args) => {
const cell = args.cell as Cell;
cell.addTools([
{
name: 'target-arrowhead',
args: {
attrs: {
d: 'M -10.5 -6 1 0 -10.5 6 Z',
'stroke-width': 0,
fill: '#333'
}
}
}
]);
});
flowChart.on('edge:mouseleave', (args) => {
const cell = args.cell as Cell;
cell.removeTools(['target-arrowhead']);
});
};
const registerShortcuts = (flowChart: Graph): void => {
Object.values(shortcuts).forEach(shortcut => {
const {keys, handler} = shortcut;
flowChart.bindKey(keys, () => handler(flowChart));
});
};
const createFlowChart = (container: HTMLDivElement, miniMapContainer: HTMLDivElement): Graph => {
const flowChart = new Graph({
container,
rotating: false,
resizing: true,
// https://x6.antv.vision/zh/docs/tutorial/basic/clipboard
clipboard: {
enabled: true,
useLocalStorage: true
},
// https://x6.antv.vision/zh/docs/tutorial/intermediate/connector
connecting: {
snap: true,
dangling: true,
highlight: true,
anchor: 'center',
connectionPoint: 'anchor',
router: {
name: 'manhattan'
},
validateConnection({sourceView, targetView, sourceMagnet, targetMagnet}) {
if(!sourceMagnet) {
return false;
} else if(!targetMagnet) {
return false;
} else {
return sourceView !== targetView;
}
}
},
// https://x6.antv.vision/zh/docs/tutorial/basic/background
background: {
color: '#f8f9fa'
},
// https://x6.antv.vision/zh/docs/tutorial/basic/grid
grid: {
visible: true
},
// https://x6.antv.vision/zh/docs/tutorial/basic/selection
selecting: {
enabled: true,
multiple: true,
rubberband: true,
movable: true,
showNodeSelectionBox: true
},
// https://x6.antv.vision/zh/docs/tutorial/basic/snapline
snapline: {
enabled: true,
clean: 100
},
// https://x6.antv.vision/zh/docs/tutorial/basic/keyboard
keyboard: {
enabled: true,
global: false
},
// https://x6.antv.vision/zh/docs/tutorial/basic/history
history: {
enabled: true
},
// https://x6.antv.vision/zh/docs/tutorial/basic/minimap
minimap: {
width: 150 * container.offsetWidth / container.offsetHeight,
height: 150,
minScale: MIN_ZOOM,
maxScale: MAX_ZOOM,
enabled: true,
scalable: false,
container: miniMapContainer,
graphOptions: {
async: true,
getCellView(cell: Cell) {
if(cell.isNode()){
return MiniMapSimpleNode;
}
},
createCellView(cell: Cell) {
if (cell.isEdge()) {
return null;
}
}
}
},
// https://x6.antv.vision/zh/docs/tutorial/basic/scroller
scroller: {
enabled: true
},
mousewheel: {
enabled: true,
minScale: MIN_ZOOM,
maxScale: MAX_ZOOM,
modifiers: ['ctrl', 'meta']
},
});
registerEvents(flowChart);
registerShortcuts(flowChart);
return flowChart;
};
export default createFlowChart;
|
#include "duckdb/optimizer/rule/distributivity.hpp"
#include "duckdb/optimizer/matcher/expression_matcher.hpp"
#include "duckdb/planner/expression/bound_conjunction_expression.hpp"
#include "duckdb/planner/expression/bound_constant_expression.hpp"
#include "duckdb/planner/expression_iterator.hpp"
#include "duckdb/planner/operator/logical_filter.hpp"
using namespace duckdb;
using namespace std;
DistributivityRule::DistributivityRule(ExpressionRewriter &rewriter) : Rule(rewriter) {
// we match on an OR expression within a LogicalFilter node
root = make_unique<ExpressionMatcher>();
root->expr_type = make_unique<SpecificExpressionTypeMatcher>(ExpressionType::CONJUNCTION_OR);
}
void DistributivityRule::AddExpressionSet(Expression &expr, expression_set_t &set) {
if (expr.type == ExpressionType::CONJUNCTION_AND) {
auto &and_expr = (BoundConjunctionExpression &)expr;
for (auto &child : and_expr.children) {
set.insert(child.get());
}
} else {
set.insert(&expr);
}
}
unique_ptr<Expression> DistributivityRule::ExtractExpression(BoundConjunctionExpression &conj, idx_t idx,
Expression &expr) {
auto &child = conj.children[idx];
unique_ptr<Expression> result;
if (child->type == ExpressionType::CONJUNCTION_AND) {
// AND, remove expression from the list
auto &and_expr = (BoundConjunctionExpression &)*child;
for (idx_t i = 0; i < and_expr.children.size(); i++) {
if (Expression::Equals(and_expr.children[i].get(), &expr)) {
result = move(and_expr.children[i]);
and_expr.children.erase(and_expr.children.begin() + i);
break;
}
}
if (and_expr.children.size() == 1) {
conj.children[idx] = move(and_expr.children[0]);
}
} else {
// not an AND node! remove the entire expression
// this happens in the case of e.g. (X AND B) OR X
assert(Expression::Equals(child.get(), &expr));
result = move(child);
conj.children[idx] = nullptr;
}
assert(result);
return result;
}
unique_ptr<Expression> DistributivityRule::Apply(LogicalOperator &op, vector<Expression *> &bindings,
bool &changes_made) {
auto initial_or = (BoundConjunctionExpression *)bindings[0];
// we want to find expressions that occur in each of the children of the OR
// i.e. (X AND A) OR (X AND B) => X occurs in all branches
// first, for the initial child, we create an expression set of which expressions occur
// this is our initial candidate set (in the example: [X, A])
expression_set_t candidate_set;
AddExpressionSet(*initial_or->children[0], candidate_set);
// now for each of the remaining children, we create a set again and intersect them
// in our example: the second set would be [X, B]
// the intersection would leave [X]
for (idx_t i = 1; i < initial_or->children.size(); i++) {
expression_set_t next_set;
AddExpressionSet(*initial_or->children[i], next_set);
expression_set_t intersect_result;
for (auto &expr : candidate_set) {
if (next_set.find(expr) != next_set.end()) {
intersect_result.insert(expr);
}
}
candidate_set = intersect_result;
}
if (candidate_set.size() == 0) {
// nothing found: abort
return nullptr;
}
// now for each of the remaining expressions in the candidate set we know that it is contained in all branches of
// the OR
auto new_root = make_unique<BoundConjunctionExpression>(ExpressionType::CONJUNCTION_AND);
for (auto &expr : candidate_set) {
assert(initial_or->children.size() > 0);
// extract the expression from the first child of the OR
auto result = ExtractExpression(*initial_or, 0, (Expression &)*expr);
// now for the subsequent expressions, simply remove the expression
for (idx_t i = 1; i < initial_or->children.size(); i++) {
ExtractExpression(*initial_or, i, *result);
}
// now we add the expression to the new root
new_root->children.push_back(move(result));
// remove any expressions that were set to nullptr
for (idx_t i = 0; i < initial_or->children.size(); i++) {
if (!initial_or->children[i]) {
initial_or->children.erase(initial_or->children.begin() + i);
i--;
}
}
}
// finally we need to add the remaining expressions in the OR to the new root
if (initial_or->children.size() == 1) {
// one child: skip the OR entirely and only add the single child
new_root->children.push_back(move(initial_or->children[0]));
} else if (initial_or->children.size() > 1) {
// multiple children still remain: push them into a new OR and add that to the new root
auto new_or = make_unique<BoundConjunctionExpression>(ExpressionType::CONJUNCTION_OR);
for (auto &child : initial_or->children) {
new_or->children.push_back(move(child));
}
new_root->children.push_back(move(new_or));
}
// finally return the new root
if (new_root->children.size() == 1) {
return move(new_root->children[0]);
}
return move(new_root);
}
|
import json
data = json.loads(data_json)
# Output:
data = {
"name": "John Doe",
"age": 18,
"hobbies": ["reading", "swimming", "gaming"],
"contact": {
"email": "johndoe@example.com",
"mobile": "+1900000000"
}
} |
<filename>src/main/java/com/assist/watchnext/repository/MovieCategoryRepository.java
package com.assist.watchnext.repository;
import com.assist.watchnext.model.MovieCategory;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface MovieCategoryRepository extends JpaRepository<MovieCategory, Integer> {
}
|
#!/bin/bash
#
# modify by hiyang @ 2016-12-19
#
export PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
clear
printf "
#######################################################################
# OneinStack odm for CentOS/RadHat 5+ #
# FTP virtual user account management #
#######################################################################
"
. ./options.conf
. ./include/color.sh
# Check if user is root
[ $(id -u) != "0" ] && { echo "${CFAILURE}Error: You must be root to run this script${CEND}"; exit 1; }
[ ! -d "$pureftpd_install_dir" ] && { echo "${CFAILURE}FTP server does not exist! ${CEND}"; exit 1; }
FTP_conf=$pureftpd_install_dir/etc/pure-ftpd.conf
FTP_tmp_passfile=$pureftpd_install_dir/etc/pureftpd_psss.tmp
Puredbfile=$pureftpd_install_dir/etc/pureftpd.pdb
Passwdfile=$pureftpd_install_dir/etc/pureftpd.passwd
FTP_bin=$pureftpd_install_dir/bin/pure-pw
[ -z "`grep ^PureDB $FTP_conf`" ] && { echo "${CFAILURE}pure-ftpd is not own password database${CEND}" ; exit 1; }
USER() {
while :; do echo
read -p "Please input a username: " User
if [ -z "$User" ]; then
echo "${CWARNING}username can't be NULL! ${CEND}"
else
break
fi
done
}
PASSWORD() {
while :; do echo
read -p "Please input the password: " Password
[ -n "`echo $Password | grep '[+|&]'`" ] && { echo "${CWARNING}input error,not contain a plus sign (+) and &${CEND}"; continue; }
if (( ${#Password} >= 5 ));then
echo -e "${Password}\n$Password" > $FTP_tmp_passfile
break
else
echo "${CWARNING}Ftp password least 5 characters! ${CEND}"
fi
done
}
DIRECTORY() {
while :; do echo
read -p "Please input the directory(Default directory: $wwwroot_dir): " Directory
if [ -z "$Directory" ]; then
Directory="$wwwroot_dir"
fi
if [ ! -d "$Directory" ]; then
echo "${CWARNING}The directory does not exist${CEND}"
else
break
fi
done
}
while :; do
printf "
What Are You Doing?
\t${CMSG}1${CEND}. UserAdd
\t${CMSG}2${CEND}. UserMod
\t${CMSG}3${CEND}. UserPasswd
\t${CMSG}4${CEND}. UserDel
\t${CMSG}5${CEND}. ListAllUser
\t${CMSG}6${CEND}. ShowUser
\t${CMSG}q${CEND}. Exit
"
read -p "Please input the correct option: " Number
if [[ ! $Number =~ ^[1-6,q]$ ]]; then
echo "${CFAILURE}input error! Please only input 1 ~ 6 and q${CEND}"
else
case "$Number" in
1)
USER
[ -e "$Passwdfile" ] && [ -n "`grep ^${User}: $Passwdfile`" ] && { echo "${CQUESTION}[$User] is already existed! ${CEND}"; continue; }
PASSWORD;DIRECTORY
$FTP_bin useradd $User -f $Passwdfile -u $run_user -g $run_user -d $Directory -m < $FTP_tmp_passfile
$FTP_bin mkdb $Puredbfile -f $Passwdfile > /dev/null 2>&1
echo "#####################################"
echo
echo "[$User] create successful! "
echo
echo "You user name is : ${CMSG}$User${CEND}"
echo "You Password is : ${CMSG}$Password${CEND}"
echo "You directory is : ${CMSG}$Directory${CEND}"
echo
;;
2)
USER
[ -e "$Passwdfile" ] && [ -z "`grep ^${User}: $Passwdfile`" ] && { echo "${CQUESTION}[$User] was not existed! ${CEND}"; continue; }
DIRECTORY
$FTP_bin usermod $User -f $Passwdfile -d $Directory -m
$FTP_bin mkdb $Puredbfile -f $Passwdfile > /dev/null 2>&1
echo "#####################################"
echo
echo "[$User] modify a successful! "
echo
echo "You user name is : ${CMSG}$User${CEND}"
echo "You new directory is : ${CMSG}$Directory${CEND}"
echo
;;
3)
USER
[ -e "$Passwdfile" ] && [ -z "`grep ^${User}: $Passwdfile`" ] && { echo "${CQUESTION}[$User] was not existed! ${CEND}"; continue; }
PASSWORD
$FTP_bin passwd $User -f $Passwdfile -m < $FTP_tmp_passfile
$FTP_bin mkdb $Puredbfile -f $Passwdfile > /dev/null 2>&1
echo "#####################################"
echo
echo "[$User] Password changed successfully! "
echo
echo "You user name is : ${CMSG}$User${CEND}"
echo "You new password is : ${CMSG}$Password${CEND}"
echo
;;
4)
if [ ! -e "$Passwdfile" ]; then
echo "${CQUESTION}User was not existed! ${CEND}"
else
$FTP_bin list
fi
USER
[ -e "$Passwdfile" ] && [ -z "`grep ^${User}: $Passwdfile`" ] && { echo "${CQUESTION}[$User] was not existed! ${CEND}"; continue; }
$FTP_bin userdel $User -f $Passwdfile -m
$FTP_bin mkdb $Puredbfile -f $Passwdfile > /dev/null 2>&1
echo
echo "[$User] have been deleted! "
;;
5)
if [ ! -e "$Passwdfile" ]; then
echo "${CQUESTION}User was not existed! ${CEND}"
else
$FTP_bin list
fi
;;
6)
USER
[ -e "$Passwdfile" ] && [ -z "`grep ^${User}: $Passwdfile`" ] && { echo "${CQUESTION}[$User] was not existed! ${CEND}"; continue; }
$FTP_bin show $User
;;
q)
exit
;;
esac
fi
done
|
addXLCVars () {
export NIX_ENFORCE_PURITY=0
if [ -d $1/include ]; then
export NIX_CFLAGS_COMPILE+=" -I $1/include"
fi
if [ -d $1/lib64 -a ! -L $1/lib64 ]; then
export NIX_LDFLAGS+=" -L$1/lib64"
fi
if [ -d $1/lib ]; then
export NIX_LDFLAGS+=" -L$1/lib"
fi
}
## default CC to use
export CC=bgxlc_r
export CXX=bgxlc++_r
# cmake libc fix
# add bgq libc in the path
export LDFLAGS+=" -L@default_libc_path@ -lc "
envHooks+=(addXLCVars)
|
import React from 'react'
import PropTypes from 'prop-types'
import { makeStyles } from '@material-ui/styles'
const useStyles = makeStyles(theme => ({
root: {
display: 'flex',
marginBottom: '10px',
},
label: {
flexGrow: 0,
fontWeight: 'bold',
width: '100px',
},
}))
const PresetField = ({ label, value }) => {
const classes = useStyles()
return (
<div className={classes.root}>
<label className={classes.label}>{label}</label>
<span>{value}</span>
</div>
)
}
PresetField.propTypes = {
label: PropTypes.string,
value: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
]),
}
export default PresetField
|
def parse_targets(specs):
"""
Parse specifications into tool and version pairs.
Args:
specs (list): List of strings representing tool and version specifications.
Returns:
dict: A dictionary where the keys are the tools and the values are their corresponding versions.
"""
tool_version_pairs = {}
for spec in specs:
parts = spec.split(":")
if len(parts) == 2:
tool_version_pairs[parts[0]] = parts[1]
return tool_version_pairs |
<reponame>putragraha/mobilefingerprint<gh_stars>1-10
package com.klk.mobilefingerprint.services;
import android.content.Context;
public interface TransitionService {
void animate(Context context, Class javaClass);
}
|
CREATE TABLE users (
user_id INT AUTO_INCREMENT,
username VARCHAR(20),
name VARCHAR(50),
email VARCHAR(120) UNIQUE,
PRIMARY KEY (user_id)
);
CREATE TABLE profiles (
profile_id INT AUTO_INCREMENT,
user_id INT NOT NULL,
bio VARCHAR(500),
status VARCHAR(25),
FOREIGN KEY (user_id)
REFERENCES users (user_id),
PRIMARY KEY (profile_id)
);
CREATE TABLE groups (
group_id INT AUTO_INCREMENT,
group_name VARCHAR(50),
description VARCHAR(255),
PRIMARY KEY (group_id)
);
CREATE TABLE user_groups (
user_group_id INT,
user_id INT NOT NULL,
group_id INT NOT NULL,
FOREIGN KEY (user_id)
REFERENCES users (user_id),
FOREIGN KEY (group_id)
REFERENCES groups (group_id),
PRIMARY KEY (user_group_id)
); |
<reponame>jing-si/plant
package kr.co.gardener.admin.model.forest;
import kr.co.gardener.util.GridSystem;
public class PlantLevel extends GridSystem{
private String plantName;
private int plantLevelId;
private String plantImage;
private int plantId;
public String getPlantName() {
return plantName;
}
public void setPlantName(String plantName) {
this.plantName = plantName;
}
public int getPlantLevelId() {
return plantLevelId;
}
public void setPlantLevelId(int plantLevelId) {
this.plantLevelId = plantLevelId;
}
public int getPlantId() {
return plantId;
}
public void setPlantId(int plantId) {
this.plantId = plantId;
}
public String getPlantImage() {
return plantImage;
}
public void setPlantImage(String plantImage) {
this.plantImage = plantImage;
}
}
|
#!/bin/bash
cd `dirname ${BASH_SOURCE-$0}`
. env.sh
test=$1
minerCount=$2
clientCount=$3
txRate=$4
txLimit=$5
wl=$6
sudo chronyd -q
timeStamp=$(date +"%Y-%m-%d_%H-%M-%S")
now=$(($(date +%s%N)/1000000))
startTime=$((now + 10000 + clientCount*10000))
echo "==== starting all clients ===="
i=0
for client in `cat $CLIENTS`; do
if [[ $i -lt $clientCount ]]; then
clientId=$(expr $i + 1)
echo starting client $client clientNo=$clientId
ssh -i $SSH_KEY -oStrictHostKeyChecking=no $client $IBFT_HOME/startClient.sh $test $minerCount $clientCount $txRate $txLimit $wl $startTime $clientId $timeStamp
fi
let i=$i+1
done
echo "==== all clients started ====" |
#pragma once
#include "unittest.h"
#include <iostream>
#include <string>
class ConvertBase : public UnitTest
{
public:
virtual TestResult test() override;
private:
/*!
* \brief Convert a number from one base to another.
* \param baseIn The base that number is represented in
* \param number The number to be converted
* \param baseOut The new base to change the number into
* \return The number after conversion to the new base
*/
std::string convertBase(int baseIn, std::string number, int baseOut);
};
|
/*
TITLE Memory layout Chapter18Exercise9.cpp
"<NAME> "C++ Programming: Principles and Practice.""
COMMENT
Objcective: Write a program showing the orders of:
- static,
- stack and
- heap memory.
Expectations:
Static and Heap memory addresses are allocated in increaing order,
while Stack memory addresses are allocated in decreasing order.
However, the arrays' elements are allocated as consecutive memory locations.
Input: -
Output: -
Author: <NAME>
Date: 26.12.2015
*/
#include <iostream>
void stack_f( )
{
// array on stack
char stack_arr[3] = {'a', 'b', 'c'};
// stack memory addresses
std::cout <<"\nStack memory addresses.\n";
for (size_t i = 0; i < 3; ++i) std::cout <<"&stack_arr["<< i <<"] "<< reinterpret_cast<void*>(&stack_arr[i]) <<'\n';
// array on stack
char stack_arr1[3] = {'c', 'h', 'z'};
// stack memory addresses
std::cout <<"\nStack memory addresses1.\n";
for (size_t i = 0; i < 3; ++i) std::cout <<"&stack_arr["<< i <<"] "<< reinterpret_cast<void*>(&stack_arr1[i]) <<'\n';
}
//--------------------------------------------------------------------------------------------------------------------------
int main ()
{
try
{
// array on static storage
static char global_arr[3] = {'a', 'b', 'c'};
// array on heap
char *heap_arr = new char[3];
for (size_t i = 0; i < 3; ++i) heap_arr[i] = 'a' + i;
// Print addresses of the three arrays and compare them
// static memory addresses
std::cout <<"Static memory addresses.\n";
for (size_t i = 0; i < 3; ++i) std::cout <<"&global_arr["<< i <<"] "<< reinterpret_cast<void*>(&global_arr[i]) <<'\n';
// stack memory addresses
stack_f( );
// heap memory addresses
std::cout <<"\nHeap memory addresses.\n";
for (size_t i = 0; i < 3; ++i) std::cout <<"&heap_arr["<< i <<"] "<< reinterpret_cast<void*>(&heap_arr[i]) <<'\n';
delete [] heap_arr;
}
catch(std::exception& e)
{
std::cerr << e.what();
}
catch(...)
{
std::cerr << "Unhandled exception!\n";
}
getchar();
}
|
import { colors, Styles } from '@0xproject/react-shared';
import * as _ from 'lodash';
import RaisedButton from 'material-ui/RaisedButton';
import * as React from 'react';
import { Blockchain } from 'ts/blockchain';
import { ProviderPicker } from 'ts/components/top_bar/provider_picker';
import { DropDown } from 'ts/components/ui/drop_down';
import { Identicon } from 'ts/components/ui/identicon';
import { Dispatcher } from 'ts/redux/dispatcher';
import { ProviderType } from 'ts/types';
import { constants } from 'ts/utils/constants';
import { utils } from 'ts/utils/utils';
const ROOT_HEIGHT = 24;
export interface ProviderDisplayProps {
dispatcher: Dispatcher;
userAddress: string;
networkId: number;
injectedProviderName: string;
providerType: ProviderType;
onToggleLedgerDialog: () => void;
blockchain: Blockchain;
}
interface ProviderDisplayState {}
const styles: Styles = {
root: {
height: ROOT_HEIGHT,
backgroundColor: colors.white,
borderRadius: ROOT_HEIGHT,
boxShadow: `0px 4px 6px ${colors.walletBoxShadow}`,
},
};
export class ProviderDisplay extends React.Component<ProviderDisplayProps, ProviderDisplayState> {
public render() {
const isAddressAvailable = !_.isEmpty(this.props.userAddress);
const isExternallyInjectedProvider =
this.props.providerType === ProviderType.Injected && this.props.injectedProviderName !== '0x Public';
const displayAddress = isAddressAvailable
? utils.getAddressBeginAndEnd(this.props.userAddress)
: isExternallyInjectedProvider ? 'Account locked' : '0x0000...0000';
// If the "injected" provider is our fallback public node, then we want to
// show the "connect a wallet" message instead of the providerName
const injectedProviderName = isExternallyInjectedProvider
? this.props.injectedProviderName
: 'Connect a wallet';
const providerTitle =
this.props.providerType === ProviderType.Injected ? injectedProviderName : 'Ledger Nano S';
const isProviderMetamask = providerTitle === constants.PROVIDER_NAME_METAMASK;
const hoverActiveNode = (
<div className="flex right lg-pr0 md-pr2 sm-pr2 p1" style={styles.root}>
<div>
<Identicon address={this.props.userAddress} diameter={ROOT_HEIGHT} />
</div>
<div style={{ marginLeft: 12, paddingTop: 3 }}>
<div style={{ fontSize: 16, color: colors.darkGrey }}>{displayAddress}</div>
</div>
{isProviderMetamask && (
<div style={{ marginLeft: 16 }}>
<img src="/images/metamask_icon.png" style={{ width: ROOT_HEIGHT, height: ROOT_HEIGHT }} />
</div>
)}
</div>
);
const hasInjectedProvider =
this.props.injectedProviderName !== '0x Public' && this.props.providerType === ProviderType.Injected;
const hasLedgerProvider = this.props.providerType === ProviderType.Ledger;
const horizontalPosition = hasInjectedProvider || hasLedgerProvider ? 'left' : 'middle';
return (
<div style={{ width: 'fit-content', height: 48, float: 'right' }}>
<DropDown
hoverActiveNode={hoverActiveNode}
popoverContent={this.renderPopoverContent(hasInjectedProvider, hasLedgerProvider)}
anchorOrigin={{ horizontal: horizontalPosition, vertical: 'bottom' }}
targetOrigin={{ horizontal: horizontalPosition, vertical: 'top' }}
zDepth={1}
/>
</div>
);
}
public renderPopoverContent(hasInjectedProvider: boolean, hasLedgerProvider: boolean) {
if (hasInjectedProvider || hasLedgerProvider) {
return (
<ProviderPicker
dispatcher={this.props.dispatcher}
networkId={this.props.networkId}
injectedProviderName={this.props.injectedProviderName}
providerType={this.props.providerType}
onToggleLedgerDialog={this.props.onToggleLedgerDialog}
blockchain={this.props.blockchain}
/>
);
} else {
// Nothing to connect to, show install/info popover
return (
<div className="px2" style={{ maxWidth: 420 }}>
<div className="center h4 py2" style={{ color: colors.grey700 }}>
Choose a wallet:
</div>
<div className="flex pb3">
<div className="center px2">
<div style={{ color: colors.darkGrey }}>Install a browser wallet</div>
<div className="py2">
<img src="/images/metamask_or_parity.png" width="135" />
</div>
<div>
Use{' '}
<a
href={constants.URL_METAMASK_CHROME_STORE}
target="_blank"
style={{ color: colors.lightBlueA700 }}
>
Metamask
</a>{' '}
or{' '}
<a
href={constants.URL_PARITY_CHROME_STORE}
target="_blank"
style={{ color: colors.lightBlueA700 }}
>
Parity Signer
</a>
</div>
</div>
<div>
<div
className="pl1 ml1"
style={{ borderLeft: `1px solid ${colors.grey300}`, height: 65 }}
/>
<div className="py1">or</div>
<div
className="pl1 ml1"
style={{ borderLeft: `1px solid ${colors.grey300}`, height: 68 }}
/>
</div>
<div className="px2 center">
<div style={{ color: colors.darkGrey }}>Connect to a ledger hardware wallet</div>
<div style={{ paddingTop: 21, paddingBottom: 29 }}>
<img src="/images/ledger_icon.png" style={{ width: 80 }} />
</div>
<div>
<RaisedButton
style={{ width: '100%' }}
label="Use Ledger"
onClick={this.props.onToggleLedgerDialog}
/>
</div>
</div>
</div>
</div>
);
}
}
}
|
#!/bin/bash
#
# Copyright (c) 2014-2016 Carnegie Mellon University.
#
# All rights reserved.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. See the AUTHORS file for names of contributors.
#
if test $# -lt 1
then
echo "== Usage: $0 [mkdir|mknod|getattr|chmod|readdir|backend] <file_path>"
exit 1
fi
me=$0
INDEXFS_HOME=$(cd -P -- `dirname $me`/.. && pwd -P)
INDEXFS_CONF_DIR=${INDEXFS_CONF_DIR:-"$INDEXFS_HOME/etc/indexfs-lo"}
# check the location of the build directory
INDEXFS_BASE=$INDEXFS_HOME
if test -d $INDEXFS_HOME/build
then
INDEXFS_BASE=$INDEXFS_HOME/build
fi
is_nfs()
{
cat $INDEXFS_BASE/config.h \
| grep "#define __NFS__" &>/dev/null
if test $? = 0
then
echo __NFS__ && return 0
fi
return 1
}
is_hdfs()
{
cat $INDEXFS_BASE/config.h \
| grep "#define __HDFS__" &>/dev/null
if test $? = 0
then
echo __HDFS__ && return 0
fi
return 1
}
is_rados()
{
cat $INDEXFS_BASE/config.h \
| grep "#define __RADOS__" &>/dev/null
if test $? = 0
then
echo __RADOS__ && return 0
fi
return 1
}
get_indexfs_backend()
{
is_nfs || is_hdfs || is_rados || exit 1
}
prepare_runtime()
{
if test x"`get_indexfs_backend`" = x"__HDFS__"
then
LD_PATH=`$INDEXFS_HOME/sbin/hdfs.sh ldpath`
export LD_LIBRARY_PATH=$LD_PATH
export LIBHDFS_OPTS="-Djava.library.path=$LD_PATH"
export CLASSPATH=`$INDEXFS_HOME/sbin/hdfs.sh classpath`
fi
}
# execute command
COMMAND=$1
case $COMMAND in
mkdir)
prepare_runtime
$INDEXFS_BASE/bin/mkdir \
--configfn=$INDEXFS_CONF_DIR/indexfs_conf \
--srvlstfn=$INDEXFS_CONF_DIR/server_list "$2"
;;
mknod)
prepare_runtime
$INDEXFS_BASE/bin/mknod \
--configfn=$INDEXFS_CONF_DIR/indexfs_conf \
--srvlstfn=$INDEXFS_CONF_DIR/server_list "$2"
;;
chmod)
prepare_runtime
$INDEXFS_BASE/bin/chmod \
--configfn=$INDEXFS_CONF_DIR/indexfs_conf \
--srvlstfn=$INDEXFS_CONF_DIR/server_list "$2"
;;
getattr)
prepare_runtime
$INDEXFS_BASE/bin/getattr \
--configfn=$INDEXFS_CONF_DIR/indexfs_conf \
--srvlstfn=$INDEXFS_CONF_DIR/server_list "$2"
;;
readdir)
prepare_runtime
$INDEXFS_BASE/bin/readdir \
--configfn=$INDEXFS_CONF_DIR/indexfs_conf \
--srvlstfn=$INDEXFS_CONF_DIR/server_list "$2"
;;
backend)
get_indexfs_backend
;;
*)
echo "Unrecognized command '$COMMAND' - oops"
exit 1
;;
esac
exit 0
|
<?hh
namespace Waffle\Tests\Http\Message;
use type Waffle\Http\Message\Stream;
use namespace Waffle\Http\Message\Exception;
use type Waffle\Http\Message\UploadedFile;
use type Waffle\Contract\Http\Message\UploadedFileError;
use type Facebook\HackTest\HackTest;
use function Facebook\FBExpect\expect;
use function Waffle\Http\Message\__Private\create_stream_from_string;
use function is_scalar;
use function file_exists;
use function unlink;
use function fopen;
use function tempnam;
use function sys_get_temp_dir;
use function uniqid;
use function file_get_contents;
class UploadedFileTest extends HackTest
{
protected array $cleanup = [];
<<__Override>>
public async function beforeEachTestAsync(): Awaitable<void>
{
$this->cleanup = [];
}
<<__Override>>
public async function afterEachTestAsync(): Awaitable<void>
{
foreach ($this->cleanup as $file) {
if (is_scalar($file) && file_exists($file)) {
unlink($file);
}
}
}
public function testGetStreamReturnsOriginalStreamObject()
{
$stream = create_stream_from_string('');
$upload = new UploadedFile($stream, 0, UploadedFileError::ERROR_OK);
expect($upload->getStream())->toBeSame($stream);
}
public function testGetStreamReturnsWrappedPhpStream()
{
$handle = fopen('php://temp', 'wb+');
$upload = new UploadedFile(new Stream($handle), 0, UploadedFileError::ERROR_OK);
$uploadHandle = $upload->getStream()->detach();
expect($uploadHandle)->toBeSame($handle);
}
public function testSuccessful()
{
$stream = create_stream_from_string('Foo bar!');
$upload = new UploadedFile($stream, $stream->getSize(), UploadedFileError::ERROR_OK, 'filename.txt', 'text/plain');
expect($upload->getSize())->toBePHPEqual($stream->getSize());
expect($upload->getClientFilename())->toBePHPEqual('filename.txt');
expect($upload->getClientMediaType())->toBePHPEqual('text/plain');
$this->cleanup[] = $to = tempnam(sys_get_temp_dir(), 'successful');
$upload->moveTo($to);
expect(file_get_contents($to))->toBePHPEqual($stream->__toString());
}
public function testMoveCannotBeCalledMoreThanOnce()
{
$stream = create_stream_from_string('Foo bar!');
$upload = new UploadedFile($stream, 0, UploadedFileError::ERROR_OK);
$this->cleanup[] = $to = tempnam(sys_get_temp_dir(), 'diac');
$upload->moveTo($to);
expect(file_exists($to))->toBeTrue();
expect(() ==> {
$upload->moveTo($to);
})->toThrow(
Exception\UploadedFileAlreadyMovedException::class,
'Cannot retrieve stream after it has already moved'
);
}
public function testCannotRetrieveStreamAfterMove()
{
$stream = create_stream_from_string('Foo bar!');
$upload = new UploadedFile($stream, 0, UploadedFileError::ERROR_OK);
$this->cleanup[] = $to = tempnam(sys_get_temp_dir(), 'diac');
$upload->moveTo($to);
expect(() ==> {
$upload->getStream();
})->toThrow(
Exception\UploadedFileAlreadyMovedException::class,
'Cannot retrieve stream after it has already moved'
);
}
public function nonOkErrorStatus()
{
return [
'UPLOAD_ERR_INI_SIZE' => [UploadedFileError::ERROR_EXCEEDS_MAX_INI_SIZE],
'UPLOAD_ERR_FORM_SIZE' => [UploadedFileError::ERROR_EXCEEDS_MAX_FORM_SIZE],
'UPLOAD_ERR_PARTIAL' => [UploadedFileError::ERROR_INCOMPLETE],
'UPLOAD_ERR_NO_FILE' => [UploadedFileError::ERROR_NO_FILE],
'UPLOAD_ERR_NO_TMP_DIR' => [UploadedFileError::ERROR_TMP_DIR_NOT_SPECIFIED],
'UPLOAD_ERR_CANT_WRITE' => [UploadedFileError::ERROR_TMP_DIR_NOT_WRITEABLE],
'UPLOAD_ERR_EXTENSION' => [UploadedFileError::ERROR_CANCELED_BY_EXTENSION],
];
}
<<DataProvider('nonOkErrorStatus')>>
public function testConstructorDoesNotRaiseExceptionForInvalidStreamWhenErrorStatusPresent($status)
{
$uploadedFile = new UploadedFile(create_stream_from_string(''), 0, $status);
expect($uploadedFile->getError())->toBeSame($status);
}
<<DataProvider('nonOkErrorStatus')>>
public function testMoveToRaisesExceptionWhenErrorStatusPresent($status)
{
$uploadedFile = new UploadedFile(create_stream_from_string(''), 0, $status);
expect(() ==> {
$uploadedFile->moveTo(__DIR__.'/'.uniqid());
})->toThrow(
Exception\UploadedFileErrorException::class,
'Cannot retrieve stream due to upload error'
);
}
<<DataProvider('nonOkErrorStatus')>>
public function testGetStreamRaisesExceptionWhenErrorStatusPresent($status)
{
$uploadedFile = new UploadedFile(create_stream_from_string(''), 0, $status);
expect(() ==> {
$stream = $uploadedFile->getStream();
})->toThrow(
Exception\UploadedFileErrorException::class,
'Cannot retrieve stream due to upload error'
);
}
}
|
#!/bin/bash
# Check if DEBS variable is set
if [ -z "$DEBS" ]; then
echo "DEBS variable is not set. Please specify the Debian package files to process."
exit 1
fi
# Check if DIR variable is set
if [ -z "$DIR" ]; then
echo "DIR variable is not set. Please specify the directory to remove."
exit 1
fi
# Iterate through each Debian package file
for sub_deb in $DEBS; do
echo "Processing $sub_deb"
# Check if the Debian package file exists
if [ ! -f "$sub_deb" ]; then
echo "Error: $sub_deb does not exist."
continue
fi
# Extract the contents of the Debian package
ar x $sub_deb && tar xf data.tar.xz
# Move nccl.h to /usr/local/include/
if [ -f "usr/include/nccl.h" ]; then
mv -f usr/include/nccl.h /usr/local/include/
echo "Moved nccl.h to /usr/local/include/"
else
echo "Error: nccl.h not found in $sub_deb"
fi
# Move libnccl* files to /usr/local/lib/
if [ -n "$(shopt -s nullglob; echo usr/lib/x86_64-linux-gnu/libnccl*)" ]; then
mv -f usr/lib/x86_64-linux-gnu/libnccl* /usr/local/lib/
echo "Moved libnccl* files to /usr/local/lib/"
else
echo "Error: libnccl* files not found in $sub_deb"
fi
done
# Remove the specified directory and its contents
if [ -d "$DIR" ]; then
rm -rf $DIR
echo "Removed directory $DIR and its contents"
else
echo "Error: $DIR does not exist or is not a directory"
fi |
enum ThreadType {
case mainThreadChecker
case backgroundThreadChecker
}
func determineThreadType(for case: ThreadType) -> String {
switch `case` {
case .mainThreadChecker:
return "Main Thread"
default:
return "Background Thread"
}
}
// Test the function
let mainThreadType = determineThreadType(for: .mainThreadChecker)
print(mainThreadType) // Output: "Main Thread"
let backgroundThreadType = determineThreadType(for: .backgroundThreadChecker)
print(backgroundThreadType) // Output: "Background Thread" |
import axios from 'axios';
import { useContext, useState } from 'react';
import { useHistory } from 'react-router-dom';
import { AuthUserContext, IsLoggedInContext } from '../../App';
import LoginForm from '../../components/Authentication/LoginForm';
import { getLoggedInUser } from '../User/GetLoggedInUser';
const Login = () => {
const [isValid, setIsValid] = useState<boolean>(true);
const { setLoggedIn } = useContext(IsLoggedInContext);
const { setLoggedInUser } = useContext(AuthUserContext);
const history = useHistory();
function loginUser(credentials: any) {
axios.post('/login', credentials)
.then((res) => {
localStorage.setItem('token', res.data.token);
setLoggedIn(true);
setLoggedInUser(getLoggedInUser());
history.push("/home/projects");
}).catch((err) => {
console.log(err);
setIsValid(false);
});
}
return (
<>
<LoginForm
loginUser={loginUser}
isValid={isValid}
setIsValid={setIsValid}
/>
</>
);
}
export default Login; |
package ru.job4j.quartzstarter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import ru.job4j.database.DBConnect;
import ru.job4j.jsoupparser.JsoupParser;
import ru.job4j.jsoupparser.Vacancy;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.List;
public class ParserJob implements Job {
private static final Logger LOG = LogManager.getLogger(ParserJob.class);
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
LOG.info("Start parsing");
DBConnect dbConnect = new DBConnect();
JsoupParser jsoupParser = new JsoupParser();
String url = "https://www.sql.ru/forum/job-offers";
List<Vacancy> list;
try {
if (dbConnect.isTableEmpty()) {
list = jsoupParser.parseTableFromBeginingOfTheYear(url);
dbConnect.insert(list);
} else {
list = jsoupParser.parse(url);
dbConnect.insert(list);
}
} catch (SQLException e) {
LOG.info(e);
} catch (IOException e) {
LOG.info(e);
} catch (ParseException e) {
LOG.info(e);
}
LOG.info("Finish parsing");
}
}
|
<reponame>Mirmik/crow
/**
@file
*/
#ifndef CROW_NODE_SPAMMER_H
#define CROW_NODE_SPAMMER_H
#include <chrono>
#include <crow/extra/nodeaddr.h>
#include <crow/proto/node.h>
#include <map>
#include <igris/event/delegate.h>
using namespace std::literals::chrono_literals;
namespace crow
{
class spammer : public crow::node
{
struct record
{
std::chrono::time_point<std::chrono::system_clock> last_subscribe;
};
std::map<nodeaddr, record> targets;
std::chrono::milliseconds timeout = 5000ms;
uint8_t qos = 0;
uint16_t ackquant = 50;
public:
void send(igris::buffer data, bool fastsend = CROW_FASTSEND_DEFAULT)
{
auto time = std::chrono::system_clock::now();
std::vector<std::map<nodeaddr, record>::iterator> to_delete;
auto eit = targets.end();
auto it = targets.begin();
for (; it != eit; it++)
{
if (time - it->second.last_subscribe > timeout)
{
to_delete.push_back(it);
continue;
}
node::send(it->first.nid, it->first.hostaddr(), data, qos,
ackquant, fastsend);
}
for (auto it : to_delete)
{
targets.erase(it);
}
}
void send_v(igris::buffer *data, size_t sz,
bool fastsend = CROW_FASTSEND_DEFAULT)
{
auto time = std::chrono::system_clock::now();
std::vector<std::map<nodeaddr, record>::iterator> to_delete;
auto eit = targets.end();
auto it = targets.begin();
for (; it != eit; it++)
{
if (time - it->second.last_subscribe > timeout)
{
to_delete.push_back(it);
continue;
}
node::send_v(it->first.nid, it->first.hostaddr(), data, sz, qos,
ackquant, fastsend);
}
for (auto it : to_delete)
{
targets.erase(it);
}
}
void incoming_packet(crow_packet *pack) override
{
auto time = std::chrono::system_clock::now();
std::vector<uint8_t> addr(crow_packet_addrptr(pack),
crow_packet_addrptr(pack) +
crow_packet_addrsize(pack));
targets[nodeaddr{addr, node::sid(pack)}] = record{time};
crow::release(pack);
}
int count_of_subscribers() { return targets.size(); }
};
class spam_subscriber : public crow::node
{
igris::delegate<void, igris::buffer> dlg;
std::vector<uint8_t> addr;
int nid;
public:
spam_subscriber(igris::delegate<void, igris::buffer> dlg) : dlg(dlg) {}
void subscribe(nid_t nid, crow::hostaddr_view host, uint8_t qos = 2,
uint16_t ackquant = 200)
{
this->addr = std::vector<uint8_t>(
(uint8_t *)host.data(), (uint8_t *)host.data() + host.size());
this->nid = nid;
node::send(nid, host, "", qos, ackquant);
}
void resubscribe(uint8_t qos = 2, uint16_t ackquant = 200)
{
node::send(nid, {addr.data(), addr.size()}, "", qos, ackquant);
}
void incoming_packet(crow_packet *pack) override
{
dlg(node::message(pack));
crow::release(pack);
}
};
}
#endif
|
#!/bin/bash
# if your program creates an output file (e.g., output.txt) compare it to the file created just now and store the difference in diff.log
# Example: diff output.txt ${APP_DIR}/golden_output.txt > diff.log
touch diff.log
# comparing stdout generated by your program
diff <(sed 's/:::Injecting.*::://g' stdout.txt) ${APP_DIR}/golden_stdout.txt > stdout_diff.log
# comparing stderr generated by your program
diff stderr.txt ${APP_DIR}/golden_stderr.txt > stderr_diff.log
# Application specific output: The following check will be performed only if at least one of diff.log, stdout_diff.log, and stderr_diff.log is different
grep sum stdout.txt > selected_output.txt
grep sum ${APP_DIR}/golden_stderr.txt > selected_golden_output.txt
diff selected_output.txt selected_golden_output.txt > special_check.log
|
<filename>java/ql/test/query-tests/UnreadLocal/UnreadLocal/ImplicitReads.java
package test;
public class ImplicitReads
{
private static class B implements AutoCloseable
{
long time = 123;
@Override
public void close ()
{
System.out.println("Closing at time " + time);
}
}
public void test()
{
// Not flagged due to implicit read in finally block
try (B b = new B()) {
System.out.println("test");
}
// Not flagged due to implicit read in finally block
try (B b = null) {}
}
public void test2(B b)
{
if (b.time > 3) {
System.out.println("test");
}
B c = null;
if (c == null) {
System.out.println("test");
}
// Assignment is useless
c = b;
// Not flagged due to implicit read in implicit finally block
try(B d = b) {}
}
}
|
#!/bin/bash
parallelism=2
while getopts j: flag
do
case "${flag}" in
j) parArg=${OPTARG};;
esac
done
if [ "$parArg" -eq "$parArg" ] 2>/dev/null
then
parallelism=$parArg
fi
#1. Build the interval generation program
echo -e "\033[1m* Compiling interval genration code\033[0m"
cd IntervalGen/posit32
make --silent clean
make --silent
echo -e "\033[1m\tCompilation finished\033[0m"
#2. Run interval generation program in parallel
echo -e "\033[1m* Generating reduced intervals. This will take a long time\033[0m"
echo -e "\033[1m\tRunning generation script in parallel\033[0m"
echo -e "\033[1m\tParallelism: $parallelism jobs\033[0m"
echo -e "\033[1m\tApproximate Time required for each job:\033[0m"
echo -e "\t\tLog10(x): 12hrs"
echo -e "\t\tLog(x): 6.5hrs"
echo -e "\t\tLog2(x): 7.5hrs"
cat IntGenCommands.txt | parallel -j $parallelism
echo -e "\033[1m\tInterval generation complete\033[0m"
#4. Once intervals are generated, move it somewhere
echo -e "\033[1m* Moving interval data file to somewhere safe...\033[0m"
mkdir -p ../../intervals
mv Posit32Log10Data ../../intervals/
mv Posit32LogData ../../intervals/
mv Posit32Log2Data ../../intervals/
mv Posit32ExpData ../../intervals/
mv Posit32Exp2Data ../../intervals/
mv Posit32Exp10Data ../../intervals/
mv Posit32SinhForSinhData ../../intervals/
mv Posit32CoshForSinhData ../../intervals/
mv Posit32SinhForCoshData ../../intervals/
mv Posit32CoshForCoshData ../../intervals/
echo -e "\033[1m\tFile transfer complete\033[0m"
#5. Finally, make clean
echo -e "\033[1m* Cleaning up\033[0m"
make --silent clean
cd ../..
echo -e "\033[1m\tCleanup complete\033[0m"
|
#! /bin/sh
for f in train/*/*.wav
do
./SMILExtract -C mfcc_pitch.conf -I "$f" -outputcsv "${f%.*}".csv
done
for f in test/*/*.wav
do
./SMILExtract -C mfcc_pitch.conf -I "$f" -outputcsv "${f%.*}".csv
done
|
const express = require('express');
const mysql = require('mysql');
const app = express();
// Create a connection object to connect to the MySQL database
const con = mysql.createConnection({
host: 'localhost',
user: '',
password: '',
database: ''
});
// Connect to the MySQL database
con.connect((err) => {
if (err) {
console.log('Error connecting to the MySQL database');
console.log(err);
return;
}
console.log('Successfully connected to the MySQL database');
});
// Route to serve the REST API endpoint
app.get('/api/query', (req, res) => {
// SQL query to retrieve data from the MySQL database
const sql = 'SELECT * FROM table';
// Execute the SQL query
con.query(sql, (err, result, fields) => {
if (err) {
// Return an error if there was any
console.log('Error executing the SQL query');
console.log(err);
res.status(500).send({ error: 'Error executing the SQL query' });
return;
}
// Return the results if successful
console.log('Successfully executed the SQL query');
console.log(result);
res.send(result);
});
});
// Listen to port 3000
app.listen(3000, () => console.log('API listening to port 3000')); |
#!/bin/bash
WORK_DIR=$(git rev-parse --show-toplevel)/build
CURRENT_DIR=$(pwd)
OUTPUT_DIR=$WORK_DIR/onnxruntime
if [[ -d $OUTPUT_DIR ]]; then
rm -rf $OUTPUT_DIR
fi
if [[ ! -d $WORK_DIR ]]; then
mkdir -p $WORK_DIR
fi
cd $WORK_DIR
ONNXRT_VER=1.7.2
ONNXRT_DIR=$WORK_DIR/onnxruntime-$ONNXRT_VER
if [[ ! -d $ONNXRT_DIR ]]; then
git clone --single-branch -b v$ONNXRT_VER https://github.com/microsoft/onnxruntime $ONNXRT_DIR
fi
cd $ONNXRT_DIR
./build.sh --config RelWithDebInfo --parallel --skip_tests
cd build/MacOS/RelWithDebInfo
cmake $WORK_DIR/onnxruntime-$ONNXRT_VER/cmake -DCMAKE_INSTALL_PREFIX=$OUTPUT_DIR && \
cmake --build . --target install
mkdir -p $OUTPUT_DIR/lib
for f in $( find . -name "lib*.a" ); do
cp $f $OUTPUT_DIR/lib
done
cd $CURRENT_DIR
|
<reponame>cinaglia/bitcheck<filename>bitcheck/utils.py
import functools
import datetime
def measure(func):
@functools.wraps(func)
def measure(self, *args, **kwargs):
# Skip measurement if verbosity is disabled
if not self.args.get('--verbose'):
return func(self, *args, **kwargs)
# Calculate time and output
start = datetime.datetime.now()
print "Retrieving exchange rates ..".format(func.__name__),
output = func(self, *args, **kwargs)
diff = (datetime.datetime.now() - start).total_seconds()
print " took {} seconds".format(diff)
return output
return measure
|
# Defining the function
def count_positive_negative_zero(arr):
# Keeping track of the counts
positive_count = 0
negative_count = 0
zero_count = 0
# Iterating over the array of integers
for number in arr:
if number > 0:
positive_count += 1
elif number == 0:
zero_count += 1
elif number < 0:
negative_count += 1
# Printing the results
print('Number of positive elements:', positive_count)
print('Number of zero elements:', zero_count)
print('Number of negative elements:', negative_count)
# Testing the function
arr = [1, 0, -2, 3, 4, -6]
count_positive_negative_zero(arr)
# Output
# Number of positive elements: 3
# Number of zero elements: 1
# Number of negative elements: 2 |
Pod::Spec.new do |s|
s.name = "HelloXCFramework_Yen"
s.version = "0.0.5"
s.summary = "A brief description of MyFramework project."
s.homepage = "https://github.com/yen-igaw/HelloXCFramework"
s.license = {
:type => 'Commercial',
:text => <<-LICENSE
All text and design is copyright 2020 igaworks, Inc.
All rights reserved.
https://github.com/IGAWorksDev/SDK-IOS-Abx.Remaster-CocoaPod
LICENSE
}
s.platform = :ios
s.author = { "yen-igaw" => "<EMAIL>" }
s.source = { :git => "https://github.com/yen-igaw/HelloXCFramework.git", :tag => "#{s.version}" }
# Supported deployment targets
s.ios.deployment_target = "9.0"
# Published binaries
s.vendored_frameworks = "AdBrixRM_XC.xcframework"
end |
/*
* Copyright 2018-2021 Elyra Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { useCallback, useEffect } from "react";
import { useSelect } from "downshift";
import { useTheme } from "styled-components";
import {
EnumButton,
EnumContainer,
EnumIcon,
EnumLabel,
EnumMenu,
EnumMenuItem,
} from "./components";
import { createControl, useControlState } from "./control";
import { getErrorMessages, getNestedEnumValidators } from "./validators";
export interface Data {
value: string;
label: string;
options?: {
value: string;
label: string;
}[];
}
export interface FlatData {
value: string;
option: string;
}
interface Props {
data?: Data[];
placeholder?: string;
allowNoOptions?: boolean;
required?: boolean;
}
function flatten(data: Data[], allowNoOptions: boolean): any[] {
let flattenedData: FlatData[] = [];
data.forEach((item: Data) => {
item.options?.forEach((option: Data) => {
flattenedData.push({
value: item.value,
option: option.value,
});
});
if (allowNoOptions && (!item.options || item.options.length === 0)) {
flattenedData.push({
value: item.value,
option: "",
});
}
});
return flattenedData;
}
function getLabel(
value: FlatData | undefined,
data: Data[],
placeholder: string
): string {
if (!value) {
return placeholder;
}
const entry = data.find((item) => item.value === value.value);
const option = entry?.options?.find((opt) => opt.value === value.option);
return entry
? entry.label + (option ? ": " + option?.label : "")
: placeholder;
}
export function NestedEnumControl({
data = [],
placeholder = "Select a value",
allowNoOptions = false,
required,
}: Props) {
const [value, setValue] = useControlState<FlatData>();
const theme = useTheme();
const flattenedData = flatten(data, allowNoOptions);
const handleSelectedItemChange = useCallback(
({ selectedItem }) => {
if (selectedItem.value) {
setValue(selectedItem);
} else {
setValue(undefined);
}
},
[setValue]
);
useEffect(() => {
if (
value !== undefined &&
!flatten(data, allowNoOptions).find((item) => item.value === value.value)
) {
setValue(undefined);
}
}, [data, allowNoOptions, value, setValue]);
const {
isOpen,
getToggleButtonProps,
getMenuProps,
getItemProps,
} = useSelect({
items: flattenedData,
selectedItem: value,
onSelectedItemChange: handleSelectedItemChange,
});
const validators = getNestedEnumValidators({
data,
allowNoOptions,
required,
});
const errorMessages = required ? getErrorMessages(value, validators) : [];
return (
<div className={errorMessages.length > 0 ? "error" : undefined}>
<EnumContainer isOpen={isOpen}>
<EnumButton {...getToggleButtonProps()}>
<EnumLabel>{getLabel(value, data, placeholder)}</EnumLabel>
<EnumIcon className="elyricon elyricon-chevron-down">
{theme.overrides?.chevronDownIcon}
</EnumIcon>
</EnumButton>
<EnumMenu {...getMenuProps()}>
{isOpen &&
flattenedData.map((item: FlatData, index: number) => {
const label = getLabel(item, data, placeholder);
return (
<EnumMenuItem
key={`${item.value}${item.option}${index}`}
{...getItemProps({ item, index })}
>
<EnumLabel title={label}>{label}</EnumLabel>
</EnumMenuItem>
);
})}
</EnumMenu>
</EnumContainer>
</div>
);
}
export default createControl("NestedEnumControl", NestedEnumControl);
|
#!/bin/bash
./docker_build.sh
docker run -p 548:548 -p 636:636 -p 9:9 -p 5353:5353/udp --name timemachine -h TimeMachine -v $(pwd)/DATA/timemachine:/timemachine -v $(pwd)/DATA/netatalk:/var/state/netatalk --rm didstopia/timecapsule:latest
|
package com.embotics.vlm.plugin;
import java.io.IOException;
import java.net.ConnectException;
// Custom exception class for handling service connection exceptions
public class ServiceConnectException extends IOException {
public ServiceConnectException(String message) {
super(message);
}
}
// Existing code snippet with modified exception handling
public class YourClass {
public void interactWithExternalService() {
try {
// Code to interact with external service
} catch (ConnectException e) {
throw new ServiceConnectException("Error connecting to the external service");
} catch (IOException e) {
// Handle other IO exceptions
}
}
} |
wget https://dist.thingsboard.io/thingsboard-3.2.2pe.rpm
|
const { merge } = require("webpack-merge");
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
const baseConfig = require("./webpack.common");
module.exports = merge(baseConfig, {
devServer: {
static: {
directory: "./src/public",
},
open: true,
port: 7700,
allowedHosts: "all",
},
mode: "development",
output: {
filename: "[name].js",
},
plugins: [
new MiniCssExtractPlugin({
filename: "[name].css",
chunkFilename: "[id].css",
}),
],
});
|
package com.ua.nure.TestHelper.service;
import com.ua.nure.TestHelper.domain.Template;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
public interface TemplateService {
Template addTemplate(Template template);
void delete(Template template);
Template getById(long id);
Template editTemplate(Template template);
List<Template> getAll();
List<Template> getAllByTeacherId(String id);
}
|
#!/bin/bash
# set -x
if [ $# -lt 3 ]; then
echo "usage: $0 num_servers num_workers bin [args..]"
exit -1;
fi
export DMLC_NUM_SERVER=$1
shift
# $2: the number of workers
export DMLC_NUM_WORKER=$1
shift
script_path=`readlink -f $0`
scripts_path=`dirname $script_path`
project_path=`dirname $scripts_path`
model=$1
shift
# the test program
bin=${project_path}"/"${model}"/"$1
shift
worker=${project_path}"/"${model}"/"$1
shift
solver=${project_path}"/"${model}"/"$1
shift
arg="$@"
scheduler_file=${scripts_path}"/machinefile/scheduler"
v_scheduler_file=${scripts_path}"/machinefile/vscheduler"
host_file=${scripts_path}"/machinefile/host"
server_file=${scripts_path}"/machinefile/server"
scheduler_ip=`cat $scheduler_file | awk '{ print $0 }'`
v_scheduler_ip=`cat $v_scheduler_file | awk '{ print $0 }'`
host_ip=`cat $host_file | awk '{ print $0 }'`
server_ip=`cat $server_file | awk '{ print $0 }'`
declare -a nodes
# public IP
i=0
for ip in $scheduler_ip; do
nodes[${i}]=$ip
(( i++ ))
done
export DMLC_PS_ROOT_URI=${nodes[0]}
export DMLC_PS_ROOT_PORT=8000
# private IP
i=0
for ip in $v_scheduler_ip; do
nodes[${i}]=$ip
(( i++ ))
done
export DMLC_NODE_VHOST=${nodes[0]}
# server IP
declare -a servers
i=0
for ip in $server_ip; do
servers[${i}]=$ip
(( i++ ))
done
# worker IP
i=0
for ip in $host_ip; do
nodes[${i}]=$ip
(( i++ ))
done
# check before running
echo $DMLC_PS_ROOT_URI
echo ${project_path}
echo ${bin}
echo ${worker}
echo ${solver}
echo ${arg}
# exit
# Must be on node scheduler_ip
echo "create scheduler"
export DMLC_ROLE='scheduler'
${bin} ${arg} --sync_timeout=600 &
echo "create servers"
for ((i=0; i<${DMLC_NUM_SERVER}; ++i)); do
echo ${servers[(( $i % ${#servers[@]} ))]}
ssh ${servers[(( $i % ${#servers[@]} ))]} "${scripts_path}/remote_server.sh ${DMLC_NUM_SERVER} ${DMLC_NUM_WORKER} ${bin} $i ${arg}" &
done
echo "create workers"
for ((i=0; i<${DMLC_NUM_WORKER}; ++i)); do
echo ${nodes[(( $i % ${#nodes[@]} ))]}
ssh ${nodes[(( $i % ${#nodes[@]} ))]} "cd ${project_path}/${model}; ${scripts_path}/remote_worker.sh ${DMLC_NUM_SERVER} ${DMLC_NUM_WORKER} ${worker} $i ${solver} ${arg}" &
done
wait %1
|
package edu.mdamle.controllers;
import io.javalin.http.Context;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import edu.mdamle.beans.DirectSupervisor;
import edu.mdamle.beans.Employee;
import edu.mdamle.beans.TuitionReimbursementRequest;
import edu.mdamle.beans.User;
import edu.mdamle.beans.User.Role;
import edu.mdamle.services.AdminServiceImpl;
import edu.mdamle.services.BenefitsCoordinatorServiceImpl;
import edu.mdamle.services.DirectSupervisorServiceImpl;
import edu.mdamle.services.EmployeeServiceImpl;
import edu.mdamle.services.MessageService;
import edu.mdamle.services.TrmsMessageServiceImpl;
import edu.mdamle.services.TuitionReimbursementRequestService;
import edu.mdamle.services.TuitionReimbursementRequestServiceImpl;
import edu.mdamle.services.UserService;
import edu.mdamle.services.UserServiceImpl;
public class UserController {
private static final Logger log = LogManager.getLogger(UserController.class);
private static UserService userSvc; // = new UserServiceImpl(); //BEANFACTORY?
private static TuitionReimbursementRequestService trrSvc; //BEANFACTORY?
private static MessageService msgSvc;
//temp
private static String temp = null;
//LOGGED
private static boolean authorization(Context ctx) {
log.trace("authorization("+"ctx"+") invoked"); //log flag
log.trace("session attributes:\n"+ctx.sessionAttributeMap()); //log flag
String userRole = ctx.sessionAttribute("userRole");
String username = ctx.sessionAttribute("username");
log.trace("currently logged-in user's userRole: "+userRole); //log flag
log.trace("currently logged-in username: "+username); //log flag
if(username == null) {
log.error("username is null"); //log flag
ctx.status(500);//:InternalServerError //502:BadGateway
ctx.result("no logged-in username detected");
log.error("authorization unsuccesful"); //log flag
return false;
}
if(userRole == null) {
log.error("userRole is null"); //log flag
ctx.status(500);//:InternalServerError //502:BadGateway
ctx.result("no logged-in userRole detected");
log.error("authorization unsuccesful"); //log flag
return false;
}
//This may be enough for general UserController authorization
//maybe later you could implement code for more strict (username,userRole) verification
//nah, let's implement it now
//it may have to be removed, or significantly altered/optimized:
userRole = userRole.toUpperCase();
switch(userRole) {
case "ADMIN":
userSvc = new AdminServiceImpl();
break;
case "EMP":
userSvc = new EmployeeServiceImpl();
break;
case "BENCO":
userSvc = new BenefitsCoordinatorServiceImpl();
break;
case "DIRSUP":
case "DEPTHEAD":
userSvc = new DirectSupervisorServiceImpl();
break;
default:
log.error("failure to understand logged-in user's userRole: "+userRole); //log flag
ctx.status(500);//:InternalServerError //502:BadGateway
ctx.result("logged-in userRole incomprehensible");
log.error("authorization unsuccesful"); //log flag
return false;
}
log.trace("attempting to verify existence of "+userRole+" "+username); //log flag
if(!userSvc.userExistence(username)) {
log.error("existence of logged-in "+userRole+" "+username+" could not be verified"); //log flag
ctx.status(500);//:InternalServerError //502:BadGateway
ctx.result("existence of logged-in \"+userRole+\" \"+username+\" could not be verified");
log.error("authorization unsuccesful"); //log flag
return false;
}
log.trace("existence of logged-in "+userRole+" "+username+" verified"); //log flag
ctx.result("existence of logged-in "+userRole+" "+username+" verified");
return true;
}
//DELETE
//service method for user authorization
private static boolean authorizationSansAdmin(Context ctx) {
User currentUser = ctx.sessionAttribute("User"); //VERIFY
if(currentUser == null) { //VERIFY
//ERROR STATUS
//NO LOGGED in User
return false;
}
if(currentUser.getUserRole().equals(Role.ADMIN)) {
//ERROR STATUS
//wrong user type
return false;
}
return true;
}
//service method for message existence verification
private static boolean messageExistence(Context ctx) {
int id = Integer.parseInt(ctx.pathParam("id")); //VERIFY
String targetUsername = ctx.pathParam("username"); //VERIFY
msgSvc = new TrmsMessageServiceImpl(); //VERIFY
if(msgSvc.getMessage(targetUsername, id) == null) {
//ERROR STATUS
//this message doesn't exist
return false;
}
return true;
}
//LOGGED
//User Login
//Try coding in a response indicating who logged in & success
public static void login(Context ctx) {
//log flag
log.trace("login("+"ctx"+") invoked"); //log flag
log.trace(new StringBuilder("POST")
.append(" request to:\n")
.append(ctx.contextPath().toString())
.append("\nmethod: ")
.append(" login")
.toString());
log.trace("session attributes:\n"+ctx.sessionAttributeMap()); //log flag
log.trace("form parameters:\n"+ctx.formParamMap()); //log flag
log.trace("path parameters:\n"+ctx.pathParamMap()); //log flag
log.trace("query parameters:\n"+ctx.queryParamMap()); //log flag
log.trace("Attempting to login");//log flag
if(ctx.sessionAttribute("username") != null) {
log.error("Cannot log in.\n"+ctx.sessionAttribute("username")+" is already logged in");
ctx.status(406); //VERIFY
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
//log flag
if(ctx.sessionAttribute("username") == null) {
temp = "NULL";
} else {
temp = (ctx.sessionAttribute("username"));
}
log.trace("sessionAttribute should be null: "+temp);
log.trace("session attribute map:\n"+ctx.sessionAttributeMap());
String username = ctx.formParam("username").toLowerCase(); //try-catch, VERIFY formParam vs queryParam
String password = ctx.formParam("password"); //try-catch, VERIFY formParam vs queryParam
//log flag
log.trace("ctx form param "+"username"+" is ", ctx.formParam("username"));
log.trace("stored as "+username);
log.trace("ctx form param "+"password"+" is ", ctx.formParam("password"));
log.trace("stored as "+password);
userSvc = new UserServiceImpl();
log.trace("attempting to determine the user role of "+username);//log flag
Role userRole = ((UserServiceImpl) userSvc).getUserRole(username);
if(userRole == null) {
log.error("User "+username+" could not be found, or their role could not be verified");
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
log.trace("user role of "+username+" found to be: "+userRole.toString());//log flag
switch(userRole) {
case ADMIN:
userSvc = new AdminServiceImpl();
break;
case EMP:
userSvc = new EmployeeServiceImpl();
break;
case BENCO:
userSvc = new BenefitsCoordinatorServiceImpl();
break;
case DIRSUP:
case DEPTHEAD:
userSvc = new DirectSupervisorServiceImpl();
break;
default:
log.error("unable to verify user role: "+userRole.toString());
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
log.trace("userSvc instantiated as:\n"+userSvc.getClass().toString());//log flag
log.trace("invoking User Service's passwordMatch("+password+","+username+")"); //log flag
if(!userSvc.passwordMatch(password, username)) {
//error status
log.error("incorrect password"); //log flag
log.error("method returned "+false); //log flag
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
log.trace("setting session attrib "+"username"+" to: "+username);//log flag
ctx.sessionAttribute("username", username);
log.trace("setting session attrib "+"userRole"+" to: "+userRole.toString().toUpperCase());//log flag
ctx.sessionAttribute("userRole", userRole.toString().toUpperCase());
log.trace("session attribute map:\n"+ctx.sessionAttributeMap());//log flag
//ctx.json(target) //???
//success STATUS
//MESSAGES
log.trace(userRole.toString()+" "+username+" is now logged in");
ctx.status(200);//:OK, or 201:Created
ctx.result(userRole.toString()+" "+username+" is now logged in");
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
//LOGGED
//User Logout
//Try coding in a response that indicates who logged out & success
public static void logout(Context ctx) {
//log flag
log.trace("logout("+"ctx"+") invoked"); //log flag
log.trace(new StringBuilder("DELETE")
.append(" request to:\n")
.append(ctx.contextPath().toString())
.append("\nmethod: ")
.append(" logout")
.toString());
log.trace("session attributes:\n"+ctx.sessionAttributeMap()); //log flag
log.trace("form parameters:\n"+ctx.formParamMap()); //log flag
log.trace("path parameters:\n"+ctx.pathParamMap()); //log flag
log.trace("query parameters:\n"+ctx.queryParamMap()); //log flag
log.trace("Attempting to logout");//log flag
if(!authorization(ctx)) {
log.error("authorization failed"); //log flag
ctx.status(401);
ctx.result("authorization failed");
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
log.trace("session attributes:\n"+ctx.sessionAttributeMap()); //log flag
log.trace("attempting to invalidate javalin session"); //log flag
ctx.req.getSession().invalidate();
log.trace("session attributes:\n"+ctx.sessionAttributeMap()); //log flag
//what if(ctx.sessionAttributeMap() == null)?
if(!ctx.sessionAttributeMap().isEmpty()) {
ctx.status(500);//:InternalServerError //502:BadGateway
log.trace("logout unsuccessful due to some server error");//log flag
ctx.result("logout unsuccessful due to some server error");
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
ctx.status(200);//:OK
log.trace("logout successful");//log flag
ctx.result("logout successful");
log.trace("--------------------------------------------------------------------------------------\n\n"); //log flag
return;
}
//Get a list of TRRs
public static void viewTrrs(Context ctx) {
User currentUser = ctx.sessionAttribute("User"); //VERIFY
if(currentUser == null || currentUser.getUserRole().equals(Role.ADMIN)) {
//ERROR STATUS
//either NO LOGGED in User
//or ADMIN does not have this privelege
//Maybe break this down into 2 validations
return;
}
String targetUsername = ctx.pathParam("username"); //VERIFY
userSvc = new EmployeeServiceImpl();
Employee targetUser = (Employee) userSvc.getUser(targetUsername); //VERIFY
//VERIFY BLOCK
if(currentUser.getUserRole().equals(Role.EMP)){
if(!currentUser.getUsername().equals(targetUsername)) {
//ERROR STATUS
//Unauthorized, emp cannot view another emp's trrs
return;
}
trrSvc = new TuitionReimbursementRequestServiceImpl();
//RETURN trrs somehow
//return trrSvc.getTrrs(targetUsername)
//SUCCESS
return;
}
//VERIFY BLOCK
switch(currentUser.getUserRole()) {
case DIRSUP:
if(!currentUser.getUsername().equals(targetUser.getDirSup())) {
//ERROR STATUS
//employee not assgned to you
return;
}
break;
case DEPTHEAD:
userSvc = new DirectSupervisorServiceImpl();
DirectSupervisor dirSupOfTarget = (DirectSupervisor) userSvc.getUser(targetUser.getDirSup()); //VERIFY
if(!currentUser.getUsername().equals(dirSupOfTarget.getUsername()) && !currentUser.getUsername().equals(dirSupOfTarget.getSupervisorUsername())) { //VERIFY
//ERROR STATUS
//employee not assgned to you nor to your underling
return;
}
break;
case BENCO:
if(!currentUser.getUsername().equals(targetUser.getBenCo())) {
//ERROR STATUS
//employee not assgned to you
return;
}
break;
default:
break;
}
trrSvc = new TuitionReimbursementRequestServiceImpl();
//RETURN trrs somehow
//return trrSvc.getTrrs(targetUsername)
//SUCCESS
}
//View a particular Tuition Reimbursement Request
public static void viewTrr(Context ctx) {
User currentUser = ctx.sessionAttribute("User"); //VERIFY
if(currentUser == null || currentUser.getUserRole().equals(Role.ADMIN)) {
//ERROR STATUS
//either NO LOGGED in User
//or ADMIN does not have this privelege
//Maybe break this down into 2 validations
return;
}
String targetUsername = ctx.pathParam("username"); //VERIFY
int id = Integer.parseInt(ctx.pathParam("id"));
userSvc = new EmployeeServiceImpl();
Employee targetUser = (Employee) userSvc.getUser(targetUsername);
if(targetUser == null) {
//ERROR STATUS
//Employee DNE
return;
}
if(currentUser.getUserRole().equals(Role.DIRSUP)) {
if(!currentUser.getUsername().equals(targetUser.getDirSup())) { //VERIFY
//ERROR STATUS
//employee not assigned to you
return;
}
//Some message of successful authorization?
} else if(currentUser.getUserRole().equals(Role.BENCO)) {
if(!currentUser.getUsername().equals(targetUser.getBenCo())) { //VERIFY
//ERROR STATUS
//employee not assigned to you
return;
}
//Some message of successful authorization?
} else if(currentUser.getUserRole().equals(Role.DEPTHEAD)) {
userSvc = new DirectSupervisorServiceImpl();
DirectSupervisor dirSupOfTarget = (DirectSupervisor) userSvc.getUser(targetUser.getDirSup()); //VERIFY
if(!currentUser.getUsername().equals(targetUser.getDirSup()) && !currentUser.getUsername().equals(dirSupOfTarget.getSupervisorUsername())) { //VERIFY
//ERROR STATUS
//employee not assigned to you, nor to your underlings
return;
}
} else if(currentUser.getUsername() != targetUsername) {
//ERROR STATUS
//an EMP cannot view another EMP's trr
return;
}
trrSvc = new TuitionReimbursementRequestServiceImpl();
TuitionReimbursementRequest trr = trrSvc.getTrr(targetUsername, id); //VERIFY
if(trr == null) {
//ERROR STATUS
//TRR DNE
return;
}
//RETURN trr in some fashion
}
//View Messages
public static void viewMessages(Context ctx) {
if(!authorizationSansAdmin(ctx)) {
//ERROR STATUS
//not logged in
//or wrong user type
return;
}
msgSvc = new TrmsMessageServiceImpl();
//msgSvc.getMessages(currentUsername);
//return list/map of user's messages
//null check
//SUCCESS
}
//View Message
public static void viewMessage(Context ctx) {
if(!messageExistence(ctx)) {
//ERROR STATUS
//This message DNE
return;
}
msgSvc = new TrmsMessageServiceImpl();
//return msgSvc.getMessage(currentUsername, id);
//SUCCESS
}
}
|
def quick_sort(list):
if len(list) <= 1:
return list
else:
pivot = list[0]
less = [element for element in list[1:] if element < pivot]
greater = [element for element in list[1:] if element >= pivot]
return quick_sort(less) + [pivot] + quick_sort(greater)
list = [5, 3, 7, 8, 6]
print(quick_sort(list)) |
<filename>Modules/ThirdParty/OssimPlugins/src/ossim/EnvisatAsar/AntennaElevationPatterns.cpp
/*
* Copyright (C) 2005-2017 by Centre National d'Etudes Spatiales (CNES)
*
* This file is licensed under MIT license:
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <EnvisatAsar/AntennaElevationPatterns.h>
namespace ossimplugins
{
AntennaElevationPatterns::AntennaElevationPatterns() : EnvisatAsarRecord("AntennaElevationPatterns_rec")
{
}
AntennaElevationPatterns::~AntennaElevationPatterns()
{
}
std::ostream& operator<<(std::ostream& os, const AntennaElevationPatterns& data)
{
os<<"zero_doppler_time:"<<data._zero_doppler_time <<std::endl;
os<<"attach_flag:"<<data._attach_flag<<std::endl;
os<<"swath:"<<data._swath <<std::endl;
for (int i = 0; i<33; i++) {
os<<"elevation pattern:"<<data._elevation_pattern[i]<<std::endl;
}
return os;
}
std::istream& operator>>(std::istream& is, AntennaElevationPatterns& data)
{
char buf16[17];
buf16[16]='\0';
char buf12[13];
buf12[12] = '\0';
char buf3[4];
buf3[3] = '\0';
// char buf2[3];
// buf2[2] = '\0';
// char buf1[1]; // avoid not used warning
is.read(buf12,12);
data._zero_doppler_time = buf12;
is.read((char*)&(data._attach_flag),1);
is.read(buf3,3);
data._swath = buf3;
for (int i = 0; i<33; i++) {
is.read((char*)&(data._elevation_pattern[i]),4);
data.SwitchEndian(data._elevation_pattern[i]);
}
is.read(buf16,14);
return is;
}
AntennaElevationPatterns::AntennaElevationPatterns(const AntennaElevationPatterns& rhs):
EnvisatAsarRecord(rhs),
_zero_doppler_time(rhs._zero_doppler_time),
_attach_flag(rhs._attach_flag),
_swath(rhs._swath)
{
for (int i = 0; i<33; i++) {
_elevation_pattern[i] = rhs._elevation_pattern[i];
}
}
AntennaElevationPatterns& AntennaElevationPatterns::operator=(const AntennaElevationPatterns& rhs)
{
_zero_doppler_time = rhs._zero_doppler_time;
_attach_flag = rhs._attach_flag;
_swath = rhs._swath;
for (int i = 0; i<33; i++) {
_elevation_pattern[i] = rhs._elevation_pattern[i];
}
return *this;
}
}
|
#!/bin/bash
#=============================================================
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
# Lisence: MIT
# Author: P3TERX
# Blog: https://p3terx.com
#=============================================================
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#sed -i '$a src-git kenzo https://github.com/kenzok8/openwrt-packages' feeds.conf.default
#sed -i '$a src-git OpenWrt-Actions https://github.com/OpenWrt-Actions/helloworld' feeds.conf.default
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gn/ninja_rust_binary_target_writer.h"
#include "gn/config.h"
#include "gn/rust_values.h"
#include "gn/scheduler.h"
#include "gn/target.h"
#include "gn/test_with_scheduler.h"
#include "gn/test_with_scope.h"
#include "util/build_config.h"
#include "util/test/test.h"
using NinjaRustBinaryTargetWriterTest = TestWithScheduler;
TEST_F(NinjaRustBinaryTargetWriterTest, RustSourceSet) {
Err err;
TestWithScope setup;
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::SOURCE_SET);
target.visibility().SetPublic();
target.sources().push_back(SourceFile("//foo/input1.rs"));
target.sources().push_back(SourceFile("//foo/main.rs"));
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.SetToolchain(setup.toolchain());
ASSERT_FALSE(target.OnResolved(&err));
}
TEST_F(NinjaRustBinaryTargetWriterTest, RustExecutable) {
Err err;
TestWithScope setup;
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/input3.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.config_values().ldflags().push_back("-fsanitize=address");
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/input3.rs "
"../../foo/main.rs\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs =\n"
" rustdeps =\n"
" ldflags = -fsanitize=address\n"
" sources = ../../foo/input3.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
// Accessible dependencies appear as --extern switches for rustc, so that the
// target crate can make use of them whether transitive or not. Transitive
// dependencies can be accessible if they are in the public_deps of a direct
// dependency, or part of a chain of public_deps from a direct dependency. Any
// dependencies used by other crate dependencies also must appear, but are
// pointed to by -Ldependency as they are not available for use from the target
// crate. In the future they may move to `--extern priv:` when explicit private
// dependencies are stabilized.
TEST_F(NinjaRustBinaryTargetWriterTest, RlibDeps) {
Err err;
TestWithScope setup;
Target private_rlib(setup.settings(), Label(SourceDir("//baz/"), "privatelib"));
private_rlib.set_output_type(Target::RUST_LIBRARY);
private_rlib.visibility().SetPublic();
SourceFile bazlib("//baz/lib.rs");
private_rlib.sources().push_back(SourceFile("//baz/privatelib.rs"));
private_rlib.sources().push_back(bazlib);
private_rlib.source_types_used().Set(SourceFile::SOURCE_RS);
private_rlib.rust_values().set_crate_root(bazlib);
private_rlib.rust_values().crate_name() = "privatecrate";
private_rlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(private_rlib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&private_rlib, out);
writer.Run();
const char expected[] =
"crate_name = privatecrate\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/baz\n"
"target_output_name = libprivatelib\n"
"\n"
"build obj/baz/libprivatelib.rlib: rust_rlib ../../baz/lib.rs | "
"../../baz/privatelib.rs ../../baz/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../baz/privatelib.rs ../../baz/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target far_public_rlib(setup.settings(),
Label(SourceDir("//far/"), "farlib"));
far_public_rlib.set_output_type(Target::RUST_LIBRARY);
far_public_rlib.visibility().SetPublic();
SourceFile farlib("//far/lib.rs");
far_public_rlib.sources().push_back(SourceFile("//far/farlib.rs"));
far_public_rlib.sources().push_back(farlib);
far_public_rlib.source_types_used().Set(SourceFile::SOURCE_RS);
far_public_rlib.rust_values().set_crate_root(farlib);
far_public_rlib.rust_values().crate_name() = "farcrate";
far_public_rlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(far_public_rlib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&far_public_rlib, out);
writer.Run();
const char expected[] =
"crate_name = farcrate\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/far\n"
"target_output_name = libfarlib\n"
"\n"
"build obj/far/libfarlib.rlib: rust_rlib ../../far/lib.rs | "
"../../far/farlib.rs ../../far/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../far/farlib.rs ../../far/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target public_rlib(setup.settings(), Label(SourceDir("//bar/"), "publiclib"));
public_rlib.set_output_type(Target::RUST_LIBRARY);
public_rlib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
public_rlib.sources().push_back(SourceFile("//bar/publiclib.rs"));
public_rlib.sources().push_back(barlib);
public_rlib.source_types_used().Set(SourceFile::SOURCE_RS);
public_rlib.rust_values().set_crate_root(barlib);
public_rlib.rust_values().crate_name() = "publiccrate";
public_rlib.public_deps().push_back(LabelTargetPair(&far_public_rlib));
public_rlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(public_rlib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&public_rlib, out);
writer.Run();
const char expected[] =
"crate_name = publiccrate\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libpubliclib\n"
"\n"
"build obj/bar/libpubliclib.rlib: rust_rlib ../../bar/lib.rs | "
"../../bar/publiclib.rs ../../bar/lib.rs obj/far/libfarlib.rlib\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs = --extern farcrate=obj/far/libfarlib.rlib\n"
" rustdeps = -Ldependency=obj/far\n"
" ldflags =\n"
" sources = ../../bar/publiclib.rs ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target rlib(setup.settings(), Label(SourceDir("//foo/"), "direct"));
rlib.set_output_type(Target::RUST_LIBRARY);
rlib.visibility().SetPublic();
SourceFile lib("//foo/main.rs");
rlib.sources().push_back(SourceFile("//foo/direct.rs"));
rlib.sources().push_back(lib);
rlib.source_types_used().Set(SourceFile::SOURCE_RS);
rlib.rust_values().set_crate_root(lib);
rlib.rust_values().crate_name() = "direct";
rlib.SetToolchain(setup.toolchain());
rlib.public_deps().push_back(LabelTargetPair(&public_rlib));
rlib.private_deps().push_back(LabelTargetPair(&private_rlib));
ASSERT_TRUE(rlib.OnResolved(&err));
Target target(setup.settings(), Label(SourceDir("//main/"), "main"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//main/main.rs");
target.sources().push_back(SourceFile("//main/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "main_crate";
target.private_deps().push_back(LabelTargetPair(&rlib));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = main_crate\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/main\n"
"target_output_name = main\n"
"\n"
"build ./main_crate: rust_bin ../../main/main.rs | "
"../../main/source.rs ../../main/main.rs obj/foo/libdirect.rlib\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern direct=obj/foo/libdirect.rlib "
"--extern publiccrate=obj/bar/libpubliclib.rlib "
"--extern farcrate=obj/far/libfarlib.rlib\n"
" rustdeps = -Ldependency=obj/foo -Ldependency=obj/bar "
"-Ldependency=obj/far -Ldependency=obj/baz\n"
" ldflags =\n"
" sources = ../../main/source.rs ../../main/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, DylibDeps) {
Err err;
TestWithScope setup;
Target private_inside_dylib(setup.settings(), Label(SourceDir("//faz/"), "private_inside"));
private_inside_dylib.set_output_type(Target::RUST_LIBRARY);
private_inside_dylib.visibility().SetPublic();
SourceFile fazlib("//faz/lib.rs");
private_inside_dylib.sources().push_back(SourceFile("//faz/private_inside.rs"));
private_inside_dylib.sources().push_back(fazlib);
private_inside_dylib.source_types_used().Set(SourceFile::SOURCE_RS);
private_inside_dylib.rust_values().set_crate_root(fazlib);
private_inside_dylib.rust_values().crate_name() = "private_inside";
private_inside_dylib.SetToolchain(setup.toolchain());
ASSERT_TRUE(private_inside_dylib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&private_inside_dylib, out);
writer.Run();
const char expected[] =
"crate_name = private_inside\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/faz\n"
"target_output_name = libprivate_inside\n"
"\n"
"build obj/faz/libprivate_inside.rlib: rust_rlib ../../faz/lib.rs | "
"../../faz/private_inside.rs ../../faz/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../faz/private_inside.rs ../../faz/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target inside_dylib(setup.settings(), Label(SourceDir("//baz/"), "inside"));
inside_dylib.set_output_type(Target::RUST_LIBRARY);
inside_dylib.visibility().SetPublic();
SourceFile bazlib("//baz/lib.rs");
inside_dylib.sources().push_back(SourceFile("//baz/inside.rs"));
inside_dylib.sources().push_back(bazlib);
inside_dylib.source_types_used().Set(SourceFile::SOURCE_RS);
inside_dylib.rust_values().set_crate_root(bazlib);
inside_dylib.rust_values().crate_name() = "inside";
inside_dylib.SetToolchain(setup.toolchain());
ASSERT_TRUE(inside_dylib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&inside_dylib, out);
writer.Run();
const char expected[] =
"crate_name = inside\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/baz\n"
"target_output_name = libinside\n"
"\n"
"build obj/baz/libinside.rlib: rust_rlib ../../baz/lib.rs | "
"../../baz/inside.rs ../../baz/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../baz/inside.rs ../../baz/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target dylib(setup.settings(), Label(SourceDir("//bar/"), "mylib"));
dylib.set_output_type(Target::SHARED_LIBRARY);
dylib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
dylib.sources().push_back(SourceFile("//bar/mylib.rs"));
dylib.sources().push_back(barlib);
dylib.source_types_used().Set(SourceFile::SOURCE_RS);
dylib.rust_values().set_crate_type(RustValues::CRATE_DYLIB); // TODO
dylib.rust_values().set_crate_root(barlib);
dylib.rust_values().crate_name() = "mylib";
dylib.public_deps().push_back(LabelTargetPair(&inside_dylib));
dylib.private_deps().push_back(LabelTargetPair(&private_inside_dylib));
dylib.SetToolchain(setup.toolchain());
ASSERT_TRUE(dylib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&dylib, out);
writer.Run();
const char expected[] =
"crate_name = mylib\n"
"crate_type = dylib\n"
"output_extension = .so\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libmylib\n"
"\n"
"build obj/bar/libmylib.so: rust_dylib ../../bar/lib.rs | "
"../../bar/mylib.rs ../../bar/lib.rs "
"obj/baz/libinside.rlib obj/faz/libprivate_inside.rlib\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs = --extern inside=obj/baz/libinside.rlib "
"--extern private_inside=obj/faz/libprivate_inside.rlib\n"
" rustdeps = -Ldependency=obj/baz -Ldependency=obj/faz\n"
" ldflags =\n"
" sources = ../../bar/mylib.rs ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target another_dylib(setup.settings(), Label(SourceDir("//foo/"), "direct"));
another_dylib.set_output_type(Target::SHARED_LIBRARY);
another_dylib.visibility().SetPublic();
SourceFile lib("//foo/main.rs");
another_dylib.sources().push_back(SourceFile("//foo/direct.rs"));
another_dylib.sources().push_back(lib);
another_dylib.source_types_used().Set(SourceFile::SOURCE_RS);
another_dylib.rust_values().set_crate_type(RustValues::CRATE_DYLIB);
another_dylib.rust_values().set_crate_root(lib);
another_dylib.rust_values().crate_name() = "direct";
another_dylib.SetToolchain(setup.toolchain());
another_dylib.public_deps().push_back(LabelTargetPair(&dylib));
ASSERT_TRUE(another_dylib.OnResolved(&err));
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.private_deps().push_back(LabelTargetPair(&another_dylib));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/foo/libdirect.so\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern direct=obj/foo/libdirect.so "
"--extern mylib=obj/bar/libmylib.so "
"--extern inside=obj/baz/libinside.rlib\n"
" rustdeps = -Ldependency=obj/foo -Ldependency=obj/bar "
"-Ldependency=obj/baz -Ldependency=obj/faz\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, RlibDepsAcrossGroups) {
Err err;
TestWithScope setup;
Target procmacro(setup.settings(), Label(SourceDir("//bar/"), "mymacro"));
procmacro.set_output_type(Target::RUST_PROC_MACRO);
procmacro.visibility().SetPublic();
SourceFile barproc("//bar/lib.rs");
procmacro.sources().push_back(SourceFile("//bar/mylib.rs"));
procmacro.sources().push_back(barproc);
procmacro.source_types_used().Set(SourceFile::SOURCE_RS);
procmacro.rust_values().set_crate_root(barproc);
procmacro.rust_values().crate_name() = "mymacro";
procmacro.rust_values().set_crate_type(RustValues::CRATE_PROC_MACRO);
procmacro.SetToolchain(setup.toolchain());
ASSERT_TRUE(procmacro.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&procmacro, out);
writer.Run();
const char expected[] =
"crate_name = mymacro\n"
"crate_type = proc-macro\n"
"output_extension = .so\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libmymacro\n"
"\n"
"build obj/bar/libmymacro.so: rust_macro ../../bar/lib.rs | "
"../../bar/mylib.rs ../../bar/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../bar/mylib.rs ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target group(setup.settings(), Label(SourceDir("//baz/"), "group"));
group.set_output_type(Target::GROUP);
group.visibility().SetPublic();
group.public_deps().push_back(LabelTargetPair(&procmacro));
group.SetToolchain(setup.toolchain());
ASSERT_TRUE(group.OnResolved(&err));
Target rlib(setup.settings(), Label(SourceDir("//bar/"), "mylib"));
rlib.set_output_type(Target::RUST_LIBRARY);
rlib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
rlib.sources().push_back(SourceFile("//bar/mylib.rs"));
rlib.sources().push_back(barlib);
rlib.source_types_used().Set(SourceFile::SOURCE_RS);
rlib.rust_values().set_crate_root(barlib);
rlib.rust_values().crate_name() = "mylib";
rlib.SetToolchain(setup.toolchain());
rlib.public_deps().push_back(LabelTargetPair(&group));
ASSERT_TRUE(rlib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&rlib, out);
writer.Run();
const char expected[] =
"crate_name = mylib\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libmylib\n"
"\n"
"build obj/bar/libmylib.rlib: rust_rlib ../../bar/lib.rs | "
"../../bar/mylib.rs ../../bar/lib.rs obj/bar/libmymacro.so || "
"obj/baz/group.stamp\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs = --extern mymacro=obj/bar/libmymacro.so\n"
" rustdeps = -Ldependency=obj/bar\n"
" ldflags =\n"
" sources = ../../bar/mylib.rs ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.private_deps().push_back(LabelTargetPair(&rlib));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | "
"../../foo/source.rs ../../foo/main.rs obj/bar/libmylib.rlib || "
"obj/baz/group.stamp\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern mylib=obj/bar/libmylib.rlib "
"--extern mymacro=obj/bar/libmymacro.so\n"
" rustdeps = -Ldependency=obj/bar\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, RenamedDeps) {
Err err;
TestWithScope setup;
Target transitive(setup.settings(), Label(SourceDir("//faz/"), "transitive"));
transitive.set_output_type(Target::RUST_LIBRARY);
transitive.visibility().SetPublic();
SourceFile transitive_lib("//faz/transitive/lib.rs");
transitive.sources().push_back(SourceFile("//faz/transitive/transitive.rs"));
transitive.sources().push_back(transitive_lib);
transitive.source_types_used().Set(SourceFile::SOURCE_RS);
transitive.rust_values().set_crate_root(transitive_lib);
transitive.rust_values().crate_name() = "transitive";
transitive.SetToolchain(setup.toolchain());
ASSERT_TRUE(transitive.OnResolved(&err));
Target rlib(setup.settings(), Label(SourceDir("//baz/"), "mylib"));
rlib.set_output_type(Target::RUST_LIBRARY);
rlib.visibility().SetPublic();
SourceFile barlib("//baz/bar/lib.rs");
rlib.sources().push_back(SourceFile("//baz/bar/mylib.rs"));
rlib.sources().push_back(barlib);
rlib.source_types_used().Set(SourceFile::SOURCE_RS);
rlib.rust_values().set_crate_root(barlib);
rlib.rust_values().crate_name() = "mylib";
rlib.SetToolchain(setup.toolchain());
rlib.public_deps().push_back(LabelTargetPair(&transitive));
ASSERT_TRUE(rlib.OnResolved(&err));
Target direct(setup.settings(), Label(SourceDir("//bar/"), "direct"));
direct.set_output_type(Target::RUST_LIBRARY);
direct.visibility().SetPublic();
SourceFile direct_lib("//bar/direct/lib.rs");
direct.sources().push_back(SourceFile("//bar/direct/direct.rs"));
direct.sources().push_back(direct_lib);
direct.source_types_used().Set(SourceFile::SOURCE_RS);
direct.rust_values().set_crate_root(direct_lib);
direct.rust_values().crate_name() = "direct";
direct.SetToolchain(setup.toolchain());
ASSERT_TRUE(direct.OnResolved(&err));
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
// A direct dependency is renamed.
target.rust_values().aliased_deps()[direct.label()] = "direct_renamed";
// A transitive public dependency, through `rlib`, is renamed.
target.rust_values().aliased_deps()[transitive.label()] =
"transitive_renamed";
target.private_deps().push_back(LabelTargetPair(&direct));
target.private_deps().push_back(LabelTargetPair(&rlib));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/bar/libdirect.rlib obj/baz/libmylib.rlib\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern direct_renamed=obj/bar/libdirect.rlib "
"--extern mylib=obj/baz/libmylib.rlib "
"--extern transitive_renamed=obj/faz/libtransitive.rlib\n"
" rustdeps = -Ldependency=obj/bar -Ldependency=obj/baz "
"-Ldependency=obj/faz\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, NonRustDeps) {
Err err;
TestWithScope setup;
Target staticlib(setup.settings(), Label(SourceDir("//foo/"), "static"));
staticlib.set_output_type(Target::STATIC_LIBRARY);
staticlib.visibility().SetPublic();
staticlib.sources().push_back(SourceFile("//foo/static.cpp"));
staticlib.source_types_used().Set(SourceFile::SOURCE_CPP);
staticlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(staticlib.OnResolved(&err));
Target rlib(setup.settings(), Label(SourceDir("//bar/"), "mylib"));
rlib.set_output_type(Target::RUST_LIBRARY);
rlib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
rlib.sources().push_back(SourceFile("//bar/mylib.rs"));
rlib.sources().push_back(barlib);
rlib.source_types_used().Set(SourceFile::SOURCE_RS);
rlib.rust_values().set_crate_root(barlib);
rlib.rust_values().crate_name() = "mylib";
rlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(rlib.OnResolved(&err));
Target sharedlib(setup.settings(), Label(SourceDir("//foo/"), "shared"));
sharedlib.set_output_type(Target::SHARED_LIBRARY);
sharedlib.visibility().SetPublic();
sharedlib.sources().push_back(SourceFile("//foo/static.cpp"));
sharedlib.source_types_used().Set(SourceFile::SOURCE_CPP);
sharedlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(sharedlib.OnResolved(&err));
Target csourceset(setup.settings(), Label(SourceDir("//baz/"), "sourceset"));
csourceset.set_output_type(Target::SOURCE_SET);
csourceset.visibility().SetPublic();
csourceset.sources().push_back(SourceFile("//baz/csourceset.cpp"));
csourceset.source_types_used().Set(SourceFile::SOURCE_CPP);
csourceset.SetToolchain(setup.toolchain());
ASSERT_TRUE(csourceset.OnResolved(&err));
Toolchain toolchain_with_toc(
setup.settings(), Label(SourceDir("//toolchain_with_toc/"), "with_toc"));
TestWithScope::SetupToolchain(&toolchain_with_toc, true);
Target sharedlib_with_toc(setup.settings(),
Label(SourceDir("//foo/"), "shared_with_toc"));
sharedlib_with_toc.set_output_type(Target::SHARED_LIBRARY);
sharedlib_with_toc.visibility().SetPublic();
sharedlib_with_toc.sources().push_back(SourceFile("//foo/static.cpp"));
sharedlib_with_toc.source_types_used().Set(SourceFile::SOURCE_CPP);
sharedlib_with_toc.SetToolchain(&toolchain_with_toc);
ASSERT_TRUE(sharedlib_with_toc.OnResolved(&err));
Target nonrust(setup.settings(), Label(SourceDir("//foo/"), "bar"));
nonrust.set_output_type(Target::EXECUTABLE);
nonrust.visibility().SetPublic();
SourceFile main("//foo/main.rs");
nonrust.sources().push_back(SourceFile("//foo/source.rs"));
nonrust.sources().push_back(main);
nonrust.source_types_used().Set(SourceFile::SOURCE_RS);
nonrust.rust_values().set_crate_root(main);
nonrust.rust_values().crate_name() = "foo_bar";
nonrust.private_deps().push_back(LabelTargetPair(&rlib));
nonrust.private_deps().push_back(LabelTargetPair(&staticlib));
nonrust.private_deps().push_back(LabelTargetPair(&sharedlib));
nonrust.private_deps().push_back(LabelTargetPair(&csourceset));
nonrust.private_deps().push_back(LabelTargetPair(&sharedlib_with_toc));
nonrust.SetToolchain(setup.toolchain());
ASSERT_TRUE(nonrust.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&nonrust, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/baz/sourceset.csourceset.o "
"obj/bar/libmylib.rlib "
"obj/foo/libstatic.a ./libshared.so ./libshared_with_toc.so.TOC "
"|| obj/baz/sourceset.stamp\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern mylib=obj/bar/libmylib.rlib\n"
" rustdeps = -Ldependency=obj/bar "
"-Lnative=obj/baz -Lnative=obj/foo -Lnative=. "
"-Clink-arg=-Bdynamic -Clink-arg=obj/baz/sourceset.csourceset.o "
"-Clink-arg=obj/foo/libstatic.a -Clink-arg=./libshared.so "
"-Clink-arg=./libshared_with_toc.so\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target nonrust_only(setup.settings(), Label(SourceDir("//foo/"), "bar"));
nonrust_only.set_output_type(Target::EXECUTABLE);
nonrust_only.visibility().SetPublic();
nonrust_only.sources().push_back(SourceFile("//foo/source.rs"));
nonrust_only.sources().push_back(main);
nonrust_only.source_types_used().Set(SourceFile::SOURCE_RS);
nonrust_only.rust_values().set_crate_root(main);
nonrust_only.rust_values().crate_name() = "foo_bar";
nonrust_only.private_deps().push_back(LabelTargetPair(&staticlib));
nonrust_only.SetToolchain(setup.toolchain());
ASSERT_TRUE(nonrust_only.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&nonrust_only, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/foo/libstatic.a\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs =\n"
" rustdeps = -Lnative=obj/foo -Clink-arg=-Bdynamic "
"-Clink-arg=obj/foo/libstatic.a\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target rstaticlib(setup.settings(), Label(SourceDir("//baz/"), "baz"));
rstaticlib.set_output_type(Target::STATIC_LIBRARY);
rstaticlib.visibility().SetPublic();
SourceFile bazlib("//baz/lib.rs");
rstaticlib.sources().push_back(bazlib);
rstaticlib.source_types_used().Set(SourceFile::SOURCE_RS);
rstaticlib.rust_values().set_crate_root(bazlib);
rstaticlib.rust_values().crate_name() = "baz";
rstaticlib.private_deps().push_back(LabelTargetPair(&staticlib));
rstaticlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(rstaticlib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&rstaticlib, out);
writer.Run();
const char expected[] =
"crate_name = baz\n"
"crate_type = staticlib\n"
"output_extension = .a\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/baz\n"
"target_output_name = libbaz\n"
"\n"
"build obj/baz/libbaz.a: rust_staticlib ../../baz/lib.rs | "
"../../baz/lib.rs "
"obj/foo/libstatic.a\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps = -Lnative=obj/foo -Clink-arg=-Bdynamic "
"-Clink-arg=obj/foo/libstatic.a\n"
" ldflags =\n"
" sources = ../../baz/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, RustOutputExtensionAndDir) {
Err err;
TestWithScope setup;
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/input3.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.set_output_extension(std::string("exe"));
target.set_output_dir(SourceDir("//out/Debug/foo/"));
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = .exe\n"
"output_dir = foo\n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar.exe: rust_bin ../../foo/main.rs | ../../foo/input3.rs "
"../../foo/main.rs\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../foo/input3.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, LibsAndLibDirs) {
Err err;
TestWithScope setup;
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/input.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.set_output_dir(SourceDir("//out/Debug/foo/"));
target.config_values().libs().push_back(LibFile("quux"));
target.config_values().lib_dirs().push_back(SourceDir("//baz/"));
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = foo\n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/input.rs "
"../../foo/main.rs\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs =\n"
" rustdeps = -Lnative=../../baz -lquux\n"
" ldflags =\n"
" sources = ../../foo/input.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
// Test that neither public nor private rust dependencies of a proc-macro are
// transitively acquired as accessible dependencies by users of the macro. But
// the macro itself is listed as an accessible dependency (via --extern).
TEST_F(NinjaRustBinaryTargetWriterTest, RustProcMacro) {
Err err;
TestWithScope setup;
Target procmacropublicdep(
setup.settings(), Label(SourceDir("//baz/public/"), "mymacropublicdep"));
procmacropublicdep.set_output_type(Target::RUST_LIBRARY);
procmacropublicdep.visibility().SetPublic();
SourceFile publicbazlib("//baz/public/lib.rs");
procmacropublicdep.sources().push_back(SourceFile("//baz/public/mylib.rs"));
procmacropublicdep.sources().push_back(publicbazlib);
procmacropublicdep.source_types_used().Set(SourceFile::SOURCE_RS);
procmacropublicdep.rust_values().set_crate_root(publicbazlib);
procmacropublicdep.rust_values().crate_name() = "publicdep";
procmacropublicdep.SetToolchain(setup.toolchain());
ASSERT_TRUE(procmacropublicdep.OnResolved(&err));
Target procmacroprivatedep(
setup.settings(),
Label(SourceDir("//baz/private/"), "mymacroprivatedep"));
procmacroprivatedep.set_output_type(Target::RUST_LIBRARY);
procmacroprivatedep.visibility().SetPublic();
SourceFile privatebazlib("//baz/private/lib.rs");
procmacroprivatedep.sources().push_back(SourceFile("//baz/private/mylib.rs"));
procmacroprivatedep.sources().push_back(privatebazlib);
procmacroprivatedep.source_types_used().Set(SourceFile::SOURCE_RS);
procmacroprivatedep.rust_values().set_crate_root(privatebazlib);
procmacroprivatedep.rust_values().crate_name() = "privatedep";
procmacroprivatedep.SetToolchain(setup.toolchain());
ASSERT_TRUE(procmacroprivatedep.OnResolved(&err));
Target procmacro(setup.settings(), Label(SourceDir("//bar/"), "mymacro"));
procmacro.set_output_type(Target::RUST_PROC_MACRO);
procmacro.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
procmacro.sources().push_back(SourceFile("//bar/mylib.rs"));
procmacro.sources().push_back(barlib);
procmacro.source_types_used().Set(SourceFile::SOURCE_RS);
procmacro.rust_values().set_crate_root(barlib);
procmacro.rust_values().crate_name() = "mymacro";
procmacro.rust_values().set_crate_type(RustValues::CRATE_PROC_MACRO);
// Add a dependency to the procmacro so we can be sure its output
// directory is not propagated downstream beyond the proc macro.
procmacro.public_deps().push_back(LabelTargetPair(&procmacropublicdep));
procmacro.private_deps().push_back(LabelTargetPair(&procmacroprivatedep));
procmacro.SetToolchain(setup.toolchain());
ASSERT_TRUE(procmacro.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&procmacro, out);
writer.Run();
const char expected[] =
"crate_name = mymacro\n"
"crate_type = proc-macro\n"
"output_extension = .so\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libmymacro\n"
"\n"
"build obj/bar/libmymacro.so: rust_macro ../../bar/lib.rs | "
"../../bar/mylib.rs ../../bar/lib.rs "
"obj/baz/public/libmymacropublicdep.rlib "
"obj/baz/private/libmymacroprivatedep.rlib\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs = "
"--extern publicdep=obj/baz/public/libmymacropublicdep.rlib "
"--extern privatedep=obj/baz/private/libmymacroprivatedep.rlib\n"
" rustdeps = -Ldependency=obj/baz/public "
"-Ldependency=obj/baz/private\n"
" ldflags =\n"
" sources = ../../bar/mylib.rs ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.private_deps().push_back(LabelTargetPair(&procmacro));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/bar/libmymacro.so\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern mymacro=obj/bar/libmymacro.so\n"
" rustdeps = -Ldependency=obj/bar\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, GroupDeps) {
Err err;
TestWithScope setup;
Target rlib(setup.settings(), Label(SourceDir("//bar/"), "mylib"));
rlib.set_output_type(Target::RUST_LIBRARY);
rlib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
rlib.sources().push_back(SourceFile("//bar/mylib.rs"));
rlib.sources().push_back(barlib);
rlib.source_types_used().Set(SourceFile::SOURCE_RS);
rlib.rust_values().set_crate_root(barlib);
rlib.rust_values().crate_name() = "mylib";
rlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(rlib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&rlib, out);
writer.Run();
const char expected[] =
"crate_name = mylib\n"
"crate_type = rlib\n"
"output_extension = .rlib\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libmylib\n"
"\n"
"build obj/bar/libmylib.rlib: rust_rlib ../../bar/lib.rs | "
"../../bar/mylib.rs ../../bar/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../bar/mylib.rs ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target group(setup.settings(), Label(SourceDir("//baz/"), "group"));
group.set_output_type(Target::GROUP);
group.visibility().SetPublic();
group.public_deps().push_back(LabelTargetPair(&rlib));
group.SetToolchain(setup.toolchain());
ASSERT_TRUE(group.OnResolved(&err));
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.private_deps().push_back(LabelTargetPair(&group));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/bar/libmylib.rlib || obj/baz/group.stamp\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern mylib=obj/bar/libmylib.rlib\n"
" rustdeps = -Ldependency=obj/bar\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, Externs) {
Err err;
TestWithScope setup;
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
const char* lib = "lib1";
target.config_values().externs().push_back(
std::pair(lib, LibFile(SourceFile("//foo/lib1.rlib"))));
lib = "lib2";
target.config_values().externs().push_back(
std::pair(lib, LibFile("lib2.rlib")));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs ../../foo/lib1.rlib\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern lib1=../../foo/lib1.rlib --extern "
"lib2=lib2.rlib\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, Inputs) {
Err err;
TestWithScope setup;
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.config_values().inputs().push_back(SourceFile("//foo/config.json"));
target.config_values().inputs().push_back(SourceFile("//foo/template.h"));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"build obj/foo/bar.inputs.stamp: stamp ../../foo/config.json "
"../../foo/template.h\n"
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs ../../foo/config.json ../../foo/template.h "
"|| obj/foo/bar.inputs.stamp\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs "
"../../foo/config.json ../../foo/template.h\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, CdylibDeps) {
Err err;
TestWithScope setup;
Target cdylib(setup.settings(), Label(SourceDir("//bar/"), "mylib"));
cdylib.set_output_type(Target::SHARED_LIBRARY);
cdylib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
cdylib.sources().push_back(barlib);
cdylib.source_types_used().Set(SourceFile::SOURCE_RS);
cdylib.rust_values().set_crate_type(RustValues::CRATE_CDYLIB);
cdylib.rust_values().set_crate_root(barlib);
cdylib.rust_values().crate_name() = "mylib";
cdylib.SetToolchain(setup.toolchain());
ASSERT_TRUE(cdylib.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&cdylib, out);
writer.Run();
const char expected[] =
"crate_name = mylib\n"
"crate_type = cdylib\n"
"output_extension = .so\n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/bar\n"
"target_output_name = libmylib\n"
"\n"
"build obj/bar/libmylib.so: rust_cdylib ../../bar/lib.rs | "
"../../bar/lib.rs\n"
" source_file_part = lib.rs\n"
" source_name_part = lib\n"
" externs =\n"
" rustdeps =\n"
" ldflags =\n"
" sources = ../../bar/lib.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(SourceFile("//foo/source.rs"));
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.private_deps().push_back(LabelTargetPair(&cdylib));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/source.rs "
"../../foo/main.rs obj/bar/libmylib.so\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs =\n"
" rustdeps = -Lnative=obj/bar -Clink-arg=-Bdynamic "
"-Clink-arg=obj/bar/libmylib.so\n"
" ldflags =\n"
" sources = ../../foo/source.rs ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
TEST_F(NinjaRustBinaryTargetWriterTest, TransitivePublicNonRustDeps) {
Err err;
TestWithScope setup;
// This test verifies that the Rust binary "target" links against this lib.
Target implicitlib(setup.settings(), Label(SourceDir("//foo/"), "implicit"));
implicitlib.set_output_type(Target::SHARED_LIBRARY);
implicitlib.visibility().SetPublic();
implicitlib.sources().push_back(SourceFile("//foo/implicit.cpp"));
implicitlib.source_types_used().Set(SourceFile::SOURCE_CPP);
implicitlib.SetToolchain(setup.toolchain());
ASSERT_TRUE(implicitlib.OnResolved(&err));
Target sharedlib(setup.settings(), Label(SourceDir("//foo/"), "shared"));
sharedlib.set_output_type(Target::SHARED_LIBRARY);
sharedlib.visibility().SetPublic();
sharedlib.sources().push_back(SourceFile("//foo/shared.cpp"));
sharedlib.source_types_used().Set(SourceFile::SOURCE_CPP);
sharedlib.SetToolchain(setup.toolchain());
sharedlib.public_deps().push_back(LabelTargetPair(&implicitlib));
ASSERT_TRUE(sharedlib.OnResolved(&err));
Target rlib(setup.settings(), Label(SourceDir("//bar/"), "mylib"));
rlib.set_output_type(Target::RUST_LIBRARY);
rlib.visibility().SetPublic();
SourceFile barlib("//bar/lib.rs");
rlib.sources().push_back(SourceFile("//bar/mylib.rs"));
rlib.sources().push_back(barlib);
rlib.source_types_used().Set(SourceFile::SOURCE_RS);
rlib.rust_values().set_crate_root(barlib);
rlib.rust_values().crate_name() = "mylib";
rlib.SetToolchain(setup.toolchain());
rlib.private_deps().push_back(LabelTargetPair(&sharedlib));
ASSERT_TRUE(rlib.OnResolved(&err));
Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
target.set_output_type(Target::EXECUTABLE);
target.visibility().SetPublic();
SourceFile main("//foo/main.rs");
target.sources().push_back(main);
target.source_types_used().Set(SourceFile::SOURCE_RS);
target.rust_values().set_crate_root(main);
target.rust_values().crate_name() = "foo_bar";
target.private_deps().push_back(LabelTargetPair(&rlib));
target.SetToolchain(setup.toolchain());
ASSERT_TRUE(target.OnResolved(&err));
{
std::ostringstream out;
NinjaRustBinaryTargetWriter writer(&target, out);
writer.Run();
const char expected[] =
"crate_name = foo_bar\n"
"crate_type = bin\n"
"output_extension = \n"
"output_dir = \n"
"rustflags =\n"
"rustenv =\n"
"root_out_dir = .\n"
"target_out_dir = obj/foo\n"
"target_output_name = bar\n"
"\n"
"build ./foo_bar: rust_bin ../../foo/main.rs | ../../foo/main.rs "
"obj/bar/libmylib.rlib ./libshared.so ./libimplicit.so\n"
" source_file_part = main.rs\n"
" source_name_part = main\n"
" externs = --extern mylib=obj/bar/libmylib.rlib\n"
" rustdeps = -Ldependency=obj/bar -Lnative=. -Clink-arg=-Bdynamic "
"-Clink-arg=./libshared.so -Clink-arg=./libimplicit.so\n"
" ldflags =\n"
" sources = ../../foo/main.rs\n";
std::string out_str = out.str();
EXPECT_EQ(expected, out_str) << expected << "\n" << out_str;
}
}
|
<filename>app/src/main/java/com/lai/mtc/comm/widget/PreCacheLayoutManager.java
package com.lai.mtc.comm.widget;
import android.content.Context;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
/**
* @author Lai
* @time 2018/2/6 18:05
* @describe 预加载的LayoutManager
* getExtraLayoutSpace(),增加不可见View的缓存的空间
* <p>
* http://www.jcodecraeer.com/a/anzhuokaifa/androidkaifa/2015/0209/2452.html
*/
public class PreCacheLayoutManager extends LinearLayoutManager {
public int getExtraSpace() {
return mExtraSpace;
}
private int mExtraSpace = 0;
public PreCacheLayoutManager(Context context) {
super(context);
}
public void setExtraSpace(int extraSpace) {
mExtraSpace = extraSpace;
}
//getExtraLayoutSpace(),增加不可见View的缓存的空间
@Override
protected int getExtraLayoutSpace(RecyclerView.State state) {
if (mExtraSpace > 0) {
if (getOrientation() == LinearLayoutManager.HORIZONTAL) {
return mExtraSpace * getWidth();
} else {
return mExtraSpace * getHeight();
}
}
return 0;
}
}
|
<reponame>seclib/cerberus_scripts<filename>src/test/java/com/nike/cerberus/endpoints/authentication/RefreshUserTokenTest.java
/*
* Copyright (c) 2016 Nike, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nike.cerberus.endpoints.authentication;
import com.google.common.collect.Maps;
import com.nike.backstopper.exception.ApiException;
import com.nike.cerberus.auth.connector.AuthData;
import com.nike.cerberus.auth.connector.AuthResponse;
import com.nike.cerberus.auth.connector.AuthStatus;
import com.nike.cerberus.domain.AuthTokenResponse;
import com.nike.cerberus.domain.CerberusAuthToken;
import com.nike.cerberus.security.CmsRequestSecurityValidator;
import com.nike.cerberus.security.CerberusPrincipal;
import com.nike.cerberus.security.CerberusSecurityContext;
import com.nike.cerberus.service.AuthenticationService;
import com.nike.riposte.server.http.RequestInfo;
import com.nike.riposte.server.http.ResponseInfo;
import io.netty.handler.codec.http.HttpMethod;
import org.junit.Before;
import org.junit.Test;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Fail.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class RefreshUserTokenTest {
private final Executor executor = Executors.newSingleThreadExecutor();
private AuthenticationService authenticationService;
private RefreshUserToken refreshUserTokenEndpoint;
@Before
public void setUp() throws Exception {
authenticationService = mock(AuthenticationService.class);
refreshUserTokenEndpoint = new RefreshUserToken(authenticationService);
}
@Test
public void requestMatcher_is_http_get() {
Collection<HttpMethod> httpMethods = refreshUserTokenEndpoint.requestMatcher().matchingMethods();
assertThat(httpMethods).hasSize(1);
assertThat(httpMethods).contains(HttpMethod.GET);
}
@Test
public void execute_returns_new_token_if_replacing_valid_one() {
Map<String, Object> requestAttributes = Maps.newHashMap();
CerberusPrincipal authPrincipal = new CerberusPrincipal(
CerberusAuthToken.Builder.create()
.withIsAdmin(true)
.withPrincipal("username")
.withGroups("group1,group2")
.build());
requestAttributes.put(CmsRequestSecurityValidator.SECURITY_CONTEXT_ATTR_KEY,
new CerberusSecurityContext(authPrincipal, "https"));
AuthTokenResponse response = new AuthTokenResponse();
AuthResponse authResponse = new AuthResponse();
authResponse.setStatus(AuthStatus.SUCCESS);
authResponse.setData(new AuthData().setUsername("username").setClientToken(response));
RequestInfo<Void> requestInfo = mock(RequestInfo.class);
when(requestInfo.getRequestAttributes()).thenReturn(requestAttributes);
when(authenticationService.refreshUserToken(authPrincipal)).thenReturn(authResponse);
CompletableFuture<ResponseInfo<AuthResponse>> completableFuture =
refreshUserTokenEndpoint.execute(requestInfo, executor, null);
ResponseInfo<AuthResponse> responseInfo = completableFuture.join();
assertThat(responseInfo.getContentForFullResponse()).isEqualTo(authResponse);
}
@Test
public void execute_throws_api_error_if_no_security_context() {
Map<String, Object> requestAttributes = Maps.newHashMap();
requestAttributes.put(CmsRequestSecurityValidator.SECURITY_CONTEXT_ATTR_KEY, null);
RequestInfo<Void> requestInfo = mock(RequestInfo.class);
when(requestInfo.getRequestAttributes()).thenReturn(requestAttributes);
try {
CompletableFuture<ResponseInfo<AuthResponse>> completableFuture =
refreshUserTokenEndpoint.execute(requestInfo, executor, null);
completableFuture.join();
fail("Expected exception not thrown.");
} catch (CompletionException cex) {
assertThat(cex.getCause()).isInstanceOf(ApiException.class);
}
}
} |
import React from "react";
import SCModal, { SCModalProps } from "./Modal";
import { BrowserRouter as Router } from "react-router-dom";
import { Story, Meta } from "@storybook/react";
export default {
title: "Smartchef/Modal",
component: SCModal,
argTypes: {
hideOverlay: {
control: {
type: "func",
defaultValue: () => {
console.log("Hiding overlay");
},
},
},
},
} as Meta;
const Template: Story<SCModalProps> = (args) => {
return (
<Router>
<SCModal {...args} />
</Router>
);
};
export const Default = Template.bind({});
Default.args = {
modaltitle: "Modal title",
buttons: <button>Button</button>,
children: <div>Modal content</div>,
};
|
<filename>functions/fn_load_trips_staging.sql
create or replace function fn_load_trips_staging(organisation_name character varying (200)) returns void as
$$
declare
var_organisation_id integer := 0;
begin
-- firstly, the organisation name must be passed in.
if length(organisation_name) = 0 then
raise exception 'An organisation name must be provided.';
end if;
-- the organisation name must already be in the database
select orgs.id into var_organisation_id from (select id from organisation where name = organisation_name) as orgs;
if var_organisation_id = 0 OR var_organisation_id is null then
raise exception 'Organisation not found in the database.';
end if;
-- list of new trips
with trips as (
select
s.mobile,
s.origin_stop,
s.origin_stop_longitude,
s.origin_stop_latitude,
s.destination_stop,
s.destination_stop_longitude,
s.destination_stop_latitude,
s.distance,
s.duration,
st_geomfromtext(s.geom, 4326) as geom
from trip_staging s
)
insert into trip(origin_stop_id, destination_stop_id, distance, duration, geom)
select
distinct
(select id from stop os where os.name = t.origin_stop and st_x(os.geom) = t.origin_stop_longitude and st_y(os.geom) = t.origin_stop_latitude limit 1) as origin_stop_id,
(select id from stop ds where ds.name = t.destination_stop and st_x(ds.geom) = t.destination_stop_longitude and st_y(ds.geom) = t.destination_stop_latitude limit 1) as destination_stop_id,
t.distance,
t.duration,
t.geom
from trips t
join mobile m on m.name = t.mobile and m.organisation_id = var_organisation_id;
delete from trip_staging;
end;
$$
language plpgsql; |
<gh_stars>1-10
from management.models import Sensor, Type_of_sensor, Wlan, Nb_iot, HTTP, HTTPS, Sample_rate, Sensitivity, Value_pair, MQTT, Data_format, Variable, Default_variable
from rest_framework import serializers
from generic_relations.relations import GenericRelatedField
from management.utils import update_sensor, create_new_sensor
class VariableSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Variable
fields = ('id', 'url', 'name', 'unit', 'sensor')
class DefaultVariableSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Default_variable
fields = ('id', 'url', 'name', 'unit', 'type_of_sensor')
class WlanSerializer(serializers.ModelSerializer):
class Meta:
model = Wlan
fields = ('id', 'url', 'name', 'ssid', 'security', 'key', 'username')
class NbIotSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Nb_iot
fields = ('id', 'url', 'name', 'settings')
class HTTPSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HTTP
fields = ('id', 'url', 'name', 'data_server_url', 'data_server_port', 'path')
class HTTPSSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HTTPS
fields = ('id', 'url', 'name', 'data_server_url', 'data_server_port', 'path')
class MQTTSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MQTT
fields = ('id', 'url', 'name', 'user', 'key', 'topic', 'broker_url', 'broker_port')
class ModelSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Type_of_sensor
fields = ('sensor_model', 'url', 'sensor_information', 'address')
class SensorSerializer(serializers.HyperlinkedModelSerializer):
communication_object = GenericRelatedField({
Wlan: serializers.HyperlinkedRelatedField(
queryset = Wlan.objects.all(),
view_name='wlan-detail',
),
Nb_iot: serializers.HyperlinkedRelatedField(
queryset = Nb_iot.objects.all(),
view_name='nb_iot-detail',
),
})
protocol_object = GenericRelatedField({
HTTP: serializers.HyperlinkedRelatedField(
queryset = HTTP.objects.all(),
view_name='http-detail',
),
HTTPS: serializers.HyperlinkedRelatedField(
queryset = HTTPS.objects.all(),
view_name='https-detail',
),
MQTT: serializers.HyperlinkedRelatedField(
queryset = MQTT.objects.all(),
view_name='mqtt-detail',
),
})
variables = VariableSerializer(many=True)
class Meta:
model = Sensor
fields = ('sensor_id', 'url', 'sensor_name', 'model', 'status', 'description', 'location', 'sensor_key', 'sample_rate', 'sensitivity', 'data_send_rate', 'burst_length', 'burst_rate', 'connection_close_limit', 'network_close_limit', 'update_check_limit', 'update_url', 'update_port', 'update_https', 'encrypt_data', 'shared_secret_data', 'data_format', 'variables', 'communication_object', 'protocol_object')
def create(self, validated_data):
s = Sensor.objects.create( sensor_name = validated_data['sensor_name'],
model = validated_data['model'],
status = validated_data['status'],
sample_rate = validated_data['sample_rate'],
sensitivity = validated_data['sensitivity'],
data_send_rate = validated_data['data_send_rate'],
burst_length = validated_data['burst_length'],
burst_rate = validated_data['burst_rate'],
connection_close_limit = validated_data['connection_close_limit'],
network_close_limit = validated_data['network_close_limit'],
update_check_limit = validated_data['update_check_limit'],
update_url = validated_data['update_url'],
update_port = validated_data['update_port'],
update_https = validated_data['update_https'],
encrypt_data = validated_data['encrypt_data'],
shared_secret_data = validated_data['shared_secret_data'],
data_format = validated_data['data_format'],
communication_object = validated_data['communication_object'],
protocol_object = validated_data['protocol_object']
)
#add variables to sensor
variables = validated_data.pop('variables')
for variable_data in variables:
Variable.objects.create(sensor=s, **variable_data)
#add optional parameters to sensor
try:
s.description = validated_data['description']
except KeyError:
pass
try:
s.location = validated_data['location']
except KeyError:
pass
try:
s.sensor_key = validated_data['sensor_key']
except KeyError:
pass
s.save()
create_new_sensor(s)
return s
def update(self, instance, validated_data):
instance.sensor_name = validated_data['sensor_name']
instance.model = validated_data['model']
instance.status = validated_data['status']
instance.sample_rate = validated_data['sample_rate']
instance.sensitivity = validated_data['sensitivity']
instance.data_send_rate = validated_data['data_send_rate']
instance.burst_length = validated_data['burst_length']
instance.burst_rate = validated_data['burst_rate']
instance.connection_close_limit = validated_data['connection_close_limit']
instance.network_close_limit = validated_data['network_close_limit']
instance.update_check_limit = validated_data['update_check_limit']
instance.update_url = validated_data['update_url']
instance.update_https = validated_data['update_https']
instance.update_port = validated_data['update_port']
instance.encrypt_data = validated_data['encrypt_data']
instance.shared_secret_data = validated_data['shared_secret_data']
instance.data_format = validated_data['data_format']
instance.communication_object = validated_data['communication_object']
instance.protocol_object = validated_data['protocol_object']
#add variables to sensor
variables = validated_data.pop('variables')
for variable_data in variables:
Variable.objects.create(sensor=s, **variable_data)
#add optional parameters to sensor
try:
instance.description = validated_data['description']
except KeyError:
pass
try:
instance.location = validated_data['location']
except KeyError:
pass
try:
instance.sensor_key = validated_data['sensor_key']
except KeyError:
pass
instance.save()
update_sensor(instance)
return instance
class SampleRateSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Sample_rate
fields = ('id', 'url', 'model', 'supported_sensitivities', 'sample_rate', 'read_values', 'write_values', 'format_string')
class SensitivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Sensitivity
fields = ('id', 'url', 'model', 'sensitivity', 'read_values', 'write_values', 'format_string')
class ValuePairSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Value_pair
fields = ('id', 'url', 'value1', 'value2')
class DataFormatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Data_format
fields = ('id', 'url', 'name')
|
import { SET_EDITOR_CONTENT, SET_EDITOR_PATH } from '../constants';
const initialEditorState = {
editorContent: '',
editorPath: ''
};
export default function (state = initialEditorState, action) {
const newState = Object.assign({}, state);
switch (action.type) {
case SET_EDITOR_CONTENT:
newState.editorContent = action.editorContent;
break;
case SET_EDITOR_PATH:
newState.editorPath = action.editorPath;
break;
default:
return state;
}
return newState;
} |
def quick_sort(arr):
if len(arr) < 2:
return arr
else:
pivot = arr[-1]
less = [i for i in arr[:-1] if i > pivot]
greater = [i for i in arr[:-1] if i <= pivot]
return quick_sort(less) + [pivot] + quick_sort(greater)
arr = [5, 6, 2, 1, 7]
sorted_list = quick_sort(arr)
print(sorted_list) |
#!/bin/bash
# This file contains some utilities to test the .deb/.rpm
# packages and the SysV/Systemd scripts.
# WARNING: This testing file must be executed as root and can
# dramatically change your system. It should only be executed
# in a throw-away VM like those made by the Vagrantfile at
# the root of the Elasticsearch source code. This should
# cause the script to fail if it is executed any other way:
[ -f /etc/is_vagrant_vm ] || {
>&2 echo "must be run on a vagrant VM"
exit 1
}
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Checks if necessary commands are available to run the tests
if [ ! -x /usr/bin/which ]; then
echo "'which' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which wget 2>/dev/null`" ]; then
echo "'wget' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which curl 2>/dev/null`" ]; then
echo "'curl' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which pgrep 2>/dev/null`" ]; then
echo "'pgrep' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which unzip 2>/dev/null`" ]; then
echo "'unzip' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which tar 2>/dev/null`" ]; then
echo "'tar' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which unzip 2>/dev/null`" ]; then
echo "'unzip' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which java 2>/dev/null`" ]; then
# there are some tests that move java temporarily
if [ ! -x "`command -v java.bak 2>/dev/null`" ]; then
echo "'java' command is mandatory to run the tests"
exit 1
fi
fi
# Returns 0 if the 'dpkg' command is available
is_dpkg() {
[ -x "`which dpkg 2>/dev/null`" ]
}
# Returns 0 if the 'rpm' command is available
is_rpm() {
[ -x "`which rpm 2>/dev/null`" ]
}
# Skip test if the 'dpkg' command is not supported
skip_not_dpkg() {
is_dpkg || skip "dpkg is not supported"
}
# Skip test if the 'rpm' command is not supported
skip_not_rpm() {
is_rpm || skip "rpm is not supported"
}
skip_not_dpkg_or_rpm() {
is_dpkg || is_rpm || skip "only dpkg or rpm systems are supported"
}
# Returns 0 if the system supports Systemd
is_systemd() {
[ -x /bin/systemctl ]
}
# Skip test if Systemd is not supported
skip_not_systemd() {
if [ ! -x /bin/systemctl ]; then
skip "systemd is not supported"
fi
}
# Returns 0 if the system supports SysV
is_sysvinit() {
[ -x "`which service 2>/dev/null`" ]
}
# Skip test if SysV is not supported
skip_not_sysvinit() {
if [ -x "`which service 2>/dev/null`" ] && is_systemd; then
skip "sysvinit is supported, but systemd too"
fi
if [ ! -x "`which service 2>/dev/null`" ]; then
skip "sysvinit is not supported"
fi
}
# Skip if tar is not supported
skip_not_tar_gz() {
if [ ! -x "`which tar 2>/dev/null`" ]; then
skip "tar is not supported"
fi
}
# Skip if unzip is not supported
skip_not_zip() {
if [ ! -x "`which unzip 2>/dev/null`" ]; then
skip "unzip is not supported"
fi
}
assert_file_exist() {
local file="$1"
local count=$(echo "$file" | wc -l)
[[ "$count" == "1" ]] || {
echo "assert_file_exist must be run on a single file at a time but was called on [$count] files: $file"
false
}
if [ ! -e "$file" ]; then
echo "Should exist: ${file} but does not"
fi
local file=$(readlink -m "${file}")
[ -e "$file" ]
}
assert_file_not_exist() {
local file="$1"
if [ -e "$file" ]; then
echo "Should not exist: ${file} but does"
fi
local file=$(readlink -m "${file}")
[ ! -e "$file" ]
}
assert_file() {
local file="$1"
local type=$2
local user=$3
local group=$4
local privileges=$5
assert_file_exist "$file"
if [ "$type" = "d" ]; then
if [ ! -d "$file" ]; then
echo "[$file] should be a directory but is not"
fi
[ -d "$file" ]
else
if [ ! -f "$file" ]; then
echo "[$file] should be a regular file but is not"
fi
[ -f "$file" ]
fi
if [ "x$user" != "x" ]; then
realuser=$(find "$file" -maxdepth 0 -printf "%u")
if [ "$realuser" != "$user" ]; then
echo "Expected user: $user, found $realuser [$file]"
fi
[ "$realuser" = "$user" ]
fi
if [ "x$group" != "x" ]; then
realgroup=$(find "$file" -maxdepth 0 -printf "%g")
if [ "$realgroup" != "$group" ]; then
echo "Expected group: $group, found $realgroup [$file]"
fi
[ "$realgroup" = "$group" ]
fi
if [ "x$privileges" != "x" ]; then
realprivileges=$(find "$file" -maxdepth 0 -printf "%m")
if [ "$realprivileges" != "$privileges" ]; then
echo "Expected privileges: $privileges, found $realprivileges [$file]"
fi
[ "$realprivileges" = "$privileges" ]
fi
}
assert_module_or_plugin_directory() {
local directory=$1
shift
#owner group and permissions vary depending on how es was installed
#just make sure that everything is the same as $CONFIG_DIR, which was properly set up during install
config_user=$(find "$ESHOME" -maxdepth 0 -printf "%u")
config_owner=$(find "$ESHOME" -maxdepth 0 -printf "%g")
assert_file $directory d $config_user $config_owner 755
}
assert_module_or_plugin_file() {
local file=$1
shift
assert_file_exist "$(readlink -m $file)"
assert_file $file f $config_user $config_owner 644
}
assert_output() {
echo "$output" | grep -E "$1"
}
# Deletes everything before running a test file
clean_before_test() {
# List of files to be deleted
ELASTICSEARCH_TEST_FILES=("/usr/share/elasticsearch" \
"/etc/elasticsearch" \
"/var/lib/elasticsearch" \
"/var/log/elasticsearch" \
"/tmp/elasticsearch" \
"/etc/default/elasticsearch" \
"/etc/sysconfig/elasticsearch" \
"/var/run/elasticsearch" \
"/usr/share/doc/elasticsearch" \
"/usr/share/doc/elasticsearch-oss" \
"/tmp/elasticsearch" \
"/usr/lib/systemd/system/elasticsearch.conf" \
"/usr/lib/tmpfiles.d/elasticsearch.conf" \
"/usr/lib/sysctl.d/elasticsearch.conf")
# Kills all processes of user elasticsearch
if id elasticsearch > /dev/null 2>&1; then
pkill -u elasticsearch 2>/dev/null || true
fi
# Kills all running Elasticsearch processes
ps aux | grep -i "org.elasticsearch.bootstrap.Elasticsearch" | awk {'print $2'} | xargs kill -9 > /dev/null 2>&1 || true
purge_elasticsearch
# Removes user & group
userdel elasticsearch > /dev/null 2>&1 || true
groupdel elasticsearch > /dev/null 2>&1 || true
# Removes all files
for d in "${ELASTICSEARCH_TEST_FILES[@]}"; do
if [ -e "$d" ]; then
rm -rf "$d"
fi
done
if is_systemd; then
systemctl unmask systemd-sysctl.service
fi
}
purge_elasticsearch() {
# Removes RPM package
if is_rpm; then
rpm --quiet -e $PACKAGE_NAME > /dev/null 2>&1 || true
fi
if [ -x "`which yum 2>/dev/null`" ]; then
yum remove -y $PACKAGE_NAME > /dev/null 2>&1 || true
fi
# Removes DEB package
if is_dpkg; then
dpkg --purge $PACKAGE_NAME > /dev/null 2>&1 || true
fi
if [ -x "`which apt-get 2>/dev/null`" ]; then
apt-get --quiet --yes purge $PACKAGE_NAME > /dev/null 2>&1 || true
fi
}
# Start elasticsearch and wait for it to come up with a status.
# $1 - expected status - defaults to green
start_elasticsearch_service() {
local desiredStatus=${1:-green}
local index=$2
local commandLineArgs=$3
run_elasticsearch_service 0 $commandLineArgs
wait_for_elasticsearch_status $desiredStatus $index
if [ -r "/tmp/elasticsearch/elasticsearch.pid" ]; then
pid=$(cat /tmp/elasticsearch/elasticsearch.pid)
[ "x$pid" != "x" ] && [ "$pid" -gt 0 ]
echo "Looking for elasticsearch pid...."
ps $pid
elif is_systemd; then
run systemctl is-active elasticsearch.service
[ "$status" -eq 0 ]
run systemctl status elasticsearch.service
[ "$status" -eq 0 ]
elif is_sysvinit; then
run service elasticsearch status
[ "$status" -eq 0 ]
fi
}
# Start elasticsearch
# $1 expected status code
# $2 additional command line args
run_elasticsearch_service() {
local expectedStatus=$1
local commandLineArgs=$2
# Set the ES_PATH_CONF setting in case we start as a service
if [ ! -z "$ES_PATH_CONF" ] ; then
if is_dpkg; then
echo "ES_PATH_CONF=$ES_PATH_CONF" >> /etc/default/elasticsearch;
elif is_rpm; then
echo "ES_PATH_CONF=$ES_PATH_CONF" >> /etc/sysconfig/elasticsearch;
fi
fi
if [ -f "/tmp/elasticsearch/bin/elasticsearch" ]; then
# we must capture the exit code to compare so we don't want to start as background process in case we expect something other than 0
local background=""
local timeoutCommand=""
if [ "$expectedStatus" = 0 ]; then
background="-d"
else
timeoutCommand="timeout 180s "
fi
# su and the Elasticsearch init script work together to break bats.
# sudo isolates bats enough from the init script so everything continues
# to tick along
run sudo -u elasticsearch bash <<BASH
# If jayatana is installed then we try to use it. Elasticsearch should ignore it even when we try.
# If it doesn't ignore it then Elasticsearch will fail to start because of security errors.
# This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf
[ -f /usr/share/java/jayatanaag.jar ] && export JAVA_TOOL_OPTIONS="-javaagent:/usr/share/java/jayatanaag.jar"
# And now we can start Elasticsearch normally, in the background (-d) and with a pidfile (-p).
export ES_PATH_CONF=$ES_PATH_CONF
export ES_JAVA_OPTS=$ES_JAVA_OPTS
$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid $commandLineArgs
BASH
[ "$status" -eq "$expectedStatus" ]
elif is_systemd; then
run systemctl daemon-reload
[ "$status" -eq 0 ]
run systemctl enable elasticsearch.service
[ "$status" -eq 0 ]
run systemctl is-enabled elasticsearch.service
[ "$status" -eq 0 ]
run systemctl start elasticsearch.service
[ "$status" -eq "$expectedStatus" ]
elif is_sysvinit; then
run service elasticsearch start
[ "$status" -eq "$expectedStatus" ]
fi
}
stop_elasticsearch_service() {
if [ -r "/tmp/elasticsearch/elasticsearch.pid" ]; then
pid=$(cat /tmp/elasticsearch/elasticsearch.pid)
[ "x$pid" != "x" ] && [ "$pid" -gt 0 ]
kill -SIGTERM $pid
elif is_systemd; then
run systemctl stop elasticsearch.service
[ "$status" -eq 0 ]
run systemctl is-active elasticsearch.service
[ "$status" -eq 3 ]
echo "$output" | grep -E 'inactive|failed'
elif is_sysvinit; then
run service elasticsearch stop
[ "$status" -eq 0 ]
run service elasticsearch status
[ "$status" -ne 0 ]
fi
}
# the default netcat packages in the distributions we test are not all compatible
# so we use /dev/tcp - a feature of bash which makes tcp connections
# http://tldp.org/LDP/abs/html/devref1.html#DEVTCP
test_port() {
local host="$1"
local port="$2"
cat < /dev/null > "/dev/tcp/$host/$port"
}
describe_port() {
local host="$1"
local port="$2"
if test_port "$host" "$port"; then
echo "port $port on host $host is open"
else
echo "port $port on host $host is not open"
fi
}
debug_collect_logs() {
local es_logfile="$ESLOG/elasticsearch_server.json"
local system_logfile='/var/log/messages'
if [ -e "$es_logfile" ]; then
echo "Here's the elasticsearch log:"
cat "$es_logfile"
else
echo "The elasticsearch log doesn't exist at $es_logfile"
fi
if [ -e "$system_logfile" ]; then
echo "Here's the tail of the log at $system_logfile:"
tail -n20 "$system_logfile"
else
echo "The logfile at $system_logfile doesn't exist"
fi
echo "Current java processes:"
ps aux | grep java || true
echo "Testing if ES ports are open:"
describe_port 127.0.0.1 9200
describe_port 127.0.0.1 9201
}
set_debug_logging() {
if [ "$ESCONFIG" ] && [ -d "$ESCONFIG" ] && [ -f /etc/os-release ] && (grep -qi suse /etc/os-release); then
echo 'logger.org.elasticsearch.indices: TRACE' >> "$ESCONFIG/elasticsearch.yml"
echo 'logger.org.elasticsearch.gateway: TRACE' >> "$ESCONFIG/elasticsearch.yml"
echo 'logger.org.elasticsearch.cluster: DEBUG' >> "$ESCONFIG/elasticsearch.yml"
fi
}
# Waits for Elasticsearch to reach some status.
# $1 - expected status - defaults to green
wait_for_elasticsearch_status() {
local desiredStatus=${1:-green}
local index=$2
echo "Making sure elasticsearch is up..."
wget -O - --retry-connrefused --waitretry=1 --timeout=120 --tries=120 http://localhost:9200/_cluster/health || {
echo "Looks like elasticsearch never started"
debug_collect_logs
false
}
if [ -z "index" ]; then
echo "Tring to connect to elasticsearch and wait for expected status $desiredStatus..."
curl -sS "http://localhost:9200/_cluster/health?wait_for_status=$desiredStatus&timeout=180s&pretty"
else
echo "Trying to connect to elasticsearch and wait for expected status $desiredStatus for index $index"
curl -sS "http://localhost:9200/_cluster/health/$index?wait_for_status=$desiredStatus&timeout=180s&pretty"
fi
if [ $? -eq 0 ]; then
echo "Connected"
else
echo "Unable to connect to Elasticsearch"
false
fi
echo "Checking that the cluster health matches the waited for status..."
run curl -sS -XGET 'http://localhost:9200/_cat/health?h=status&v=false'
if [ "$status" -ne 0 ]; then
echo "error when checking cluster health. code=$status output="
echo $output
false
fi
echo $output | grep $desiredStatus || {
echo "unexpected status: '$output' wanted '$desiredStatus'"
false
}
}
# Checks the current elasticsearch version using the Info REST endpoint
# $1 - expected version
check_elasticsearch_version() {
local version=$1
local versionToCheck
local major=$(echo ${version} | cut -d. -f1 )
if [ $major -ge 7 ] ; then
versionToCheck=$version
else
versionToCheck=$(echo ${version} | sed -e 's/-SNAPSHOT//')
fi
run curl -s localhost:9200
[ "$status" -eq 0 ]
echo $output | grep \"number\"\ :\ \"$versionToCheck\" || {
echo "Expected $versionToCheck but installed an unexpected version:"
curl -s localhost:9200
false
}
}
# Executes some basic Elasticsearch tests
run_elasticsearch_tests() {
# TODO this assertion is the same the one made when waiting for
# elasticsearch to start
run curl -XGET 'http://localhost:9200/_cat/health?h=status&v=false'
[ "$status" -eq 0 ]
echo "$output" | grep -w "green"
curl -s -H "Content-Type: application/json" -XPOST 'http://localhost:9200/library/book/1?refresh=true&pretty' -d '{
"title": "Book #1",
"pages": 123
}'
curl -s -H "Content-Type: application/json" -XPOST 'http://localhost:9200/library/book/2?refresh=true&pretty' -d '{
"title": "Book #2",
"pages": 456
}'
curl -s -XGET 'http://localhost:9200/_count?pretty' |
grep \"count\"\ :\ 2
curl -s -XDELETE 'http://localhost:9200/_all'
}
# Move the config directory to another directory and properly chown it.
move_config() {
local oldConfig="$ESCONFIG"
# The custom config directory is not under /tmp or /var/tmp because
# systemd's private temp directory functionally means different
# processes can have different views of what's in these directories
export ESCONFIG="${1:-$(mktemp -p /etc -d -t 'config.XXXX')}"
echo "Moving configuration directory from $oldConfig to $ESCONFIG"
# Move configuration files to the new configuration directory
mv "$oldConfig"/* "$ESCONFIG"
chown -R elasticsearch:elasticsearch "$ESCONFIG"
assert_file_exist "$ESCONFIG/elasticsearch.yml"
assert_file_exist "$ESCONFIG/jvm.options"
assert_file_exist "$ESCONFIG/log4j2.properties"
}
# permissions from the user umask with the executable bit set
executable_privileges_for_user_from_umask() {
local user=$1
shift
echo $((0777 & ~$(sudo -E -u $user sh -c umask) | 0111))
}
# permissions from the user umask without the executable bit set
file_privileges_for_user_from_umask() {
local user=$1
shift
echo $((0777 & ~$(sudo -E -u $user sh -c umask) & ~0111))
}
# move java to simulate it not being in the path
move_java() {
which_java=`command -v java`
assert_file_exist $which_java
mv $which_java ${which_java}.bak
}
# move java back to its original location
unmove_java() {
which_java=`command -v java.bak`
assert_file_exist $which_java
mv $which_java `dirname $which_java`/java
}
|
CREATE TABLE patients (
patient_id INTEGER PRIMARY KEY,
first_name VARCHAR(255) NOT NULL,
last_name VARCHAR(255) NOT NULL,
dob DATE NOT NULL,
address VARCHAR(255)
);
CREATE TABLE doctors (
doctor_id INTEGER PRIMARY KEY,
first_name VARCHAR(255) NOT NULL,
last_name VARCHAR(255) NOT NULL,
specialty VARCHAR(255) NOT NULL
);
CREATE TABLE diagnostics (
diagnostic_id INTEGER PRIMARY KEY,
name VARCHAR(255) NOT NULL
);
CREATE TABLE treatments (
treatment_id INTEGER PRIMARY KEY,
name VARCHAR(255) NOT NULL,
dosage VARCHAR(255) NOT NULL
);
CREATE TABLE patient_diagnostics (
patient_diagnostic_id INTEGER PRIMARY KEY,
patient_id INTEGER NOT NULL REFERENCES patients(patient_id) ON DELETE CASCADE,
diagnostic_id INTEGER NOT NULL REFERENCES diagnostics(diagnostic_id) ON DELETE CASCADE
);
CREATE TABLE patient_treatments (
patient_treatment_id INTEGER PRIMARY KEY,
patient_id INTEGER NOT NULL REFERENCES patients(patient_id) ON DELETE CASCADE,
doctor_id INTEGER NOT NULL REFERENCES doctors(doctor_id) ON DELETE CASCADE,
treatment_id INTEGER NOT NULL REFERENCES treatments(treatment_id) ON DELETE CASCADE
); |
#!/usr/bin/env bash
#{{{ MARK:Header
#**************************************************************
##### Author: WIZARD
##### Date: Fri Aug 31 23:37:08 EDT 2018
##### Purpose: bash script to
##### Notes:
#}}}***********************************************************
type fping >/dev/null 2>&1 || {
echo "$(date) no fping"
exit 1
}
gateway_ip='1.1.1.1'
network_check_tries=0
network_check_threshold=5
function restart_br0() {
echo "$(date) Network was not working for the previous $network_check_tries checks."
sudo brctl showstp br0
echo "Restarting br0"
sudo bash /etc/openvpn/server/down.sh
sleep 5
echo "Restarting ovpn"
sudo sytemctl restart ovpn
sleep 60
host_status=$(fping $gateway_ip)
if ! echo "$host_status" | grep -iq alive; then
echo "$(date) Network failed...rebooting"
sudo dmesg -T
sudo journalctl --no-pager -n 100
sudo ifconfig -a
sudo brctl showstp br0
sudo bash /etc/openvpn/server/down.sh
sudo bash /etc/openvpn/server/up.sh
sudo reboot
fi
}
while ((network_check_tries < network_check_threshold)); do
host_status=$(fping $gateway_ip)
((++network_check_tries))
if echo "$host_status" | grep -iq alive; then
echo "$(date) Bridged VPN NIC is working correctly"
exit 0
else
echo "$(date) Bridged VPN NIC is down, failed check number $network_check_tries of $network_check_threshold"
fi
((network_check_tries >= network_check_threshold)) && restart_br0
sleep 5
done
|
/**
* @author <NAME>
*/
import * as firebase from 'firebase';
import { firebaseConfig } from '../config/firebase';
let instance = null
class FirebaseService {
constructor() {
if (!instance) {
this.app = firebase.initializeApp(firebaseConfig);
instance = this;
}
return instance;
}
}
const firebaseService = new FirebaseService().app;
export default firebaseService; |
#!/usr/bin/env bash
echo "Fix PHP files"
./../../../vendor/shopware/platform/bin/php-cs-fixer.phar fix --config=../../../vendor/shopware/platform/.php_cs.dist -vv .
echo "Fix Javascript files"
../../../vendor/shopware/platform/src/Administration/Resources/administration/node_modules/.bin/eslint --ignore-path .eslintignore --config ../../../vendor/shopware/platform/src/Administration/Resources/administration/.eslintrc.js --ext .js,.vue --fix .
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/modulestate/StateXML.java
package io.opensphere.core.modulestate;
import javax.xml.bind.JAXBException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import io.opensphere.core.util.XMLUtilities;
import io.opensphere.core.util.xml.MutableNamespaceContext;
/**
* Utilities for the module state XML.
*/
public final class StateXML
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(StateXML.class);
/**
* Utility method to get the node for the given path. If the node does not
* exist, it will be created and appended to the parent.
*
* @param rootNode The root node.
* @param doc The document.
* @param parent The immediate parent of the desired node.
* @param childPath The full path of the desired node.
* @param childName The name of the desired node.
* @return The child node.
* @throws XPathExpressionException when the child path is not a valid
* expression.
*/
public static Node createChildNode(Node rootNode, Document doc, Node parent, String childPath, String childName)
throws XPathExpressionException
{
Node child = getChildNode(rootNode, childPath);
if (child == null)
{
child = parent.appendChild(createElement(doc, childName));
}
return child;
}
/**
* Create an element using the given document and qualified name, using the
* {@link ModuleStateController#STATE_NAMESPACE}.
*
* @param doc The document.
* @param qname The qualified name.
* @return The element.
*/
public static Element createElement(Document doc, String qname)
{
return doc.createElementNS(ModuleStateController.STATE_NAMESPACE, qname);
}
/**
* Utility method to get the node for the given path.
*
* @param rootNode The root node.
* @param childPath The full path of the desired node.
* @return The child node.
* @throws XPathExpressionException when the child path is not a valid
* expression.
*/
public static Node getChildNode(Node rootNode, String childPath) throws XPathExpressionException
{
return (Node)newXPath().evaluate(childPath, rootNode, XPathConstants.NODE);
}
/**
* Gets a list of nodes that match the path.
*
* @param rootNode The root xml node.
* @param xpath The xpath query string.
* @return The list of nodes matching the specified xpath.
* @throws XPathExpressionException If xpath cannot be evaluated.
*/
public static NodeList getChildNodes(Node rootNode, String xpath) throws XPathExpressionException
{
return (NodeList)newXPath().evaluate(xpath, rootNode, XPathConstants.NODESET);
}
/**
* Utility method to get the node for the given path.
*
* @param rootNode The root node.
* @param childPathFragment The path fragment (after the state node) of the
* desired node.
* @return The child node, or null.
*/
public static Node getChildStateNode(Node rootNode, String childPathFragment)
{
Node resultNode = null;
try
{
resultNode = getChildNode(rootNode, "/" + ModuleStateController.STATE_QNAME + childPathFragment);
}
catch (XPathExpressionException e)
{
LOGGER.error(e, e);
}
return resultNode;
}
/**
* Determines if any nodes match the path.
*
* @param rootNode The root xml node.
* @param xpath The xpath query string.
* @return Whether any nodes match the path.
*/
public static boolean anyMatch(Node rootNode, String xpath)
{
boolean anyMatch = false;
try
{
NodeList childNodes = getChildNodes(rootNode, xpath);
if (childNodes.getLength() > 0)
{
anyMatch = true;
}
}
catch (XPathExpressionException e)
{
LOGGER.error(e.getMessage(), e);
}
return anyMatch;
}
/**
* Gets the state object from the given DOM node.
*
* @param <T> the state object type
* @param node the DOM node
* @param childPathFragment The path fragment (after the state node) of the
* desired node.
* @param target the state object type
* @return the state object or null
*/
public static <T> T getStateBean(Node node, String childPathFragment, Class<T> target)
{
T state = null;
Node resultNode = getChildStateNode(node, childPathFragment);
if (resultNode != null)
{
try
{
state = XMLUtilities.readXMLObject(resultNode, target);
}
catch (JAXBException e)
{
LOGGER.error("Failed to read state: " + e, e);
}
}
return state;
}
/**
* Create a new {@link XPath} that has a {@link MutableNamespaceContext}
* pre-configured with {@link ModuleStateController#STATE_NAMESPACE} as the
* default namespace.
*
* @return The XPath.
*/
public static XPath newXPath()
{
MutableNamespaceContext nsContext = new MutableNamespaceContext();
nsContext.addNamespace(ModuleStateController.STATE_NAMESPACE_PREFIX, ModuleStateController.STATE_NAMESPACE);
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setNamespaceContext(nsContext);
return xpath;
}
/** Disallow instantiation. */
private StateXML()
{
}
}
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module DataLabeling
module V1beta1
# Describes an evaluation between a machine learning model's predictions and
# ground truth labels. Created when an {::Google::Cloud::DataLabeling::V1beta1::EvaluationJob EvaluationJob} runs successfully.
# @!attribute [rw] name
# @return [::String]
# Output only. Resource name of an evaluation. The name has the following
# format:
#
# "projects/<var>\\{project_id}</var>/datasets/<var>\\{dataset_id}</var>/evaluations/<var>\\{evaluation_id</var>}'
# @!attribute [rw] config
# @return [::Google::Cloud::DataLabeling::V1beta1::EvaluationConfig]
# Output only. Options used in the evaluation job that created this
# evaluation.
# @!attribute [rw] evaluation_job_run_time
# @return [::Google::Protobuf::Timestamp]
# Output only. Timestamp for when the evaluation job that created this
# evaluation ran.
# @!attribute [rw] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. Timestamp for when this evaluation was created.
# @!attribute [rw] evaluation_metrics
# @return [::Google::Cloud::DataLabeling::V1beta1::EvaluationMetrics]
# Output only. Metrics comparing predictions to ground truth labels.
# @!attribute [rw] annotation_type
# @return [::Google::Cloud::DataLabeling::V1beta1::AnnotationType]
# Output only. Type of task that the model version being evaluated performs,
# as defined in the
#
# {::Google::Cloud::DataLabeling::V1beta1::EvaluationJobConfig#input_config evaluationJobConfig.inputConfig.annotationType}
# field of the evaluation job that created this evaluation.
# @!attribute [rw] evaluated_item_count
# @return [::Integer]
# Output only. The number of items in the ground truth dataset that were used
# for this evaluation. Only populated when the evaulation is for certain
# AnnotationTypes.
class Evaluation
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Configuration details used for calculating evaluation metrics and creating an
# {::Google::Cloud::DataLabeling::V1beta1::Evaluation Evaluation}.
# @!attribute [rw] bounding_box_evaluation_options
# @return [::Google::Cloud::DataLabeling::V1beta1::BoundingBoxEvaluationOptions]
# Only specify this field if the related model performs image object
# detection (`IMAGE_BOUNDING_BOX_ANNOTATION`). Describes how to evaluate
# bounding boxes.
class EvaluationConfig
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Options regarding evaluation between bounding boxes.
# @!attribute [rw] iou_threshold
# @return [::Float]
# Minimum
# [intersection-over-union
#
# (IOU)](/vision/automl/object-detection/docs/evaluate#intersection-over-union)
# required for 2 bounding boxes to be considered a match. This must be a
# number between 0 and 1.
class BoundingBoxEvaluationOptions
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] classification_metrics
# @return [::Google::Cloud::DataLabeling::V1beta1::ClassificationMetrics]
# @!attribute [rw] object_detection_metrics
# @return [::Google::Cloud::DataLabeling::V1beta1::ObjectDetectionMetrics]
class EvaluationMetrics
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metrics calculated for a classification model.
# @!attribute [rw] pr_curve
# @return [::Google::Cloud::DataLabeling::V1beta1::PrCurve]
# Precision-recall curve based on ground truth labels, predicted labels, and
# scores for the predicted labels.
# @!attribute [rw] confusion_matrix
# @return [::Google::Cloud::DataLabeling::V1beta1::ConfusionMatrix]
# Confusion matrix of predicted labels vs. ground truth labels.
class ClassificationMetrics
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metrics calculated for an image object detection (bounding box) model.
# @!attribute [rw] pr_curve
# @return [::Google::Cloud::DataLabeling::V1beta1::PrCurve]
# Precision-recall curve.
class ObjectDetectionMetrics
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] annotation_spec
# @return [::Google::Cloud::DataLabeling::V1beta1::AnnotationSpec]
# The annotation spec of the label for which the precision-recall curve
# calculated. If this field is empty, that means the precision-recall curve
# is an aggregate curve for all labels.
# @!attribute [rw] area_under_curve
# @return [::Float]
# Area under the precision-recall curve. Not to be confused with area under
# a receiver operating characteristic (ROC) curve.
# @!attribute [rw] confidence_metrics_entries
# @return [::Array<::Google::Cloud::DataLabeling::V1beta1::PrCurve::ConfidenceMetricsEntry>]
# Entries that make up the precision-recall graph. Each entry is a "point" on
# the graph drawn for a different `confidence_threshold`.
# @!attribute [rw] mean_average_precision
# @return [::Float]
# Mean average prcision of this curve.
class PrCurve
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] confidence_threshold
# @return [::Float]
# Threshold used for this entry.
#
# For classification tasks, this is a classification threshold: a
# predicted label is categorized as positive or negative (in the context of
# this point on the PR curve) based on whether the label's score meets this
# threshold.
#
# For image object detection (bounding box) tasks, this is the
# [intersection-over-union
#
# (IOU)](/vision/automl/object-detection/docs/evaluate#intersection-over-union)
# threshold for the context of this point on the PR curve.
# @!attribute [rw] recall
# @return [::Float]
# Recall value.
# @!attribute [rw] precision
# @return [::Float]
# Precision value.
# @!attribute [rw] f1_score
# @return [::Float]
# Harmonic mean of recall and precision.
# @!attribute [rw] recall_at1
# @return [::Float]
# Recall value for entries with label that has highest score.
# @!attribute [rw] precision_at1
# @return [::Float]
# Precision value for entries with label that has highest score.
# @!attribute [rw] f1_score_at1
# @return [::Float]
# The harmonic mean of {::Google::Cloud::DataLabeling::V1beta1::PrCurve::ConfidenceMetricsEntry#recall_at1 recall_at1} and {::Google::Cloud::DataLabeling::V1beta1::PrCurve::ConfidenceMetricsEntry#precision_at1 precision_at1}.
# @!attribute [rw] recall_at5
# @return [::Float]
# Recall value for entries with label that has highest 5 scores.
# @!attribute [rw] precision_at5
# @return [::Float]
# Precision value for entries with label that has highest 5 scores.
# @!attribute [rw] f1_score_at5
# @return [::Float]
# The harmonic mean of {::Google::Cloud::DataLabeling::V1beta1::PrCurve::ConfidenceMetricsEntry#recall_at5 recall_at5} and {::Google::Cloud::DataLabeling::V1beta1::PrCurve::ConfidenceMetricsEntry#precision_at5 precision_at5}.
class ConfidenceMetricsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# Confusion matrix of the model running the classification. Only applicable
# when the metrics entry aggregates multiple labels. Not applicable when the
# entry is for a single label.
# @!attribute [rw] row
# @return [::Array<::Google::Cloud::DataLabeling::V1beta1::ConfusionMatrix::Row>]
class ConfusionMatrix
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] annotation_spec
# @return [::Google::Cloud::DataLabeling::V1beta1::AnnotationSpec]
# The annotation spec of a predicted label.
# @!attribute [rw] item_count
# @return [::Integer]
# Number of items predicted to have this label. (The ground truth label for
# these items is the `Row.annotationSpec` of this entry's parent.)
class ConfusionMatrixEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# A row in the confusion matrix. Each entry in this row has the same
# ground truth label.
# @!attribute [rw] annotation_spec
# @return [::Google::Cloud::DataLabeling::V1beta1::AnnotationSpec]
# The annotation spec of the ground truth label for this row.
# @!attribute [rw] entries
# @return [::Array<::Google::Cloud::DataLabeling::V1beta1::ConfusionMatrix::ConfusionMatrixEntry>]
# A list of the confusion matrix entries. One entry for each possible
# predicted label.
class Row
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
|
<reponame>myso-kr/kr.myso.tampermonkey<filename>service/com.naver.shopping.monitor.keyword.trend.user.js
// ==UserScript==
// @namespace https://tampermonkey.myso.kr/
// @name 네이버 쇼핑 키워드 트렌드
// @description 네이버 쇼핑의 키워드의 트렌드를 확인합니다.
// @copyright 2021, myso (https://tampermonkey.myso.kr)
// @license Apache-2.0
// @version 1.0.0
// @updateURL https://github.com/myso-kr/kr.myso.tampermonkey/raw/master/service/com.naver.shopping.monitor.keyword.trend.user.js
// @downloadURL https://github.com/myso-kr/kr.myso.tampermonkey/raw/master/service/com.naver.shopping.monitor.keyword.trend.user.js
// @author <NAME>
// @connect naver.com
// @connect ryo.co.kr
// @match *://search.shopping.naver.com/search/*
// @grant GM_addStyle
// @grant GM_xmlhttpRequest
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/polyfill/Object.fromEntries.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/polyfill/Array.prototype.flat.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/polyfill/String.prototype.matchAll.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/vendor/gm-app.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/vendor/gm-add-style.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/vendor/gm-add-script.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/vendor/gm-xmlhttp-request-async.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/lib/naver-datalab.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/lib/naver-search-ad.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/lib/naver-search-nx.js
// @require https://cdn.jsdelivr.net/npm/kr.myso.tampermonkey@1.0.52/assets/donation.js
// @require https://cdn.jsdelivr.net/npm/chart.js
// @require https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.29.1/moment.js
// @require https://cdnjs.cloudflare.com/ajax/libs/moment-timezone/0.5.33/moment-timezone.js
// @require https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.7.2/bluebird.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js
// ==/UserScript==
// ==OpenUserJS==
// @author myso
// ==/OpenUserJS==
GM_App(async function main() {
moment.tz.setDefault("Asia/Seoul");
GM_donation('[class^="style_container__"]');
function format_number(number) { return number.toString().split( /(?=(?:\d{3})+(?:\.|$))/g ).join( "," ); }
function parsed_number(number) { return /^[\d\.]+$/.test(String(number)) ? parseFloat(number) : 0; }
const range = _.range(32).map(o=>moment().subtract(o + 1, 'days').format('YYYYMMDD')).sort();
const keyword = (new URL(location.href)).searchParams.get('query'); if(!keyword) return;
const keyword_search = await Promise.resolve().then(async()=>{
const stat = await NA_search(keyword);
const data = await ND_trend(keyword);
const sums = data.reduce((r, o)=>r+o.value, 0);
const tick = stat ? (stat.monthlyQcCnt / sums) : 1;
const items = range.map((period)=>Object.assign({ period, value: 0 }, _.find(data, { period }))).map((item)=>(item.value = (item.value * tick), item));
return { items, rval: !!stat };
});
const keyword_create = await Promise.map(range, async (period)=>{
const props = await Promise.props({
blog_count: NX_count(keyword, 'blog', 'normal', { api_type: 1, nso: `so:r,p:from${period}to${period},a:all` }),
view_count: NX_count(keyword, 'view', 'normal', { api_type: 11, nso: `so:r,p:from${period}to${period},a:all` }),
cafe_count: NX_count(keyword, 'article', 'normal', { prmore: 1, nso: `so:r,p:from${period}to${period},a:all` }),
});
return { period, ...props };
});
const wrap = document.querySelector('[class^="resultSummary_result_summary__"]'); if(!wrap) return;
const canv = wrap.querySelector('canvas') || document.createElement('canvas'); canv.style.width = '100%'; canv.style.height = '120px'; canv.style.marginBottom = '20px'; wrap.append(canv);
const config = {
type: 'line',
data: {
labels: range,
datasets: [
{ yAxisID: 'y1', viewtype:'search_cnt', backgroundColor: '#74d2e7', borderColor: '#48a9c5', data: _.map(keyword_search.items, o=>o.value), },
{ yAxisID: 'y2', viewtype:'view_count', backgroundColor: '#52565e', borderColor: '#caccd1', data: _.map(keyword_create, o=>o.view_count), },
{ yAxisID: 'y3', viewtype:'blog_count', backgroundColor: '#279b37', borderColor: '#34bf49', data: _.map(keyword_create, o=>o.blog_count), },
{ yAxisID: 'y3', viewtype:'cafe_count', backgroundColor: '#f48924', borderColor: '#ffc845', data: _.map(keyword_create, o=>o.cafe_count), },
],
},
options: {
scales: {
x: { display: false, },
y1: {
type: 'linear',
display: true,
position: 'left',
},
y2: {
type: 'linear',
display: false,
position: 'left',
grid: {
drawOnChartArea: false,
},
},
y3: {
type: 'linear',
display: false,
position: 'right',
grid: {
drawOnChartArea: false,
},
},
},
plugins: {
title: {
display: true,
text(context) {
return '검색어 검색/생산/노출 트렌드';
}
},
legend: { display: false },
tooltip: {
mode: 'index',
intersect: false,
callbacks: {
label(context) {
if(context.dataset.viewtype == 'search_cnt') return (keyword_search.rval) ? `검색량: ${format_number(context.parsed.y.toFixed(0))}회` : `검색율: ${format_number(context.parsed.y.toFixed(2))}%`;
if(context.dataset.viewtype == 'view_count') return `노출량: ${format_number(context.parsed.y.toFixed(0))}건 노출 됨`;
if(context.dataset.viewtype == 'blog_count') return `블로그: ${format_number(context.parsed.y.toFixed(0))}건 생산 됨`;
if(context.dataset.viewtype == 'cafe_count') return `카페: ${format_number(context.parsed.y.toFixed(0))}건 생산 됨`;
return '';
}
}
}
},
}
};
const chart = new Chart(canv, config);
}); |
<filename>StreletzCorporatePortal/env/Scripts/django-crud-generator.py<gh_stars>0
#!C:\Users\Егор\source\repos\StreletzCorporatePortal\StreletzCorporatePortal\env\Scripts\python.exe
from django_crud_generator import execute_from_command_line
if __name__ == "__main__":
execute_from_command_line()
|
function isUnique(str) {
// Create a Set and add characters of the string to it.
let s = new Set();
for (let i = 0; i < str.length; i++) {
if (s.has(str.charAt(i))) {
// If set already has the char, string is not unique
return false;
}
else {
// Add characters to the set.
s.add(str.charAt(i));
}
}
// All characters are unique
return true;
} |
<filename>ruby/lib/helix_runtime/project.rb
require 'tomlrb'
require 'json'
module HelixRuntime
class Project
class OutdatedBuildError < StandardError
def initialize(name)
super("\n\nHelix crate '#{name}' is outdated. To resolve this issue, run `rake build` and restart your server.\n\n")
end
end
attr_accessor :root
attr_accessor :helix_lib_dir
attr_accessor :debug_rust
attr_accessor :build_root
def initialize(root)
@root = find_root(root)
@debug_rust = ENV['DEBUG_RUST']
@build_root = @root
end
def debug_rust?
!!debug_rust
end
def name
@name ||= Tomlrb.load_file(cargo_toml_path)["package"]["name"]
end
def cargo_toml_path
"#{root}/Cargo.toml"
end
def build_path
metadata = %x[cargo metadata --format-version 1]
target_directory = JSON.parse(metadata)["target_directory"]
File.expand_path(debug_rust? ? 'debug' : 'release', target_directory)
end
def lib_path
"#{root}/lib/#{name}"
end
def libfile_prefix
IS_WINDOWS ? '' : 'lib'
end
def native_path
"#{lib_path}/native.#{Platform.dlext}"
end
def native_lib
"#{libfile_prefix}#{name.gsub('-', '_')}.#{Platform.libext}"
end
def outdated_build?
mtime = Dir["#{root}/src/**/*.rs"].map{|file| File.mtime(file) }.max
native = "#{root}/lib/#{name}/native.#{Platform.dlext}"
!File.exist?(native) || File.mtime(native) < mtime
end
def ensure_built!
raise OutdatedBuildError.new(name) if outdated_build?
end
def autobuild
build if outdated_build?
end
def cargo_build
HelixRuntime.ensure_dll!
# We have to do this here since Cargo has no internal means of specifying `-C` flags
link_args = if IS_WINDOWS
# SAFESEH is added to i686 Rust hosts
# https://github.com/rust-lang/rust/blob/1.15.1/src/librustc_back/target/i686_pc_windows_msvc.rs#L25
if `rustc -vV` =~ /host:\s+i686/
'/SAFESEH:NO' # Can't use SAFESEH with .libs from dlltool
end
else
# Allowing all methods to be undefined is a bit risky, would be nice to have a specific list.
'-Wl,-undefined,dynamic_lookup'
end
env = {}
env['HELIX_LIB_DIR'] = helix_lib_dir if helix_lib_dir
cargo_args = []
rustc_args = []
if ENV['DEBUG_RUST_MACROS']
rustc_args << "--pretty expanded"
rustc_args << "-Z unstable-options"
end
unless debug_rust?
cargo_args << ["--release"]
end
if ENV['VERBOSE']
cargo_args << " --verbose"
end
if link_args
rustc_args << "-C link-args=#{link_args}"
end
unless rustc_args.empty?
cargo_args << "-- #{rustc_args.join(' ')}"
end
run env, "cargo rustc #{cargo_args.join(' ')}"
end
def cargo_clean
run("cargo clean")
end
def copy_native
source = "#{build_path}/#{native_lib}"
raise "native source doesn't exist, run `cargo_build` first; source=#{source}" unless File.exist?(source)
FileUtils.mkdir_p(File.dirname(native_path))
FileUtils.cp source, native_path
true
end
def build
cargo_build && copy_native
end
def clobber
cargo_clean
FileUtils.rm_f native_path
end
private
def run(*args)
Dir.chdir(root) do
puts(*args)
system(*args)
end
end
def find_root(root)
root = File.expand_path(root)
dir = root
loop do
return dir if File.exist?("#{dir}/Cargo.toml")
new_dir = File.dirname(dir)
raise "Unable to find root for #{root}" if new_dir == dir
dir = new_dir
end
end
end
end
|
<gh_stars>10-100
package com.ulfy.master.application.cm;
import com.ulfy.android.mvvm.IView;
import com.ulfy.master.application.base.BaseCM;
import com.ulfy.master.domain.entity.ContentSearch;
import com.ulfy.master.ui.cell.ContentSearchCell;
public class ContentSearchCM extends BaseCM {
public ContentSearch contentSearch;
public ContentSearchCM(ContentSearch contentSearch) {
this.contentSearch = contentSearch;
}
@Override public Class<? extends IView> getViewClass() {
return ContentSearchCell.class;
}
} |
#!/bin/sh
###############################################################################
#
# NAME: incaInstall.sh
#
# DESCRIPTION
#
# An installer script for the Inca 2.0 distribution.
#
# SYNOPSIS
#
# incaInstall.sh all|agent|common|consumers|depot|incat|reporters|server
#
###############################################################################
#=============================================================================#
# Global Vars
#=============================================================================#
SERVER="common-java agent consumers depot Inca-WS"
CORE="common-java agent consumers depot incat Inca-Reporter"
ALL="${SERVER} incat Inca-Reporter"
REAL_ALL="${ALL} reporters incaws"
INCA_RELEASES="http://inca.sdsc.edu/releases/2.7"
HTTP_GET_METHODS="wget curl"
#=============================================================================#
# Functions
#=============================================================================#
#-----------------------------------------------------------------------------#
# echoError
#
# Prints out an error message to stderr
#-----------------------------------------------------------------------------#
printUsage() {
echo "Installer script for the Inca 2.0 distribution";
echo;
echo "Usage: ./incaInstall.sh [-r releasesUrl] installdir all|<component>|server"
echo;
echo "where";
echo;
echo " installdir the target installation directory";
echo;
echo " all install all of the Inca components";
echo;
echo " <component> is one of the following values:";
echo;
echo " agent the Inca component responsible for reporter data ";
echo " collection"
echo " consumers Jetty web server that serves JSP pages for displaying";
echo " collected Inca reporter data";
echo " depot the Inca component responsible for storing and archiving";
echo " reporter data";
echo " incat GUI for configuring and administering an Inca deployment";
echo " incaws the Web Services server";
echo " reporters the Inca reporter API and repository tools";
echo;
echo " core the Inca agent, consumers, depot, components";
echo " server the Inca agent, consumers, depot, and incaws components";
echo;
echo "Options:";
echo " r Specify an alternative release directory [default: $INCA_RELEASES]";
}
#-----------------------------------------------------------------------------#
# checkForHttpGet
#
# Find out whether curl or wget exists on machine to fetch urls
#-----------------------------------------------------------------------------#
checkForHttpGet() {
for method in $HTTP_GET_METHODS; do
if test ${method} = 'wget'; then
args='-O -'
else
args=''
fi
${method} ${args} http://inca.sdsc.edu >/dev/null 2>&1
if test $? -eq 0; then
echo ${method};
return;
fi
done
}
#-----------------------------------------------------------------------------#
# installWS
#
# Runs the installer script for the Inca-WS component
#-----------------------------------------------------------------------------#
installWS() {
wsdir=""; #in case there are multiple Inca-WS dirs (e.g., one or more updates)
wsdirs=`ls -d Inca-WS* 2>/dev/null | grep -v tar`
if ( test $? -eq 0 ); then
for dir in ${wsdirs}; do
# get the last listed dir (latest version)
wsdir=${dir}
done
if ( test -d ${wsdir} ); then
cd ${wsdir} 2>&1 >> ${installdir}/install.log
perl -I${installdir}/lib/perl Makefile.PL \
PREFIX=${installdir} INSTALLDIRS=perl \
LIB=${installdir}/lib/perl \
INSTALLSCRIPT=${installdir}/bin \
INSTALLMAN1DIR=${installdir}/man/man1 \
INSTALLMAN3DIR=${installdir}/man/man3 2>&1 ${installdir}/install.log
make >> ${installdir}/install.log
make install >> ${installdir}/install.log
cd ${installdir} 2>&1 >> ${installdir}/install.log
fi
fi
}
#=============================================================================#
# Main
#=============================================================================#
# read options
releasesUrl=${INCA_RELEASES}
getMethod="";
while getopts r: opt; do
case $opt in
r) releasesUrl=$OPTARG
case $releasesUrl in
file://*) localDir=`echo $releasesUrl | sed 's/^file:\/\///'`
releasesUrl=${localDir}
getMethod=cp ;;
'?' ) ;;
esac
;;
'?') echo "$0: invalid option $OPTARG" >&2
printUsage;
exit 1;
;;
esac
done
shift `expr $OPTIND - 1`
# find wget or curl
if test "${getMethod}" = ""; then
getMethod=`checkForHttpGet`;
if test "${getMethod}" = ""; then
echo "Error, unable to find one of the following tools on your system: " \
"$HTTP_GET_METHODS";
exit 1;
fi
fi
# read user's choice
installdir=""
modules=""
if test "$1" = ""; then
printUsage;
exit 1;
else
installdir=$1
fi
if test "$2" = ""; then
echo "Error, missing Inca component name to install"
exit 1;
elif test "$2" = "all"; then
modules=${ALL}
elif test "$2" = "core"; then
modules=${CORE}
elif test "$2" = "server"; then
modules=${SERVER}
else
validmodule=`echo ${REAL_ALL} | grep $2`
if test "${validmodule}" != ""; then
if test "$2" = "reporters"; then
modules="Inca-Reporter"
elif test "$2" = "incaws"; then
modules="Inca-WS"
elif test "$2" = "manager"; then
modules="Inca-ReporterManager"
elif test "$2" = "common-java"; then
modules="common-java"
else
modules="common-java $2"
fi
else
echo "'$2' is not a valid Inca component name"
exit 1;
fi
fi
if test ! -d $installdir; then
echo "Creating directory $installdir";
mkdir -p $installdir;
fi
cd $installdir;
installdir=`pwd`
for component in $modules; do
name="";
if test "${component}" = "Inca-Reporter" -o "${component}" = "Inca-WS"; then
name=${component}
elif test "${component}" = "incat"; then
name="${component}-bin"
else
name="inca-${component}-bin"
fi
echo "Retrieving ${releasesUrl}/${name}.tar.gz";
if test "${getMethod}" = "cp"; then
${getMethod} "${releasesUrl}/${name}.tar.gz" .
elif test "${getMethod}" = "wget"; then
${getMethod} "${releasesUrl}/${name}.tar.gz"
else
${getMethod} "${releasesUrl}/${name}.tar.gz" > ${name}.tar.gz
fi
if test ! -f "${name}.tar.gz"; then
echo "Unable to retrieve Inca component ${component}";
exit 1;
fi
echo "Unpacking ${releasesUrl}/${name}.tar.gz";
gunzip "${name}.tar.gz";
gtar xvf "${name}.tar";
rm -f "${name}.tar";
if ( test "${component}" = "Inca-WS" ); then
installWS
fi
echo "${component} installed";
done
|
<filename>src/php/network/index.js<gh_stars>1000+
module.exports.inet_ntop = require('./inet_ntop')
module.exports.inet_pton = require('./inet_pton')
module.exports.ip2long = require('./ip2long')
module.exports.long2ip = require('./long2ip')
module.exports.setcookie = require('./setcookie')
module.exports.setrawcookie = require('./setrawcookie')
|
#!/usr/bin/env bash
set -o errexit
set -o pipefail
set -o nounset
git clone $1 extension
cd extension
if [[ "$PHP_VERSION" == "7.1" || "$PHP_VERSION" == "7.2" ]]; then
composer install --no-interaction --no-suggest
composer require --dev phpunit/phpunit:'^7.5.20' --update-with-dependencies
else
composer install
fi;
cp ../phpstan.phar vendor/phpstan/phpstan/phpstan.phar
cp ../phpstan vendor/phpstan/phpstan/phpstan
cp ../bootstrap.php vendor/phpstan/phpstan/bootstrap.php
if [[ "$STATIC_REFLECTION" == "true" ]]; then
echo "Running with static reflection"
cp ../e2e/bootstrap-static-reflection.php tests/bootstrap.php
fi
make tests
make phpstan
|
#!/bin/bash -ex
if [ "${VERIFY_DEP}" == "true" ]; then
COMMIT=$(cat $(ls ../manifest-artifactory/manifest*.json) | jq -r .ontftp.commit)
git config --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pull/*
git fetch
git checkout $COMMIT
export ONCORE_TAG=$(<../on-core-docker/digest)
sed -i "s/^FROM.*/FROM $REGISTRY\/${REPO_OWNER}\/on-core@${ONCORE_TAG}/" ./Dockerfile
fi
cat Dockerfile
cp -rf * ../build
|
const getRandomNumber = () => {
return Math.floor(Math.random() * 6) + 1;
}
let numbers = [];
for (let i = 0; i < 24; i++) {
numbers.push(getRandomNumber());
}
console.log(numbers); |
#!/bin/bash
backupfolder="/home/$USER/backups/$(date +'%Y.%m.%d')"
# Dados do schema a ser tratado
host="127.0.0.1"
database=""
user=""
password=""
# Verifica se foi passado algum argumento
while [[ $# > 1 ]]; do
key="$1"
case $key in
-d|--database)
database="$2"
shift
;;
-u|--user)
user="$2"
shift
;;
-p|--password)
password="$2"
shift
;;
*)
echo "Argumento desconhecido: $key $2"
;;
esac
shift
done
if [[ "$database" == "" || "$user" == "" || "$password" == "" ]]; then
echo "Usage: ./script-backup.sh -d <database-name> -u <user-name> -p <password>"
exit 1
fi
# Criação da pasta onde ficará os backups do dia
mkdir -p "$backupfolder"
# Nome do arquivo de log
logfile="$backupfolder/backup-log.txt"
# Iniciando o backup
echo ">> Mysqldump começou em $(date +'%d-%m-%Y %H:%M:%S')" >> "$logfile"
mysqldump --verbose --protocol=tcp --port=3306 --default-character-set=utf8 \
--host="$host" --user="$user" --password="$password" \
--single-transaction=TRUE --routines --events \
--databases "$database" 2>> "$logfile" > "$backupfolder/backup-$database.sql"
echo ">> Mysqldump terminou em $(date +'%d-%m-%Y %H:%M:%S')" >> "$logfile"
cd "$backupfolder"
echo ">> Rar iniciou em $(date +'%d-%m-%Y %H:%M:%S')" >> "$logfile"
rar m "backup-$database.rar" "backup-$database.sql" >> "$logfile"
echo ">> Rar terminou em $(date +'%d-%m-%Y %H:%M:%S')" >> "$logfile"
chmod -R a+rw "$backupfolder"
echo ">> Permições de arquivos foram atualizadas" >> "$logfile"
echo ">> Operação terminada em $(date +'%d-%m-%Y %H:%M:%S')" >> "$logfile"
echo "*************************************************************************************" >> "$logfile"
exit 0 |
// ---------------------------------------------
// Model
// ---------------------------------------------
const _TABLE = (sequelize, Sequelize) => {
return sequelize.define('Poll_Voted', {
poll_id: {
type: Sequelize.STRING
},
user_id: {
type: Sequelize.STRING
},
choices: Sequelize.STRING,
}, {
timestamp: false
})
}
// ---------------------------------------------
// ---------------------------------------------
// Helper
// ---------------------------------------------
async function add(msg, poll_id, user_id, choices) {
try {
await msg.client.DB.Poll_Voted.TABLE.create({
poll_id: poll_id,
user_id: user_id,
choices: JSON.stringify(choices)
})
return true
} catch (e) {
msg.client.logger.log("error", `Could not add poll_voted with poll_id ${poll_id} with user ${user_id} with choices ${choices} in database 'Poll_Voted'`)
msg.client.logger.log("error", e)
return false
}
}
async function get(msg, poll_id, user_id) {
const tag = await msg.client.DB.Poll_Voted.TABLE.findOne({ where: { poll_id: poll_id, user_id: user_id } })
if (tag) {
tag.choices = JSON.parse(tag.choices)
return tag
} else {
msg.client.logger.log("info",`Could not get choices with poll_id ${poll_id} and user_id ${user_id} in database 'Poll_Voted'`)
return null
}
}
async function set(msg, poll_id, user_id, choices) {
const new_tag = await msg.client.DB.Poll_Voted.TABLE.update({ choices: JSON.stringify(choices) }, { where: { poll_id: poll_id, user_id: user_id } })
if (new_tag) {
return true
} else {
msg.client.logger.log("warn", `Could not set choices for poll_id ${poll_id} and user_id ${user_id} in database 'Poll_Voted'`)
return false
}
}
async function remove(msg, guild_id) {
}
// ---------------------------------------------
module.exports = { _TABLE, add, get, set, remove }
|
clearImmediate.length = {};
clearImmediate.name = {};
clearImmediate.prototype = {};
clearImmediate();
|
(() => {
class CloudProjectComputeInfrastructureModalLoginInformationCtrl {
constructor ($uibModal, $state, $stateParams) {
this.$uibModal = $uibModal;
this.$state = $state;
this.$stateParams = $stateParams;
}
$onInit () {
this.openLoginInformations();
}
openLoginInformations () {
const modal = this.$uibModal.open({
templateUrl: "app/cloud/project/compute/infrastructure/virtualMachine/loginInformation/cloud-project-compute-infrastructure-virtual-machine-login-information.html",
controller: "CloudProjectComputeInfrastructureVirtualMachineLoginInformationCtrl",
controllerAs: "VmLoginInformationCtrl",
size: "md",
resolve: {
params: () => ({
serviceName: this.$stateParams.projectId,
id: this.$stateParams.instanceId,
ipAddresses: this.$stateParams.ipAddresses || null,
image: this.$stateParams.image || null
})
}
});
modal.result
.then((id) => {
this.$state.go("iaas.pci-project.compute.infrastructure", { openVncWithId: id });
})
.catch(() => {
this.$state.go("iaas.pci-project.compute.infrastructure");
});
}
}
angular.module("managerApp").controller("CloudProjectComputeInfrastructureModalLoginInformationCtrl", CloudProjectComputeInfrastructureModalLoginInformationCtrl);
})();
|
<reponame>jgreffe/junit-toolbox
package com.googlecode.junittoolbox.samples;
import org.junit.experimental.categories.Category;
/**
* Possible value for {@link Category} annotation.
*/
public interface SlowTests {}
|
package mindustry.maps.generators;
import java.util.PriorityQueue;
import arc.func.Intc2;
import arc.math.Mathf;
import arc.math.geom.Geometry;
import arc.math.geom.Point2;
import arc.struct.Array;
import arc.struct.GridBits;
import arc.struct.IntArray;
import arc.struct.IntFloatMap;
import arc.util.Structs;
import arc.util.noise.Simplex;
import mindustry.content.Blocks;
import mindustry.world.Block;
import mindustry.world.Pos;
import mindustry.world.Tile;
import mindustry.world.blocks.Floor;
/**
* 基础地图构建器
* */
public abstract class BasicGenerator extends RandomGenerator{
protected static final DistanceHeuristic manhattan = (x1, y1, x2, y2) -> Math.abs(x1 - x2) + Math.abs(y1 - y2);
/** 矿石块容器*/
protected Array<Block> ores;
protected Simplex sim = new Simplex();
protected Simplex sim2 = new Simplex();
public BasicGenerator(int width, int height, Block... ores){
super(width, height);
this.ores = Array.with(ores);
}
@Override
public void generate(Tile[][] tiles){
int seed = Mathf.random(99999999);
sim.setSeed(seed);
sim2.setSeed(seed + 1);
super.generate(tiles);
}
/** 构建地图矿石*/
public void ores(Tile[][] tiles){
pass(tiles, (x, y) -> {
if(ores != null){
int offsetX = x - 4, offsetY = y + 23;
for(int i = ores.size - 1; i >= 0; i--){
Block entry = ores.get(i);
if(Math.abs(0.5f - sim.octaveNoise2D(2, 0.7, 1f / (40 + i * 2), offsetX, offsetY + i*999)) > 0.26f &&
Math.abs(0.5f - sim2.octaveNoise2D(1, 1, 1f / (30 + i * 4), offsetX, offsetY - i*999)) > 0.37f){
ore = entry;
break;
}
}
}
});
}
/** 构建地形*/
public void terrain(Tile[][] tiles, Block dst, float scl, float mag, float cmag){
pass(tiles, (x, y) -> {
double rocks = sim.octaveNoise2D(5, 0.5, 1f / scl, x, y) * mag
+ Mathf.dst((float)x / width, (float)y / height, 0.5f, 0.5f) * cmag;
double edgeDist = Math.min(x, Math.min(y, Math.min(Math.abs(x - (width - 1)), Math.abs(y - (height - 1)))));
double transition = 5;
if(edgeDist < transition){
rocks += (transition - edgeDist) / transition / 1.5;
}
if(rocks > 0.9){
block =dst;
}
});
}
/** 构建障碍物*/
public void noise(Tile[][] tiles, Block floor, Block block, int octaves, float falloff, float scl, float threshold){
sim.setSeed(Mathf.random(99999));
pass(tiles, (x, y) -> {
if(sim.octaveNoise2D(octaves, falloff, 1f / scl, x, y) > threshold){
Tile tile = tiles[x][y];
this.floor = floor;
if(tile.block().solid){
this.block = block;
}
}
});
}
/** 构建覆盖层*/
public void overlay(Tile[][] tiles, Block floor, Block block, float chance, int octaves, float falloff, float scl, float threshold){
sim.setSeed(Mathf.random(99999));
pass(tiles, (x, y) -> {
if(sim.octaveNoise2D(octaves, falloff, 1f / scl, x, y) > threshold && Mathf.chance(chance) && tiles[x][y].floor() == floor){
ore = block;
}
});
}
/** 科技*/
public void tech(Tile[][] tiles){
Block[] blocks = {Blocks.darkPanel3};
int secSize = 20;
pass(tiles, (x, y) -> {
int mx = x % secSize, my = y % secSize;
int sclx = x / secSize, scly = y / secSize;
if(noise(sclx, scly, 10f, 1f) > 0.63f && (mx == 0 || my == 0 || mx == secSize - 1 || my == secSize - 1)){
if(Mathf.chance(noise(x + 0x231523, y, 40f, 1f))){
floor = Structs.random(blocks);
if(Mathf.dst(mx, my, secSize/2, secSize/2) > secSize/2f + 2){
floor = Blocks.darkPanel4;
}
}
if(block.solid && Mathf.chance(0.7)){
block = Blocks.darkMetal;
}
}
});
}
/** 变形*/
public void distort(Tile[][] tiles, float scl, float mag){
Block[][] blocks = new Block[width][height];
Floor[][] floors = new Floor[width][height];
each((x, y) -> {
float cx = x + noise(x, y, scl, mag) - mag / 2f, cy = y + noise(x, y + 1525215f, scl, mag) - mag / 2f;
Tile other = tiles[Mathf.clamp((int)cx, 0, width-1)][Mathf.clamp((int)cy, 0, height-1)];
blocks[x][y] = other.block();
floors[x][y] = other.floor();
});
pass(tiles, (x, y) -> {
floor = floors[x][y];
block = blocks[x][y];
});
}
/** 分散*/
public void scatter(Tile[][] tiles, Block target, Block dst, float chance){
pass(tiles, (x, y) -> {
if(!Mathf.chance(chance)) return;
if(floor == target){
floor = dst;
}else if(block == target){
block = dst;
}
});
}
/** 遍历*/
public void each(Intc2 r){
for(int x = 0; x < width; x++){
for(int y = 0; y < height; y++){
r.get(x, y);
}
}
}
/** 噪音*/
protected float noise(float x, float y, float scl, float mag){
return (float)sim2.octaveNoise2D(1f, 0f, 1f / scl, x + 0x361266f, y + 0x251259f) * mag;
}
/** 通行*/
public void pass(Tile[][] tiles, Intc2 r){
for(int x = 0; x < width; x++){
for(int y = 0; y < height; y++){
floor = tiles[x][y].floor();
block = tiles[x][y].block();
ore = tiles[x][y].overlay();
r.get(x, y);
tiles[x][y] = new Tile(x, y, floor.id, ore.id, block.id);
}
}
}
/** 刷子*/
public void brush(Tile[][] tiles, Array<Tile> path, int rad){
path.each(tile -> erase(tiles, tile.x, tile.y, rad));
}
/** 檫除*/
public void erase(Tile[][] tiles, int cx, int cy, int rad){
for(int x = -rad; x <= rad; x++){
for(int y = -rad; y <= rad; y++){
int wx = cx + x, wy = cy + y;
if(Structs.inBounds(wx, wy, width, height) && Mathf.dst(x, y, 0, 0) <= rad){
Tile other = tiles[wx][wy];
other.setBlock(Blocks.air);
}
}
}
}
/** 路径探测器*/
public Array<Tile> pathfind(Tile[][] tiles, int startX, int startY, int endX, int endY, TileHueristic th, DistanceHeuristic dh){
Tile start = tiles[startX][startY];
Tile end = tiles[endX][endY];
GridBits closed = new GridBits(width, height);
IntFloatMap costs = new IntFloatMap();
PriorityQueue<Tile> queue = new PriorityQueue<>(tiles.length * tiles[0].length / 2, (a, b) -> Float.compare(costs.get(a.pos(), 0f) + dh.cost(a.x, a.y, end.x, end.y), costs.get(b.pos(), 0f) + dh.cost(b.x, b.y, end.x, end.y)));
queue.add(start);
boolean found = false;
while(!queue.isEmpty()){
Tile next = queue.poll();
float baseCost = costs.get(next.pos(), 0f);
if(next == end){
found = true;
break;
}
closed.set(next.x, next.y);
for(Point2 point : Geometry.d4){
int newx = next.x + point.x, newy = next.y + point.y;
if(Structs.inBounds(newx, newy, width, height)){
Tile child = tiles[newx][newy];
if(!closed.get(child.x, child.y)){
closed.set(child.x, child.y);
child.rotation(child.relativeTo(next.x, next.y));
costs.put(child.pos(), th.cost(child) + baseCost);
queue.add(child);
}
}
}
}
Array<Tile> out = new Array<>();
if(!found) return out;
Tile current = end;
while(current != start){
out.add(current);
Point2 p = Geometry.d4(current.rotation());
current = tiles[current.x + p.x][current.y + p.y];
}
out.reverse();
return out;
}
/** 翻转溢出填充*/
public void inverseFloodFill(Tile[][] tiles, Tile start, Block block){
IntArray arr = new IntArray();
arr.add(start.pos());
while(!arr.isEmpty()){
int i = arr.pop();
int x = Pos.x(i), y = Pos.y(i);
tiles[x][y].cost = 2;
for(Point2 point : Geometry.d4){
int newx = x + point.x, newy = y + point.y;
if(Structs.inBounds(newx, newy, width, height)){
Tile child = tiles[newx][newy];
if(child.block() == Blocks.air && child.cost != 2){
child.cost = 2;
arr.add(child.pos());
}
}
}
}
for(int x = 0; x < width; x ++){
for(int y = 0; y < height; y++){
Tile tile = tiles[x][y];
if(tile.cost != 2 && tile.block() == Blocks.air){
tile.setBlock(block);
}
}
}
}
public interface DistanceHeuristic{
float cost(int x1, int y1, int x2, int y2);
}
public interface TileHueristic{
float cost(Tile tile);
}
}
|
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
import React, { Component } from 'react';
import { storiesOf } from '@storybook/react';
import { Grommet, Box, Chart, Keyboard, Stack, Text } from 'grommet';
import { grommet } from 'grommet/themes';
import { calcs } from '../calcs';
import { generateData } from './data';
var ScanChart =
/*#__PURE__*/
function (_Component) {
_inheritsLoose(ScanChart, _Component);
function ScanChart() {
var _this;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _Component.call.apply(_Component, [this].concat(args)) || this;
_defineProperty(_assertThisInitialized(_this), "state", {});
_defineProperty(_assertThisInitialized(_this), "valueRefs", []);
return _this;
}
ScanChart.getDerivedStateFromProps = function getDerivedStateFromProps(nextProps, prevState) {
var data = nextProps.data,
max = nextProps.max;
var active = prevState.active; // convert data to chart coordinates
var values = data.map(function (d) {
return [d.time, d.value];
});
var _calcs = calcs(values, {
min: 0,
max: max
}),
axis = _calcs.axis,
bounds = _calcs.bounds,
pad = _calcs.pad,
thickness = _calcs.thickness;
return {
active: active,
axis: axis,
bounds: bounds,
pad: pad,
thickness: thickness,
values: values
};
};
var _proto = ScanChart.prototype;
_proto.render = function render() {
var _this2 = this;
var data = this.props.data;
var _this$state = this.state,
active = _this$state.active,
axis = _this$state.axis,
bounds = _this$state.bounds,
pad = _this$state.pad,
thickness = _this$state.thickness,
values = _this$state.values;
return React.createElement(Grommet, {
theme: grommet
}, React.createElement(Keyboard, {
onLeft: function onLeft() {
return _this2.setState({
active: Math.max(0, active - 1)
});
},
onRight: function onRight() {
return _this2.setState({
active: Math.min(data.length - 1, active + 1)
});
},
onEsc: function onEsc() {
return _this2.setState({
active: undefined
});
}
}, React.createElement(Box, {
tabIndex: "0",
direction: "row",
margin: "large"
}, React.createElement(Box, {
width: "xxsmall"
}, React.createElement(Box, {
flex: true,
justify: "between"
}, React.createElement(Box, {
border: "top",
align: "end"
}, React.createElement(Box, {
pad: "xsmall",
background: {
color: 'white',
opacity: 'medium'
}
}, React.createElement(Text, null, axis[1][0]))), React.createElement(Box, {
border: "bottom",
align: "end"
}, React.createElement(Box, {
pad: "xsmall",
background: {
color: 'white',
opacity: 'medium'
}
}, React.createElement(Text, null, axis[1][1])))), React.createElement(Box, {
height: "xxsmall",
flex: false
})), React.createElement(Box, {
width: "large"
}, React.createElement(Stack, {
guidingChild: "first"
}, React.createElement(Box, {
pad: {
horizontal: pad
}
}, React.createElement(Chart, {
type: "bar",
overflow: true,
bounds: bounds,
values: values,
thickness: thickness,
size: {
width: 'full',
height: 'small'
}
})), React.createElement(Box, {
fill: true,
direction: "row",
justify: "between"
}, values.map(function (v, i) {
return React.createElement(Box, {
flex: false,
key: v[0]
}, React.createElement(Stack, {
fill: true,
anchor: "center",
interactiveChild: "first"
}, React.createElement(Box, {
fill: true,
pad: pad,
ref: function ref(_ref) {
_this2.valueRefs[i] = _ref;
},
background: active === i ? {
color: 'dark-5',
opacity: 'medium'
} : undefined,
onMouseOver: function onMouseOver() {
return _this2.setState({
active: i
});
},
onMouseOut: function onMouseOut() {
return _this2.setState({
active: undefined
});
},
onFocus: function onFocus() {},
onBlur: function onBlur() {}
}), active === i && React.createElement(Box, {
animation: {
type: 'fadeIn',
duration: 100
},
width: "xsmall",
pad: "small",
round: "small",
background: "dark-3"
}, React.createElement(Text, {
size: "large"
}, data[active].value), React.createElement(Text, {
size: "small"
}, new Date(data[active].time).toLocaleDateString()))));
}))), React.createElement(Box, {
height: "xxsmall",
direction: "row",
justify: "between",
align: "center"
}, axis[0].map(function (t) {
return React.createElement(Text, {
key: t
}, new Date(t).toLocaleDateString());
}))))));
};
return ScanChart;
}(Component);
storiesOf('Chart', module).add('Scan', function () {
return React.createElement(ScanChart, {
data: generateData(30, 100),
max: 100
});
}); |
# **************************************************************************** #
# #
# ::: :::::::: #
# activation.sh :+: :+: :+: #
# +:+ +:+ +:+ #
# By: jtoty <jtoty@student.42.fr> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2019/11/30 18:26:19 by jtoty #+# #+# #
# Updated: 2019/11/30 18:26:20 by jtoty ### ########.fr #
# #
# **************************************************************************** #
#!/bin/bash
# Activate functions that will be tested
activate_functions()
{
for part in ${tab_all_part[*]}
do
opt_part=$(echo OPT_NO_${part} | tr '[:lower:]' '[:upper:]' | rev | cut -c 6- | rev)
if [ ${!opt_part} -eq 0 ]
then
p=0
tab_part=$(echo ${part}[*])
nb_func=$(echo ${!tab_part} | wc -w)
if [ ${part} != "Additional_func" ]
then
while (( p < ${nb_func} ))
do
(( ${part}_activation[$p]=1 ))
(( p += 1 ))
done
else
(( ${opt_part}=1 ))
while (( p < ${nb_func} ))
do
func_name=$(echo ${part}[$p])
func_name=${!func_name}
if [[ -n $(echo ${LIB_CONTENT} | grep -w $(echo ${func_name})) ]]
then
(( ${part}_activation[$p]=1 ))
(( ${opt_part}=0 ))
fi
(( p += 1 ))
done
fi
fi
done
}
# Activate part if opt_no_part is equal to 0
activate_part()
{
for part in ${tab_all_part[*]}
do
opt_part=$(echo OPT_NO_${part} | tr '[:lower:]' '[:upper:]' | rev | cut -c 6- | rev)
activate_part=$(echo ACTIVATE_${part} | tr '[:lower:]' '[:upper:]' | rev | cut -c 6- | rev)
if [ ${!opt_part} -eq 0 ]
then
(( ${activate_part}=1 ))
fi
done
}
|
<reponame>BenjaminViranin/Snake-AI-Project<gh_stars>0
#include "include/manager/Game_Manager.h"
int main(int argc, const char* argv[])
{
Game_Manager game;
return game.Run();
} |
class IntegerArray {
constructor(arr) {
this.array = arr;
}
sum() {
let total = 0;
for (let i = 0; i < this.array.length; i++) {
total += this.array[i];
}
return total;
}
} |
#include <stdio.h>
// A utility function to swap two elements
void swap(int* a, int* b)
{
int t = *a;
*a = *b;
*b = t;
}
/* This function takes last element as pivot, places
the pivot element at its correct position in sorted
array, and places all smaller (smaller than pivot)
to left of pivot and all greater elements to right
of pivot */
int partition (int arr[], int low, int high)
{
int pivot = arr[high]; // pivot
int i = (low - 1); // Index of smaller element
for (int j = low; j <= high- 1; j++)
{
// If current element is smaller than or
// equal to pivot
if (arr[j] <= pivot)
{
i++; // increment index of smaller element
swap(&arr[i], &arr[j]);
}
}
swap(&arr[i + 1], &arr[high]);
return (i + 1);
}
/* The main function that implements QuickSort
arr[] --> Array to be sorted,
low --> Starting index,
high --> Ending index */
void quickSort(int arr[], int low, int high)
{
if (low < high)
{
/* pi is partitioning index, arr[p] is now
at right place */
int pi = partition(arr, low, high);
// Separately sort elements before
// partition and after partition
quickSort(arr, low, pi - 1);
quickSort(arr, pi + 1, high);
}
}
/* Function to print an array */
void printArray(int arr[], int size)
{
int i;
for (i=0; i < size; i++)
printf("%d ", arr[i]);
printf("
");
}
// Driver program to test above functions
int main()
{
int arr[] = {10, 7, 8, 9, 1, 5};
int n = sizeof(arr)/sizeof(arr[0]);
quickSort(arr, 0, n-1);
printf("Sorted array: n");
printArray(arr, n);
return 0;
} |
#pragma once
#include "myfibo_export.hpp"
#include "fibo_define.hpp"
// // R Python
// Global functions
MYFIBO_EXPORT int testInt(int a); // OK OK
MYFIBO_EXPORT int testIntPtr(const int* a); // OK X_p1
MYFIBO_EXPORT int* testIntCreate(int a); // X_r1 X_p5
MYFIBO_EXPORT VectorInt testVectorInt(const VectorInt& a); // OK OK
MYFIBO_EXPORT VectorInt testVectorIntPtr(const VectorInt* a); // X_r0 X_p2
MYFIBO_EXPORT VectorInt* testVectorIntCreate(const VectorInt& a); // X_r3 OK
MYFIBO_EXPORT String testString(const String& a); // OK OK
MYFIBO_EXPORT String testStringPtr(const String* a); // X_r0 X_p3
MYFIBO_EXPORT String* testStringCreate(const String& a); // X_r2 X_p5
MYFIBO_EXPORT VectorString testVectorString(const VectorString& a); // OK OK
MYFIBO_EXPORT VectorString testVectorStringPtr(const VectorString* a); // X_r0 X_p4
MYFIBO_EXPORT VectorString* testVectorStringCreate(const VectorString& a); // OK OK
/*
X_r0: R Crash with SEGV in SWIG_R_ConvertPtr
X_r1: Arg OK but return value NOK:
Erreur dans getClass(Class, where = topenv(parent.frame())) :
“_p_int” is not a defined class
X_r2: Arg OK but return value NOK:
Erreur dans getClass(Class, where = topenv(parent.frame())) :
“_p_std__string” is not a defined class
X_r3: Call OK, but object returned is externalptr (TODO : so why testVectorStringCreate is Ok ?):
Erreur dans vi[1] : objet de type 'externalptr' non indiçable
X_p1: TypeError: in method 'testIntPtr', argument 1 of type 'int const *'
X_p2: TypeError: in method 'testVectorIntPtr', argument 1 of type 'VectorInt const *'
X_p3: TypeError: in method 'testStringPtr', argument 1 of type 'String const *'
X_p4: TypeError: in method 'testVectorStringPtr', argument 1 of type 'VectorString const *'
X_p5: Call OK but object returned is SwigPyObject (TODO : so why testVectorIntCreate is Ok ?):
i != 32 returns True
*/
|
board = [['','','',''],['','','',''],['','','',''],['','','','']]
def print_board(board):
for row in range(len(board)):
for col in range(len(board[0])):
print('|' + board[row][col], end=" ")
print("|")
print_board(board) |
<gh_stars>1-10
from db import db
import datetime
import logging
video_coll = db['video']
videos = video_coll.find({'cView': {'$gt': 100000}})
for each_video in videos:
try:
if 'aid' not in each_video:
continue
if 'data' not in each_video or len(each_video['data']) < 100:
continue
print(each_video['aid'])
aid = each_video['aid']
data = sorted(each_video['data'],
key=lambda x: x['datetime'], reverse=True)
c_data = data[0]
c_date = data[0]['datetime'].strftime('%Y-%m-%d %H')
f_data = [c_data]
for each_data in data:
delta_day = (datetime.datetime.utcnow() + datetime.timedelta(hours=8) -
each_data['datetime']).days
if delta_day > 7:
n_date = each_data['datetime'].strftime('%Y-%m-%d %H')
# 如果不是同一小时
if n_date != c_date:
f_data.append(each_data)
c_date = n_date
pass
pass
else:
f_data.append(each_data)
video_coll.update_one({'aid': aid}, {'$set': {'data': f_data}})
pass
except Exception as e:
logging.exception(e)
pass
|
/* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package xyz.niflheim.stockfish.engine.enums;
/**
* Variant of Stockfish process.
*
* @author Niflheim
* @since 1.0
*/
public enum Variant {
/**
* Works on Unix and Windows machines
*/
DEFAULT,
/**
* Works on Unix and Windows machines
*/
BMI2,
/**
* Works on Windows machines
*/
POPCNT,
/**
* Works on Unix machines
*/
MODERN
}
|
<filename>test/models/fe/businessactivities/AccountantForAMLSRegulationsSpec.scala
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.fe.businessactivities
import generators.supervision.BusinessActivityGenerators
import models.des.businessactivities.{BusinessActivitiesAll, MlrAdvisor}
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.{JsSuccess, Json}
import org.mockito.Mockito.when
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
class AccountantForAMLSRegulationsSpec extends PlaySpec
with MockitoSugar
with BusinessActivityGenerators
with ScalaCheckPropertyChecks {
"JSON validation" must {
"successfully validate given an `true` value" in {
val json = Json.obj("accountantForAMLSRegulations" -> true)
Json.fromJson[AccountantForAMLSRegulations](json) must
be(JsSuccess(AccountantForAMLSRegulations(true)))
}
"successfully validate given an `false` value" in {
val json = Json.obj("accountantForAMLSRegulations" -> false)
Json.fromJson[AccountantForAMLSRegulations](json) must
be(JsSuccess(AccountantForAMLSRegulations(false)))
}
"write the correct value given an NCARegisteredYes" in {
Json.toJson(AccountantForAMLSRegulations(true)) must
be(Json.obj("accountantForAMLSRegulations" -> true))
}
"write the correct value given an NCARegisteredNo" in {
Json.toJson(AccountantForAMLSRegulations(false)) must
be(Json.obj("accountantForAMLSRegulations" -> false))
}
}
"convertAccountant" must {
"return the data if it is supplied" in {
forAll(activityGen) { mlrActivities =>
val result = AccountantForAMLSRegulations.convertAccountant(Some(MlrAdvisor(doYouHaveMlrAdvisor = true, None)), Some(mlrActivities))
result must contain(AccountantForAMLSRegulations(true))
}
}
"return None if there is no MLR Advisor data" when {
"the application is an ASP" in {
forAll(activityGen) { mlrActivities =>
val ba = mock[BusinessActivitiesAll]
when(ba.mlrAdvisor) thenReturn None
val result = AccountantForAMLSRegulations.convertAccountant(None, Some(mlrActivities.copy(asp = true)))
result must not be defined
}
}
}
"return 'No' if there is no MLR Advisor data" when {
"the application is not an ASP" in {
forAll(activityGen) { mlrActivities =>
val ba = mock[BusinessActivitiesAll]
when(ba.mlrAdvisor) thenReturn None
val result = AccountantForAMLSRegulations.convertAccountant(None, Some(mlrActivities.copy(asp = false)))
result must contain(AccountantForAMLSRegulations(false))
}
}
}
}
}
|
<gh_stars>1-10
var gcd = require('../../index').gcd;
var expect = require('chai').expect;
describe('Eucliden and Extended Eucliden algorithm', () => {
var _T = (a, b, d) => expect(gcd(a, b)).to.equal(d);
var T = (a, b, d) => (_T(a, b, d), _T(b, a, d));
it('finds greatest common divisor of two integers', () => {
T(1, 0, 1);
T(1, 1, 1);
T(6, 6, 6);
T(6, 12, 6);
T(12, 18, 6);
});
});
|
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# chown as root
chown -R keybase:keybase /mnt
# Run everything else as the keybase user
sudo -i -u keybase bash << EOF
export "FORCE_WRITE=$FORCE_WRITE"
export "TEAMS=$TEAMS"
export "KEYBASE_USERNAME=$KEYBASE_USERNAME"
export "KEYBASE_PAPERKEY=$KEYBASE_PAPERKEY"
nohup bash -c "KEYBASE_RUN_MODE=prod kbfsfuse /keybase | grep -v 'ERROR Mounting the filesystem failed' &"
sleep ${KEYBASE_TIMEOUT:-5}
keybase oneshot
bin/keybaseca generate
EOF
|
package yaml
import (
"fmt"
"github.com/tony84727/diplomat/pkg/data"
"gopkg.in/yaml.v2"
)
type TranslationParser struct {
content []byte
root data.Translation
}
type translationFile yaml.MapSlice
func (p *TranslationParser) GetTranslation() (data.Translation, error) {
if p.root != nil {
return p.root, nil
}
err := p.parse()
if err != nil {
return nil, err
}
return p.root, nil
}
func (p *TranslationParser) parse() error {
var translations translationFile
err := yaml.Unmarshal(p.content, &translations)
if err != nil {
return err
}
root := data.NewTranslation("")
err = p.assignTranslations(root, translations)
if err != nil {
return err
}
p.root = root
return nil
}
func (p TranslationParser) assignTranslations(root data.Translation, input translationFile) error {
for _, item := range input {
stringKey, ok := item.Key.(string)
if !ok {
return fmt.Errorf("unexpected %v", input)
}
current := data.NewTranslation(stringKey)
switch v := item.Value.(type) {
case translationFile:
p.assignTranslations(current, translationFile(v))
case string:
current.SetText(v)
default:
return fmt.Errorf("unexpected %v(%T)", v, v)
}
root.AddChild(current)
}
return nil
}
func NewParser(content []byte) *TranslationParser {
return &TranslationParser{content: content}
}
|
#!/bin/bash
# pass wig file through, add missing links
cat - \
| sed 's~/not chosen~/not_chosen~' \
| tr '=' ' ' \
| while read gcid guid lat lon dts dummy
do
case $guid in
# only overwrite if no GUID exists (may be wrong else)
_*)
# get from wherigo search
guid=`
lynx -source "http://www.wherigo.com/search/results.aspx?searchlat=$lat&searchlon=$lon&stype=8&rad=2" \
| grep /cartridge/details \
| grep -v Try.out.the.tutorial \
| sed -e 's~.*CGUID=~~' -e "s~'.*~~" \
| head -n1`
[ -z "$guid" ] && guid="_even_a_wherigo_search_gave_nothing_"
;;
*)
;;
esac
echo -e "$gcid=$guid\t$lat $lon\t$dts\t$dummy"
done \
| sed 's~/not_chosen~/not chosen~'
|
<reponame>lineality/Coding-Challenges-Study-Practice
# polygon shape
#
# (User's) Problem
# We Have:
# n
# We Need:
# the area of a new type of polygon given: n
# e.g.
# n1 = 1
# n2 = 2*2 + 1
# n3 = 3*3 + 2*2
# n4 = 4*4 + 3*3 + 2*2 + 1
# We Must:
# return the area as an integer
# calculate within 4 seconds
#
# Solution (Product)
# A geometry of rotated tiles:
# while a normal square figure has the volume: side^2
# this kind of shape has an 'inner diagonal' space
# that is (n-1)^2 in volume
# kind of like a partial second dimension?
# so the final volume is the sum of those two volumes n^2 + (n-1)^2
def shapeArea(n):
polygon_area = 0
# volume is n^2 + (n-1)^2
polygon_area = n ** 2 # add the square
polygon_area += (n - 1) ** 2 # add the square
return polygon_area
|
<reponame>Yash-10/numbakit-ode
"""
benchmarks.against_scipy
~~~~~~~~~~~~~~~~~~~~~~~~
Comparisons using SciPy as a gold standard.
:copyright: 2020 by nbkode Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import numpy as np
from scipy import integrate
import nbkode
from nbkode.nbcompat import numba
from .common import NumbaStepModes
by_name = {
"scipy": {
"RungeKutta23": integrate.RK23,
"RungeKutta45": integrate.RK45,
"DOP853": integrate.DOP853,
},
"nbkode": {
"RungeKutta23": nbkode.RungeKutta23,
"RungeKutta45": nbkode.RungeKutta45,
"DOP853": nbkode.DOP853,
},
}
y0 = np.atleast_1d(1.0)
def create_f(package):
if package == "nbkode":
def f(t, y, k):
return k * y
args = 0.0, y0, -0.01
else:
k = -0.01
def f(t, y):
return k * y
args = 0.0, y0
return f, args
PACKAGES = tuple(by_name.keys())
INTEGRATORS = tuple(by_name["scipy"].keys())
BOOLEANS = (True, False)
NUMBA_MODES = tuple(NumbaStepModes.__members__.keys())
func = None
def define_func(package, jit_rhs_before):
global func
if jit_rhs_before:
func, args = create_f(package)
func = numba.njit()(func)
else:
func, args = create_f(package)
# Test (and compile) func
func(*args)
sol = None
def define_sol(package, integrator):
global sol, func
solver_cls = by_name[package][integrator]
if package == "nbkode":
sol = solver_cls(func, 0.0, y0, params=(-0.01,))
else:
sol = solver_cls(func, 0.0, y0, t_bound=10_000_000_000)
###############
# Instantiate
###############
def setup_time_f1_instantiate(package, integrator, jit_rhs_before):
define_func(package, jit_rhs_before)
def time_f1_instantiate(package, integrator, jit_rhs_before):
"""Measures the time required to instantiate the solver"""
define_sol(package, integrator)
time_f1_instantiate.setup = setup_time_f1_instantiate
time_f1_instantiate.params = (PACKAGES, INTEGRATORS, BOOLEANS)
time_f1_instantiate.param_names = ["package", "integrator", "jit_rhs_before"]
###############
# First Step
###############
def setup_time_f1_first_step(package, integrator):
define_func(package, True)
define_sol(package, integrator)
def time_f1_first_step(package, integrator):
sol.step()
time_f1_first_step.setup = setup_time_f1_first_step
time_f1_first_step.params = (PACKAGES, INTEGRATORS)
time_f1_first_step.param_names = ["package", "integrator"]
###############
# Run 10k
###############
def setup_time_f1_run10k(package, integrator, other):
if other == NumbaStepModes.INTERNAL_LOOP.name and package == "scipy":
raise NotImplementedError
define_func(package, True)
define_sol(package, integrator)
sol.step()
if package == "nbkode":
# warm up _nsteps
sol.step(n=2)
def time_f1_run10k(package, integrator, other):
if other == NumbaStepModes.INTERNAL_LOOP.name:
sol.step(n=10_000)
else:
for n in range(10_000):
sol.step()
time_f1_run10k.setup = setup_time_f1_run10k
time_f1_run10k.params = (
PACKAGES,
INTEGRATORS,
(NumbaStepModes.INTERNAL_LOOP.name, NumbaStepModes.EXTERNAL_LOOP.name),
)
time_f1_run10k.param_names = ["package", "integrator", "other"]
|
from typing import List
def sum_of_even_numbers(nums: List[int]) -> int:
return sum(num for num in nums if num % 2 == 0) |
///<reference path='../node_modules/@types/polymer/index.d.ts'/>
///<reference path='crystal.ts'/>
module crystal.elements {
Polymer({
is: 'js-xtal-init',
properties:{
innerTarget:{
type: String
},
allowNativeElementAsTarget: {
type: Boolean
}
},
attached: function() {
const actions = evalInner(this);
let target = nextNonScriptSibling(this);
if((target && target['set']) || this.allowNativeElementAsTarget) {
this.processTarget(target, actions);
}else{
this.async(() => {
target = nextDomBindElement(this);
this.processTarget(target, actions);
}, 1);
}
},
processTarget: function(target: Element, actions){
if (this.innerTarget) {
target = target.querySelector(this.innerTarget);
}
performCustElActions(actions, target as HTMLElement);
}
});
}
|
#!/bin/bash
#
# Welcome to the VirtuOps™ Pilot installer! In this installer you will need
#
# - A License file
# - PHP version 5.5+, can be installed with yum, check out this page (https://webtatic.com/projects/yum-repository/)
# - Perl version 5.8+ with JSON module (can be installed with yum).
# - cURL, can be installed with yum as well
# - MariaDB 10+, yum repos available (run the curl command shown on this page to install https://mariadb.com/kb/en/mariadb/mariadb-package-repository-setup-and-usage/).
#
# The installer will do the following:
# Validate you have the right version of PHP (5.5 or higher)
# Install the database and tables for VirtuOps™ Pilot
# Update your config.ini
#
# Do NOT move this script from this location
replaceCmd(){
sed -i "s~$1~$2~g" $3
}
installPreReqs(){
if [ "$release" == "centos6" ]; then
yum -y install epel-release
yum -y install curl
yum -y install unzip
yum -y install nginx nginx-all-modules
cat <<EOF > /etc/yum.repos.d/mariadb.repo
# MariaDB 10.2 CentOS repository list - created 2017-11-05 23:54 UTC
# http://downloads.mariadb.org/mariadb/repositories/
[mariadb]
name = MariaDB
baseurl = http://yum.mariadb.org/10.2/centos6-amd64
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
gpgcheck=1
EOF
yum update
yum -y install MariaDB-server MariaDB-client
yum -y install httpd
rpm -Uvh https://mirror.webtatic.com/yum/el6/latest.rpm
yum -y install php70w php70w-mysqlnd php70w-mcrypt php70w-pgsql php70w-opcache php70w-ldap php70w-gd php70w-bcmath php70w-cli php70w-dba
yum -y install perl-JSON
service httpd restart
service mysql restart
mysql_secure_installation
fi
if [ "$release" == "7" ]; then
yum -y install epel-release
yum -y install curl
yum -y install unzip
yum -y install nginx nginx-all-modules
cat <<EOF > /etc/yum.repos.d/mariadb.repo
# MariaDB 10.2 CentOS repository list - created 2017-11-06 01:04 UTC
# http://downloads.mariadb.org/mariadb/repositories/
[mariadb]
name = MariaDB
baseurl = http://yum.mariadb.org/10.2/centos7-amd64
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
gpgcheck=1
EOF
yum update
yum -y install MariaDB-server MariaDB-client
yum -y install httpd
rpm -Uvh https://mirror.webtatic.com/yum/el7/webtatic-release.rpm
yum -y install php70w php70w-mysqlnd php70w-mcrypt php70w-pgsql php70w-opcache php70w-ldap php70w-gd php70w-bcmath php70w-cli php70w-dba
yum -y install perl-JSON
service httpd restart
service mysql restart
mysql_secure_installation
fi
if [ "$release" == "ubuntu" ]; then
apt-get update
apt-get upgrade
apt-get -y install curl
apt-get -y install unzip
echo "deb http://nginx.org/packages/ubuntu/ `lsb_release -cs` nginx" >> /etc/apt/sources.list.d/nginx.list
curl http://nginx.org/keys/nginx_signing.key | apt-key add -
apt-get update
apt-get -y install mariadb-server mariadb-client
mysql_secure_installation
apt-get -y install apache2
apt-get -y install php libapache2-mod-php
apt-get -y install php5 libapache2-mod-php5
apt-get -y install php5-mysqlnd php5-curl php5-gd php5-intl php-pear php5-imagick php5-imap php5-mcrypt php5-memcache php5-pspell php5-recode php5-sqlite php5-tidy php5-xmlrpc php5-xsl php5-apcu
apt-get -y install php7.0-mysqlnd php7.0-curl php7.0-gd php7.0-intl php7.0-pear php7.0-imagick php7.0-imap php7.0-mcrypt php7.0-memcache php7.0-pspell php7.0-recode php7.0-sqlite php7.0-tidy php7.0-xmlrpc php7.0-xsl php7.0-apcu
service apache2 restart
apt-get -y install libjson-perl
apt-get -y install perl-doc
mysql_secure_installation
fi
if [ "$release" == "debian" ]; then
apt-get update
apt-get upgrade
apt-get -y install curl
apt-get -y install unzip
echo "deb http://nginx.org/packages/debian/ `lsb_release -cs` nginx" >> /etc/apt/sources.list.d/nginx.list
echo "deb-src http://nginx.org/packages/debian/ `lsb_release -cs` nginx" >> /etc/apt/sources.list.d/nginx.list
curl http://nginx.org/keys/nginx_signing.key | apt-key add -
apt-get update
apt-get -y install mariadb-server mariadb-client
apt-get -y install apache2
apt-get -y install php libapache2-mod-php
apt-get -y install php5 libapache2-mod-php5
apt-get -y install php-mysqlnd php-curl php-gd php-intl php-pear php-imagick php-imap php-mcrypt php-memcache php-pspell php-recode php-sqlite php-tidy php-xmlrpc php-xsl php-apcu
apt-get -y install php5-mysqlnd php5-curl php5-gd php5-intl php5-pear php5-imagick php5-imap php5-mcrypt php5-memcache php5-pspell php5-recode php5-sqlite php5-tidy php5-xmlrpc php5-xsl php5-apcu
apt-get -y install php7.0-mysqlnd php7.0-curl php7.0-gd php7.0-intl php7.0-pear php7.0-imagick php7.0-imap php7.0-mcrypt php7.0-memcache php7.0-pspell php7.0-recode php7.0-sqlite php7.0-tidy php7.0-xmlrpc php7.0-xsl php7.0-apcu
service apache2 restart
apt-get -y install libjson-perl
apt-get -y install perl-doc
mysql_secure_installation
fi
}
startingNote(){
echo -e "\n\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<STARTING INSTALLATION>>>>>>>>>>>>>>>>>>>>>>>>>>\n\nWelcome to the VirtuOps™ Pilot installer.\n\nThis script will guide you through your installation. It will only take a few minutes if you have all of the pre-requisites satisfied, which we will show you below or you can read about at https://www.virtuops.com/pilot-prerequisites.\n\nAfter installation, you need to get a license.txt file and put it in <web path>/app/license.txt after installation. The license file should be in <installdir>/app/license.txt.\n\nIf you need a subscription, go to https://www.virtuops.com and select Support -> Contact Us.\n"
}
updateCron(){
cp updateactions_sh updateactions.sh
if [ $dbuser ]; then
replaceCmd "USER" "$dbuser" "updateactions.sh"
replaceCmd "PASS" "$dbpass" "updateactions.sh"
replaceCmd "DB" "$db" "updateactions.sh"
else
replaceCmd "USER" "$user" "updateactions.sh"
replaceCmd "PASS" "$password" "updateactions.sh"
replaceCmd "DB" "$db" "updateactions.sh"
fi
echo -e "Now we need to update cron. We are going to add a line that updates your tasks in the actiontext section.\n"
echo -e "WE ARE SETTING UP THE CRON SCHEDULER WITH THE DEFAULT USER AND PASS. IF YOU CHANGE THE ADMIN PASSWORD, YOU WILL NEED TO UPDATE CRON.\n"
crontab -l > crontemp.txt
cronsize=`wc -c < crontemp.txt`
if [ $cronsize == '0' ]; then
echo "* * * * * /bin/bash ${PWD}/updateactions.sh 2> /dev/null >&1" > crontemp.txt
echo "* * * * * ${PHPPATH} ${BASEDIR}/app/server/admin/wfcron.php 'admin' 'admin' > /dev/null 2>&1" > crontemp.txt
else
sed -i -e "\$a* * * * * /bin/bash ${PWD}/updateactions.sh 2> /dev/null >&1" crontemp.txt
sed -i -e "\$a* * * * * ${PHPPATH} ${BASEDIR}/app/server/admin/wfcron.php 'admin' 'admin' 2> /dev/null >&1" crontemp.txt
fi
crontab crontemp.txt
rm -f crontemp.txt
}
beginInstallNote() {
echo -e "\nOk, begin installation \n"
echo -e "\nIn order to continue, you will need to have/do the following: \n1) Make sure you have PHP 5.5 or higher installed AND date.timezone is set. \n2) Have MariaDB 10.x or higher. \n3) Have PERL 5.8 or higher and JSON module with perldoc in your path \n4) Have unzip in your path \n5) Have MariaDB 10.x credentials that allow you to create a database, tables, and a user. \n\n\n"
echo -e "Have you met all of these prerequisites (y|n)? \n"
read prereq
if [ "$prereq" != 'y' ] && [ "$prereq" != 'Y' ] && [ "$prereq" != 'Yes' ] && [ "$prereq" != 'yes' ] && [ "$prereq" != 'YES' ]
then
read -p "What OS do you want to install pre-reqs for (centos6/centos7/ubuntu/debian)?" release
installPreReqs
fi
}
testHTTP(){
HTTPD=`which httpd`
APACHE2=`which apache2`
if [ ! -f $HTTPD ] && [ ! -f $APACHE2 ]; then
echo -e "Cannot find the httpd server using 'which httpd' or 'which apache2'. Please install httpd.\n"
else
echo -e "Found httpd at $HTTPD$APACHE2, moving on....\n"
fi
}
testPHP(){
echo -e "\nOk, checking PHP version now \n"
PHPMAJ=`php -v | grep 'PHP ' | egrep 'built|cli' | awk '{print $2}' | cut -d. -f1 2> /dev/null`
PHPMIN=`php -v | grep 'PHP ' | egrep 'built|cli' | awk '{print $2}' | cut -d. -f2 2> /dev/null`
if [ ${PHPMAJ} -lt 5 ]
then
echo -e "\nYour PHP version is not at least 5.5. Please upgrade your PHP at least 5.5. Exiting\n";
exit
fi
if [ ${PHPMAJ} -eq 5 ] && [ ${PHPMIN} -lt 5 ]
then
echo -e "\nYour PHP version is above 5.x, but not at least 5.5. Please upgrade your PHP to at least 5.5. Exiting \n";
exit
fi
echo -e "\nYour PHP version is 5.5 or greater -- $PHPMAJ.$PHPMIN, we can continue \n"
}
unzipTest() {
UNZIPPATH=`which unzip`
if [ ! -f $UNZIPPATH ]
then
read -p "Please enter the full path of your unzip utility (usually /usr/bin/unzip): " UNZIPFILE
if [ ! -f $UNZIPFILE ]
then
echo -e "\nCannot find unzip utility at: ${UNZIPFILE}. Exiting\n"
exit
fi
fi
}
phpTest(){
PHPPATH=`which php`
if [ $? -ne 0 ]
then
read -p "Please enter the full path of PHP5.5+ : " PHPPATH
$PHPPATH
if [ $? -ne 0 ]
then
echo -e "\nCannot find php at: ${PHPPATH}. Exiting\n"
exit
fi
fi
}
curlTest(){
CURLPATH=`which curl`
if [ ! -f ${CURLPATH} ]
then
read -p "Please enter the full path of curl : " CURLFILE
if [ ! -f ${CURLFILE} ]
then
echo -e "\nCannot find curl at: ${CURLFILE}. Exiting\n"
exit
fi
fi
}
perlDocTest(){
PERLDOCPATH=`which perldoc`
if [ ! -f $PERLDOCPATH ]
then
read -p "Please enter the full path of your perldoc utility (usually /usr/bin/perldoc): " PERLDOCFILE
if [ ! -f $PERLDOCFILE ]
then
echo -e "\nCannot run perldoc utility at: ${PERLDOCFILE}. Exiting\n"
exit
fi
fi
}
jsonModTest(){
JSONMOD=`$PERLDOCPATH -l JSON`
echo -e "Perl JSON Mod: ${JSONMOD}\n";
if [ ${JSONMOD} == 'No documentation found for "JSON".' ]
then
echo -e "\nJSON PERL module not installed. Please do a yum install or install via CPAN then rerun the installer\n"
exit
fi
}
setBaseDir(){
# Put the basedir in the config.ini
BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )"
replaceCmd "basedir = .*" "basedir = \"${BASEDIR}\"" "../app/server/config.ini"
}
setWebPath(){
echo -e "\n";
read -p "Please enter the full web url for this instance. Example - http://myserver.com/pilot Example 2 - https://mysecureserver.com/pilot Example 3 - http://mydiffport.com:8081/pilot. MAKE SURE TO INCLUDE THE 'pilot' SUBDIR and change it if you are installing this into somewhere different than 'pilot': " WEBURLPATH
echo -e "\n";
read -p "Please enter the linux user that is running the web server (usually www-data or apache): " WEBUSER
echo -e "\n";
read -p "Please enter the linux group for the user that is running the web server (usually www-data or apache): " WEBGROUP
echo -e "\n";
chown -R $WEBUSER:$WEBGROUP $BASEDIR
}
setReadVars(){
replaceCmd "unzip = .*" "unzip = \"${UNZIPPATH}\"" "../app/server/config.ini"
replaceCmd "perldoc = .*" "perldoc = \"${PERLDOCPATH}\"" "../app/server/config.ini"
replaceCmd "php = .*" "php = \"${PHPPATH}\"" "../app/server/config.ini"
replaceCmd "curl = .*" "curl = \"${CURLPATH}\"" "../app/server/config.ini"
replaceCmd "weburl = .*" "weburl = \"${WEBURLPATH}\"" "../app/server/config.ini"
}
dbParams(){
dbclient=""
user=""
password=""
host=""
port=""
db=""
while [ ${#dbclient} -lt 1 ]
do
read -p "Please enter the full path of your DB client (usually /usr/bin/mysql): " dbclient
done
while [ ${#user} -lt 1 ]
do
read -p "Please enter the DB username that will connect to the VirtuOps™ Pilot database: " user
done
while [ ${#password} -lt 1 ]
do
read -p "Please enter the DB password for ${user}: " password
done
while [ ${#host} -lt 1 ]
do
read -p "Please enter the DB hostname (localhost, IP or FQDN of DB host): " host
done
while [ ${#port} -lt 1 ]
do
read -p "Please enter the DB port (usually 3306): " port
done
while [ ${#db} -lt 1 ]
do
read -p "Please enter the Database name for VirtuOps™ Pilot (usually pilot): " db
done
}
connectionSuccess(){
echo -e "Connection SUCCESS!\n"
echo -e "\nVirtuOps™ Pilot will connect to database ${db} using the following information.";
echo -e " Mysql client: ${dbclient}";
echo -e " Hostname: ${host}";
echo -e " DB Name: ${db}";
echo -e " Port: ${port}";
echo -e " User: ${user}";
echo -e " Password: ${password}";
replaceCmd "dbname = .*" "dbname = \"${db}\"" "../app/server/config.ini"
replaceCmd "dbhost = .*" "dbhost = \"${host}\"" "../app/server/config.ini"
replaceCmd "dbuser = .*" "dbuser = \"${user}\"" "../app/server/config.ini"
replaceCmd "dbpass = .*" "dbpass = \"${password}\"" "../app/server/config.ini"
replaceCmd "dbport = .*" "dbport = \"${port}\"" "../app/server/config.ini"
# updateactions_sh updates and cp to updateactions.sh
}
dbTests(){
while ! $dbclient -u$user -p$password -h $host -P $port -A $db -e ";"; do
echo -e "\nWe do not have working mysql params yet, please enter them now and make sure MariaDB is installed and running\n"
dbParams
done
connectionSuccess
}
updateSQL(){
cp -rp pilot.sql pilot.inst_sql
sed -i "s/__DB__/${db}/g" pilot.inst_sql
}
dbInstall(){
dbParams
read -p "[OPTIONAL] If you want a different database user to access the pilot db, enter it here or just press the ENTER key to leave it blank: " dbuser
`${dbclient} -u${user} -p${password} -e "drop database if exists ${db}"`
if [ ! $? -eq 0 ]
then
echo -e "\nError $?: Could not remove existing database ${db}. Please examine your DB settings and run the installer again."
exit
fi
`${dbclient} -u${user} -p${password} -e "create database ${db}"`
if [ ! $? -eq 0 ]; then
echo -e "\nError $?: Could not create database ${db}. Please examine your DB settings and run the installer again."
exit
fi
updateSQL
`${dbclient} -u${user} -p${password} ${db} < pilot.inst_sql`
if [ ! $? -eq 0 ]; then
echo -e "\nError $?: Could not populate database ${db}. Please examine your DB settings and run the installer again."
exit
fi
replaceCmd "dbname = .*" "dbname = \"${db}\"" "../app/server/config.ini"
replaceCmd "dbhost = .*" "dbhost = \"${host}\"" "../app/server/config.ini"
replaceCmd "dbuser = .*" "dbuser = \"${user}\"" "../app/server/config.ini"
replaceCmd "dbpass = .*" "dbpass = \"${password}\"" "../app/server/config.ini"
replaceCmd "dbport = .*" "dbport = \"${port}\"" "../app/server/config.ini"
if [ ${dbuser} ]
then
read -p "[OPTIONAL] Enter the password for ${dbuser}: " dbpass
dbSqlUpdates
fi
#
# Need to do this part because mysql will only listen on the local loopback. It will not be able to listen
# on any inbound traffic, which means failover and clustering won't work.
#
for i in `find /etc -name "*.cnf"`; do
replaceCmd "bind-address" "#bind-address" $i
done
if [ -f /etc/init.d/mysql ]; then
/etc/init.d/mysql restart
else
systemctl restart mysql
fi
}
dbSqlUpdates(){
replaceCmd "dbuser = .*" "dbuser = \"${dbuser}\"" "../app/server/config.ini"
replaceCmd "dbpass = .*" "dbpass = \"${dbpass}\"" "../app/server/config.ini"
`${dbclient} -u${user} -p${password} -e "GRANT SELECT ON ${db}.* TO '${dbuser}'@'${host}' IDENTIFIED BY '${dbpass}'"`;
if [ ! $? -eq 0 ]; then
echo "\nError $?: Could not grant permissions on ${db} to ${dbuser} on host ${host}. Please examine your DB settings and run the installer again."
exit
fi
`${dbclient} -u${user} -p${password} -e "GRANT INSERT ON ${db}.* TO '${dbuser}'@'${host}' IDENTIFIED BY '${dbpass}'"`;
if [ ! $? -eq 0 ]; then
echo "\nError $?: Could not grant permissions on ${db} to ${dbuser} on ${host}. Please examine your DB settings and run the installer again."
exit
fi
`${dbclient} -u${user} -p${password} -e "GRANT UPDATE ON ${db}.* TO '${dbuser}'@'${host}' IDENTIFIED BY '${dbpass}'"`;
if [ ! $? -eq 0 ]; then
echo "\nError $?: Could not grant permissions on ${db} to ${dbuser} on ${host}. Please examine your DB settings and run the installer again."
exit
fi
`${dbclient} -u${user} -p${password} -e "GRANT DELETE ON ${db}.* TO '${dbuser}'@'${host}' IDENTIFIED BY '${dbpass}'"`;
if [ ! $? -eq 0 ]; then
echo "\nError $?: Could not grant permissions on ${db} to ${dbuser} on ${host}. Please examine your DB settings and run the installer again."
exit
fi
}
installLb(){
read -p "This will install an nginx loadbalancer on this server. It will write a new config to /etc/nginx/nginx.conf. Continue (y/n)? " instlb
if [ "$instlb" == "yes" ] || [ "$instlb" == "y" ] || [ "$instlb" == "Yes" ] || [ "$instlb" == "YES" ]
then
apt-get install nginx
TESTLB=`which nginx 2> /dev/null`
if [ -e ${TESTLB} ]
then
echo -e "\nThis server is now a load balancer. Use the Admin menu to configure the load balancer as well as add cluster members\n"
updateLbConfig
else
echo -e "Something went wrong with the load balancer install. Please look at the output and try to do a yum install -y nginx.x86_64 nginx-all-modules.noarch from the command line\n"
exit
fi
else
updateCron
finishInstall
fi
}
finishInstall(){
echo -e "\nCongratulations! You have successfully installed VirtuOps™ Pilot. Now open a browser and go to ${WEBURLPATH} to log in"
exit
}
updateLbConfig(){
if [ -f /etc/nginx/conf.d/default.conf ];then
mv /etc/nginx/conf.d/default.conf /etc/nginx/conf.d/default.orig_conf
fi
read -p "Is this loadbalancer running http or https (http/https)? " lbproto
while [ "$lbproto" != "https" ] && [ "$lbproto" != "HTTPS" ] && [ "$lbproto" != "http" ] && [ "$lbproto" != "HTTP" ]
do
read -p "Need http or https: " lbproto
done
read -p "What port should this balancer listen on? " lbport
read -p "What is this server's hostname? " lbhost
read -p "Where does the error log go? " lberror
read -p "Where does the access log go? " lbaccess
read -p "If SSL, what is the path to the SSL certificate (leave blank if not ssl)? " lbsslcert
read -p "If SSL, what is the path to the SSL key (leave blank if not ssl)? " lbsslkey
if [ "$lbproto" == "http" ] || [ "$lbproto" == "HTTP" ]
then
cp -rp lb.conf lb.conf.tmp
replaceCmd "PORT" "${lbport}" "lb.conf.tmp"
replaceCmd "ERROR_LOG" "${lberror}" "lb.conf.tmp"
replaceCmd "ACC_LOG" "${lbaccess}" "lb.conf.tmp"
replaceCmd "SERVER" "${lbhost}" "lb.conf.tmp"
mv lb.conf.tmp /etc/nginx/nginx.conf
fi
if [ "$lbproto" == "https" ] || [ "$lbproto" == "HTTPS" ]
then
cp -rp lb.ssl.conf lb.ssl.conf.tmp
replaceCmd "PORT" "${lbport}" "lb.ssl.conf.tmp"
replaceCmd "ERROR_LOG" "${lberror}" "lb.ssl.conf.tmp"
replaceCmd "ACC_LOG" "${lbaccess}" "lb.ssl.conf.tmp"
replaceCmd "SERVER" "${lbhost}" "lb.ssl.conf.tmp"
replaceCmd "SSL_CERT" "${lbsslcert}" "lb.ssl.conf.tmp"
replaceCmd "SSL_KEY" "${lbsslkey}" "lb.ssl.conf.tmp"
mv lb.ssl.conf.tmp /etc/nginx/nginx.conf
fi
systemctl enable nginx
restartLb
}
restartLb(){
if [ -e /etc/init.d/nginx ]; then
/etc/init.d/nginx restart
echo -e "Load balancer config updated and load balancer restarted\n"
else
systemctl restart nginx
echo -e "Load balancer config updated and load balancer restarted\n"
fi
}
exitInstall() {
echo -e "\nYou've chosen to exit installation. To continue installation, please run the VirtuOps™ Pilot install script again.\n"
exit
}
startingNote
echo -e "Do you wish to install VirtuOps™ Pilot on this machine (y|n)? \n"
read yn
if [ "$yn" == 'Yes' ] || [ "$yn" == 'Y' ] || [ "$yn" == 'y' ] || [ "$yn" == 'yes' ]
then
beginInstallNote
testHTTP
testPHP
unzipTest
phpTest
curlTest
perlDocTest
jsonModTest
setBaseDir
setWebPath
setReadVars
read -p "Are you Installing a DB (y/n)? If connecting to an existing one, type no " install
if [ $install == 'y' ] || [ $install == 'yes' ] || [ $install == 'Yes' ] || [ $install == 'YES' ]
then
dbInstall
else
dbTests
fi
read -p "Is this server a load balancer (if just a cluster member, type no)? " lb
if [ $lb == 'y' ] || [ $lb == 'yes' ] || [ $lb == 'Yes' ] || [ $lb == 'YES' ]
then
installLb
updateCron
finishInstall
else
updateCron
finishInstall
fi
else
exitInstall
fi
|
package ch15.ex3;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.GeneralPath;
import java.awt.geom.Path2D;
import static java.awt.BasicStroke.*;
import static java.awt.Color.*;
/**
* Project: ch15.ex3
* Date: 2/27/2018
*
* @author <NAME>
*/
public class ex153 extends JApplet
{
private final static int MARGIN = 15;
private final static int COMPONENT_WIDTH = 500;
private final static int COMPONENT_HEIGHT = 500;
private final static Path2D PATH_2D = new GeneralPath();
@Override
public void init()
{
setSize(COMPONENT_WIDTH, COMPONENT_HEIGHT);
}
@Override
public void paint(Graphics g)
{
super.paint(g);
final Graphics2D G2D = (Graphics2D) g;
final double CENTER_X = getWidth() / 2 - baseLength(1.0);
final Point P1 = new Point(CENTER_X, MARGIN);
final Point P2 = new Point(CENTER_X + baseLength(5.5), baseLength(7.5));
final Point P3 = new Point(CENTER_X - baseLength(3.0), baseLength(9.5));
final Point P4 = new Point(CENTER_X - baseLength(3.5), baseLength(6.0));
PATH_2D.reset();
moveTo(P1);
lineTo(P3);
lineTo(P4);
PATH_2D.closePath();
G2D.setColor(GRAY);
G2D.fill(PATH_2D);
PATH_2D.reset();
moveTo(P1);
lineTo(P2);
lineTo(P3);
PATH_2D.closePath();
G2D.setColor(LIGHT_GRAY);
G2D.fill(PATH_2D);
lineTo(P4);
lineTo(P3);
G2D.setStroke(new BasicStroke(3, CAP_ROUND, JOIN_ROUND));
G2D.setColor(BLACK);
G2D.draw(PATH_2D);
}
private double baseLength(final double LEN)
{
return (getHeight() - (MARGIN * 2.0)) * LEN / 10.0;
}
private static void moveTo(final Point p)
{
PATH_2D.moveTo(p.X, p.Y);
}
private static void lineTo(final Point p)
{
PATH_2D.lineTo(p.X, p.Y);
}
private final static class Point
{
final double X;
final double Y;
private Point(final double x, final double y)
{
this.X = x;
this.Y = y;
}
}
} |
def replace_substring(my_string, old_str, new_str):
new_string = my_string.replace(old_str, new_str)
return new_string |
package provider
import (
"crypto/sha1" //nolint
"encoding/json"
"fmt"
"mime"
"net/http"
"time"
"github.com/go-pkgz/rest"
"github.com/golang-jwt/jwt"
"github.com/go-pkgz/auth/logger"
"github.com/go-pkgz/auth/token"
)
const (
// MaxHTTPBodySize defines max http body size
MaxHTTPBodySize = 1024 * 1024
)
// DirectHandler implements non-oauth2 provider authorizing user in traditional way with storage
// with users and hashes
type DirectHandler struct {
logger.L
CredChecker CredChecker
ProviderName string
TokenService TokenService
Issuer string
AvatarSaver AvatarSaver
UserIDFunc UserIDFunc
}
// CredChecker defines interface to check credentials
type CredChecker interface {
Check(user, password string) (ok bool, err error)
}
// UserIDFunc allows to provide custom func making userID instead of the default based on user's name hash
type UserIDFunc func(user string, r *http.Request) string
// CredCheckerFunc type is an adapter to allow the use of ordinary functions as CredsChecker.
type CredCheckerFunc func(user, password string) (ok bool, err error)
// Check calls f(user,passwd)
func (f CredCheckerFunc) Check(user, password string) (ok bool, err error) {
return f(user, password)
}
// credentials holds user credentials
type credentials struct {
User string `json:"user"`
Password string `json:"<PASSWORD>"`
Audience string `json:"aud"`
}
// Name of the handler
func (p DirectHandler) Name() string { return p.ProviderName }
// LoginHandler checks "user" and "passwd" against data store and makes jwt if all passed.
//
// GET /something?user=name&passwd=<PASSWORD>&aud=bar&sess=[0|1]
//
// POST /something?sess[0|1]
// Accepts application/x-www-form-urlencoded or application/json encoded requests.
//
// application/x-www-form-urlencoded body example:
// user=name&passwd=<PASSWORD>&aud=bar
//
// application/json body example:
// {
// "user": "name",
// "passwd": "<PASSWORD>",
// "aud": "bar",
// }
func (p DirectHandler) LoginHandler(w http.ResponseWriter, r *http.Request) {
creds, err := p.getCredentials(w, r)
if err != nil {
rest.SendErrorJSON(w, r, p.L, http.StatusBadRequest, err, "failed to parse credentials")
return
}
sessOnly := r.URL.Query().Get("sess") == "1"
if p.CredChecker == nil {
rest.SendErrorJSON(w, r, p.L, http.StatusInternalServerError,
fmt.Errorf("no credential checker"), "no credential checker")
return
}
ok, err := p.CredChecker.Check(creds.User, creds.Password)
if err != nil {
rest.SendErrorJSON(w, r, p.L, http.StatusInternalServerError, err, "failed to check user credentials")
return
}
if !ok {
rest.SendErrorJSON(w, r, p.L, http.StatusForbidden, nil, "incorrect user or password")
return
}
userID := p.ProviderName + "_" + token.HashID(sha1.New(), creds.User)
if p.UserIDFunc != nil {
userID = p.ProviderName + "_" + token.HashID(sha1.New(), p.UserIDFunc(creds.User, r))
}
u := token.User{
Name: creds.User,
ID: userID,
}
u, err = setAvatar(p.AvatarSaver, u, &http.Client{Timeout: 5 * time.Second})
if err != nil {
rest.SendErrorJSON(w, r, p.L, http.StatusInternalServerError, err, "failed to save avatar to proxy")
return
}
cid, err := randToken()
if err != nil {
rest.SendErrorJSON(w, r, p.L, http.StatusInternalServerError, err, "can't make token id")
return
}
claims := token.Claims{
User: &u,
StandardClaims: jwt.StandardClaims{
Id: cid,
Issuer: p.Issuer,
Audience: creds.Audience,
},
SessionOnly: sessOnly,
}
if _, err = p.TokenService.Set(w, claims); err != nil {
rest.SendErrorJSON(w, r, p.L, http.StatusInternalServerError, err, "failed to set token")
return
}
rest.RenderJSON(w, claims.User)
}
// getCredentials extracts user and password from request
func (p DirectHandler) getCredentials(w http.ResponseWriter, r *http.Request) (credentials, error) {
// GET /something?user=name&passwd=<PASSWORD>&aud=bar
if r.Method == "GET" {
return credentials{
User: r.URL.Query().Get("user"),
Password: r.URL.Query().Get("<PASSWORD>"),
Audience: r.URL.Query().Get("aud"),
}, nil
}
if r.Method != "POST" {
return credentials{}, fmt.Errorf("method %s not supported", r.Method)
}
if r.Body != nil {
r.Body = http.MaxBytesReader(w, r.Body, MaxHTTPBodySize)
}
contentType := r.Header.Get("Content-Type")
if contentType != "" {
mt, _, err := mime.ParseMediaType(r.Header.Get("Content-Type"))
if err != nil {
return credentials{}, err
}
contentType = mt
}
// POST with json body
if contentType == "application/json" {
var creds credentials
if err := json.NewDecoder(r.Body).Decode(&creds); err != nil {
return credentials{}, fmt.Errorf("failed to parse request body: %w", err)
}
return creds, nil
}
// POST with form
if err := r.ParseForm(); err != nil {
return credentials{}, fmt.Errorf("failed to parse request: %w", err)
}
return credentials{
User: r.Form.Get("user"),
Password: r.Form.Get("<PASSWORD>"),
Audience: r.Form.Get("aud"),
}, nil
}
// AuthHandler doesn't do anything for direct login as it has no callbacks
func (p DirectHandler) AuthHandler(w http.ResponseWriter, r *http.Request) {}
// LogoutHandler - GET /logout
func (p DirectHandler) LogoutHandler(w http.ResponseWriter, r *http.Request) {
p.TokenService.Reset(w)
}
|
<reponame>prophecy/Pillar
/*
* This source file is part of Wonderland, the C++ Cross-platform middleware for game
*
* For the latest information, see https://github.com/prophecy/Wonderland
*
* The MIT License (MIT)
* Copyright (c) 2015 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#ifndef __TYPES_H__
#define __TYPES_H__
#define WONDERLAND_PLATFORM_NONE 0
#define WONDERLAND_PLATFORM_WINDOWS 1
#define WONDERLAND_PLATFORM_IOS 2
#define WONDERLAND_PLATFORM_ANDROID 3
#define WONDERLAND_PLATFORM_MAC 4
#define WONDERLAND_ACTIVE_PLATFORM WONDERLAND_PLATFORM_NONE
#define MEM_POOL_DEBUG
#if defined( __APPLE_CC__)
# include "TargetConditionals.h"
/* // Complicated way to detect iOS devices
# if __ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__ >= 50000 || __IPHONE_OS_VERSION_MIN_REQUIRED >= 30000
# undef WONDERLAND_ACTIVE_PLATFORM
# define WONDERLAND_ACTIVE_PLATFORM WONDERLAND_PLATFORM_IOS
*/
# if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
# define WONDERLAND_ACTIVE_PLATFORM WONDERLAND_PLATFORM_IOS
# elif TARGET_OS_MAC
# undef WONDERLAND_ACTIVE_PLATFORM
# define WONDERLAND_ACTIVE_PLATFORM WONDERLAND_PLATFORM_MAC
# endif
#elif defined( __WIN32__ ) || defined( _WIN32 )
# undef WONDERLAND_ACTIVE_PLATFORM
# define WONDERLAND_ACTIVE_PLATFORM WONDERLAND_PLATFORM_WINDOWS
#elif defined( __ANDROID__ )
# undef WONDERLAND_ACTIVE_PLATFORM
# define WONDERLAND_ACTIVE_PLATFORM WONDERLAND_PLATFORM_ANDROID
#endif
// Include
# include <string>
# include <vector>
# include <deque>
# include <map>
# include <set>
# include <list>
// Include for Windows
#if WONDERLAND_ACTIVE_PLATFORM == WONDERLAND_PLATFORM_WINDOWS
# include <windows.h>
#endif
// Typedef
#if WONDERLAND_ACTIVE_PLATFORM == WONDERLAND_PLATFORM_WINDOWS
# if _WIN64
typedef unsigned __int64 addr;
# else
typedef unsigned __int32 addr;
# endif
typedef __int8 s8;
typedef __int16 s16;
typedef __int32 s32;
typedef __int64 s64;
typedef unsigned __int8 u8;
typedef unsigned __int16 u16;
typedef unsigned __int32 u32;
typedef unsigned __int64 u64;
typedef volatile u8 vu8;
typedef volatile u16 vu16;
typedef volatile u32 vu32;
typedef volatile u64 vu64;
typedef volatile s8 vs8;
typedef volatile s16 vs16;
typedef volatile s32 vs32;
typedef volatile s64 vs64;
typedef float f32;
typedef double f64;
typedef volatile f32 vf32;
typedef volatile f64 vf64;
typedef size_t size_t;
#elif WONDERLAND_ACTIVE_PLATFORM == WONDERLAND_PLATFORM_IOS || WONDERLAND_ACTIVE_PLATFORM == WONDERLAND_PLATFORM_MAC
# if __x86_64__ || __ppc64__ || __LP64__
typedef unsigned long addr;
# else
typedef unsigned int addr;
# endif
typedef unsigned char s8;
typedef short int s16;
typedef int s32;
typedef long long s64;
typedef unsigned char u8;
typedef unsigned short int u16;
typedef unsigned int u32;
typedef unsigned long long u64;
typedef volatile u8 vu8;
typedef volatile u16 vu16;
typedef volatile u32 vu32;
typedef volatile u64 vu64;
typedef volatile s8 vs8;
typedef volatile s16 vs16;
typedef volatile s32 vs32;
typedef volatile s64 vs64;
typedef float f32;
typedef double f64;
typedef volatile f32 vf32;
typedef volatile f64 vf64;
typedef size_t size_t;
#elif WONDERLAND_ACTIVE_PLATFORM == WONDERLAND_PLATFORM_ANDROID
typedef unsigned char s8;
typedef short int s16;
typedef int s32;
typedef long long s64;
typedef unsigned char u8;
typedef unsigned short int u16;
typedef unsigned int u32;
typedef unsigned long long u64;
typedef volatile u8 vu8;
typedef volatile u16 vu16;
typedef volatile u32 vu32;
typedef volatile u64 vu64;
typedef volatile s8 vs8;
typedef volatile s16 vs16;
typedef volatile s32 vs32;
typedef volatile s64 vs64;
typedef float f32;
typedef double f64;
typedef volatile f32 vf32;
typedef volatile f64 vf64;
typedef size_t size_t;
#endif
// TRUE
#ifndef TRUE
# define TRUE 1
#endif // TRUE
// FALSE
#ifndef FALSE
# define FALSE 0
#endif // FALSE
// INVALID
#ifndef INVALID
# define INVALID -1
#endif // INVALID
#ifndef NULL
# ifdef __cplusplus
# define NULL 0
# else // __cplusplus
# define NULL ((void *)0)
# endif // __cplusplus
#endif // NULL
// Constance
const f32 PI = 3.14159265358979323846f;
const f32 PI_2 = 1.57079632679489661923f;
const f32 PI_4 = 0.785398163397448309616f;
#endif // __TYPES_H__
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var bifrost_1 = require("@fimbul/bifrost");
var extCurlyRule_1 = require("../extCurlyRule");
var rule = bifrost_1.wrapTslintRule(extCurlyRule_1.Rule);
exports.Rule = rule;
//# sourceMappingURL=data:application/json;base64,<KEY> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.