text
stringlengths 1
1.05M
|
|---|
<filename>183 - Steal Diamond Game/script.js
document.querySelector('body').addEventListener('mousemove', eyeBall);
function eyeBall() {
let eye = document.querySelectorAll('.eye');
eye.forEach( eye => {
let x = (eye.getBoundingClientRect().left) + (eye.clientWidth / 2);
let y = (eye.getBoundingClientRect().top) + (eye.clientHeight / 2);
let radian = Math.atan2(event.pageX - x, event.pageY - y);
let rot = (radian * (180 / Math.PI) * -1 ) + 270;
eye.style.transform = `rotateZ(${rot}deg)`;
});
}
|
def solveSudoku(board):
row, col = findEmptyCell(board)
if row == -1 and col == -1:
return True
for num in range(1, 10):
if validSolution(board, row, col, num):
board[row][col] = num
if solveSudoku(board):
return True
board[row][col] = 0
return False
def validSolution(board, row, col, num):
return (rowConstraint(board, row, num) and
colConstraint(board, col, num) and
boxConstraint(board, row - row % 3, col - col % 3, num))
def findEmptyCell(board):
for row in range(len(board)):
for col in range(len(board[row])):
if board[row][col] == 0:
return (row, col)
return (-1, -1)
def rowConstraint(board, row, num):
for col in range(len(board[0])):
if board[row][col] == num:
return False
return True
def colConstraint(board, col, num):
for row in range(len(board)):
if board[row][col] == num:
return False
return True
def boxConstraint(board, startRow, startCol, num):
for row in range(0, 3):
for col in range(0, 3):
if board[row + startRow][col + startCol] == num:
return False
return True
|
#include <string>
#include <vector>
#include <iomanip>
#include <algorithm>
#include "atl/status.h"
#include "atl/optional.h"
#include "atl/time.h"
#include "atl/file.h"
#include "atl/colors.h"
#include "atl/string.h"
#include "serializer.h"
#include "worklog.h"
#include "utils.h"
std::string Template() {
worklog::HumanSerializer hs;
worklog::Log log;
log.subject = "Title here";
log.description = "Description here";
log.tags = {"tag1", "tag2"};
log.created_at = atl::UnixTimestamp(atl::CurrentTime());
return hs.Serialize(log);
}
int PostEditValidation(const std::string& content) {
worklog::HumanSerializer hs;
worklog::Log log = hs.Unserialize(content);
atl::Status valid_check = worklog::Validate(log);
if (!valid_check.ok()) {
std::cerr << "Error: The work log is invalid. Reason: "
<< valid_check.error_message()
<< ". Please fix!\n";
return -1;
}
return 0;
}
atl::Optional<int> ExtractWorklogIdFromPath(const std::string& path) {
auto pos = path.rfind("/");
if (pos == std::string::npos) {
return {};
}
return std::stoi(path.substr(pos + 1));
}
std::vector<worklog::Log> IndexFromDir(const std::string& path) {
std::vector<worklog::Log> index;
worklog::HumanSerializer hs;
atl::WalkDir(path, [&index, &hs](const std::string& file) -> bool {
atl::Optional<int> worklog_id = ExtractWorklogIdFromPath(file);
if (!worklog_id) {
std::cerr << "Failed to extract work log from path: " << file << "\n";
return true;
}
auto content = atl::FileReadContent(file);
if (!content) {
std::cerr << "Error: Failed to read log: " << file << "\n";
return true;
}
auto log = hs.Unserialize(content.value());
log.id = worklog_id.value();
index.push_back(log);
return true;
});
// Sort by created_at DESC - newer entries are displayed first:
std::sort(index.begin(), index.end(), [](const worklog::Log& a, const worklog::Log& b) {
return a.created_at > b.created_at;
});
return index;
}
atl::Optional<int> NumberFromString(const std::string& number) {
try {
return std::stoi(number);
} catch (const std::exception& e) {
return {};
}
}
void PrintWorklog(const worklog::Log& log) {
if (log.id > 0) {
std::cout << std::setw(10) << std::left << log.id;
} else {
std::cout << std::setw(10) << std::left << "???";
}
std::cout << atl::FormatTime(log.created_at) << " " << atl::console::fg::yellow
<< std::setw(30) << std::left << atl::CreateSnippet(log.subject, 30)
<< atl::console::fg::reset;
std::cout << " [" << atl::Join(log.tags, ", ") << "]\n";
}
worklog::Command::Action MustBeInWorkspace(worklog::Command::Action action) {
return [action](const worklog::CommandContext& ctx) -> int {
if (!worklog::IsInWorklogSpace(ctx.config)) {
std::cerr << "Fatal: Not in a worklog space. Please initialize a worklog first (see 'help')\n";
return -1;
}
return action(ctx);
};
}
|
<gh_stars>0
declare const checkUpdate: (timeout?: number) => Promise<void>;
export default checkUpdate;
|
'use strict'
var test = require('tap').test
var pino = require('../')
var sink = require('./helper').sink
var os = require('os')
var pid = process.pid
var hostname = os.hostname()
var level = 50
var name = 'error'
test('err is serialized with additional properties set on the Error object', function (t) {
t.plan(2)
var err = Object.assign(new Error('myerror'), {foo: 'bar'})
var instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.deepEqual(chunk, {
pid: pid,
hostname: hostname,
level: level,
type: 'Error',
msg: err.message,
stack: err.stack,
foo: err.foo,
v: 1
})
cb()
}))
instance.level = name
instance[name](err)
})
test('type should be retained, even if type is a property', function (t) {
t.plan(2)
var err = Object.assign(new Error('myerror'), {type: 'bar'})
var instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.deepEqual(chunk, {
pid: pid,
hostname: hostname,
level: level,
type: 'bar',
msg: err.message,
stack: err.stack,
v: 1
})
cb()
}))
instance.level = name
instance[name](err)
})
test('type, message and stack should be first level properties', function (t) {
t.plan(2)
var err = Object.assign(new Error('foo'), { foo: 'bar' })
var instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.deepEqual(chunk, {
pid: pid,
hostname: hostname,
level: level,
type: 'Error',
msg: err.message,
stack: err.stack,
foo: err.foo,
v: 1
})
cb()
}))
instance.level = name
instance[name](err)
})
test('err serializer', function (t) {
t.plan(2)
var err = Object.assign(new Error('myerror'), {foo: 'bar'})
var instance = pino({
serializers: {
err: pino.stdSerializers.err
}
}, sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.deepEqual(chunk, {
pid: pid,
hostname: hostname,
level: level,
err: {
type: 'Error',
message: err.message,
stack: err.stack,
foo: err.foo
},
v: 1
})
cb()
}))
instance.level = name
instance[name]({ err })
})
test('an error with statusCode property is not confused for a http response', function (t) {
t.plan(2)
var err = Object.assign(new Error('StatusCodeErr'), { statusCode: 500 })
var instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.deepEqual(chunk, {
pid: pid,
hostname: hostname,
level: level,
type: 'Error',
msg: err.message,
stack: err.stack,
statusCode: err.statusCode,
v: 1
})
cb()
}))
instance.level = name
instance[name](err)
})
test('stack is omitted if it is not set on err', t => {
t.plan(2)
var err = new Error('myerror')
delete err.stack
var instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.equal(chunk.hasOwnProperty('stack'), false)
cb()
}))
instance.level = name
instance[name](err)
})
test('stack is rendered as any other property if it\'s not a string', t => {
t.plan(3)
var err = new Error('myerror')
err.stack = null
var instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.equal(chunk.hasOwnProperty('stack'), true)
t.equal(chunk.stack, null)
cb()
}))
instance.level = name
instance[name](err)
})
|
<filename>Hackerrank/Java/Java MD5.java
import java.io.*;
import java.util.*;
import java.text.*;
import java.math.*;
import java.util.regex.*;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
public class Solution {
private static Scanner scanner = new Scanner(System.in);
public static void main(String[] args) {
try{
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] message = md.digest((scanner.next().getBytes()));
BigInteger no = new BigInteger(1, message);
String hashtext = no.toString(16);
while(hashtext.length() < 32){
hashtext = "0" + hashtext;
}
System.out.println(hashtext);
}
catch (NoSuchAlgorithmException e) {
System.out.println(e);
}
}
}
|
<filename>Documentation/backends_2reference_2workloads_2_space_to_batch_n_d_8cpp.js
var backends_2reference_2workloads_2_space_to_batch_n_d_8cpp =
[
[ "GetOffset", "backends_2reference_2workloads_2_space_to_batch_n_d_8cpp.xhtml#adafb0fd0a3f6435c2bdf41f971761ecf", null ],
[ "SpaceToBatchNd", "backends_2reference_2workloads_2_space_to_batch_n_d_8cpp.xhtml#a4a180e425d4c19b2cdea4ce5760180e1", null ]
];
|
package io.opensphere.core.control.action;
/**
* Manages events related to context menus and subscriptions for menu button
* providers.
*/
public interface ContextActionManager
{
/**
* De-register any single action provider which is not the default provider
* for the context and notify it that it has been invalidated.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param <R> a context menu key type.
*/
<R> void clearContextSingleActionProvider(String contextId, Class<R> keyType);
/**
* De-register a provider of menus for a context.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param provider The provider to de-register.
* @param <R> a context menu key type.
*/
<R> void deregisterContextMenuItemProvider(String contextId, Class<R> keyType, ContextMenuProvider<R> provider);
/**
* De-register a provider of a single action for a context.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param provider The provider to de-register.
* @param <R> a context menu key type.
*/
<R> void deregisterContextSingleActionProvider(String contextId, Class<R> keyType, ContextSingleActionProvider<R> provider);
/**
* De-register a provider of a default action for a context.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param provider The provider to de-register.
* @param <R> a context menu key type.
*/
<R> void deregisterDefaultContextActionProvider(String contextId, Class<R> keyType, ContextActionProvider<R> provider);
/**
* Gets the {@link ActionContext} for the provided context Identifier, if no
* context exists yet for the identifier one will be created.
*
* @param contextIdentifier the context identifier to be used to get the
* context.
* @param keyType the type of the context key associated with the context
* ID.
* @param <R> A context menu key type.
* @return the control popup menu option context for the identifier.
*/
<R> ActionContext<R> getActionContext(String contextIdentifier, Class<R> keyType);
/**
* Register a provider of menus for a context. Only a weak reference is held
* to the provider.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param provider The provider to register.
* @param <R> a context menu key type.
*/
<R> void registerContextMenuItemProvider(String contextId, Class<R> keyType, ContextMenuProvider<R> provider);
/**
* Register a non-default provider of a single action for a context. Only
* one may be registered at a time, so if one is already registered it will
* be notified that it has been invalidated. Only a weak reference is held
* to the provider.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param provider The provider to register.
* @param <R> a context menu key type.
*/
<R> void registerContextSingleActionProvider(String contextId, Class<R> keyType, ContextSingleActionProvider<R> provider);
/**
* Register a default provider of a single action for a context. A default
* provider will only be called if another non-default provider isn't
* currently registered. Multiple defaults may be registered; they will be
* called in order of registration until one of them accepts the action.
* Only a weak reference is held to the provider.
*
* @param contextId The identifier for the context.
* @param keyType The type of the key associated with the context.
* @param provider The provider to register.
* @param <R> a context menu key type.
*/
<R> void registerDefaultContextActionProvider(String contextId, Class<R> keyType, ContextActionProvider<R> provider);
}
|
<gh_stars>1-10
import commonjs from 'rollup-plugin-commonjs';
import { uglify } from 'rollup-plugin-uglify';
import babel from 'rollup-plugin-babel';
import pkg from './package.json';
const input = 'src/index.js';
const name = 'whatsappChatParser';
const sourcemap = true;
const format = 'umd';
const exports = 'named';
export default [
{
input,
output: {
name,
file: pkg.main,
sourcemap,
format,
exports,
},
plugins: [commonjs(), babel()],
},
{
input,
output: {
name,
file: pkg.main.replace(/\.js$/, '.min.js'),
sourcemap,
format,
exports,
},
plugins: [commonjs(), babel(), uglify()],
},
];
|
#!/usr/bin/env bash
start_dashboard() {
kubectl apply -f https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.0-beta8/aio/deploy/recommended.yaml
kubectl apply -f dashboard.yml
echo "Bearer token for UI user."
kubectl -n kubernetes-dashboard describe secret $(kubectl -n kubernetes-dashboard get secret | grep admin-user | awk '{print $1}')
}
start_dashboard
|
<reponame>ReactionMap/main_canost
// assign_cast_code CASTコードをふる関数
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "main_canost.h"
// #define TEST_ASSIGN_CAST_CODE 1
extern int iatm;
extern struct atom *a1st;
extern int *cast_codes;
extern int *relative_parent;
int assign_cast_code( int epsilon ){
int alpha,beta,gamma;
#ifdef TEST_ASSIGN_CAST_CODE
printf("assign_cast_code \n");
printf("epsilon %d \n",epsilon);
#endif
alpha=MINUS;
beta=MINUS;
gamma=MINUS;
// 仮想的な原子は, 他方の原子の座標を利用する
if( epsilon > iatm ){
if( a1st[ (a1st[epsilon].adj_list)->alias ].grade < a1st[ (a1st[epsilon].last)->alias ].grade ){
a1st[ epsilon ].xco = a1st[ (a1st[epsilon].last)->alias ].xco;
a1st[ epsilon ].yco = a1st[ (a1st[epsilon].last)->alias ].yco;
a1st[ epsilon ].zco = a1st[ (a1st[epsilon].last)->alias ].zco;
}else{
a1st[ epsilon ].xco = a1st[ (a1st[epsilon].adj_list)->alias ].xco;
a1st[ epsilon ].yco = a1st[ (a1st[epsilon].adj_list)->alias ].yco;
a1st[ epsilon ].zco = a1st[ (a1st[epsilon].adj_list)->alias ].zco;
}
}
if( relative_parent[ epsilon ] != MINUS ){ // relative_parent が同定されているかのチェック
gamma = identify_ancestors( relative_parent[ epsilon ],epsilon );
if( gamma != MINUS && a1st[ gamma ].layer >= 2 && relative_parent[ gamma ] != MINUS ){
beta = identify_ancestors( relative_parent[ gamma ],gamma );
#ifdef TEST_ASSIGN_CAST_CODE
printf("beta %d gamma %d epsilon %d \n",beta,gamma,epsilon);
printf("relative_parent of beta %d is %d \n",beta,relative_parent[beta]);
#endif
if( beta != MINUS && a1st[ beta ].layer >= 1 && relative_parent[ beta ] != MINUS ){
alpha = identify_ancestors( relative_parent[ beta ],beta );
#ifdef TEST_ASSIGN_CAST_CODE
printf("alpha %d beta %d gamma %d epsilon %d \n",alpha,beta,gamma,epsilon);
#endif
if( alpha != MINUS ){
cast_codes[ epsilon ] = get_dihedral_angle( alpha, beta, gamma, epsilon );
}else{
cast_codes[ epsilon ] = UK;
}
}else{
cast_codes[ epsilon ] = UK;
}
}else{
cast_codes[ epsilon ] = UK;
}
}else{
cast_codes[ epsilon ] = UK;
}
#ifdef TEST_ASSIGN_CAST_CODE
printf("alpha %d beta %d gamma %d epsilon %d \n",alpha,beta,gamma,epsilon);
printf("atom %d cast %d \n",epsilon,cast_codes[epsilon]);
#endif
return EXIT_SUCCESS;
}
|
<gh_stars>1-10
const Command = require("../../structure/commands.js");
const { MessageEmbed } = require("discord.js");
const EcoleDirecte = require("node-ecole-directe");
class Login extends Command {
constructor(client) {
super(client, {
name: "login",
aliases: [],
enabled: true,
userPerms: [],
permLevel: 1
});
};
async run(message, args) {
let client = this.client;
let msg = null;
let filter = (reaction, user) => {
return ['1️⃣', '2️⃣', '3️⃣', '✅'].includes(reaction.emoji.name) && user.id === message.author.id;
};
const opt = { max: 1, time: 90000, errors: [ "time" ] };
let account = null;
let rememberMe = false;
let username = await client.translate("ecoledirecte:ENTER_USERNAME");
let password = await client.translate("ecoledirecte:ENTER_PASSWORD");
let passwordSecret = await client.translate("ecoledirecte:ENTER_PASSWORD");
async function WaitForReaction(msg) {
let reaction = null;
await msg.awaitReactions(filter, { max: 1, time: 60000, errors: ['time'] }).then(collected => {
reaction = collected.first();
reaction.users.remove(message.author.id);
}).catch(collected => {
return cancel();
});
if(reaction == null) return;
return reaction.emoji.name;
};
async function WaitForText(msg) {
filter = (m) => m.author.id === message.author.id;
let collected = await message.channel.awaitMessages(filter, opt).catch(() => {});
if (!collected || !collected.first()) return cancel();
const confMessage = collected.first().content;
if(confMessage === "cancel") return cancel();
collected.first().delete().catch(() => {});
filter = (reaction, user) => {
return ['1️⃣', '2️⃣', '3️⃣', '✅'].includes(reaction.emoji.name) && user.id === message.author.id;
};
return confMessage;
};
async function cancel() {
msg.delete().catch(() => {});
message.delete().catch(() => {});
}
async function displayMain(msg) {
let embed = new MessageEmbed()
.setTitle(await client.translate("ecoledirecte:TITLE_LOGIN"))
.setAuthor(message.author.username, message.author.displayAvatarURL({ dynamic: true }))
.setFooter(await client.translate("ecoledirecte:FOOTER_LOGIN"))
.setColor("#099CFE")
.addField(await client.translate("ecoledirecte:FIRST_LOGIN"), username)
.addField(await client.translate("ecoledirecte:SECOND_LOGIN"), password)
.addField(await client.translate("ecoledirecte:THIRD_LOGIN"), await client.translate("common:NO"))
return msg.edit(embed);
}
async function wait() {
const embed = new MessageEmbed()
.setTitle(await client.translate("ecoledirecte:TITLE_LOGIN"))
.setAuthor(message.author.username, message.author.displayAvatarURL({ dynamic: true }))
.setFooter(client.cfg.footer)
.setColor("#099CFE")
.setDescription(client.emotes["waiting"] + " " + await client.translate("common:WAITING"))
return message.channel.send(embed);
}
async function start() {
msg = await wait();
await msg.react('1️⃣');
await msg.react('2️⃣');
await msg.react('3️⃣');
await msg.react('✅');
msg = await displayMain(msg);
const toModify = await WaitForReaction(msg);
await switchTM(toModify);
}
async function after() {
const r = await WaitForReaction(msg);
await switchTM(r);
}
async function updateEmbed(msg) {
const embed = new MessageEmbed()
.setTitle(await client.translate("ecoledirecte:TITLE_LOGIN"))
.setAuthor(message.author.username, message.author.displayAvatarURL({ dynamic: true }))
.setFooter(await client.translate("ecoledirecte:FOOTER_LOGIN"))
.setColor("#099CFE")
.addField(await client.translate("ecoledirecte:FIRST_LOGIN"), username)
.addField(await client.translate("ecoledirecte:SECOND_LOGIN"), passwordSecret)
.addField(await client.translate("ecoledirecte:THIRD_LOGIN"), rememberMe ? await client.translate("common:YES") : await client.translate("common:NO"));
msg.edit(embed);
}
async function changePassword(pswd) {
let count = parseInt(pswd.length);
let newPassword = "``";
for(let i = 0; i < count; i++) {
newPassword += "*";
}
newPassword += "``";
return newPassword;
}
async function switchTM(tm) {
switch(tm) {
case '1️⃣':
let msg2 = await message.channel.send(client.emotes["write"] + " " + await client.translate("ecoledirecte:INSTRUCTION_ONE_LOGIN"));
username = await WaitForText(msg);
await updateEmbed(msg);
msg2.delete().catch(() => {});
await after();
break;
case '2️⃣':
let msg3 = await message.channel.send(client.emotes["write"] + " " + await client.translate("ecoledirecte:INSTRUCTION_TWO_LOGIN"));
password = await WaitForText(msg);
passwordSecret = await changePassword(password);
await updateEmbed(msg);
msg3.delete().catch(() => {});
await after();
break;
case '3️⃣':
rememberMe ? rememberMe = false : rememberMe = true;
await updateEmbed(msg);
await after();
break;
case '✅':
await msg.delete().catch(() => {});
try {
const session = new EcoleDirecte.Session()
account = await session.connexion(username, password)
} catch(err) {
return message.channel.send(client.emotes["error"] + " " + await client.translate("ecoledirecte:ERROR_LOGIN"))
};
const caseInfo = {
"id": message.author.id,
"username": username,
"password": password
};
client.ecoledirecte.users.push(caseInfo);
if(rememberMe); // Do nothing x)
message.channel.send(client.emotes["succes"] + " " + await client.translate("ecoledirecte:CONNECTED_LOGIN", {
prenom: account.prenom,
nom: account.nom
}));
break;
default:
return;
}
}
const tm = await start();
await switchTM(tm);
};
};
module.exports = Login;
|
#!/usr/bin/env bash
# One Half - Gnome Terminal color scheme install script
# Based on https://github.com/chriskempson/base16-gnome-terminal
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="One Half Dark"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="one-half-dark"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
elif [ -f /proc/sys/kernel/random/uuid ]; then
PROFILE_SLUG=`cat /proc/sys/kernel/random/uuid`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#282c34', '#e06c75', '#98c379', '#e5c07b', '#61afef', '#c678dd', '#56b6c2', '#dcdfe4', '#5d677a', '#e06c75', '#98c379', '#e5c07b', '#61afef', '#c678dd', '#56b6c2', '#dcdfe4']"
dset background-color "'#282c34'"
dset foreground-color "'#dcdfe4'"
dset bold-color "'#dcdfe4'"
dset bold-color-same-as-fg "true"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#282c34:#e06c75:#98c379:#e5c07b:#61afef:#c678dd:#56b6c2:#dcdfe4:#5d677a:#e06c75:#98c379:#e5c07b:#61afef:#c678dd:#56b6c2:#dcdfe4"
gset string background_color "#282c34"
gset string foreground_color "#dcdfe4"
gset string bold_color "#dcdfe4"
gset bool bold_color_same_as_fg "true"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
import Head from "next/head";
import "antd/dist/antd.css";
const metaList = ["description", "keywords", "title", "name"];
export default function MyApp({
Component,
pageProps,
}: {
Component: any;
pageProps: any;
}) {
return (
<>
<Head>
<title><NAME> personal demo projects</title>
<meta name="viewport" content="initial-scale=1.0, width=device-width" />
{metaList.map(meta => (
<meta
key={meta}
name={meta}
property={`og:${meta}`}
content={`<NAME> personal demo projects with Typescript, react.js, stylus, Next.js ... ect`}
/>
))}
<link
rel="stylesheet"
href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700&display=swap"
/>
<link
rel="stylesheet"
href="https://fonts.googleapis.com/icon?family=Material+Icons"
/>
</Head>
<Component {...pageProps} />
</>
);
}
|
#!/usr/bin/bash
set -x
FOLDER=${1:-mainnet.min}
while :; do
if command -v rclone; then
break
else
echo waiting for rclone ...
sleep 10
# need to discuss with LC
break
fi
done
# wait for harmony.service to start
sleep 30
# stop harmony service
sudo systemctl stop harmony.service
unset shard
# determine the shard number
for s in 3 2 1; do
if [ -d harmony_db_${s} ]; then
shard=${s}
# download shard db
echo rclone syncing harmony_db_${shard}
rclone sync -P mainnet-do:pub.harmony/${FOLDER}/harmony_db_${shard} /root/harmony_db_${shard}
break
fi
done
# download beacon chain db anyway
echo rclone syncing harmony_db_0
rclone sync -P mainnet-do:pub.harmony/${FOLDER}/harmony_db_0 /root//harmony_db_0
# restart the harmony service
sudo systemctl start harmony.service
|
#!/bin/bash
echo "~~~ MANAGERIAL: Installs management apps. ~~~";
echo "Installing:";
echo " dconf-tools: configuration toolkit";
echo " gconf-editor: configuration toolkit";
echo " unity-tweak-tool: Tweak Unity's appearance";
echo " htop: Processes Terminal Tool";
echo " acpi: Hardware Status Utility";
echo " gparted: Partition Manipulation Utility";
echo " pavucontrol: Pulse Audio Volume Control Utility";
echo " zenmap: Network Sniffing Utility";
echo " ubuntu-tweak: Ubuntu Tweak Tool";
# PPA's
wget -q -O - http://archive.getdeb.net/getdeb-archive.key | sudo apt-key add -
sudo sh -c 'echo "deb http://archive.getdeb.net/ubuntu xenial-getdeb apps" >> /etc/apt/sources.list.d/getdeb.list'
sudo apt-get update;
# Installs
sudo apt-get install -y dconf-tools;
sudo apt-get install -y unity-tweak-tool;
sudo apt-get install -y htop;
sudo apt-get install -y acpi;
sudo apt-get install -y gparted;
sudo apt-get install -y pavucontrol;
sudo apt-get install -y zenmap;
sudo apt-get install -y ubuntu-tweak;
echo "MANAGERIAL INSTALLED";
|
from pyedflib import EdfReader
def average_attribute_value(file_path: str, attribute_name: str) -> float:
# Open the EDF file using EdfReader
edf_file = EdfReader(file_path)
# Get the number of samples in the file
num_samples = edf_file.getNSamples()[0]
# Get the attribute values for the specified attribute
attribute_values = edf_file.readAnnotations()[attribute_name]
# Calculate the sum of attribute values
attribute_sum = sum(attribute_values)
# Calculate the average attribute value
average_value = attribute_sum / num_samples
# Close the EdfReader object
edf_file.close()
return average_value
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
set -e
ROOT_DIR=$(git rev-parse --show-toplevel)
cd $ROOT_DIR/pulsar-client-cpp
./pulsar-test-service-start.sh
pushd tests
if [ -f /gtest-parallel/gtest-parallel ]; then
echo "---- Run unit tests in parallel"
/gtest-parallel/gtest-parallel ./main --workers=10
RES=$?
else
./main
RES=$?
fi
popd
if [ $RES -eq 0 ]; then
pushd python
echo "---- Build Python Wheel file"
python setup.py bdist_wheel
echo "---- Installing Python Wheel file"
pip install dist/pulsar_client-*-linux_x86_64.whl
echo "---- Running Python unit tests"
# Running tests from a different directory to avoid importing directly
# from the current dir, but rather using the installed wheel file
cp pulsar_test.py /tmp
pushd /tmp
python pulsar_test.py
RES=$?
echo "---- Running Python Function Instance unit tests"
bash /pulsar/pulsar-functions/instance/src/scripts/run_python_instance_tests.sh
RES=$?
popd
popd
fi
./pulsar-test-service-stop.sh
exit $RES
|
<reponame>benmizrahi/benthos<filename>internal/old/input/socket.go
package input
import (
"context"
"errors"
"fmt"
"io"
"net"
"sync"
"time"
"github.com/benthosdev/benthos/v4/internal/codec"
"github.com/benthosdev/benthos/v4/internal/component"
"github.com/benthosdev/benthos/v4/internal/component/input"
"github.com/benthosdev/benthos/v4/internal/component/metrics"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/interop"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/message"
"github.com/benthosdev/benthos/v4/internal/old/input/reader"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeSocket] = TypeSpec{
constructor: fromSimpleConstructor(NewSocket),
Summary: `
Connects to a tcp or unix socket and consumes a continuous stream of messages.`,
Config: docs.FieldComponent().WithChildren(
docs.FieldString("network", "A network type to assume (unix|tcp).").HasOptions(
"unix", "tcp",
),
docs.FieldString("address", "The address to connect to.", "/tmp/benthos.sock", "127.0.0.1:6000"),
codec.ReaderDocs.AtVersion("3.42.0"),
docs.FieldInt("max_buffer", "The maximum message buffer size. Must exceed the largest message to be consumed.").Advanced(),
),
Categories: []string{
"Network",
},
}
}
//------------------------------------------------------------------------------
// SocketConfig contains configuration values for the Socket input type.
type SocketConfig struct {
Network string `json:"network" yaml:"network"`
Address string `json:"address" yaml:"address"`
Codec string `json:"codec" yaml:"codec"`
MaxBuffer int `json:"max_buffer" yaml:"max_buffer"`
}
// NewSocketConfig creates a new SocketConfig with default values.
func NewSocketConfig() SocketConfig {
return SocketConfig{
Network: "",
Address: "",
Codec: "lines",
MaxBuffer: 1000000,
}
}
//------------------------------------------------------------------------------
// NewSocket creates a new Socket input type.
func NewSocket(conf Config, mgr interop.Manager, log log.Modular, stats metrics.Type) (input.Streamed, error) {
rdr, err := newSocketClient(conf.Socket, log)
if err != nil {
return nil, err
}
// TODO: Consider removing the async cut off here. It adds an overhead and
// we can get the same results by making sure that the async readers forward
// CloseAsync all the way through. We would need it to be configurable as it
// wouldn't be appropriate for inputs that have real acks.
return NewAsyncReader(TypeSocket, true, reader.NewAsyncCutOff(reader.NewAsyncPreserver(rdr)), log, stats)
}
//------------------------------------------------------------------------------
type socketClient struct {
log log.Modular
conf SocketConfig
codecCtor codec.ReaderConstructor
codecMut sync.Mutex
codec codec.Reader
}
func newSocketClient(conf SocketConfig, logger log.Modular) (*socketClient, error) {
switch conf.Network {
case "tcp", "unix":
default:
return nil, fmt.Errorf("socket network '%v' is not supported by this input", conf.Network)
}
codecConf := codec.NewReaderConfig()
codecConf.MaxScanTokenSize = conf.MaxBuffer
ctor, err := codec.GetReader(conf.Codec, codecConf)
if err != nil {
return nil, err
}
return &socketClient{
log: logger,
conf: conf,
codecCtor: ctor,
}, nil
}
// ConnectWithContext attempts to establish a connection to the target S3 bucket
// and any relevant queues used to traverse the objects (SQS, etc).
func (s *socketClient) ConnectWithContext(ctx context.Context) error {
s.codecMut.Lock()
defer s.codecMut.Unlock()
if s.codec != nil {
return nil
}
conn, err := net.Dial(s.conf.Network, s.conf.Address)
if err != nil {
return err
}
if s.codec, err = s.codecCtor("", conn, func(ctx context.Context, err error) error {
return nil
}); err != nil {
conn.Close()
return err
}
s.log.Infof("Consuming from socket at '%v://%v'\n", s.conf.Network, s.conf.Address)
return nil
}
// ReadWithContext attempts to read a new message from the target S3 bucket.
func (s *socketClient) ReadWithContext(ctx context.Context) (*message.Batch, reader.AsyncAckFn, error) {
s.codecMut.Lock()
codec := s.codec
s.codecMut.Unlock()
if codec == nil {
return nil, nil, component.ErrNotConnected
}
parts, codecAckFn, err := codec.Next(ctx)
if err != nil {
if errors.Is(err, context.Canceled) ||
errors.Is(err, context.DeadlineExceeded) {
err = component.ErrTimeout
}
if err != component.ErrTimeout {
s.codecMut.Lock()
if s.codec != nil && s.codec == codec {
s.codec.Close(ctx)
s.codec = nil
}
s.codecMut.Unlock()
}
if errors.Is(err, io.EOF) {
return nil, nil, component.ErrTimeout
}
return nil, nil, err
}
// We simply bounce rejected messages in a loop downstream so there's no
// benefit to aggregating acks.
_ = codecAckFn(context.Background(), nil)
msg := message.QuickBatch(nil)
msg.Append(parts...)
if msg.Len() == 0 {
return nil, nil, component.ErrTimeout
}
return msg, func(rctx context.Context, res error) error {
return nil
}, nil
}
// CloseAsync begins cleaning up resources used by this reader asynchronously.
func (s *socketClient) CloseAsync() {
s.codecMut.Lock()
if s.codec != nil {
s.codec.Close(context.Background())
s.codec = nil
}
s.codecMut.Unlock()
}
// WaitForClose will block until either the reader is closed or a specified
// timeout occurs.
func (s *socketClient) WaitForClose(time.Duration) error {
return nil
}
|
<reponame>sparber/CComprehensiveDatatypes<filename>src/tree/declarations/TStorageClassSpecifier.java
package tree.declarations;
import tree.DefaultTreeNode;
import tree.symbols.TSAuto;
import tree.symbols.TSExtern;
import tree.symbols.TSRegister;
import tree.symbols.TSStatic;
import tree.symbols.TSThreadLocal;
import tree.symbols.TSTypedef;
public class TStorageClassSpecifier extends DefaultTreeNode {
public TStorageClassSpecifier(TStorageClassSpecifier node) {
super(node);
}
public TStorageClassSpecifier(TSTypedef typedef) {
addChild(typedef);
}
public TStorageClassSpecifier(TSExtern extern) {
addChild(extern);
}
public TStorageClassSpecifier(TSStatic stat) {
addChild(stat);
}
public TStorageClassSpecifier(TSThreadLocal thread_local) {
addChild(thread_local);
}
public TStorageClassSpecifier(TSAuto auto) {
addChild(auto);
}
public TStorageClassSpecifier(TSRegister register) {
addChild(register);
}
}
|
#!/bin/bash
echo "Clearing $(pwd)/website"
rm -rf website
mkdir website
echo "Copying $(pwd)/dist/* to $(pwd)/website/"
cp -R dist/* website/
echo "Copying $(pwd)/docs to $(pwd)/website/demo/docs"
mkdir -p website/demo/docs
cp -R docs/* website/demo/docs/
CURRENT_SHA="$(git rev-parse HEAD)"
git add website/
git commit -m "built website from $CURRENT_SHA"
echo "Built and committed as $CURRENT_SHA"
|
import React from 'react';
import { Container, Row, Col } from 'react-bootstrap';
import { Image, Item } from 'semantic-ui-react';
import Navbar from '../components/Navbar';
import Ipsum from '../components/Ipsum';
import ImageExampleFluid from '../components/ImageExampleFluid';
import PageTitle from '../components/PageTitle';
import MenuExampleCompact from '../components/menubars/Moviebar';
import Footer from '../components/Footer/Footer';
import Videocard from '../components/Videocard';
import moment from 'moment';
import 'moment/locale/ko';
import Pagination_Month from './Pagination_mp';
import { useMediaQuery } from 'react-responsive'
export default function MonthlyPage() {
const nowTime = moment().format("YYYY-MM-DD HH:mm:ss");
const paragraph = <Image src='/images/wireframe/short-paragraph.png' />
return(
<>
<Navbar/>
<Container>
<Row>
<ImageExampleFluid/>
</Row>
<br>
</br>
<Row>
<PageTitle title={"Monthly-Key:um"}/>
</Row>
<br></br>
<Row>
<Col className="item" md="2">
<MenuExampleCompact />
</Col>
<Col className="item" md="5">
<Item.Group>
<Item>
<Videocard
title ={"동영상1"}
time = {nowTime}
user ={"총동연 기획국"}
watched = {"22 watched"}/>
</Item>
<Item>
<Videocard
title ={"동영상2"}
time = {nowTime}
user ={"총동연 사무국"}
watched = {"44 watched"}/>
</Item>
<Item>
<Videocard
title ={"동영상3"}
time = {nowTime}
user ={"총동연 협력국"}
watched = {"30 watched"}/>
</Item>
</Item.Group>
</Col>
<Col className ="item" md = "5">
<Item.Group>
<Item>
<Videocard
title ={"동영상1"}
time = {nowTime}
user ={"총동연 기획국"}
watched = {"22 watched"}/>
</Item>
<Item>
<Videocard
title ={"동영상2"}
time = {nowTime}
user ={"총동연 사무국"}
watched = {"44 watched"}/>
</Item>
<Item>
<Videocard
title ={"동영상3"}
time = {nowTime}
user ={"총동연 협력국"}
watched = {"30 watched"}/>
</Item>
</Item.Group>
</Col>
</Row>
<br/>
<Row>
<Col md = '4'>
</Col>
<Col md = '4'>
<Pagination_Month />
</Col>
<Col md = '4'>
</Col>
</Row>
<br/>
</Container>
<Footer />
</>
);
}
|
<reponame>danielleolgin/to-fix-backend<gh_stars>10-100
'use strict';
const _ = require('lodash');
const test = require('./lib/test');
const removeDates = require('./lib/remove-dates');
const checkLock = require('./lib/check-lock');
const turfRandom = require('@turf/random');
const listItemsFixture = [
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '77',
pin: [77, 77]
},
{
id: '30',
pin: [30, 30]
}
]
},
{
id: '11111111-1111-1111-1111-111111111111',
name: 'Project 1'
},
{
id: '22222222-2222-2222-2222-222222222222',
name: 'Project 2',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '31',
pin: [31, 31]
},
{
id: '32',
pin: [32, 32],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '33',
pin: [33, 33],
lockedBy: 'userone',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
}
]
},
{
id: '33333333-3333-3333-3333-333333333333',
name: 'Project 3',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now() - 1000 * 15 * 60)
},
{
id: '31',
pin: [31, 31],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 2 * 1000 * 15 * 60)
},
{
id: '32',
pin: [32, 32],
lockedBy: 'userone',
lockedTill: new Date(Date.now())
}
]
},
{
id: '44444444-4444-4444-4444-444444444444',
name: 'Project 4',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now() - 1000 * 15 * 60),
status: 'open'
},
{
id: '32',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 2 * 1000 * 15 * 60),
status: 'fixed'
},
{
id: '33',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now()),
status: 'noterror'
},
{
id: '40',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now() - 1000 * 15 * 60)
},
{
id: '42',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 2 * 1000 * 15 * 60)
},
{
id: '43',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now())
},
{
id: '50',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now() - 1000 * 15 * 60)
},
{
id: '52',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 2 * 1000 * 15 * 60)
},
{
id: '53',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now()),
createdAt: '2017-09-03T17:49:25.928Z'
},
{
id: '60',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now() - 1000 * 15 * 60),
createdAt: '2017-10-03T17:49:25.928Z'
},
{
id: '62',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 2 * 1000 * 15 * 60),
createdAt: '2017-10-05T17:49:25.928Z'
},
{
id: '63',
pin: [30, 30],
lockedBy: 'userone',
lockedTill: new Date(Date.now())
}
]
}
];
const filterItemsFixture = [
{
id: '66666666-6666-6666-6666-666666666666',
name: 'Project 1',
items: [
{
id: '10',
pin: [0, 0]
},
{
id: '30',
pin: [30, 30]
},
{
id: '40',
pin: [0, 0],
status: 'closed'
}
]
}
];
const getItemsFixture = [
{
id: '11111111-1111-1111-1111-111111111111',
name: 'Project 1',
items: [
{
id: '30',
pin: [30, 30]
}
]
}
];
const projectWithOneUnlockedItem = [
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30]
}
]
}
];
const projectWithOneItemLockedByUserTwo = [
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
}
]
}
];
const projectWithOneItemLockedByUserOne = [
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'test-user',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
}
]
}
];
const itemsWithTags = [
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '111111',
pin: [77, 77],
tags: ['My Tag', 'My Other Tag']
},
{
id: '222222',
pin: [33, 33],
tags: ['My Other Tag']
},
{
id: '333333',
pin: [44, 44]
}
],
tags: [
{
name: 'My Tag'
},
{
name: 'My Other Tag'
}
]
}
];
const delay = time => new Promise(res => setTimeout(res, time));
/* GET /projects/:project/items */
test(
'GET /:version/projects/:id/items - get a project that is not in the db',
listItemsFixture,
(assert, token) => {
assert.app
.get('/v1/projects/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/items')
.set('authorization', token)
.expect(404, (err, res) => {
assert.ifError(err, 'should not error');
assert.ok(res.body.message, 'has message attr');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items - get a project without items',
listItemsFixture,
(assert, token) => {
assert.app
.get('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 0, 'has no items');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items - get a project with items',
listItemsFixture,
(assert, token) => {
assert.app
.get('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 2, 'has right number of items');
var items = res.body.reduce(function(m, i) {
m[i.id] = removeDates(i);
return m;
}, {});
assert.equal(
items['30'].project_id,
'00000000-0000-0000-0000-000000000000',
'item 30 should have the right project'
);
assert.deepEqual(
items['30'].pin,
{
type: 'Point',
coordinates: [30, 30]
},
'item 30 should be pin at the right spot'
);
assert.equal(
items['77'].project_id,
'00000000-0000-0000-0000-000000000000',
'item 77 should have the right project'
);
assert.deepEqual(
items['77'].pin,
{
type: 'Point',
coordinates: [77, 77]
},
'item 77 should be pin at the right spot'
);
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?page=X&page_size=Y - get a project with pagination',
listItemsFixture,
(assert, token) => {
var requests = [
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=0&page_size=10'
)
.set('authorization', token)
.expect(200)
.then(res => {
assert.equal(
res.body.length,
10,
'page 0 with size 10 should have 10 items'
);
}),
assert.app
.get('/v1/projects/44444444-4444-4444-4444-444444444444/items?page=0')
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
12,
'page 0 with default size should have 12 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=0&page_size=5'
)
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
5,
'page 0 with size 5 should have 5 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=1&page_size=5'
)
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
5,
'page 1 with size 5 should have 5 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=2&page_size=5'
)
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
2,
'page 2 with size 5 should have 2 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=3&page_size=5'
)
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
0,
'page 3 with size 5 should have 0 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=3000&page_size=5'
)
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
0,
'page 3000 with size 5 should have 0 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=0&page_size=0'
)
.set('authorization', token)
.expect(200)
.then(function(res) {
assert.equal(
res.body.length,
0,
'page 0 with size 0 should have 0 items'
);
}),
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?page=-1&page_size=5'
)
.set('authorization', token)
.expect(200)
];
Promise.all(requests)
.then(function() {
assert.end();
})
.catch(function(err) {
return assert.end(err);
});
}
);
test(
'GET /:version/projects/:id/items?status=<status> - filter items by status',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?status=noterror'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 1, 'should have 1 noterror item');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?from=<date>&to=<date> - filter items by date',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?from=2017-10-01&to=2017-10-10'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'date filtering should not error');
assert.equal(
res.body.length,
2,
'should have 2 items within date filter'
);
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?from=<date> - filter items by from date',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?from=2017-11-01'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'date filtering by from should not error');
assert.equal(
res.body.length,
9,
'should have 9 items with from filter'
);
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?to=<date> - filter items by to date',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?to=2017-10-04'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'date filtering by to should not error');
assert.equal(res.body.length, 2, 'should have 2 items with to filter');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?from=<date>&to=<date> - filter items by invalid date',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/44444444-4444-4444-4444-444444444444/items?from=foobar'
)
.set('authorization', token)
.expect(400, (err, res) => {
assert.ifError(err, 'invalid date filter does not error');
assert.equal(
res.body.message,
'from parameter must be a valid ISO 8601 date string'
);
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?bbox=<bbox> - flter items by bbox',
filterItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/66666666-6666-6666-6666-666666666666/items?bbox=-1,-1,1,1'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'bbox query should not error');
assert.equal(res.body.length, 2, 'bbox query should return 2 items');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?bbox=<bbox>&status=<status> - filter items by bbox and status',
filterItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/66666666-6666-6666-6666-666666666666/items?bbox=-1,-1,1,1&status=open'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'bbox + status query should not error');
assert.equal(
res.body.length,
1,
'bbox + status query should return 1 item'
);
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?lock=locked - get a project with locked items',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/22222222-2222-2222-2222-222222222222/items?lock=locked'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 3, 'should have 3 locked items');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?lock=locked - get a project with locked items with Project3 data',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/33333333-3333-3333-3333-333333333333/items?lock=locked'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 1, 'should have 1 locked items');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?lock=unlocked - get a project with unlocked items',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/22222222-2222-2222-2222-222222222222/items?lock=unlocked'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 1, 'should have one unlocked item');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?lock=unlocked - get a project with unlocked items with Project3 data',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/33333333-3333-3333-3333-333333333333/items?lock=unlocked'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 2, 'should have 2 unlocked items');
assert.end();
});
}
);
test(
'GET /:version/projects/:id/items?tags=<tags> - filter items by tags',
itemsWithTags,
(assert, token) => {
assert.app
.get('/v1/projects/00000000-0000-0000-0000-000000000000/tags')
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'fetching project tags should not error');
const filterTag = res.body.filter(tag => {
return tag.name === 'My Tag';
});
const tagId = filterTag[0].id;
assert.app
.get(
`/v1/projects/00000000-0000-0000-0000-000000000000/items?tags=${tagId}`
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'filtering by tag does not error');
assert.equal(
res.body.length,
1,
'returns one item with tag filter'
);
assert.end();
});
});
}
);
test(
'GET /:version/projects/:id/items?tags=<tags> - filter items by multiple tags',
itemsWithTags,
(assert, token) => {
assert.app
.get('/v1/projects/00000000-0000-0000-0000-000000000000/tags')
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'fetching project tags should not error');
const tagIds = res.body
.map(tag => {
return tag.id;
})
.join(',');
assert.app
.get(
`/v1/projects/00000000-0000-0000-0000-000000000000/items?tags=${tagIds}`
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'filtering by tag does not error');
assert.equal(
res.body.length,
2,
'returns two item with multi-tag filter'
);
assert.end();
});
});
}
);
test(
'GET /:version/projects/:id/items?lock=unlocked - get a project with unlocked items with Project3 data',
listItemsFixture,
(assert, token) => {
assert.app
.get(
'/v1/projects/33333333-3333-3333-3333-333333333333/items?lock=unlocked'
)
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.length, 2, 'should have 2 unlocked items');
assert.end();
});
}
);
/* POST /projects/:project/items */
test(
'POST /:version/projects/:project/items/:item - invalid feature collection properties',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '1234',
instructions: 'Fix this',
pin: [0, 0],
featureCollection: {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
geometry: {
type: 'Point',
coordinates: [0, 0]
},
properties: {
'tofix:category': 'cat',
invalidProp: 'foobar'
}
}
]
}
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(res.body.message[0], 'Keys must have a type prefix');
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - invalid body attributes',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '405270',
instructions: 'Fix this item',
pin: [0, 0],
invalidAttr: true
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'Request contains unexpected attribute invalidAttr'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - POST large items',
getItemsFixture,
(assert, token) => {
const randomFc = turfRandom('points', 10000);
randomFc.features = randomFc.features.map(feature => {
feature.properties['tofix:category'] = 'cat';
return feature;
});
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '1234',
instructions: 'fix',
pin: [0, 0],
featureCollection: randomFc
})
.expect(200, err => {
assert.ifError(err, 'should not error');
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - missing required body attributes',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({ id: '405270', instructions: 'Fix this item' })
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'req.body.pin is a required body attribute'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - bad ID',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '******',
instructions: 'Fix this item',
pin: [0, 0]
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'An item must have a valid ID comprised only of letters, numbers, and hyphens'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - bad instructions',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '405270',
instructions: 5,
pin: [0, 0]
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'An item must have a valid instruction'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - bad pin 1',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '405270',
instructions: 'Fix this item',
pin: [0]
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'An item must have a pin in the [longitude, latitude] format'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - bad pin 2',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '405270',
instructions: 'Fix this item',
pin: ['-1000', '1000']
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'Invalid Pin each element in a position must be a number'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - posting item with same id raises 400',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '30',
pin: [0, 0],
instructions: 'tofix'
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(
res.body.message,
'Item with this id already exists',
'error message is correct'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '405270',
instructions: 'Fix this item',
pin: [0, 0]
})
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
const item = removeDates(res.body);
assert.deepEqual(item, {
status: 'open',
metadata: {},
id: '405270',
project_id: '11111111-1111-1111-1111-111111111111',
pin: { type: 'Point', coordinates: [0, 0] },
quadkey: '3000000000000',
instructions: 'Fix this item',
featureCollection: { type: 'FeatureCollection', features: [] },
createdBy: 'test-user',
lastModifiedBy: '',
lastModifiedDate: '',
lockedBy: null,
sort: 0
});
assert.end();
});
}
);
test(
'POST /:version/projects/:project/items/:item - posting metadata works',
getItemsFixture,
(assert, token) => {
assert.app
.post('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
id: '405270',
instructions: 'Fix this item',
pin: [0, 0],
metadata: {
foo: 'bar'
}
})
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.equal(
res.body.metadata.foo,
'bar',
'metadata saved and returned correctly'
);
assert.end();
});
}
);
test(
'POST /:version/projects/:id/items - bulk upload items with a linear wait',
projectWithOneUnlockedItem,
(assert, token) => {
const TOTAL_REQUESTS = 10;
const requests = [];
const featureCollection = {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
properties: { 'tofix:category': 'cat' },
geometry: {
type: 'Point',
coordinates: [30, 30]
}
}
]
};
for (let i = 0; i < TOTAL_REQUESTS; i++) {
requests.push(
delay(i * 50).then(() =>
assert.app
.post(`/v1/projects/00000000-0000-0000-0000-000000000000/items`)
.set('authorization', token)
.send({
id: `item${i}`,
pin: [30, 30],
instructions: 'test',
featureCollection
})
.expect(200)
)
);
}
Promise.all(requests)
.then(function() {
assert.end();
})
.catch(function(err) {
return assert.end(err);
});
}
);
test(
'POST /:version/projects/:id/items - bulk upload items without waiting',
projectWithOneUnlockedItem,
(assert, token) => {
const TOTAL_REQUESTS = 10;
const requests = [];
const featureCollection = {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
properties: { 'tofix:category': 'cat' },
geometry: {
type: 'Point',
coordinates: [30, 30]
}
}
]
};
for (let i = 0; i < TOTAL_REQUESTS; i++) {
requests.push(
assert.app
.post(`/v1/projects/00000000-0000-0000-0000-000000000000/items`)
.set('authorization', token)
.send({
id: `item-${i}`,
pin: [30, 30],
instructions: 'test',
featureCollection
})
.expect(200)
);
}
Promise.all(requests)
.then(function() {
assert.end();
})
.catch(function(err) {
return assert.end(err);
});
}
);
/* GET /projects/:project/items/:item */
test(
'GET /:version/projects/:project/items/:item',
getItemsFixture,
(assert, token) => {
assert.app
.get('/v1/projects/11111111-1111-1111-1111-111111111111/items/30')
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'should not error');
assert.ok(
checkLock.unlocked(res.body),
'lock ended before the test started'
);
var item = removeDates(res.body);
assert.deepEqual(item, {
id: '30',
metadata: {},
project_id: '11111111-1111-1111-1111-111111111111',
pin: {
type: 'Point',
coordinates: [30, 30]
},
quadkey: '1221210321032',
status: 'open',
lockedBy: null,
featureCollection: {
type: 'FeatureCollection',
features: []
},
createdBy: 'userone',
instructions: 'created via the tests',
sort: 0,
lastModifiedBy: '',
lastModifiedDate: ''
});
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items/:item - updating an item with an invalid pin errors',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ pin: [] })
.expect(400, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'An item must have a pin in the [longitude, latitude] format'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items:id - updating an item with an invalid feature collection errors',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ featureCollection: { type: 'FeatureCollection' } })
.expect(400, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'Invalid featureCollection: "features" member required'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items:id - update an item',
projectWithOneUnlockedItem,
(assert, token) => {
var fc = {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
properties: { 'tofix:category': 'cat' },
geometry: {
type: 'Point',
coordinates: [30, 30]
}
}
]
};
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ featureCollection: fc })
.expect(200, function(err, res) {
if (err) return assert.end(err);
var item = removeDates(res.body);
assert.deepEqual(item.featureCollection, fc);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items:id - update an item shouldnt change the featureCollection',
projectWithOneUnlockedItem,
(assert, token) => {
var fc = {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
properties: { 'tofix:category': 'cat' },
geometry: {
type: 'Point',
coordinates: [30, 30]
}
}
]
};
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ featureCollection: fc })
.expect(200, function(err) {
if (err) return assert.end(err);
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'unlocked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.unlocked(res.body), 'the item is locked');
var item = removeDates(res.body);
assert.deepEqual(item.featureCollection, fc);
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:id/items:id - the lock can be activated via {lock: locked}',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'locked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.locked(res.body), 'the item is locked');
assert.equal(
res.body.lockedBy,
'test-user',
'item locked by the current user'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items/:item - the lock can be deactivated via {lock: unlocked}',
projectWithOneItemLockedByUserOne,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'unlocked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.unlocked(res.body), 'the item is unlocked');
assert.equal(
res.body.lockedBy,
null,
'item locked by the current user'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items:id - the status cannot be changed by a user who doesnt have an active lock',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ status: 'fixed' })
.expect(423, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'Cannot update an items status without a lock'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items:id - the status can be changed by the user who has the active lock',
projectWithOneItemLockedByUserOne,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'locked' })
.expect(200, function(err) {
if (err) return assert.end(err);
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ status: 'fixed' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.status, 'fixed', 'the right status');
assert.equal(
res.body.lockedBy,
null,
'the lock was released because it was moved to a complete status'
);
assert.ok(checkLock.unlocked(res.body), 'is unlocked');
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:id/items:id - add escalated status',
projectWithOneItemLockedByUserOne,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ status: 'escalated' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.status, 'escalated', 'the right status');
assert.equal(
res.body.lockedBy,
null,
'the lock was released because it was moved to a complete status'
);
assert.ok(checkLock.unlocked(res.body), 'is unlocked');
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'locked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.lockedBy, 'test-user');
assert.ok(checkLock.locked(res.body), 'locked');
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ status: 'fixed' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.status, 'fixed', 'the right status');
assert.equal(
res.body.lockedBy,
null,
'the lock was released because it was moved to a complete status'
);
assert.ok(checkLock.unlocked(res.body), 'is unlocked');
assert.end();
});
});
});
}
);
test(
'PUT /:version/projects/:id/items:id - an active lock cannot be changed by a non-locking user',
projectWithOneItemLockedByUserTwo,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'unlocked' })
.expect(423, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'This item is currently locked by usertwo'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items:id - an active lock can be changed by the locking user',
projectWithOneItemLockedByUserOne,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'locked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.lockedBy, 'test-user');
assert.ok(checkLock.locked(res.body), 'locked');
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ lock: 'unlocked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.lockedBy, null, 'no one holds the lock');
assert.ok(checkLock.unlocked(res.body), 'not locked');
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:id/items/:id - an item update cannot have unexpected body content',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/30')
.set('authorization', token)
.send({ random: 'is bad' })
.expect(400, function(err, res) {
if (err) return assert.end();
assert.equal(
res.body.message,
'Request contains unexpected attribute random',
'has right message'
);
assert.end();
});
}
);
test(
'DELETE /:version/projects/:id/items/:id - DELETE deletes an item',
filterItemsFixture,
(assert, token) => {
assert.app
.delete('/v1/projects/66666666-6666-6666-6666-666666666666/items/30')
.set('authorization', token)
.expect(200, (err, res) => {
assert.ifError(err, 'DELETE item does not error');
assert.equal(res.body.id, '30', 'item id returned');
assert.equal(
res.body.project,
'66666666-6666-6666-6666-666666666666',
'project id returned'
);
assert.app
.get('/v1/projects/66666666-6666-6666-6666-666666666666/items')
.set('authorization', token)
.expect(200, (err, res) => {
const results = res.body;
const resultIds = results.map(item => item.id);
assert.equal(results.length, 2, '2 items returned after delete');
assert.equal(
resultIds.indexOf('30'),
-1,
'item id not in list result'
);
assert.end();
});
});
}
);
/**
* update-all-item test
*/
test(
'PUT /:version/projects/:project/items - invalid body array',
getItemsFixture,
(assert, token) => {
assert.app
.put('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
ids: '405270',
status: 'fixed'
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(res.body.message, 'body.ids should be an array');
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - valid body array',
getItemsFixture,
(assert, token) => {
assert.app
.put('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
ids: ['30'],
status: 'fixed'
})
.expect(200, () => {
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - valid body array',
getItemsFixture,
(assert, token) => {
assert.app
.put('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
ids: ['30'],
status: 'escalated'
})
.expect(200, () => {
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - body array should have id',
getItemsFixture,
(assert, token) => {
assert.app
.put('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
kid: '405270'
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(res.body.message, 'body.ids should be an array');
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - try change both lock and status',
getItemsFixture,
(assert, token) => {
assert.app
.put('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
ids: ['405270'],
status: 'open',
lock: 'locked'
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'It is invalid to set the status and change the lock in one request'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - body array should throw error if body.length >500',
getItemsFixture,
(assert, token) => {
assert.app
.put('/v1/projects/11111111-1111-1111-1111-111111111111/items')
.set('authorization', token)
.send({
ids: _.range(0, 501),
lock: 'locked'
})
.expect(400, (err, res) => {
assert.ifError(err, 'should not error');
assert.deepEqual(
res.body.message,
'Only allowed to update a maximum of 500 items and a minimum of 1 item at a time'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - basic update array of items',
itemsWithTags,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items/111111')
.set('authorization', token)
.send({ lock: 'locked' })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.locked(res.body), 'the item is locked');
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({
ids: ['111111'],
lock: 'unlocked'
})
.expect(200, (err, res) => {
assert.equal(res.body.length, 1, 'should update 1 items');
assert.ok(checkLock.unlocked(res.body[0]), 'is unlocked');
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:project/items - one item locked by another user',
[
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '1',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '2',
pin: [30, 30],
lockedBy: 'test-user',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
}
]
}
],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({
ids: ['1', '2'],
lock: 'unlocked'
})
.expect(423, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'This item is currently locked by usertwo'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - both items locked by another user',
[
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '1',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '2',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
}
]
}
],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({
ids: ['1', '2'],
lock: 'locked'
})
.expect(423, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'This item is currently locked by usertwo'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - both items available',
[
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '1',
pin: [30, 30]
},
{
id: '2',
pin: [30, 30]
}
]
}
],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({
ids: ['1', '2'],
lock: 'locked'
})
.expect(200, (err, res) => {
assert.equal(res.body.length, 2, 'should update 2 items');
assert.ok(checkLock.locked(res.body[0]), 'first is locked');
assert.ok(checkLock.locked(res.body[1]), 'second is locked');
assert.equal(
res.body[0].lockedBy,
'test-user',
'item locked by the current user'
);
assert.equal(
res.body[1].lockedBy,
'test-user',
'item locked by the current user'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items - the lock can be activated via {lock: locked}',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'locked', ids: ['30'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.locked(res.body[0]), 'the item is locked');
assert.equal(
res.body[0].lockedBy,
'test-user',
'item locked by the current user'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:project/items - the lock can be deactivated via {lock: unlocked}',
projectWithOneItemLockedByUserOne,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'unlocked', ids: ['30'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.unlocked(res.body[0]), 'the item is unlocked');
assert.equal(
res.body[0].lockedBy,
null,
'item locked by the current user'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items - the status cannot be changed by a user who doesnt have an active lock',
projectWithOneUnlockedItem,
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ status: 'fixed', ids: ['30'] })
.expect(423, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'Cannot update an items status without a lock'
);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items- the status can be changed by the user who has the active lock',
[
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30]
},
{
id: '31',
pin: [30, 30]
}
]
}
],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'locked', ids: ['30', '31'] })
.expect(200, function(err) {
if (err) return assert.end(err);
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ status: 'fixed', ids: ['30', '31'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body[0].status,
'fixed',
'item1 has the right status'
);
assert.equal(
res.body[1].status,
'fixed',
'item2 has the right status'
);
assert.equal(
res.body[0].lockedBy,
null,
'the lock1 was released because it was moved to a complete status'
);
assert.equal(
res.body[1].lockedBy,
null,
'the lock2 was released because it was moved to a complete status'
);
assert.ok(checkLock.unlocked(res.body[0]), 'item1 is unlocked');
assert.ok(checkLock.unlocked(res.body[1]), 'item2 is unlocked');
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:id/items - an active lock cannot be changed by a non-locking user',
[
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '31',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() - 1000 * 15 * 60)
}
]
}
],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'unlocked', ids: ['30', '31'] })
.expect(423, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.message,
'This item is currently locked by usertwo'
);
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'unlocked', ids: ['31'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.ok(checkLock.unlocked(res.body[0]), 'item31 is unlocked');
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:id/items - an active lock can be changed by the locking user',
[
{
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30],
lockedBy: 'test-user',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '31',
pin: [30, 30],
lockedBy: 'test-user',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
},
{
id: '32',
pin: [30, 30],
lockedBy: 'usertwo',
lockedTill: new Date(Date.now() + 1000 * 15 * 60)
}
]
}
],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'locked', ids: ['30', '31'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.length, 2, 'should change 2 ids only');
assert.equal(res.body[0].lockedBy, 'test-user');
assert.equal(res.body[1].lockedBy, 'test-user');
assert.ok(checkLock.locked(res.body[0]), 'item30 locked');
assert.ok(checkLock.locked(res.body[1]), 'item31 locked');
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'unlocked', ids: ['31', '30'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.length, 2, 'should change 2 ids only');
assert.equal(
res.body[0].lockedBy,
null,
'item30 no one holds the lock'
);
assert.equal(
res.body[1].lockedBy,
null,
'item31 no one holds the lock'
);
assert.ok(checkLock.unlocked(res.body[0]), 'item30 not locked');
assert.ok(checkLock.unlocked(res.body[1]), 'item31 not locked');
assert.app
.get('/v1/projects/00000000-0000-0000-0000-000000000000/items/32')
.set('authorization', token)
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.lockedBy,
'usertwo',
"doesn't change else's lock"
);
assert.end();
});
});
});
}
);
const variedData = {
id: '00000000-0000-0000-0000-000000000000',
name: 'Project 0',
items: [
{
id: '30',
pin: [30, 30],
status: 'fixed'
},
{
id: '31',
pin: [30, 30],
status: 'fixed'
},
{
id: '32',
pin: [-122.45819852581384, 37.746826497065165],
quadkey: '0230102033323',
instructions: 'todo',
createdBy: 'mapbox-machine',
status: 'fixed',
lockedTill: '2017-11-28T07:10:37.346Z',
lockedBy: null,
featureCollection: {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
geometry: {
type: 'Point',
coordinates: [0, 0]
},
properties: {
'tofix:category': 'cat',
invalidProp: 'foobar'
}
}
]
},
metadata: {
test: 'metadata'
},
createdAt: '2017-11-21T21:02:23.442Z',
updatedAt: '2017-11-28T07:10:37.701Z'
}
]
};
test(
'PUT /:version/projects/:id/items- relevant item keys should remain intact when the status can be changed by the user who has the active lock',
[variedData],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'locked', ids: ['30', '32'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
var response = _.cloneDeep(res.body[1]);
var toMatch = _.cloneDeep(variedData.items[2]);
// createdAt
assert.equal(!!response['createdAt'], true);
response = _.omit(response, 'createdAt');
toMatch = _.omit(toMatch, 'createdAt');
// updatedAt
assert.equal(!!response['updatedAt'], true);
response = _.omit(response, 'updatedAt');
toMatch = _.omit(toMatch, 'updatedAt');
//lastModifiedBy
response = _.omit(response, 'lastModifiedBy');
toMatch = _.omit(toMatch, 'lastModifiedBy');
//lastModifiedDate
response = _.omit(response, 'lastModifiedDate');
toMatch = _.omit(toMatch, 'lastModifiedDate');
// lockedTill
assert.equal(
new Date(response['lockedTill']) <
new Date(Date.now() + 1000 * 60 * 15),
true,
'should be less than 15min'
);
assert.equal(
new Date(response['lockedTill']) >
new Date(Date.now() + 1000 * 60 * 14),
true,
'should be more than 14min'
);
response = _.omit(response, 'lockedTill');
toMatch = _.omit(toMatch, 'lockedTill');
// pin
assert.deepEqual(response.pin, {
type: 'Point',
coordinates: toMatch.pin
});
response = _.omit(response, 'pin');
toMatch = _.omit(toMatch, 'pin');
// lockedBy
assert.equal(response['lockedBy'], 'test-user');
response = _.omit(response, 'lockedBy');
toMatch = _.omit(toMatch, 'lockedBy');
// project_id
assert.equal(response['project_id'], variedData.id);
response = _.omit(response, 'project_id');
toMatch = _.omit(toMatch, 'project_id');
// sort
response = _.omit(response, 'sort');
toMatch = _.omit(toMatch, 'sort');
// deep equal anything left
assert.deepEqual(response, toMatch);
assert.end();
});
}
);
test(
'PUT /:version/projects/:id/items - relevant item keys should remain intact when locking ',
[variedData],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'locked', ids: ['32'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.length, 1, 'should change 1 ids only');
assert.equal(res.body[0].lockedBy, 'test-user');
assert.ok(checkLock.locked(res.body[0]), 'item32 locked');
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'unlocked', ids: ['32'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
var response = _.cloneDeep(res.body[0]);
var toMatch = _.cloneDeep(variedData.items[2]);
// createdAt
assert.equal(!!response['createdAt'], true);
response = _.omit(response, 'createdAt');
toMatch = _.omit(toMatch, 'createdAt');
// updatedAt
assert.equal(!!response['updatedAt'], true);
response = _.omit(response, 'updatedAt');
toMatch = _.omit(toMatch, 'updatedAt');
//lastModifiedBy
response = _.omit(response, 'lastModifiedBy');
toMatch = _.omit(toMatch, 'lastModifiedBy');
//lastModifiedDate
response = _.omit(response, 'lastModifiedDate');
toMatch = _.omit(toMatch, 'lastModifiedDate');
// lockedTill
assert.equal(
new Date(response['lockedTill']) < new Date(Date.now()),
true,
'should be in the past'
);
response = _.omit(response, 'lockedTill');
toMatch = _.omit(toMatch, 'lockedTill');
// pin
assert.deepEqual(response.pin, {
type: 'Point',
coordinates: toMatch.pin
});
response = _.omit(response, 'pin');
toMatch = _.omit(toMatch, 'pin');
// lockedBy
assert.equal(response['lockedBy'], null);
response = _.omit(response, 'lockedBy');
toMatch = _.omit(toMatch, 'lockedBy');
// project_id
assert.equal(response['project_id'], variedData.id);
response = _.omit(response, 'project_id');
toMatch = _.omit(toMatch, 'project_id');
// sort
response = _.omit(response, 'sort');
toMatch = _.omit(toMatch, 'sort');
// deep equal anything left
assert.deepEqual(response, toMatch);
assert.end();
});
});
}
);
test(
'PUT /:version/projects/:id/items - relevant item keys should remain intact when locking ',
[variedData],
(assert, token) => {
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ lock: 'locked', ids: ['32', '30'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(res.body.length, 2, 'should change 1 ids only');
assert.equal(res.body[0].lockedBy, 'test-user');
assert.ok(checkLock.locked(res.body[0]), 'item32 locked');
// change status
assert.app
.put('/v1/projects/00000000-0000-0000-0000-000000000000/items')
.set('authorization', token)
.send({ status: 'noterror', ids: ['32', '30'] })
.expect(200, function(err, res) {
if (err) return assert.end(err);
assert.equal(
res.body.find(item => item.id === '30').status,
'noterror',
'should set the status'
);
var response = _.cloneDeep(res.body.find(item => item.id === '32'));
var toMatch = _.cloneDeep(
variedData.items.find(item => item.id == response.id)
);
// createdAt
assert.equal(!!response['createdAt'], true);
response = _.omit(response, 'createdAt');
toMatch = _.omit(toMatch, 'createdAt');
//lastModifiedBy
response = _.omit(response, 'lastModifiedBy');
toMatch = _.omit(toMatch, 'lastModifiedBy');
//lastModifiedDate
response = _.omit(response, 'lastModifiedDate');
toMatch = _.omit(toMatch, 'lastModifiedDate');
// updatedAt
assert.equal(!!response['updatedAt'], true);
response = _.omit(response, 'updatedAt');
toMatch = _.omit(toMatch, 'updatedAt');
// lockedTill
assert.equal(
new Date(response['lockedTill']) < new Date(Date.now()),
true,
'should be in the past'
);
response = _.omit(response, 'lockedTill');
toMatch = _.omit(toMatch, 'lockedTill');
// pin
assert.deepEqual(response.pin, {
type: 'Point',
coordinates: toMatch.pin
});
response = _.omit(response, 'pin');
toMatch = _.omit(toMatch, 'pin');
// lockedBy
assert.equal(response['lockedBy'], null);
response = _.omit(response, 'lockedBy');
toMatch = _.omit(toMatch, 'lockedBy');
// project_id
assert.equal(response['project_id'], variedData.id);
response = _.omit(response, 'project_id');
toMatch = _.omit(toMatch, 'project_id');
// sort
response = _.omit(response, 'sort');
toMatch = _.omit(toMatch, 'sort');
// project_id
assert.equal(response['status'], 'noterror');
response = _.omit(response, 'status');
toMatch = _.omit(toMatch, 'status');
// deep equal anything left
assert.deepEqual(response, toMatch);
assert.end();
});
});
}
);
|
import random
def print_random_element(list):
index = random.randint(0, len(list)-1)
print(list[index])
|
<gh_stars>0
/**
* Main package
*/
package com.netcracker.ncstore;
|
<reponame>WlodzimierzKorza/small_eod
from ..generic.serializers import UserLogModelSerializer
from .models import Note
class NoteSerializer(UserLogModelSerializer):
class Meta:
model = Note
fields = ["id", "case", "comment"]
|
mkdir -p ~/.streamlit/
echo "\
[general]\n\
email = \"cnhhoang850@gmail.com\"\n\
" > ~/.streamlit/credentials.toml
echo "\
[server]\n\
headless = true\n\
enableCORS=false\n\
port = $PORT\n\
" > ~/.streamlit/config.toml
|
#!/usr/bin/env bash
nValue=3;
baseFolderPath="../../../TORrent_";
lockFile=""
myPort=""
function getClientNumber {
let "number=$RANDOM % $nValue + 1 | bc";
let "port=$number + 10000";
if lsof -Pi :$port -sTCP:LISTEN -t >/dev/null ; then
echo $(getClientNumber)
else
echo $(checkFolder $number);
fi
}
function getClientNumbers {
except=$1
clientsPorts=""
for ((i=1;i<=$nValue;i++)); do
if [[ ${i} != ${except} ]]; then
let "otherPort=10000 + $i"
clientsPorts="$clientsPorts $otherPort"
fi;
done
echo ${clientsPorts};
}
function checkFolder {
path="$baseFolderPath$1/"
file="lock"
lockFile="${path}${file}"
echo "Lock file: $lockFile" >&2
if [[ -e ${lockFile} ]]; then
echo $(getClientNumber);
else
touch "${lockFile}";
echo $1
fi
}
myNumber=$(getClientNumber)
let "myPort=10000+${myNumber}";
myFolder="$baseFolderPath$myNumber";
echo "Client number ${myNumber}";
echo "Client port ${myPort}";
echo "Client folder ${myFolder}";
otherPorts=$(getClientNumbers ${myNumber})
echo "Other client ports ${otherPorts}";
java -jar ../../../torro-1.1-SNAPSHOT.jar false false ${myPort} ${myFolder} ${otherPorts};
rm "$myFolder/lock";
|
// Copyright 2019 Drone.IO Inc. All rights reserved.
// Use of this source code is governed by the Drone Non-Commercial License
// that can be found in the LICENSE file.
// +build !oss
package logs
import "testing"
func TestKey(t *testing.T) {
tests := []struct {
bucket string
prefix string
result string
}{
{
bucket: "test-bucket",
prefix: "drone/logs",
result: "/drone/logs/1",
},
{
bucket: "test-bucket",
prefix: "/drone/logs",
result: "/drone/logs/1",
},
}
for _, test := range tests {
s := &s3store{
bucket: test.bucket,
prefix: test.prefix,
}
if got, want := s.key(1), test.result; got != want {
t.Errorf("Want key %s, got %s", want, got)
}
}
}
|
<gh_stars>0
import React from 'react'
import { Link } from 'gatsby'
import tkm from '../img/tkm.png'
export const Title = class extends React.Component {
render(){
return (
<section className="hero landing is-fullheight is-transparent">
<div className="hero-decoration-left" style={{backgroundImage:"url(img/hero-bg-left.svg)"}}></div>
<div className="hero-decoration-right" style={{backgroundImage:"url(img/hero-bg-right.svg)"}}></div>
<div className="hero-body" style={{paddingTop: '100px'}}>
<div className="container">
<div className="columns is-vcentered">
{/* <!-- Landing page Title --> */}
<div className="column is-5 ">
<h1 className="title is-1 is-light is-semibold is-spaced">IEEE SB TKMCE</h1>
<h2 className="subtitle is-5 is-light is-thin">
Bringing the benefits of the great organization to the society and to contribute to the technical centric approach.
</h2>
{/* <!-- CTA --> */}
<p>
<Link className="story__button is-fat is-bold" to="/membership">
Join IEEE
</Link>
</p>
</div>
{/* <!-- Hero image --> */}
<div className="column is-7">
<figure className="image">
<img src={tkm} alt="" />
</figure>
</div>
</div>
</div>
</div>
</section>
)
}
}
export const Societies=()=>(
<section className="hero-foot">
<div className="container">
<div className="tabs" >
<ul style={{display:"flex",justifyContent:"space-between"}}>
<li><img className="hero-logo" src="img/cs-logo.gif" alt="cs-logo" /></li>
<li><img className="hero-logo" src="img/ias-logo.png" alt="ias-logo" /></li>
<li><img className="hero-logo" src="img/pes-logo.png" alt="pes-logo" /></li>
<li><img className="hero-logo" src="img/ras-logo.png" alt="ras-logo" /></li>
<li><img className="hero-logo" src="img/sight-logo.png" alt="sight-logo" /></li>
<li><img className="hero-logo" src="img/wie-logo.gif" alt="wie-logo" /></li>
</ul>
</div>
</div>
</section>
)
|
<reponame>EmreErinc/itms-wiki-app
package com.itms.wikiapp.userAuth.repository.impl;
import com.itms.wikiapp.userAuth.repository.CustomUserAuthRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Configuration;
@Slf4j
@Configuration
public class UserAuthRepositoryImpl implements CustomUserAuthRepository {
}
|
$(document).ready(function () {
var gridLoad = []
gridLoad["picAddForm"] = "index.php/welcome/picgrid";
gridLoad["picEditForm"] = "index.php/welcome/picgrid";
$('#data_grid').DataTable({
"ajax": {
url: '<?php echo base_url(); ?>' + "index.php/books/books_page",
type: 'GET'
},
});
// Activate tooltip
$('[data-toggle="tooltip"]').tooltip();
// $('#picAddForm, #picEditForm').submit(function () {
// console.log("here");
// var id = this.id;
// event.preventDefault();
// var $inputs = $('#picAddForm :input');
// var values = {};
// //form data
// $inputs.each(function () {
// if (this.name != "") {
// values[this.name] = $(this).val();
// }
// });
// var data = {
// "data": "sajid"
// }
// $.post("index.php/welcome/testdata", data, function (data, status) {
// data = JSON.parse(data);
// $("#cancel").click();
// event.preventDefault();
// console.log(gridLoad[id]);
// var alert = (data.success) ? "#successMsg" : "#failureMsg";
// $(alert).removeClass("hidden");
// setTimeout(function () {
// $(alert).addClass("hidden");
// $("#contentDiv").load("index.php/welcome/picgrid");
// }, 2000);
// });
// });
});
|
<filename>arm/asm.c
/*
* arm/asm.c: ARM assembly language output, Codemist version 21
* Copyright (C) Codemist Ltd., 1987-1993
* Copyright (C) Advanced RISC Machines Limited., 1990-1993
* SPDX-Licence-Identifier: Apache-2.0
*/
/*
* RCS $Revision$
* Checkin $Date$
* Revising $Author$
*/
/* Assembler output is routed to asmstream, annotated if FEATURE_ANNOTATE. */
/* See c.armobj for more details on datastructures. */
/* exports: asmstream, display_assembly_code, asm_header, asm_trailer. */
#ifdef __STDC__
# include <string.h>
#else
# include <strings.h>
#endif
#include <ctype.h>
#include "globals.h"
#include "mcdep.h"
#include "mcdpriv.h"
#include "xrefs.h"
#include "store.h"
#include "codebuf.h"
#include "armops.h"
#include "version.h"
#include "builtin.h" /* for codesegment, code_area_idx... */
#include "errors.h"
#include "disass.h"
#include "bind.h"
#ifdef CPLUSPLUS
#include "unmangle.h"
#endif
#define lx_arg(x) ((unsigned long)((x) & 0xfffffffful))
FILE *asmstream;
#ifndef NO_ASSEMBLER_OUTPUT
#define annotations (feature & FEATURE_ANNOTATE)
static char const spaces_18[20] = " \0"; /* 18 blanks and 2 NULs */
static char const *regnames[16] = {"a1","a2","a3","a4","v1","v2","v3","v4",
"v5","v6","fp","sl","ip","sp","lr","pc"};
/* fp and sl may change when pcs is set */
static char const *fregnames[8] = {"f0","f1","f2","f3","f4","f5","f6","f7"};
static Symstr *out_codeseg;
static bool first_area;
static bool new_area;
static void newline(void)
{
fputc('\n', asmstream);
}
static void indent_18_if_annotating(void)
{
if (annotations) fputs(spaces_18, asmstream);
}
static void padtocol8(unsigned32 n)
{
if (n > 8) n = 8;
fputs(spaces_18 + 18 - 8 + n, asmstream);
}
static void indent8(void)
{
padtocol8(0);
}
static void pr_chars(int32 w)
/* Works on both sexes of host machine. */
/* Assumes ASCII target machine. */
{ unsigned i, c;
FILE *as = asmstream;
fputc('\'', as);
for (i=0; i<sizeof(int32); i++)
{ c = ((uint8 *)&w)[i];
if (c < ' ' || c >= 127)
{ if (c >= 7 && c <= 13)
{ c = "abtnvfr"[c-7];
goto escape;
}
else
{ fprintf(as, "\\%o", c);
continue;
}
}
else if (c == '\\' || c == '\'' || c == '\"')
escape: fputc('\\', as);
fputc(c, as);
}
fputc('\'', as);
}
static void spr_asmname(char *buf, char const *s)
{ const char *s1 = s;
char c;
bool oddchars = NO;
if (!isalpha(*s) && *s != '_')
oddchars = YES;
else
while ((c = *s1++) != 0)
if (!isalnum(c) && c != '_')
{ oddchars=YES;
break;
}
if (oddchars)
sprintf(buf, "|%s|", s);
else
strcpy(buf, s);
}
static void spr_asmsym(char *buf, Symstr const *sym) {
spr_asmname(buf, symname_(sym));
}
static void pr_asmname(char const *s) {
char buf[256];
spr_asmname(buf, s);
fputs(buf, asmstream);
}
static void pr_asmsym(Symstr const *sym) {
char buf[256];
spr_asmsym(buf, sym);
fputs(buf, asmstream);
}
#ifdef CPLUSPLUS
static void pr_unmangled_name(const Symstr *sym) {
char buf[256];
if (LanguageIsCPlusPlus && annotations) {
char const *name = unmangle2(symname_(sym), buf, sizeof buf);
if (name != symname_(sym))
fprintf(asmstream, " ; %s", buf);
}
}
#else
#define pr_unmangled_name(sym) ((void)0)
#endif
static Symstr *find_extsym(int32 p)
{ CodeXref *x;
for (x = codexrefs; x != NULL; x = x->codexrcdr) {
if (p == (x->codexroff & 0x00ffffffL)) return(x->codexrsym);
}
syserr(syserr_find_extsym);
return(NULL);
}
static void printlabelname(char *buf, const char *before, const char *after, int32 offset, const Symstr *sym)
{
/* For data references, ignore the function name: it will be wrong for backward
* references to the literal pool of another function. (Labels which are the
* subject of data references have already been removed from asm_lablist).
* For code refs, generate a label of the form L<address>.J<label>.<proc>.
*/
LabList *p;
/*
* in various places showcode(NULL) gets called - the following lines arrange
* that the stuff printed here is nevertheless at least textual.
*/
if (sym != NULL)
{ for ( p = asm_lablist ; p != NULL ; p = p->labcdr) {
LabelNumber *lab = p->labcar;
if ((lab->u.defn & 0x00ffffff) == offset)
{ sprintf(buf, "%s|L%.6lx.J%ld.%s|%s",
before, offset+codebase, lab_name_(lab) & 0x7fffffff,
symname_(sym), after);
return;
}
}
}
sprintf(buf, "%sL%lx%.5lx%s", before,
code_area_idx == 1 ? 0L : code_area_idx, offset+codebase, after);
}
static Symstr const *procname;
typedef struct RelAddrRec {
Symstr *sym;
int32 count;
int32 offset;
char *buf;
} RelAddrRec;
static char *disass_adrl(RelAddrRec *r, char *buf, int32 offset) {
int pos = (r->buf[3] == 'S' || r->buf[3] == ' ') ? 3 : 5;
int c = r->buf[pos];
do buf--; while (buf[-1] != ',');
memcpy(r->buf, "ADR", 3);
while (--r->count >= 0) r->buf[pos++] = 'L';
r->buf[pos] = c;
if (offset != 0) {
sprintf(buf, "%ld+", offset);
buf += strlen(buf);
}
spr_asmsym(buf, r->sym);
return buf;
}
static List *litlabels;
static int32 LiteralBefore(int32 target) {
List *p;
for (p = litlabels; p != NULL; p = cdr_(p))
if (car_(p) <= target) return car_(p);
return 0;
}
static char *disass_cb(dis_cb_type type, int32 offset, unsigned32 address, int w, void *cb_arg, char *buf) {
RelAddrRec *r = (RelAddrRec *)cb_arg;
IGNORE(w);
*buf = 0;
if (type == D_BORBL) {
if (cb_arg == NULL)
printlabelname(buf, "", "", address, procname);
else
spr_asmsym(buf, r->sym);
} else if (type == D_ADDPCREL) {
if (r != NULL)
buf = disass_adrl(r, buf, address + offset + r->offset);
else
printlabelname(buf, "#", "-.-8", address+offset, procname);
} else if (type == D_SUBPCREL) {
if (r != NULL)
buf = disass_adrl(r, buf, address - offset - r->offset);
else {
/* Backward reference to a string may be to an address part way through
a literal in a different function, for which therefore no label was
generated. (Hence the list litlabels).
*/
int32 target = address - offset + codebase;
int32 litaddr = LiteralBefore(target);
if (litaddr == target)
printlabelname(buf, "#.+8-", "", address - offset, procname);
else {
char b[8];
sprintf(b, "+%ld)", target - litaddr);
printlabelname(buf, "#.+8-(", b, litaddr - codebase, procname);
}
}
} else if (type == D_LOADPCREL || type == D_STOREPCREL) {
/* Backward references here can't currently be to an address for which
no label was generated (loading as an integer the second word of a
double constant or some non-initial word of a string are the
possibilities, which literal pool management currently precludes).
Still, we allow the possibility for safety.
*/
int32 target = address + codebase;
int32 litaddr = offset >= 0 ? target : LiteralBefore(target);
if (litaddr == target)
printlabelname(buf, "[pc, #", "-.-8]", address, procname);
else {
char b[8];
sprintf(b, "+%ld)-.-8", target - litaddr);
printlabelname(buf, "[pc, #(", b, litaddr - codebase, procname);
}
} else if (type == D_LOAD || type == D_STORE) {
if (r != NULL) {
sprintf(buf, "%ld+", offset);
buf += strlen(buf);
spr_asmsym(buf, r->sym);
}
}
return buf+strlen(buf);
}
typedef struct Import {
struct Import *next;
Symstr *sym;
int32 patch;
} Import;
static char *asm_needslabel; /* a bitmap, 1 bit per code word */
#define needslabel(widx) (((widx) & 0x80000000) ? 0 : (asm_needslabel[(widx) >> 3] & (1 << ((widx)&7))))
#define setlabbit(widx) (((widx) & 0x80000000) ? 0 : (asm_needslabel[(widx) >> 3] |= (1 << ((widx)&7))))
static void NoteLiteralLabel(int32 q) {
if (needslabel(q / 4))
litlabels = (List *)global_cons2(SU_Other, litlabels, q+codebase);
}
static void killasmlabel(int32 wordoffset)
{
/* Ensure that jopcode labels are present only for instructions, not data items */
LabList *p, *prev = NULL;
int32 offset = wordoffset * 4L;
for ( p = asm_lablist ; p != NULL ; prev = p, p = p->labcdr)
if ((p->labcar->u.defn & 0x00ffffff) == offset) {
p = (LabList *) discard2((VoidStar) p);
if (prev == NULL)
asm_lablist = p;
else
prev->labcdr = p;
return;
}
}
static int32 immediateval(int32 w) {
int32 shift = (w & 0xf00)>>7, val = w & 0xff;
return (val>>shift) | (val<<(32-shift));
}
static void asm_scancode(void)
{ int32 p;
int bitmapsize;
char *x;
bitmapsize = (int)((codep + 127) >> 5); /* > 1 bit per word of code */
asm_needslabel = x = (char *)BindAlloc(bitmapsize);
memclr((VoidStar)x, bitmapsize);
for (p = 0; p < codep; p += 4) {
int32 w = code_inst_(p);
int32 f = code_flag_(p);
switch (f) {
case LIT_ADCON:
{ Symstr *sym = find_extsym(codebase+p);
ExtRef *xr = symext_(sym);
if (xr->extflags & xr_defloc) setlabbit(xr->extoffset/4);
break;
}
case LIT_RELADDR:
break;
case LIT_OPCODE:
switch (w & 0x0f000000) {
case OP_B: case OP_BL:
if (w & 0x00800000L) w |= 0xff000000L; else w &= 0x00ffffffL;
w = (p / 4L) + 2L + w; /* word index */
setlabbit(w);
break;
case OP_PREN:
if (((w >> 16L) & 0xfL) == R_PC) {
int32 d = w & 0xfffL;
if ((w & F_UP)==0) d = -d;
w = (p + 8L + d) / 4L;
setlabbit(w);
killasmlabel(w);
}
break;
case 0x02000000L:
{ int32 op = w & (0xfL << 21L);
if ( (op == F_ADD || op == F_SUB) &&
(((w >> 16L) & 0xfL) == R_PC)) {
int32 val = immediateval(w);
if (op == F_SUB) val = -val;
w = (p + 8L + val) / 4L;
setlabbit(w);
killasmlabel(w);
}
}
break;
case OP_CPPRE:
if (((w >> 16L) & 0xfL) == R_PC) {
int32 d = w & 0xffL;
if ((w & F_UP)==0) d = -d;
w = (p + 8L) / 4L + d;
setlabbit(w);
killasmlabel(w);
}
break;
default:
break;
}
default:
break;
}
}
}
/* exported functions ...*/
static bool headerdone;
static void asm_areadef(char const *name, char const *attrib1, char const *attrib2)
{
newline();
indent8();
indent_18_if_annotating();
fputs("AREA ", asmstream);
pr_asmname(name);
fprintf(asmstream, "%s, %s\n", attrib1, attrib2);
}
static void asm_areaname(Symstr const *name, char const *attributes, bool export_area)
{ char const *s = symname_(name);
if (export_area) {
newline();
indent8();
indent_18_if_annotating();
fputs("EXPORT ", asmstream);
pr_asmname(s);
pr_unmangled_name(name);
}
asm_areadef(s, "", attributes);
}
void display_assembly_code(Symstr const *name)
{ uint32 q, qend;
FILE *as = asmstream;
char buf[255];
if (codep == 0)
{
new_area = YES;
return;
}
if (new_area)
{
Symstr *sym = bindsym_(codesegment);
char segname[255];
sprintf(segname, "C$$%s", symname_(sym) + 2);
if (!first_area)
asm_areadef(segname, (obj_iscommoncode() ? ", COMDEF" : ""),
"CODE, READONLY");
newline();
pr_asmname(symname_(sym));
fprintf(as, " DATA");
pr_unmangled_name(sym);
newline();
first_area = NO;
new_area = NO;
}
newline();
if (name != NULL) {
/* may be NULL for string literals from static inits */
procname = name;
if (StrEq(symname_(name), "main"))
obj_symref(libentrypoint, xr_code, 0);
indent_18_if_annotating();
pr_asmsym(name);
pr_unmangled_name(name);
newline();
}
asm_scancode();
for (q = 0, qend = codep; q < qend; q += 4) /* q is now a BYTE offset */
{ int32 w = code_inst_(q),
f = code_flag_(q);
VoidStar aux = code_aux_(q);
if (needslabel(q / 4))
{ indent_18_if_annotating();
printlabelname(buf, "", "\n", q, name);
fputs(buf, as);
}
if (annotations)
fprintf(as, "%.6lx %.8lx ", (long)(q + codebase), lx_arg(w));
if (f != LIT_FPNUM2) indent8();
switch(f)
{
case LIT_RELADDR:
{ unsigned32 oldq = q;
RelAddrRec r;
r.sym = (Symstr *)aux; r.offset = 0; r.count = 0;
r.buf = buf;
if ((w & 0x0f000000L) == 0x02000000L) {
int32 rd = (w >> 12) & 0xfL;
while (q+4 < qend && code_flag_(q+4) == LIT_OPCODE &&
!needslabel((q+4)/4)) {
int32 w1 = code_inst_(q+4);
if (((w1 ^ w) & 0xfff00000L) != 0 ||
((w1 >> 12) & 0xfL) != rd ||
((w1 >> 16) & 0xfL) != rd)
break;
q += 4;
r.count++; r.offset += immediateval(w1);
}
}
disass(w, oldq, buf, (void *)&r, disass_cb);
fputs(buf, as);
pr_unmangled_name((Symstr *)aux);
break;
}
case LIT_OPCODE:
disass(w, q, buf, (void *)0, disass_cb);
fputs(buf, as);
break;
case LIT_STRING:
NoteLiteralLabel(q);
{ int32 tmpw = w;
unsigned char *uc = (unsigned char *)&tmpw;
fprintf(as, "DCB 0x%.2x,0x%.2x,0x%.2x,0x%.2x",
uc[0], uc[1], uc[2], uc[3]);
}
if (annotations) fprintf(as, " ; "), pr_chars(w);
break;
case LIT_NUMBER:
NoteLiteralLabel(q);
fprintf(as, "DCD 0x%.8lx", lx_arg(w));
break;
case LIT_ADCON:
NoteLiteralLabel(q);
{ Symstr *sym = find_extsym(codebase+q);
fputs("DCD ", as); pr_asmsym(sym);
if (w != 0) fprintf(as, "+0x%lx", lx_arg(w));
pr_unmangled_name(sym);
}
break;
case LIT_FPNUM:
NoteLiteralLabel(q);
{ char *s = (char *)aux;
if (*s != '<') {
fprintf(as, "DCFS %s", s);
} else {
fprintf(as, "DCD 0x%.8lx", lx_arg(w));
}
}
break;
case LIT_FPNUM1:
NoteLiteralLabel(q);
{ char *s = (char *)aux;
if (*s != '<') {
fprintf(as, "DCFD %s", s);
} else {
fprintf(as, "DCD 0x%.8lx, 0x%.8lx",
lx_arg(w), lx_arg(code_inst_(q+4)) );
}
}
break;
case LIT_FPNUM2: /* all printed by the FPNUM1 */
if (annotations) break; else continue;
case LIT_INT64_1:
NoteLiteralLabel(q);
fprintf(as, "DCD 0x%.8lx, 0x%.8lx", lx_arg(w), lx_arg(code_inst_(q+4)) );
break;
case LIT_INT64_2: /* all printed by the FPNUM1 */
if (annotations) break; else continue;
default:
fputs("???", as);
}
newline();
}
}
void asm_header(void)
{
char b[64];
litlabels = NULL;
first_area = YES;
new_area = NO;
disass_sethexprefix("&");
disass_setregnames(regnames, fregnames);
headerdone = YES;
strcpy(b, "Lib$$Request$$armlib$$");
target_lib_variant(&b[22]);
obj_symref(sym_insert_id(b), xr_code+xr_weak, 0);
fprintf(asmstream, "; generated by %s\n", CC_BANNER);
if (annotations) return; /* do not bore interactive user */
asm_areadef("C$$code", (obj_iscommoncode() ? ", COMDEF" : ""),
"CODE, READONLY");
}
static void asm_outextern(void)
{
ExtRef *x;
int first;
first = 1;
for (x = obj_symlist; x != 0; x = x->extcdr) {
int32 flags = x->extflags;
if (!(flags & xr_objflg) && !(flags & xr_defloc) && (flags & xr_defext)) {
if (first) newline();
first = 0;
indent8();
indent_18_if_annotating();
fprintf(asmstream, "EXPORT ");
pr_asmsym(x->extsym);
fprintf(asmstream, "\n");
}
}
first = 1;
for (x = obj_symlist; x != 0; x = x->extcdr) {
int32 flags = x->extflags;
if (!(flags & xr_objflg) && !(flags & xr_defloc)
&& !(flags & xr_defext)
#ifdef CONST_DATA_IN_CODE
&& x->extsym != bindsym_(constdatasegment)
#endif
) {
/* COMMON data dealt with earlier */
if (!(flags & xr_code) && (x->extoffset > 0)) continue;
if (first) newline();
first = 0;
indent8();
indent_18_if_annotating();
fprintf(asmstream, "IMPORT ");
pr_asmsym(x->extsym);
if (x->extflags & xr_weak)
fprintf(asmstream, ", WEAK");
fprintf(asmstream, "\n");
}
}
}
void asm_setregname(int regno, char const *name) {
if (regno < R_F0) {
if (headerdone) fprintf(asmstream, "%s RN %d\n", name, regno);
regnames[regno] = name;
} else {
regno -= R_F0;
if (headerdone) fprintf(asmstream, "%s FN %d\n", name, regno);
fregnames[regno] = name;
}
}
static void asm_checklenandrpt(int32 len, int lenwanted, int32 rpt, int32 val) {
bool norpt = NO;
if (lenwanted < 0) norpt = YES, lenwanted = -lenwanted;
if (len != lenwanted)
syserr(syserr_asm_data, (long)len);
if (rpt != 1 && (val != 0 || norpt))
syserr(syserr_asm_trailer1, (long)rpt, (long)val);
}
static void asm_data(DataInit *p, int constdata)
{ FILE *as = asmstream;
int32 offset = 0;
for (; p != 0; p = p->datacdr)
{ int32 sort = p->sort;
IPtr rpt = p->rpt, len = p->len;
FloatCon *ptrval = (FloatCon *)p->val;
union { unsigned32 l;
unsigned16 w[2];
unsigned8 b[4];
/* FloatCon *f; may be bigger than unsigned32 */
} val;
val.l = p->val;
indent_18_if_annotating();
if (sort != LIT_LABEL) indent8();
switch (sort)
{ case LIT_LABEL:
pr_asmsym((Symstr *)rpt);
if (constdata) fprintf(as, " DATA");
pr_unmangled_name((Symstr *)rpt);
break;
default: syserr(syserr_asm_trailer, (long)sort);
case LIT_BBBB:
asm_checklenandrpt(len, -4, rpt, val.l);
fprintf(as, "DCB 0x%.2x,0x%.2x,0x%.2x,0x%.2x",
val.b[0], val.b[1], val.b[2], val.b[3]);
break;
case LIT_BBBX:
asm_checklenandrpt(len, -3, rpt, val.l);
fprintf(as, "DCB 0x%.2x,0x%.2x,0x%.2x", val.b[0], val.b[1], val.b[2]);
break;
case LIT_BBX:
asm_checklenandrpt(len, -2, rpt, val.l);
fprintf(as, "DCB 0x%.2x,0x%.2x", val.b[0], val.b[1]);
break;
case LIT_BXXX:
asm_checklenandrpt(len, -1, rpt, val.l);
fprintf(as, "DCB 0x%.2x", val.b[0]);
break;
case LIT_HH:
asm_checklenandrpt(len, -4, rpt, val.l);
fprintf(as, "DCW%c 0x%.4x,0x%.4x", offset & 1 ? 'U' : ' ',
val.w[0], val.w[1]);
break;
case LIT_HX:
asm_checklenandrpt(len, -2, rpt, val.l);
fprintf(as, "DCW%c 0x%.4x", offset & 1 ? 'U' : ' ', val.w[0]);
break;
case LIT_BBH:
asm_checklenandrpt(len, -4, rpt, val.l);
fprintf(as, "DCB 0x%.2x,0x%.2x", val.b[0], val.b[1]);
newline(); indent_18_if_annotating(); indent8();
fprintf(as, "DCW%c 0x%.4x", offset & 1 ? 'U' : ' ', val.w[1]);
break;
case LIT_HBX:
asm_checklenandrpt(len, -3, rpt, val.l);
fprintf(as, "DCW%c 0x%.4x", offset & 1 ? 'U' : ' ', val.w[0]);
newline(); indent_18_if_annotating(); indent8();
fprintf(as, "DCB 0x%.2x", val.b[2]);
break;
case LIT_HBB:
asm_checklenandrpt(len, -4, rpt, val.l);
fprintf(as, "DCW%c 0x%.4x", offset & 1 ? 'U' : ' ', val.w[0]);
newline(); indent_18_if_annotating(); indent8();
fprintf(as, "DCB 0x%.2x,0x%.2x", val.b[2], val.b[3]);
break;
case LIT_NUMBER:
asm_checklenandrpt(len, 4, rpt, val.l);
if (len != 4) syserr(syserr_asm_data, (long)len);
if (rpt == 1)
fprintf(as, "DCD%c 0x%.8lx", offset & 3 ? 'U' : ' ', lx_arg(val.l));
else /* val.l already checked to be zero */
fprintf(as, "%% %ld", (long)(rpt*len));
break;
case LIT_FPNUM:
{ FloatCon *f = ptrval;
char *s = f->floatstr;
if (*s != '<') {
fprintf(as, "DCF%c %s", (len == 8) ? 'D' : 'S', s);
} else if (len == 4) {
fprintf(as, "DCD 0x%.8lx", lx_arg(f->floatbin.fb.val));
} else {
fprintf(as, "DCD 0x%.8lx, 0x%.8lx",
lx_arg(f->floatbin.db.msd), lx_arg(f->floatbin.db.lsd));
}
break;
}
case LIT_ADCON: /* (possibly external) name + offset */
while (rpt--)
{
#if (sizeof_ptr == 2)
fputs("DCW ", as);
#else
fputs("DCD ", as);
#endif
pr_asmsym((Symstr *)len);
pr_unmangled_name((Symstr *)len);
if (val.l != 0) fprintf(as, "+0x%lx", lx_arg(val.l));
if (rpt) {newline(); indent_18_if_annotating(); indent8(); }
}
break;
}
offset = (offset + len) & 3;
newline();
}
}
typedef struct ExtRefList {
struct ExtRefList *cdr;
ExtRef *car;
} ExtRefList;
static void asm_pad(int32 len)
{ indent8();
indent_18_if_annotating();
fprintf(asmstream, "%% %ld\n", (long)len);
}
void asm_trailer(void)
{ FILE *as = asmstream;
#ifdef CONST_DATA_IN_CODE
if (constdata_size() != 0) {
asm_areadef("C$$constdata", "", "DATA, READONLY");
newline();
pr_asmsym(bindsym_(constdatasegment));
newline();
asm_data(constdata_head(), 0);
}
#endif
if (data_size() != 0)
{
asm_areaname(data_sym, "DATA", NO);
newline();
asm_data(data_head(), 0);
}
if (bss_size != 0)
{ int32 n = 0;
ExtRef *x = obj_symlist;
ExtRefList *zisyms = NULL;
asm_areaname(bss_sym, "NOINIT", NO);
newline();
for (; x != NULL; x = x->extcdr)
if (x->extflags & xr_bss) {
ExtRefList **prev = &zisyms;
ExtRefList *p;
for (; (p = *prev) != 0; prev = &cdr_(p))
if (x->extoffset < car_(p)->extoffset) break;
*prev = (ExtRefList *) syn_cons2(*prev, x);
}
for (; zisyms != NULL; zisyms = cdr_(zisyms))
{ x = car_(zisyms);
if (x->extoffset != n) asm_pad(x->extoffset-n);
n = x->extoffset;
indent_18_if_annotating();
pr_asmsym(x->extsym);
pr_unmangled_name(x->extsym);
newline();
}
if (n != bss_size) asm_pad(bss_size-n);
}
if (adconpool.size != 0) {
asm_areaname(adcon_sym, "BASED sb", NO);
asm_data(adconpool.head, 0);
}
{ CommonDef *p;
for (p = commondefs; p != NULL; p = p->next)
{ asm_areaname(p->name, "COMDEF", YES);
asm_data(p->data.head, 0);
}
}
{ ExtRef *x;
for (x = obj_symlist; x != NULL; x = x->extcdr)
{ int32 flags = x->extflags;
if ((flags & (xr_defloc + xr_defext)) == 0) {
/* not defined... */
Symstr *s = x->extsym;
int32 len = x->extoffset;
if (!(flags & xr_code) && (len > 0))
{ /* common data... */
asm_areaname(s, "COMMON, NOINIT", YES);
asm_pad(len);
}
}
}
}
if (!annotations)
asm_outextern();
newline();
indent8();
indent_18_if_annotating();
fputs("END\n", as);
headerdone = NO;
}
#endif
/* end of arm/asm.c */
|
<reponame>Yuhtin/react-covid-tracker
export default class CountrySchema {
static schema = {
name: 'Country',
primaryKey: 'id',
properties: {
id: {type: 'string', indexed: true},
cases: 'int',
todayCases: 'int',
deaths: 'int',
recovered: 'int',
casesPerMillion: 'int',
},
};
}
|
#include <iostream>
struct Node {
int data;
Node* next;
};
void insertAfter(Node* prevNode, int newData) {
if (prevNode == nullptr) {
std::cout << "Error: Previous node cannot be null for insertion." << std::endl;
return;
}
Node* newNode = new Node;
newNode->data = newData;
newNode->next = prevNode->next;
prevNode->next = newNode;
}
int main() {
// Example usage
Node* head = new Node{1, nullptr};
insertAfter(head, 2); // Insert 2 after head
insertAfter(head->next, 3); // Insert 3 after the node with data 2
return 0;
}
|
#!/bin/bash
monerod=monerod
daemon=127.0.0.1
servdir=$1
serverid=$2
white=$($monerod --rpc-bind-ip $daemon --rpc-bind-port 18081 print_pl | grep white | awk '{print $3}' | cut -f 1 -d ":")
white_a=($white)
echo ${white_a[@]} > $servdir/monero.$serverid.iplist
|
import * as _ from 'lodash';
import {Injectable} from '@angular/core';
import {Observable} from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class WindowService {
window = window;
_resizeDebounced = _.debounce(this._resize.bind(this), 50, {'maxWait': 1000});
resize() {
this._resizeDebounced();
}
resizeNow() {
setTimeout(this._resize.bind(this));
}
_resize() {
this.window.dispatchEvent(new Event('resize'));
}
open(url: Observable<string>) {
const tab = this.window.open();
url.subscribe((tabLocation) => tab.location.href = tabLocation, () => tab.close());
}
}
|
#!/bin/bash
###################################################
#
# Diff files in ADE with a git repo.
#
###################################################
. /etc/profile # setup ADE
PROG_PATH=${BASH_SOURCE[0]} # this script's name
PROG_NAME=${PROG_PATH##*/} # basename of script (strip path)
PROG_DIR="$(cd "$(dirname "${PROG_PATH:-$PWD}")" 2>/dev/null 1>&2 && pwd)"
BIN_DIR=${PROG_DIR:="/home/msnielse/bin"} # supporting scripts
MKTMP_VIEW_SH=$BIN_DIR/create-temp-view.sh
BUNDLE_SH=$BIN_DIR/tar-send.sh
workdir=${WORKSPACE:-/tmp}
proc=$$
out=out.${proc}.log
[ ! -x $MKTMP_VIEW_SH ] && echo "** error: unable to execute script: $MKTMP_VIEW_SH" && exit 2
[ ! -x $BUNDLE_SH ] && echo "** error: unable to execute script: $BUNDLE_SH" && exit 2
#trap 'echo "# cleaning up tmpfiles: $out" && rm -f "$out" >/dev/null 2>&1' 0
#trap "exit 2" 1 2 3 15
###################################################
# make ade view
# either makes a new view with a unique name, or
# just prints the view name tha would be created
do_mkview() {
$MKTMP_VIEW_SH $@ -p oggadp -s main compare_git_p${proc}
}
copy_from_ade() {
echo "========================================================================="
printf "Copy from ADE:
view=$view
script=$BUNDLE_SH
target file= $workdir/Adapter_${view}.tgz
target dir: $target_dir\n\n"
ade useview $view -exec "$BUNDLE_SH -l -F $workdir/Adapter_${view}.tgz Adapter"
ls -l $p2file
ls -l ${basen}.tgz
[ -f $p2file ] && ( mkdir -p $target_dir && cd $target_dir && tar xf ../${basen}.tgz ; )
echo "==== Contents of git: ade/oggadp/Adapter => "
ls -l ade/oggadp/Adapter
echo "==== Contents of ade: $target_dir/Adapter => "
ls -l $target_dir/Adapter
}
do_ade_diff() {
echo "============== diff: ade/oggadp/Adapter $target_dir/Adapter =============="
diff -r ade/oggadp/Adapter $target_dir/Adapter | tee $out
}
###################################################
# main
#
view_opts=$@
view=$( do_mkview $view_opts -v ) # get view name (only)
basen=Adapter_${view}
target_dir=${workdir}/${basen}
p2file=${workdir}/${basen}.tgz
# create a view. if "tip", apparently must refresh for recent changes (ade bug?)
do_mkview $view_opts -y
echo "$view" | grep "_tip$" >/dev/null && is_tip=true || is_tip=false
$is_tip \
&& echo "** warning: view is tip_default, first refreshing: $view" \
&& ade useview $view -exec "ade refreshview"
copy_from_ade
do_ade_diff
|
#!/bin/bash
lang_suffix=
echo "$0 $@" # Print the command line for logging
. utils/parse_options.sh || exit 1;
. cmd.sh
featdir=mfcc_vtln
num_leaves=2500
num_gauss=15000
# train linear vtln
steps/train_lvtln.sh --cmd "$train_cmd" $num_leaves $num_gauss \
data/train_si84 data/lang${lang_suffix} exp/tri2b exp/tri2c || exit 1
mkdir -p data/train_si84_vtln
cp -r data/train_si84/* data/train_si84_vtln || exit 1
cp exp/tri2c/final.warp data/train_si84_vtln/spk2warp || exit 1
utils/mkgraph.sh data/lang${lang_suffix}_test_bg_5k \
exp/tri2c exp/tri2c/graph${lang_suffix}_bg_5k || exit 1;
utils/mkgraph.sh data/lang${lang_suffix}_test_tgpr \
exp/tri2c exp/tri2c/graph${lang_suffix}_tgpr || exit 1;
for t in eval93 dev93 eval92; do
nj=10
[ $t == eval92 ] && nj=8
steps/decode_lvtln.sh --nj $nj --cmd "$decode_cmd" \
exp/tri2c/graph${lang_suffix}_bg_5k data/test_$t \
exp/tri2c/decode${lang_suffix}_${t}_bg_5k || exit 1
mkdir -p data/test_${t}_vtln
cp -r data/test_$t/* data/test_${t}_vtln || exit 1
cp exp/tri2c/decode${lang_suffix}_${t}_bg_5k/final.warp \
data/test_${t}_vtln/spk2warp || exit 1
done
for x in test_eval92 test_eval93 test_dev93 train_si84; do
steps/make_mfcc.sh --nj 20 --cmd "$train_cmd" data/${x}_vtln exp/make_mfcc/${x}_vtln ${featdir} || exit 1
steps/compute_cmvn_stats.sh data/${x}_vtln exp/make_mfcc/${x}_vtln ${featdir} || exit 1
utils/fix_data_dir.sh data/${x}_vtln || exit 1 # remove segments with problems
done
steps/align_si.sh --nj 10 --cmd "$train_cmd" \
data/train_si84_vtln data/lang${lang_suffix} \
exp/tri2c exp/tri2c_ali_si84 || exit 1
steps/train_lda_mllt.sh --cmd "$train_cmd" \
--splice-opts "--left-context=3 --right-context=3" \
2500 15000 data/train_si84_vtln data/lang${lang_suffix} \
exp/tri2c_ali_si84 exp/tri2d || exit 1
(
utils/mkgraph.sh data/lang${lang_suffix}_test_tgpr \
exp/tri2d exp/tri2d/graph${lang_suffix}_tgpr || exit 1;
steps/decode.sh --nj 10 --cmd "$decode_cmd" \
exp/tri2d/graph${lang_suffix}_tgpr data/test_dev93_vtln \
exp/tri2d/decode${lang_suffix}_tgpr_dev93 || exit 1;
steps/decode.sh --nj 8 --cmd "$decode_cmd" \
exp/tri2d/graph${lang_suffix}_tgpr data/test_eval92_vtln \
exp/tri2d/decode${lang_suffix}_tgpr_eval92 || exit 1;
) &
steps/align_si.sh --nj 10 --cmd "$train_cmd" \
data/train_si84_vtln data/lang${lang_suffix} \
exp/tri2d exp/tri2d_ali_si84 || exit 1
# From 2d system, train 3c which is LDA + MLLT + SAT.
steps/train_sat.sh --cmd "$train_cmd" \
2500 15000 data/train_si84_vtln data/lang${lang_suffix} \
exp/tri2d_ali_si84 exp/tri3c || exit 1;
(
utils/mkgraph.sh data/lang${lang_suffix}_test_tgpr \
exp/tri3c exp/tri3c/graph${lang_suffix}_tgpr || exit 1;
steps/decode_fmllr.sh --nj 10 --cmd "$decode_cmd" \
exp/tri3c/graph${lang_suffix}_tgpr data/test_dev93_vtln \
exp/tri3c/decode${lang_suffix}_tgpr_dev93 || exit 1;
steps/decode_fmllr.sh --nj 10 --cmd "$decode_cmd" \
exp/tri3c/graph${lang_suffix}_tgpr data/test_eval93_vtln \
exp/tri3c/decode${lang_suffix}_tgpr_eval93 || exit 1;
steps/decode_fmllr.sh --nj 8 --cmd "$decode_cmd" \
exp/tri3c/graph${lang_suffix}_tgpr data/test_eval92_vtln \
exp/tri3c/decode${lang_suffix}_tgpr_eval92 || exit 1;
) &
# Below shows the results we got with this script.
# Actually we only have improvement on dev93 and the others get worse.
# With VTLN:
# for x in exp/tri3c/decode_tgpr_{dev,eval}{92,93}; do grep WER $x/wer_* | utils/best_wer.sh ; done
# %WER 13.86 [ 1141 / 8234, 235 ins, 123 del, 783 sub ] exp/tri3c/decode_tgpr_dev93/wer_17
# %WER 9.23 [ 521 / 5643, 131 ins, 31 del, 359 sub ] exp/tri3c/decode_tgpr_eval92/wer_16
# %WER 12.47 [ 430 / 3448, 67 ins, 43 del, 320 sub ] exp/tri3c/decode_tgpr_eval93/wer_14
# Baseline:
#(note, I had to run the following extra decoding to get this)
#steps/decode_fmllr.sh --nj 10 --cmd "$decode_cmd" exp/tri3b/graph_tgpr data/test_eval93_vtln exp/tri3b/decode_tgpr_eval93
#
# a04:s5: for x in exp/tri3b/decode_tgpr_{dev,eval}{92,93}; do grep WER $x/wer_* | utils/best_wer.sh ; done
# %WER 14.37 [ 1183 / 8234, 228 ins, 122 del, 833 sub ] exp/tri3b/decode_tgpr_dev93/wer_19
# %WER 8.98 [ 507 / 5643, 129 ins, 28 del, 350 sub ] exp/tri3b/decode_tgpr_eval92/wer_14
# %WER 12.21 [ 421 / 3448, 68 ins, 39 del, 314 sub ] exp/tri3b/decode_tgpr_eval93/wer_14
|
<filename>tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/mixins/EchoValue3.java
// Copyright 2009 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.integration.app1.mixins;
import org.apache.tapestry5.annotations.BindParameter;
import org.apache.tapestry5.annotations.InjectContainer;
import org.apache.tapestry5.MarkupWriter;
import org.apache.tapestry5.ClientElement;
/**
* Mixin demonstrating the use of BindParameter, using explicit naming of the parent parameter.
*/
public class EchoValue3
{
@BindParameter(value = {"object","value"})
private Object boundParameter;
@InjectContainer
private ClientElement element;
private Object temp;
void beginRender(MarkupWriter writer)
{
writer.element("div","id",element.getClientId() + "_before3");
writer.writeRaw("echo3-" + boundParameter + "-before");
writer.end();
temp = boundParameter;
boundParameter = "world";
}
void afterRender(MarkupWriter writer)
{
boundParameter = temp;
writer.element("div","id",element.getClientId() + "_after3");
writer.writeRaw("echo3-" + boundParameter + "-after");
writer.end();
}
}
|
echo "install_python: start" \
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh \
&& cargo install cargo-edit \
&& echo "install_python: end" \
|
package com.neusoft.service.impl;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.neusoft.entity.vo.AnalContributionVo;
import com.neusoft.mapper.AnalContributionMapper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author shkstart
* @create 2018/11/28 - 21:11
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:spring/spring-mybatisplus.xml")
public class AnalContributionServiceImplTest {
@Autowired
AnalContributionMapper analContributionMapper;
@Test
public void analContributionPage() {
Page page = new Page();
List<AnalContributionVo> analContributionVoList = analContributionMapper.analContributionPage(page, "分", null);
System.out.println("!!!!!"+analContributionVoList);
}
}
|
<gh_stars>0
def getans(n):
S=set()
for i in range(n):
k,v=raw_input().split(' ')
if not k in S:
S.add(k)
print k,v
getans(int(raw_input()))
|
#!/bin/bash
set -euf -o pipefail
PYTHON2_VERSION=2.7.13
INSTALL_ROOT=.ros-root
if [ -z "$ALDE_CTC_CROSS" ]; then
echo "Please define the ALDE_CTC_CROSS variable with the path to Aldebaran's Crosscompiler toolchain"
exit 1
fi
docker build -t ros1-pepper -f docker/Dockerfile_ros1 docker/
if [ ! -e "Python-${PYTHON2_VERSION}.tar.xz" ]; then
wget -cN https://www.python.org/ftp/python/$PYTHON2_VERSION/Python-${PYTHON2_VERSION}.tar.xz
tar xvf Python-${PYTHON2_VERSION}.tar.xz
fi
mkdir -p ccache-build/
mkdir -p ${PWD}/Python-${PYTHON2_VERSION}-host
mkdir -p ${PWD}/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}
USE_TTY=""
if [ -z "$ROS_PEPPER_CI" ]; then
USE_TTY="-it"
fi
docker run ${USE_TTY} --rm \
-u $(id -u) \
-e INSTALL_ROOT=${INSTALL_ROOT} \
-e PYTHON2_VERSION=${PYTHON2_VERSION} \
-v ${PWD}/ccache-build:/home/nao/.ccache \
-v ${PWD}/Python-${PYTHON2_VERSION}:/home/nao/Python-${PYTHON2_VERSION}-src \
-v ${PWD}/Python-${PYTHON2_VERSION}-host:/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION} \
-v ${ALDE_CTC_CROSS}:/home/nao/ctc \
-e CC \
-e CPP \
-e CXX \
-e RANLIB \
-e AR \
-e AAL \
-e LD \
-e READELF \
-e CFLAGS \
-e CPPFLAGS \
-e LDFLAGS \
ros1-pepper \
bash -c "\
set -euf -o pipefail && \
mkdir -p Python-${PYTHON2_VERSION}-src/build-host && \
cd Python-${PYTHON2_VERSION}-src/build-host && \
export PATH=/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/bin:\${PATH} && \
../configure \
--prefix=/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION} \
--enable-shared \
--disable-ipv6 \
ac_cv_file__dev_ptmx=yes \
ac_cv_file__dev_ptc=no && \
export LD_LIBRARY_PATH=/home/nao/ctc/openssl/lib:/home/nao/ctc/zlib/lib:/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/lib && \
make -j4 install && \
wget -O - -q https://bootstrap.pypa.io/get-pip.py | /home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/bin/python && \
/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/bin/pip install empy catkin-pkg setuptools vcstool numpy rospkg defusedxml netifaces"
docker run ${USE_TTY} --rm \
-u $(id -u) \
-e INSTALL_ROOT=${INSTALL_ROOT} \
-e PYTHON2_VERSION=${PYTHON2_VERSION} \
-v ${PWD}/ccache-build:/home/nao/.ccache \
-v ${PWD}/Python-${PYTHON2_VERSION}:/home/nao/Python-${PYTHON2_VERSION}-src \
-v ${PWD}/Python-${PYTHON2_VERSION}-host:/home/nao/Python-${PYTHON2_VERSION}-host \
-v ${PWD}/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}:/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION} \
-v ${ALDE_CTC_CROSS}:/home/nao/ctc \
ros1-pepper \
bash -c "\
set -euf -o pipefail && \
mkdir -p Python-${PYTHON2_VERSION}-src/build-pepper && \
cd Python-${PYTHON2_VERSION}-src/build-pepper && \
export LD_LIBRARY_PATH=/home/nao/ctc/openssl/lib:/home/nao/ctc/zlib/lib:/home/nao/Python-${PYTHON2_VERSION}-host/lib && \
export PATH=/home/nao/Python-${PYTHON2_VERSION}-host/bin:\${PATH} && \
../configure \
--prefix=/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION} \
--host=i686-aldebaran-linux-gnu \
--build=x86_64-linux \
--enable-shared \
--disable-ipv6 \
ac_cv_file__dev_ptmx=yes \
ac_cv_file__dev_ptc=no && \
make -j4 && \
export LD_LIBRARY_PATH=/home/nao/ctc/openssl/lib:/home/nao/ctc/zlib/lib:/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/lib && \
export PATH=/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/bin:\${PATH} && \
make install && \
wget -O - -q https://bootstrap.pypa.io/get-pip.py | /home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/bin/python && \
/home/nao/${INSTALL_ROOT}/Python-${PYTHON2_VERSION}/bin/pip install empy catkin-pkg setuptools vcstool numpy rospkg defusedxml netifaces"
|
#!/bin/bash
SHELL_FOLDER=$(
cd $(dirname $0)
pwd
)
FOLDER_NAME="${SHELL_FOLDER##*/}"
cd $SHELL_FOLDER
checkJava() {
if [[ $(type -p java) != "" ]]; then
_java=java
startApp
elif [[ -n $JAVA_HOME ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
echo Found Java executable in JAVA_HOME
_java="$JAVA_HOME/bin/java"
startApp
else
echo "no JRE found"
fi
}
startApp() {
printf "$(date)\n"
printf "==== Starting ==== \n"
if [[ "$_java" ]]; then
version=$("$_java" -version 2>&1 | awk -F '"' '/version/ {print $2}')
echo Java Version "$version"
fi
## Adjust memory settings if necessary
export JAVA_OPTS="-server -Xms1g -Xmx1g -Xss256k -Xmn600m -XX:MetaspaceSize=128m -XX:MaxMetaspaceSize=256m -XX:-OmitStackTraceInFastThrow -Duser.timezone=Asia/Shanghai -Dclient.encoding.override=UTF-8 -Dfile.encoding=UTF-8"
# 项目代码路径
export CODE_HOME=$(pwd)
printf "PATH:$CODE_HOME \n"
# 设置依赖路径
export CLASSPATH="$CODE_HOME/classes:./lib/*"
# 启动类
export MAIN_CLASS=me.kany.project.learning.spring.DefaultShutdownApplication
kill -15 $(ps aux | grep -v grep | grep $MAIN_CLASS | grep $FOLDER_NAME | awk '{print $2}')
$_java $JAVA_OPTS -classpath $CLASSPATH $MAIN_CLASS >./console.log 2>&1 &
printf "================== \n"
}
checkJava
|
#!/bin/bash
__HDP_CURRENT_FOLDER__/hbase-master/bin/hbase-daemon.sh start master
|
# Generate CA
cfssl genkey -initca ca_csr.json | cfssljson -bare ca
# Get generate redoctober cert
cfssl gencert -ca ca.pem -ca-key ca-key.pem -config ca_signing_config.json redoctober_csr.json | cfssljson -bare redoctober
rm *.csr
|
#!/usr/bin/env bash
set -e
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. $CUR_DIR/../shell_config.sh
${CLICKHOUSE_LOCAL} --query "create table test engine Log as select 1 a"
# Should not see the table created by the previous instance
if ${CLICKHOUSE_LOCAL} --query "select * from test" 2>/dev/null
then
exit 1
fi
|
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.control.Label;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
public class LondonTime extends Application {
public static void main(String[] args) {
launch(args);
}
@Override
public void start(Stage primaryStage) throws Exception {
ZonedDateTime londonTime = ZonedDateTime.now(ZoneId.of("Europe/London"));
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("HH:mm");
String formattedTime = londonTime.format(formatter);
Label lblTime = new Label("The current time in London is: " + formattedTime);
VBox root = new VBox();
root.getChildren().add(lblTime);
Scene scene = new Scene(root, 350, 200);
primaryStage.setScene(scene);
primaryStage.show();
}
}
|
/*
* Small publish / subscribe to events module
* ------------------------------------------
*
* publish
*
* @param name [string]
* @param args [array] pass to callback function
*
* subscribe
*
* @param name [string] ( same as publish name )
* @param callback [function]
*/
let subscriptions = {};
const publish = ( name, args = [] ) => {
if( !subscriptions.hasOwnProperty( name ) )
return;
let callbacks = subscriptions[name];
if( callbacks )
callbacks.forEach( callback => {
callback( args );
} );
};
const subscribe = ( name, callback = () => {} ) => {
if( !subscriptions.hasOwnProperty( name ) )
subscriptions[name] = [];
let index = subscriptions[name].push( callback ) - 1;
// remove subscription
return {
remove: () => {
delete subscriptions[name][index];
}
};
};
export { publish, subscribe };
|
// Copyright 2015 The Vanadium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package jenkins
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"regexp"
"strconv"
"strings"
"v.io/jiri/collect"
)
func New(host string) (*Jenkins, error) {
j := &Jenkins{
host: host,
}
return j, nil
}
// NewForTesting creates a Jenkins instance in test mode.
func NewForTesting() *Jenkins {
return &Jenkins{
testMode: true,
invokeMockResults: map[string][]byte{},
}
}
type Jenkins struct {
host string
// The following fields are for testing only.
// testMode indicates whether this Jenkins instance is in test mode.
testMode bool
// invokeMockResults maps from API suffix to a mock result.
// In test mode, the mock result will be returned when "invoke" is called.
invokeMockResults map[string][]byte
}
// MockAPI mocks "invoke" with the given API suffix.
func (j *Jenkins) MockAPI(suffix, result string) {
j.invokeMockResults[suffix] = []byte(result)
}
type QueuedBuild struct {
Id int
Params string `json:"params,omitempty"`
Task QueuedBuildTask
}
type QueuedBuildTask struct {
Name string
}
// ParseRefs parses refs from a QueuedBuild object's Params field.
func (qb *QueuedBuild) ParseRefs() string {
// The params string is in the form of:
// "\nREFS=ref/changes/12/3412/2\nPROJECTS=test" or
// "\nPROJECTS=test\nREFS=ref/changes/12/3412/2"
parts := strings.Split(qb.Params, "\n")
refs := ""
refsPrefix := "REFS="
for _, part := range parts {
if strings.HasPrefix(part, refsPrefix) {
refs = strings.TrimPrefix(part, refsPrefix)
break
}
}
return refs
}
// QueuedBuilds returns the queued builds.
func (j *Jenkins) QueuedBuilds(jobName string) (_ []QueuedBuild, err error) {
// Get queued builds.
bytes, err := j.invoke("GET", "queue/api/json", url.Values{})
if err != nil {
return nil, err
}
var builds struct {
Items []QueuedBuild
}
if err := json.Unmarshal(bytes, &builds); err != nil {
return nil, fmt.Errorf("Unmarshal() failed: %v\n%s", err, string(bytes))
}
// Filter for jobName.
queuedBuildsForJob := []QueuedBuild{}
for _, build := range builds.Items {
if build.Task.Name != jobName {
continue
}
queuedBuildsForJob = append(queuedBuildsForJob, build)
}
return queuedBuildsForJob, nil
}
type BuildInfo struct {
Actions []BuildInfoAction
Building bool
Number int
Result string
Id string
Timestamp int64
}
type BuildInfoAction struct {
Parameters []BuildInfoParameter
}
type BuildInfoParameter struct {
Name string
Value string
}
// ParseRefs parses the REFS parameter from a BuildInfo object.
func (bi *BuildInfo) ParseRefs() string {
refs := ""
loop:
for _, action := range bi.Actions {
for _, param := range action.Parameters {
if param.Name == "REFS" {
refs = param.Value
break loop
}
}
}
return refs
}
// OngoingBuilds returns a slice of BuildInfo for current ongoing builds
// for the given job.
func (j *Jenkins) OngoingBuilds(jobName string) (_ []BuildInfo, err error) {
// Get urls of all ongoing builds.
bytes, err := j.invoke("GET", "computer/api/json", url.Values{
"tree": {"computer[executors[currentExecutable[url]],oneOffExecutors[currentExecutable[url]]]"},
})
if err != nil {
return nil, err
}
var computers struct {
Computer []struct {
Executors []struct {
CurrentExecutable struct {
Url string
}
}
OneOffExecutors []struct {
CurrentExecutable struct {
Url string
}
}
}
}
if err := json.Unmarshal(bytes, &computers); err != nil {
return nil, fmt.Errorf("Unmarshal() failed: %v\n%s", err, string(bytes))
}
urls := []string{}
for _, computer := range computers.Computer {
for _, executor := range computer.Executors {
curUrl := executor.CurrentExecutable.Url
if curUrl != "" {
urls = append(urls, curUrl)
}
}
for _, oneOffExecutor := range computer.OneOffExecutors {
curUrl := oneOffExecutor.CurrentExecutable.Url
if curUrl != "" {
urls = append(urls, curUrl)
}
}
}
buildInfos := []BuildInfo{}
masterJobURLRE := regexp.MustCompile(fmt.Sprintf(`.*/%s/(\d+)/$`, jobName))
for _, curUrl := range urls {
// Filter for jobName, and get the build number.
matches := masterJobURLRE.FindStringSubmatch(curUrl)
if matches == nil {
continue
}
strBuildNumber := matches[1]
buildNumber, err := strconv.Atoi(strBuildNumber)
if err != nil {
return nil, fmt.Errorf("Atoi(%s) failed: %v", strBuildNumber, err)
}
buildInfo, err := j.BuildInfo(jobName, buildNumber)
if err != nil {
return nil, err
}
buildInfos = append(buildInfos, *buildInfo)
}
return buildInfos, nil
}
// BuildInfo returns a build's info for the given jobName and buildNumber.
func (j *Jenkins) BuildInfo(jobName string, buildNumber int) (*BuildInfo, error) {
buildSpec := fmt.Sprintf("%s/%d", jobName, buildNumber)
return j.BuildInfoForSpec(buildSpec)
}
// BuildInfoWithBuildURL returns a build's info for the given build's URL.
func (j *Jenkins) BuildInfoForSpec(buildSpec string) (*BuildInfo, error) {
getBuildInfoUri := fmt.Sprintf("job/%s/api/json", buildSpec)
bytes, err := j.invoke("GET", getBuildInfoUri, url.Values{})
if err != nil {
return nil, err
}
var buildInfo BuildInfo
if err := json.Unmarshal(bytes, &buildInfo); err != nil {
return nil, fmt.Errorf("Unmarshal() failed: %v\n%s", err, string(bytes))
}
return &buildInfo, nil
}
// AddBuild adds a build to the given job.
func (j *Jenkins) AddBuild(jobName string) error {
addBuildUri := fmt.Sprintf("job/%s/build", jobName)
_, err := j.invoke("POST", addBuildUri, url.Values{})
if err != nil {
return err
}
return nil
}
// AddBuildWithParameter adds a parameterized build to the given job.
func (j *Jenkins) AddBuildWithParameter(jobName string, params url.Values) error {
addBuildUri := fmt.Sprintf("job/%s/buildWithParameters", jobName)
_, err := j.invoke("POST", addBuildUri, params)
if err != nil {
return err
}
return nil
}
// CancelQueuedBuild cancels the queued build by given id.
func (j *Jenkins) CancelQueuedBuild(id string) error {
cancelQueuedBuildUri := "queue/cancelItem"
if _, err := j.invoke("POST", cancelQueuedBuildUri, url.Values{
"id": {id},
}); err != nil {
return err
}
return nil
}
// CancelOngoingBuild cancels the ongoing build by given jobName and buildNumber.
func (j *Jenkins) CancelOngoingBuild(jobName string, buildNumber int) error {
cancelOngoingBuildUri := fmt.Sprintf("job/%s/%d/stop", jobName, buildNumber)
if _, err := j.invoke("POST", cancelOngoingBuildUri, url.Values{}); err != nil {
return err
}
return nil
}
type TestCase struct {
ClassName string
Name string
Status string
}
func (t TestCase) Equal(t2 TestCase) bool {
return t.ClassName == t2.ClassName && t.Name == t2.Name
}
// FailedTestCasesForBuildSpec returns failed test cases for the given build spec.
func (j *Jenkins) FailedTestCasesForBuildSpec(buildSpec string) ([]TestCase, error) {
failedTestCases := []TestCase{}
// Get all test cases.
getTestReportUri := fmt.Sprintf("job/%s/testReport/api/json", buildSpec)
bytes, err := j.invoke("GET", getTestReportUri, url.Values{})
if err != nil {
return failedTestCases, err
}
var testCases struct {
Suites []struct {
Cases []TestCase
}
}
if err := json.Unmarshal(bytes, &testCases); err != nil {
return failedTestCases, fmt.Errorf("Unmarshal(%v) failed: %v", string(bytes), err)
}
// Filter failed tests.
for _, suite := range testCases.Suites {
for _, curCase := range suite.Cases {
if curCase.Status == "FAILED" || curCase.Status == "REGRESSION" {
failedTestCases = append(failedTestCases, curCase)
}
}
}
return failedTestCases, nil
}
// JenkinsMachines stores information about Jenkins machines.
type JenkinsMachines struct {
Machines []JenkinsMachine `json:"computer"`
}
// JenkinsMachine stores information about a Jenkins machine.
type JenkinsMachine struct {
Name string `json:"displayName"`
Idle bool `json:"idle"`
}
// IsNodeIdle checks whether the given node is idle.
func (j *Jenkins) IsNodeIdle(node string) (bool, error) {
bytes, err := j.invoke("GET", "computer/api/json", url.Values{})
if err != nil {
return false, err
}
machines := JenkinsMachines{}
if err := json.Unmarshal(bytes, &machines); err != nil {
return false, fmt.Errorf("Unmarshal() failed: %v\n%s\n", err, string(bytes))
}
for _, machine := range machines.Machines {
if machine.Name == node {
return machine.Idle, nil
}
}
return false, fmt.Errorf("node %v not found", node)
}
// createRequest represents a request to create a new machine in
// Jenkins configuration.
type createRequest struct {
Name string `json:"name"`
Description string `json:"nodeDescription"`
NumExecutors int `json:"numExecutors"`
RemoteFS string `json:"remoteFS"`
Labels string `json:"labelString"`
Mode string `json:"mode"`
Type string `json:"type"`
RetentionStrategy map[string]string `json:"retentionStrategy"`
NodeProperties nodeProperties `json:"nodeProperties"`
Launcher map[string]string `json:"launcher"`
}
// nodeProperties enumerates the environment variable settings for
// Jenkins configuration.
type nodeProperties struct {
Class string `json:"stapler-class"`
Environment []map[string]string `json:"env"`
}
// AddNodeToJenkins sends an HTTP request to Jenkins that prompts it
// to add a new machine to its configuration.
//
// NOTE: Jenkins REST API is not documented anywhere and the
// particular HTTP request used to add a new machine to Jenkins
// configuration has been crafted using trial and error.
func (j *Jenkins) AddNodeToJenkins(name, host, description, credentialsId string) error {
request := createRequest{
Name: name,
Description: description,
NumExecutors: 1,
RemoteFS: "/home/veyron/jenkins",
Labels: fmt.Sprintf("%s linux", name),
Mode: "EXCLUSIVE",
Type: "hudson.slaves.DumbSlave$DescriptorImpl",
RetentionStrategy: map[string]string{"stapler-class": "hudson.slaves.RetentionStrategy$Always"},
NodeProperties: nodeProperties{
Class: "hudson.slaves.EnvironmentVariablesNodeProperty",
Environment: []map[string]string{
map[string]string{
"stapler-class": "hudson.slaves.EnvironmentVariablesNodeProperty$Entry",
"key": "GOROOT",
"value": "$HOME/go",
},
map[string]string{
"stapler-class": "hudson.slaves.EnvironmentVariablesNodeProperty$Entry",
"key": "PATH",
"value": "$HOME/go/bin:$PATH",
},
map[string]string{
"stapler-class": "hudson.slaves.EnvironmentVariablesNodeProperty$Entry",
"key": "TERM",
"value": "xterm-256color",
},
},
},
Launcher: map[string]string{
"stapler-class": "hudson.plugins.sshslaves.SSHLauncher",
"host": host,
// The following ID can be retrieved from Jenkins configuration backup.
"credentialsId": credentialsId,
},
}
bytes, err := json.Marshal(request)
if err != nil {
return fmt.Errorf("Marshal(%v) failed: %v", request, err)
}
values := url.Values{
"name": {name},
"type": {"hudson.slaves.DumbSlave$DescriptorImpl"},
"json": {string(bytes)},
}
_, err = j.invoke("GET", "computer/doCreateItem", values)
if err != nil {
return err
}
return nil
}
// RemoveNodeFromJenkins sends an HTTP request to Jenkins that prompts
// it to remove an existing machine from its configuration.
func (j *Jenkins) RemoveNodeFromJenkins(node string) error {
_, err := j.invoke("POST", fmt.Sprintf("computer/%s/doDelete", node), url.Values{})
if err != nil {
return err
}
return nil
}
// invoke invokes the Jenkins API using the given suffix, values and
// HTTP method.
func (j *Jenkins) invoke(method, suffix string, values url.Values) (_ []byte, err error) {
// Return mock result in test mode.
if j.testMode {
return j.invokeMockResults[suffix], nil
}
apiURL, err := url.Parse(j.host)
if err != nil {
return nil, fmt.Errorf("Parse(%q) failed: %v", j.host, err)
}
apiURL.Path = fmt.Sprintf("%s/%s", apiURL.Path, suffix)
apiURL.RawQuery = values.Encode()
var body io.Reader
url, body := apiURL.String(), nil
req, err := http.NewRequest(method, url, body)
if err != nil {
return nil, fmt.Errorf("NewRequest(%q, %q, %v) failed: %v", method, url, body, err)
}
req.Header.Add("Accept", "application/json")
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, fmt.Errorf("Do(%v) failed: %v", req, err)
}
defer collect.Error(func() error { return res.Body.Close() }, &err)
bytes, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, err
}
// queue/cancelItem API returns 404 even successful.
// See: https://issues.jenkins-ci.org/browse/JENKINS-21311.
if suffix != "queue/cancelItem" && res.StatusCode >= http.StatusBadRequest {
return nil, fmt.Errorf("HTTP request %q returned %d:\n%s", url, res.StatusCode, string(bytes))
}
return bytes, nil
}
// GenBuildSpec returns a spec string for the given Jenkins build.
//
// If the main job is a multi-configuration job, the spec is in the form of:
// <jobName>/axis1Label=axis1Value,axis2Label=axis2Value,.../<suffix>
// The axis values are taken from the given axisValues map.
//
// If no axisValues are provides, the spec will be: <jobName>/<suffix>.
func GenBuildSpec(jobName string, axisValues map[string]string, suffix string) string {
if len(axisValues) == 0 {
return fmt.Sprintf("%s/%s", jobName, suffix)
}
parts := []string{}
for k, v := range axisValues {
parts = append(parts, fmt.Sprintf("%s=%s", k, v))
}
return fmt.Sprintf("%s/%s/%s", jobName, strings.Join(parts, ","), suffix)
}
// LastCompletedBuildStatus returns the most recent completed BuildInfo for the given job.
//
// axisValues can be set to nil if the job is not multi-configuration.
func (j *Jenkins) LastCompletedBuildStatus(jobName string, axisValues map[string]string) (*BuildInfo, error) {
buildInfo, err := j.BuildInfoForSpec(GenBuildSpec(jobName, axisValues, "lastCompletedBuild"))
if err != nil {
return nil, err
}
return buildInfo, nil
}
|
export OPENSSL_INCLUDE_DIR=`brew --prefix openssl`/include
export OPENSSL_LIB_DIR=`brew --prefix openssl`/lib
|
#!/usr/bin/env bash
##############################################################
# Usage #
# ./class_generator.sh basePath Class1 Class2 Class3 #
##############################################################
cd $1
for i in ${@:2}; do
echo "#include \"../base.h\"
typedef struct __$i *$i;
$i ${i}_new();
void ${i}_delete($i instance);
" > $i.h;
echo "#include \"$i.h\"
struct __$i {
};
$i ${i}_new() {
$i instance = malloc(sizeof(struct __$i));
return instance;
}
void ${i}_delete($i instance) {
free(instance);
}
" > $i.c;
done
|
<filename>orderer.go
/*
Copyright: Cognition Foundry. All Rights Reserved.
License: Apache License Version 2.0
*/
package gohfc
import (
"google.golang.org/grpc"
"github.com/hyperledger/fabric/protos/common"
"github.com/hyperledger/fabric/protos/orderer"
"context"
"fmt"
"google.golang.org/grpc/credentials"
"github.com/golang/protobuf/proto"
"time"
"google.golang.org/grpc/keepalive"
)
// Orderer expose API's to communicate with orderers.
type Orderer struct {
Name string
Uri string
Opts []grpc.DialOption
caPath string
con *grpc.ClientConn
client orderer.AtomicBroadcastClient
}
const timeout = 5
// Broadcast Broadcast envelope to orderer for execution.
func (o *Orderer) Broadcast(envelope *common.Envelope) (*orderer.BroadcastResponse, error) {
if o.con == nil {
c, err := grpc.Dial(o.Uri, o.Opts...)
if err != nil {
return nil, fmt.Errorf("cannot connect to orderer: %s err is: %v", o.Name, err)
}
o.con = c
o.client = orderer.NewAtomicBroadcastClient(o.con)
}
bcc, err := o.client.Broadcast(context.Background())
if err != nil {
return nil, err
}
defer bcc.CloseSend()
bcc.Send(envelope)
response, err := bcc.Recv()
if err != nil {
return nil, err
}
if response.Status != common.Status_SUCCESS {
return nil, fmt.Errorf("unexpected status: %v", response.Status)
}
return response, err
}
// Deliver delivers envelope to orderer. Please note that new connection will be created on every call of Deliver.
func (o *Orderer) Deliver(envelope *common.Envelope) (*common.Block, error) {
connection, err := grpc.Dial(o.Uri, o.Opts...)
if err != nil {
return nil, fmt.Errorf("cannot connect to orderer: %s err is: %v", o.Name, err)
}
defer connection.Close()
dk, err := orderer.NewAtomicBroadcastClient(connection).Deliver(context.Background())
if err != nil {
return nil, err
}
if err := dk.Send(envelope); err != nil {
return nil, err
}
var block *common.Block
timer := time.NewTimer(time.Second * time.Duration(timeout))
defer timer.Stop()
for {
select {
case <-timer.C:
return nil, ErrOrdererTimeout
default:
response, err := dk.Recv()
if err != nil {
return nil, err
}
switch t := response.Type.(type) {
case *orderer.DeliverResponse_Status:
if t.Status == common.Status_SUCCESS {
return block, nil
} else {
return nil, fmt.Errorf("orderer response with status: %v", t.Status)
}
case *orderer.DeliverResponse_Block:
block = response.GetBlock()
default:
return nil, fmt.Errorf("unknown response type from orderer: %s", t)
}
}
}
}
func (o *Orderer) getGenesisBlock(identity Identity, crypto CryptoSuite, channelId string) (*common.Block, error) {
seekInfo := &orderer.SeekInfo{
Start: &orderer.SeekPosition{Type: &orderer.SeekPosition_Specified{Specified: &orderer.SeekSpecified{Number: 0}}},
Stop: &orderer.SeekPosition{Type: &orderer.SeekPosition_Specified{Specified: &orderer.SeekSpecified{Number: 0}}},
Behavior: orderer.SeekInfo_BLOCK_UNTIL_READY,
}
seekInfoBytes, err := proto.Marshal(seekInfo)
if err != nil {
return nil, err
}
creator, err := marshalProtoIdentity(identity)
if err != nil {
return nil, err
}
txId, err := newTransactionId(creator)
if err != nil {
return nil, err
}
headerBytes, err := channelHeader(common.HeaderType_DELIVER_SEEK_INFO, txId, channelId, 0, nil)
signatureHeaderBytes, err := signatureHeader(creator, txId)
if err != nil {
return nil, err
}
header := header(signatureHeaderBytes, headerBytes)
payloadBytes, err := payload(header, seekInfoBytes)
if err != nil {
return nil, err
}
payloadSignedBytes, err := crypto.Sign(payloadBytes, identity.PrivateKey)
if err != nil {
return nil, err
}
env := &common.Envelope{Payload: payloadBytes, Signature: payloadSignedBytes}
return o.Deliver(env)
}
// NewOrdererFromConfig create new Orderer from config
func NewOrdererFromConfig(conf OrdererConfig) (*Orderer, error) {
o := Orderer{Uri: conf.Host, caPath: conf.TlsPath}
if !conf.UseTLS {
o.Opts = []grpc.DialOption{grpc.WithInsecure()}
} else if o.caPath != "" {
creds, err := credentials.NewClientTLSFromFile(o.caPath, "")
if err != nil {
return nil, fmt.Errorf("cannot read orderer %s credentials err is: %v", o.Name, err)
}
o.Opts = append(o.Opts, grpc.WithTransportCredentials(creds))
}
o.Opts = append(o.Opts,
grpc.WithKeepaliveParams(keepalive.ClientParameters{
Time: time.Duration(1) * time.Minute,
Timeout: time.Duration(20) * time.Second,
PermitWithoutStream: true,
}),
grpc.WithBlock(),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(maxRecvMsgSize),
grpc.MaxCallSendMsgSize(maxSendMsgSize)))
return &o, nil
}
|
#! /bin/bash -e
# start OVS
service openvswitch-switch start
service docker start
# this cannot be done from the Dockerfile since we have the socket not mounted during build
set +e
echo 'Pulling the "ubuntu:trusty" and "ubuntu:xenial" image for later use...'
docker pull 'ubuntu:trusty'
docker pull 'ubuntu:xenial'
set -e
echo "Welcome to Containernet running within a Docker container ..."
if [[ $# -eq 0 ]]; then
exec /bin/bash
else
exec $*
fi
|
MININIX_PKG_HOMEPAGE=https://www.gpsbabel.org/
MININIX_PKG_DESCRIPTION="GPS file conversion plus transfer to/from GPS units"
MININIX_PKG_VERSION=1.4.4
MININIX_PKG_SHA256=22860e913f093aa9124e295d52d1d4ae1afccaa67ed6bed6f1f8d8b0a45336d1
MININIX_PKG_SRCURL=https://github.com/gpsbabel/gpsbabel/archive/gpsbabel_${MININIX_PKG_VERSION//./_}.tar.gz
MININIX_PKG_DEPENDS="libexpat"
mininix_step_post_extract_package () {
MININIX_PKG_SRCDIR+=/gpsbabel
}
|
#!/bin/bash
# create a resource group for the clamav container instance
export LOCATION=westeurope
export ENVIRONMENT=${1}
readonly PERMITTED_ENVS=(
dev
prod
)
if [[ "$#" -lt 1 ]]; then
echo "Usage "${BASH_SOURCE[0]}" "${PERMITTED_ENVS[@]}" "
exit 1
fi
az group create \
--location ${LOCATION} \
--name rpg-${ENVIRONMENT}-rg-clamav \
--tags "service_id=rpg" \
"name=rpg-${ENVIRONMENT}-rg-clamav" \
"environment=${ENVIRONMENT}" \
"deployed_by=$(whoami)" \
"logical_name=clamav" \
"environment_branch=$(git rev-parse --abbrev-ref HEAD)" \
"environment_version=$(git rev-parse HEAD)"
|
from astro3words import coords_to_words, words_to_coords
import what3words
import pytest
import numpy as np
def test_coords_to_words():
"""
Tests conversion of (ra, dec) coordinates to 3 words
with some edge coordinate cases.
"""
ra_list = [0, 90, 180, 360, 195.5]
dec_list = [-90, 0, 89.9, 90]
for ra in ra_list:
for dec in dec_list:
words = coords_to_words(ra, dec)
assert isinstance(words, str)
def test_words_to_coords():
"""
Tests backwards conversion: find 3 words for a list
of coordinates, then make sure that these words
map to the same coordinates within 1" precision.
"""
ra_list = [0, 90, 180, 360, 195.5]
dec_list = [-90, 0, 89.9, 90]
precision = 1/3600
for ra in ra_list:
for dec in dec_list:
words = coords_to_words(ra, dec)
ra_out, dec_out = words_to_coords(words)
# Calculate angular distance
ra_diff = ra - ra_out
dec_diff = dec - dec_out
if ra_diff > 180: ra_diff -= 360 # Wrap around between 0 and 360
# Use small-angle approx of angular distance
dist2 = ((ra_diff * np.pi / 180)**2 *
np.cos(dec * np.pi/180)**2 +
(dec_diff*np.pi/180)**2)
dist = np.sqrt(dist2)
assert dist < precision
test_words_to_coords()
|
import { Component, AfterContentInit } from '@angular/core';
import { TypedModelComponent } from '../base/TypedModelComponent';
import { SweDataRecord } from '../../../model/swe/SweDataRecord';
import { SweText } from '../../../model/swe/SweText';
import { SweTime } from '../../../model/swe/SweTime';
import { SweCount } from '../../../model/swe/SweCount';
import { SweBoolean } from '../../../model/swe/SweBoolean';
import { SweQuantity } from '../../../model/swe/SweQuantity';
import { SweCategory } from '../../../model/swe/SweCategory';
import { SweTimeRange } from '../../../model/swe/SweTimeRange';
import { SweQuantityRange } from '../../../model/swe/SweQuantityRange';
import { SweElementType } from '../../../model/swe/SweElementType';
import { ComponentType } from '../sml/NamedSweDataComponentComponent';
@Component({
selector: 'swe-element-type',
templateUrl: './SweElementTypeComponent.html'
})
export class SweElementTypeComponent extends TypedModelComponent<SweElementType> implements AfterContentInit {
private typeType: ComponentType;
ngAfterContentInit(): any {
this.typeType = this.getTypeType();
}
protected createModel(): SweElementType {
return new SweElementType();
}
private getTypeType(): ComponentType {
if (!this.model) {
return ComponentType.Unknown;
}
const type = this.model.type;
if (type instanceof SweText) {
return ComponentType.SweText;
} else if (type instanceof SweTime) {
return ComponentType.SweTime;
} else if (type instanceof SweCount) {
return ComponentType.SweCount;
} else if (type instanceof SweBoolean) {
return ComponentType.SweBoolean;
} else if (type instanceof SweQuantity) {
return ComponentType.SweQuantity;
} else if (type instanceof SweCategory) {
return ComponentType.SweCategory;
} else if (type instanceof SweTimeRange) {
return ComponentType.SweTimeRange;
} else if (type instanceof SweQuantityRange) {
return ComponentType.SweQuantityRange;
} else if (type instanceof SweDataRecord) {
return ComponentType.SweDataRecord;
// } else if (type instanceof SweDataArray) {
// return ComponentType.SweDataArray;
} else {
return ComponentType.Unknown;
}
}
}
|
<reponame>Thangiee/digitalocean
package me.jeffshaw.digitalocean
import java.util.concurrent.TimeoutException
import me.jeffshaw.digitalocean.ToFuture._
import org.asynchttpclient.{AsyncHttpClient, Request, RequestBuilder, Response}
import org.json4s._
import org.json4s.native._
import scala.concurrent.{ExecutionContext, Future}
import scala.collection.JavaConverters._
import scala.concurrent.duration._
/**
*
* @param token Your API token.
* @param maxWaitPerRequest The maximum amount of time the client should wait for a response before assuming the service is down.
* @param actionCheckInterval The amount of time to wait between checks for an action to complete.
*/
case class DigitalOceanClient(
private val token: String,
maxWaitPerRequest: Duration,
actionCheckInterval: Duration
)(implicit client: AsyncHttpClient
) extends DelayedFuture {
private val requestPrefix: Request =
new RequestBuilder(DigitalOceanClient.host).
addHeader("Authorization", "Bearer " + token).
build()
/**
* This needs to be used carefully, because it can potentially give
* the api key to a 3rd party.
*
* @param req
* @tparam T
* @return
*/
private[digitalocean] def customRequest[T: Manifest](
req: Request
)(implicit ec: ExecutionContext
): Future[T] = {
val request =
new RequestBuilder(req).addHeader("Authorization", "Bearer " + token)
parseResponse[T](client.executeRequest(request))
}
def createRequest(
path: Seq[String],
queryParameters: Map[String, Seq[String]] = Map.empty
): RequestBuilder = {
val javaQueryParameters = {
for ((key, value) <- queryParameters) yield
key -> value.asJava
}.asJava
val url = requestPrefix.getUrl + path.mkString("/", "/", "")
new RequestBuilder(requestPrefix).setUrl(url).setQueryParams(javaQueryParameters)
}
def delete(
path: Seq[String],
maybeMessage: Option[JValue] = None,
queryParameters: Map[String, Seq[String]] = Map.empty
)(implicit ec: ExecutionContext
): Future[Unit] = {
val request = createRequest(path, queryParameters).setMethod("DELETE")
for {
message <- maybeMessage
} {
val messageBody = JsonMethods.compact(JsonMethods.render(message.snakizeKeys))
request.setBody(messageBody)
}
for {
response <- client.executeRequest(request)
} yield {
if (response.getStatusCode >= 300) {
throw DigitalOceanClientException(response)
}
}
}
private def parseResponse[T: Manifest](
request: Future[Response]
)(implicit ec: ExecutionContext
): Future[T] = {
for {
response <- request
} yield {
val responseBody = response.getResponseBody
val statusCode = response.getStatusCode
if (statusCode < 300 &&
response.getContentType.startsWith(DigitalOceanClient.contentType)
) {
try parseJson(responseBody).camelizeKeys.extract[T]
catch {
case e: MappingException =>
throw DigitalOceanClientException(response, cause = Some(e))
}
} else {
throw DigitalOceanClientException(response)
}
}
}
def get[T: Manifest](
path: Seq[String],
queryParameters: Map[String, Seq[String]] = Map.empty
)(implicit ec: ExecutionContext
): Future[T] = {
val request = client.executeRequest(createRequest(path, queryParameters).setMethod("GET"))
parseResponse[T](request)
}
def exists(
path: Seq[String],
queryParameters: Map[String, Seq[String]] = Map.empty
)(implicit ec: ExecutionContext
): Future[Boolean] = {
val request = client.executeRequest(createRequest(path, queryParameters).setMethod("HEAD"))
for {
response <- request
} yield {
response.getStatusCode != 404
}
}
def post[T: Manifest](
path: Seq[String],
message: JValue,
queryParameters: Map[String, Seq[String]] = Map.empty
)(implicit ec: ExecutionContext
): Future[T] = {
val messageBody = JsonMethods.compact(JsonMethods.render(message.snakizeKeys))
val request = client.executeRequest(createRequest(path = path).setBody(messageBody).setMethod("POST"))
parseResponse[T](request)
}
def postWithEmptyResponse(
path: Seq[String],
message: JValue,
queryParameters: Map[String, Seq[String]] = Map.empty
)(implicit ec: ExecutionContext
): Future[Unit] = {
val messageBody = JsonMethods.compact(JsonMethods.render(message.snakizeKeys))
val request = client.executeRequest(createRequest(path = path).setBody(messageBody).setMethod("POST"))
for {
response <- request
} yield {
if (response.getStatusCode != 204) {
throw DigitalOceanClientException(response)
}
}
}
def put[T: Manifest](
path: Seq[String],
message: JValue,
queryParameters: Map[String, Seq[String]] = Map.empty
)(implicit ec: ExecutionContext
): Future[T] = {
val messageBody = JsonMethods.compact(JsonMethods.render(message.snakizeKeys))
val request = client.executeRequest(createRequest(path = path).setBody(messageBody).setMethod("PUT"))
parseResponse[T](request)
}
/**
* Repeatedly execute a function until a predicate is satisfied, with a delay of
* [[actionCheckInterval]] between executions, and a maximum of [[maxWaitPerRequest]]
* to wait per execution.
*
* @param pollAction
* @param predicate
* @tparam T
* @return
*/
private[digitalocean] def poll[T](
pollAction: => Future[T],
predicate: T => Boolean
)(implicit ec: ExecutionContext
): Future[T] = {
val whenTimeout = after(maxWaitPerRequest)(Future.failed(new TimeoutException()))
val firstCompleted = Future.firstCompletedOf(Seq(pollAction, whenTimeout))
for {
result <- firstCompleted
completeResult <-
if (predicate(result)) Future.successful(result)
else sleep(actionCheckInterval).flatMap(_ => poll(pollAction, predicate))
} yield completeResult
}
}
object DigitalOceanClient {
val contentType = "application/json"
val host =
new RequestBuilder().
setUrl("https://api.digitalocean.com/v2").
addHeader("Content-Type", contentType).
build()
}
|
<reponame>TranBrian10/interactive-rtc
var express = require('express');
var router = express.Router();
/* GET home page */
router.get('/', function(req, res) {
res.render('index', { title: 'threeRTC' });
});
/* GET user agent */
router.get('/user-agent', function(req, res) {
const userAgent = req.header('user-agent');
if (/mobile/i.test(userAgent)) {
res.send({ userAgent: 'Mobile' });
}
res.send({ userAgent: 'Desktop' });
});
module.exports = router;
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-FW/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-FW/7-1024+0+512-pad-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function truncate_and_pad_first_two_thirds_sixth --eval_function last_sixth_eval
|
#!/bin/bash ../.port_include.sh
port=SDL2
version=serenity-git
workdir=SDL-master-serenity
useconfigure=true
curlopts="-L"
files="https://github.com/SerenityOS/SDL/archive/master-serenity.tar.gz SDL2-git.tar.gz"
configopts="-DCMAKE_TOOLCHAIN_FILE=$SERENITY_ROOT/Toolchain/CMakeToolchain.txt -DPULSEAUDIO=OFF"
configure() {
run cmake $configopts
}
install() {
run make install
}
|
#!/usr/bin/env bash
# Define directories.
SCRIPT_DIR=$PWD
TOOLS_DIR=$SCRIPT_DIR/tools
CAKE_VERSION=0.24.0
CAKE_DLL=$TOOLS_DIR/Cake.$CAKE_VERSION/Cake.exe
DOTNET_VERSION=$(cat "$SCRIPT_DIR/global.json" | grep -o '[0-9]\.[0-9]\.[0-9]')
DOTNET_INSTRALL_URI=https://raw.githubusercontent.com/dotnet/cli/v$DOTNET_VERSION/scripts/obtain/dotnet-install.sh
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL .NET CORE CLI
###########################################################################
echo "Installing .NET CLI..."
if [ ! -d "$SCRIPT_DIR/.dotnet" ]; then
mkdir "$SCRIPT_DIR/.dotnet"
fi
curl -Lsfo "$SCRIPT_DIR/.dotnet/dotnet-install.sh" $DOTNET_INSTRALL_URI
sudo bash "$SCRIPT_DIR/.dotnet/dotnet-install.sh" -c current --version $DOTNET_VERSION --install-dir .dotnet --no-path
export PATH="$SCRIPT_DIR/.dotnet":$PATH
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
export DOTNET_CLI_TELEMETRY_OPTOUT=1
"$SCRIPT_DIR/.dotnet/dotnet" --info
###########################################################################
# INSTALL CAKE
###########################################################################
if [ ! -f "$CAKE_DLL" ]; then
curl -Lsfo Cake.zip "https://www.nuget.org/api/v2/package/Cake/$CAKE_VERSION" && unzip -q Cake.zip -d "$TOOLS_DIR/Cake.$CAKE_VERSION" && rm -f Cake.zip
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_DLL" ]; then
echo "Could not find Cake.exe at '$CAKE_DLL'."
exit 1
fi
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Point net452 to Mono assemblies
export FrameworkPathOverride=$(dirname $(which mono))/../lib/mono/4.5/
# Start Cake
exec mono "$CAKE_DLL" "$@"
|
#!/bin/bash
pkg update && pkg upgrade -y;
freebsd-update fetch && freebsd-update install;
portsnap fetch auto;
pkg clean -y;
pkg autoremove -y;
|
<filename>src/components/AboutMeSection.tsx
import React from "react";
import styled from "styled-components";
import { fonts, colors, screens } from "../utils/theme";
import profile from "../images/profile-photo.png";
const AboutMeSection = () => {
return (
<Container id="about-section">
{screen.width <= parseInt(screens.xs) ? (
<AboutMeContainer>
<AboutMeTitle>A little about me</AboutMeTitle>
<Photo src={profile} />
<AboutMeContent>
<AboutMeDesc>
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cum id
sit ut habitant rhoncus purus eget enim feugiat.
</AboutMeDesc>
<AboutMeDesc>
Vitae nunc, commodo risus lacus, quis ornare. Aliquam tortor metus
adipiscing nullam sit purus turpis duis.
</AboutMeDesc>
<AboutMeDesc>
Vitae nunc, commodo risus lacus, quis ornare. Aliquam tortor metus
adipiscing nullam sit purus turpis duis.
</AboutMeDesc>
</AboutMeContent>
</AboutMeContainer>
) : (
<AboutMeContainer>
<Photo src={profile} />
<AboutMeContent>
<AboutMeTitle>A little about me</AboutMeTitle>
<AboutMeDesc>
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cum id
sit ut habitant rhoncus purus eget enim feugiat.
</AboutMeDesc>
<AboutMeDesc>
Vitae nunc, commodo risus lacus, quis ornare. Aliquam tortor metus
adipiscing nullam sit purus turpis duis.
</AboutMeDesc>
<AboutMeDesc>
Vitae nunc, commodo risus lacus, quis ornare. Aliquam tortor metus
adipiscing nullam sit purus turpis duis.
</AboutMeDesc>
</AboutMeContent>
</AboutMeContainer>
)}
</Container>
);
};
export default AboutMeSection;
const Container = styled.div`
margin: 0 -5.55vw;
`;
const AboutMeContainer = styled.div`
flex: 1;
/* width: 100vw; */
padding: 5.625vw 6.0416666667vw;
/* margin: 0 -5.55vw; */
position: relative;
z-index: -2;
background-color: rgba(253, 255, 252, 0.9);
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
@media only screen and (min-width: ${screens.sm}) {
flex-direction: row;
justify-content: space-around;
align-items: center;
}
`;
const Photo = styled.img`
width: 43.6111111111vw;
margin: 4.77vw 0;
@media only screen and (min-width: ${screens.sm}) {
width: 25.27vw;
}
@media only screen and (min-width: ${screens.lg}) {
margin: 0;
}
`;
const AboutMeTitle = styled.h2`
margin: 4.77vw 0;
font-family: ${fonts.libre.bold};
font-size: 0.83rem;
color: ${colors.primary};
@media only screen and (min-width: ${screens.sm}) {
font-size: 0.83rem;
}
@media only screen and (min-width: ${screens.lg}) {
margin-top: 0;
font-size: 1.25rem;
}
`;
const AboutMeContent = styled.div`
@media only screen and (min-width: ${screens.sm}) {
margin-left: 40px;
}
`;
const AboutMeDesc = styled.p`
margin: 0;
margin-bottom: 1rem;
font-family: ${fonts.montserrat.regular};
font-size: 0.66rem;
line-height: 1.2rem;
color: ${colors.black};
text-align: center;
@media only screen and (min-width: ${screens.sm}) {
font-size: 0.75rem;
text-align: left;
}
@media only screen and (min-width: ${screens.lg}) {
font-size: 1rem;
margin-right: 40px;
}
`;
|
<filename>src/builder.ts
import { CxAccessor, CxEntry, CxRequest, CxValues } from '@proc7ts/context-values';
import { lazyValue } from '@proc7ts/primitives';
import { Supply } from '@proc7ts/supply';
import { CxBuilder$BoundPeer, CxBuilder$Cache } from './impl';
import { CxPeer } from './peer';
import { CxPeerBuilder } from './peer-builder';
/**
* Context builder.
*
* Provides value assets for the context.
*
* @typeParam TContext - A type of context to build.
*/
export class CxBuilder<TContext extends CxValues = CxValues> extends CxPeerBuilder<TContext> implements CxValues {
/**
* @internal
*/
private readonly _cx: () => TContext;
/**
* @internal
*/
private readonly _cache = new CxBuilder$Cache();
/**
* @internal
*/
private readonly _bound: () => CxPeer = lazyValue(() => new CxBuilder$BoundPeer(this, this.cache));
/**
* Constructs context builder.
*
* @param createContext - Context creator function. Accepts context value accessor and the builder itself as
* parameters, and returns created context.
* @param peers - Context peers to apply assets from. These assets applied before the ones provided {@link provide
* explicitly}. Peers listed later have lesser {@link CxAsset.Provided.rank rank values} than the ones listed earlier.
*/
constructor(
createContext: (this: void, getValue: CxAccessor, builder: CxBuilder<TContext>) => TContext,
...peers: CxPeer<TContext>[]
) {
super(...peers);
this._cx = lazyValue(() => createContext(
(entry, request) => this.get(entry, request),
this,
));
}
/**
* Context to build.
*/
override get context(): TContext {
return this._cx();
}
protected override get cache(): CxBuilder.Cache {
return this._cache;
}
/**
* A peer providing assets bound to {@link context}.
*
* Unlike the builder itself, this peer may provide assets for any context, as they constructed in compatible one.
*/
get boundPeer(): CxPeer {
return this._bound();
}
get<TValue>(entry: CxEntry<TValue, any>, request?: CxRequest.WithoutFallback<TValue>): TValue;
get<TValue>(entry: CxEntry<TValue, any>, request: CxRequest.WithFallback<TValue>): TValue;
get<TValue>(entry: CxEntry<TValue, any>, request?: CxRequest<TValue>): TValue | null;
get<TValue>(entry: CxEntry<TValue, any>, request?: CxRequest<TValue>): TValue | null {
return this._record(entry).get(request);
}
}
export namespace CxBuilder {
/**
* Context cache the {@link CxPeer context peer} may use to store intermediate data.
*
* There is only one cache instance exists per context.
*/
export interface Cache {
/**
* Obtains a value previously {@link put cached} under the given `key`.
*
* @param key - Cached value key.
*
* @returns Either cached value, or `undefined` if the value did not cached.
*/
get(key: unknown): unknown | undefined;
/**
* Caches the `value` under the given `key`.
*
* @param key - Cached value key.
* @param value - A value to cache.
* @param supply - Value supply. The value will be removed from cache once this supply cut off.
*/
put(key: unknown, value: unknown, supply: Supply): void;
}
}
|
echo "dummy3"
|
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
export EXTRA_LIBS="${NACL_CLI_MAIN_LIB}"
EnableGlibcCompat
NACLPORTS_CPPFLAGS+=" -DGNULIB_defined_struct_sigaction -Dpipe=nacl_spawn_pipe"
PatchStep() {
DefaultPatchStep
# Touch documentation to prevent it from updating.
touch ${SRC_DIR}/doc/*
}
|
#!/bin/bash
#
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
get_maven_proxy_settings() {
# Usage: get_maven_proxy_settings HTTP_PROXY
# Returns: Maven settings.xml proxy configuration created according to HTTP_PROXY parameter
mvn_http_proxy=$1
# Parse Maven proxy configuration fields
mvn_proxy_protocol=$(echo $mvn_http_proxy | cut -d ':' -f 1)
mvn_proxy_host=$(echo $mvn_http_proxy | cut -d ':' -f 2 | sed 's#//##')
mvn_proxy_port=$(echo $mvn_http_proxy | cut -d ':' -f 3)
read -d '' mvn_proxy_configuration <<EOF
<proxies>
<proxy>
<id>proxy-configuration</id>
<active>true</active>
<protocol>$mvn_proxy_protocol</protocol>
<host>$mvn_proxy_host</host>
<port>$mvn_proxy_port</port>
</proxy>
</proxies>
EOF
echo $mvn_proxy_configuration
}
echo_run_usage() {
echo "Usage: build_tap.sh run PLATFORM_PARENT_PATH ARTIFACTS_OUTPUT_PATH [platform-parent arguments]."
}
# Exit immediately on any error.
set -e
if [[ -z "$1" ]] || [[ "$1" = "-h" ]] || [[ "$1" = "--help" ]]; then
echo -e "Usage: build_tap.sh (build|run) \n"
echo -e "In order to build platform-parent Docker image run: build_tap.sh build. \n"
echo -e "In order to start platform-parent Docker container run: build_tap.sh run PLATFORM_PARENT_PATH ARTIFACTS_OUTPUT_PATH, where:\n PLATFORM_PARENT_PATH - points to platform-parent directory on host machine\n ARTIFACTS_OUTPUT_PATH - points to directory on host machine, where artifacts produced by platform-parent will be stored."
echo -e "Script checks http_proxy, https_proxy, HTTP_PROXY and HTTPS_PROXY environment variables in order to determine your proxy configuration and propagates proxy environment variables and Maven proxy settings on Docker image."
exit 1
fi
COMMAND=$1
if [[ "$COMMAND" != "build" ]] && [[ "$COMMAND" != "run" ]]; then
echo "Pass proper command (build|run) as first argument."
fi
# Get local http_proxy settings
if [ -z "$local_http_proxy" ]; then
if [ -n "$HTTP_PROXY" ]; then
local_http_proxy="$HTTP_PROXY"
fi
if [ -n "$http_proxy" ]; then
local_http_proxy="$http_proxy"
fi
fi
# Get local https_proxy settings
if [ -z "$local_https_proxy" ]; then
if [ -n "$HTTPS_PROXY" ]; then
local_https_proxy="$HTTPS_PROXY"
fi
if [ -n "$https_proxy" ]; then
local_https_proxy="$https_proxy"
fi
# If https_proxy is not set, use http_proxy value for https_proxy
if [ -z "$https_proxy" ]; then
local_https_proxy="$local_http_proxy"
fi
fi
if [ "$COMMAND" = "build" ]; then
if [[ -n "$local_http_proxy" ]] && [[ -n "$local_https_proxy" ]]; then
echo -e "Detected proxy settings: \n http proxy: $local_http_proxy \n https proxy: $local_https_proxy"
# Build image with configured proxy settings
# Set correct proxy environment variables in Dockerfile
PROXY_DOCEKRFILE_TEMPLATE='Dockerfile_proxy_template'
PROXY_DOCKERFILE='Dockerfile_proxy'
cp "$PROXY_DOCEKRFILE_TEMPLATE" "$PROXY_DOCKERFILE"
sed -i -e "s#<HTTP_PROXY>#$local_http_proxy#g" "$PROXY_DOCKERFILE"
sed -i -e "s#<HTTPS_PROXY>#$local_https_proxy#g" "$PROXY_DOCKERFILE"
# Create settings_proxy.xml file with properly configured proxy settings
MAVEN_SETTINGS_FILE='settings.xml'
MAVEN_PROXY_SETTINGS_FILE='settings_proxy.xml'
MAVEN_PROXY_SETTINGS=$(get_maven_proxy_settings $local_http_proxy)
cp "$MAVEN_SETTINGS_FILE" "$MAVEN_PROXY_SETTINGS_FILE"
sed -i '/<profiles>/i\'"$MAVEN_PROXY_SETTINGS" "$MAVEN_PROXY_SETTINGS_FILE"
# Building base image
echo "Running: docker build --build-arg http_proxy=$local_http_proxy --build-arg https_proxy=$local_https_proxy -f Dockerfile -t platform-parent ."
docker build --build-arg http_proxy=$local_http_proxy --build-arg https_proxy=$local_https_proxy -f Dockerfile -t platform-parent .
# Build docker image with configured proxy
echo "Running: docker build --build-arg http_proxy=$local_http_proxy --build-arg https_proxy=$local_https_proxy -f Dockerfile_proxy -t platform-parent ."
docker build --build-arg http_proxy=$local_http_proxy --build-arg https_proxy=$local_https_proxy -f Dockerfile_proxy -t platform-parent .
else
# Build image without proxy configuration
# Building base image
echo 'Running: docker build -f Dockerfile -t platform-parent .'
docker build -f Dockerfile -t platform-parent .
fi
exit 0
fi
if [ "$COMMAND" = "run" ]; then
PLATFORM_PARENT_PATH=$2
ARTIFACTS_OUTPUT_PATH=$3
if [[ -z "$2" ]] || [[ "$2" = "-h" ]] || [[ "$2" = "--help" ]]; then
echo_run_usage
exit 1
fi
if [[ -z "$PLATFORM_PARENT_PATH" ]] || [[ -z "$ARTIFACTS_OUTPUT_PATH" ]]; then
echo_run_usage
exit 1
fi
if [[ ! -d "$PLATFORM_PARENT_PATH" ]]; then
echo "Pass existing directory as PLATFORM_PARENT_PATH."
exit 1
fi
if [[ ! -d "$ARTIFACTS_OUTPUT_PATH" ]]; then
echo "Pass existing directory as ARTIFACTS_OUTPUT_PATH."
exit 1
fi
echo "Running docker run -i -v $PLATFORM_PARENT_PATH:/platform-parent -v $ARTIFACTS_OUTPUT_PATH:/artifacts -t platform-parent python build_platform.py -d /artifacts ${@:4}"
docker run -i -v $PLATFORM_PARENT_PATH:/platform-parent -v $ARTIFACTS_OUTPUT_PATH:/artifacts platform-parent python build_platform.py -d /artifacts ${@:4}
exit 0
fi
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Eureka/Eureka.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Eureka/Eureka.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
/*
* Copyright (c) 2015, 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.core.klass;
import com.oracle.truffle.api.object.DynamicObject;
import com.oracle.truffle.api.object.DynamicObjectFactory;
import org.jruby.truffle.core.module.ModuleFields;
import org.jruby.truffle.core.module.ModuleLayout;
import org.jruby.truffle.om.dsl.api.Layout;
import org.jruby.truffle.om.dsl.api.Nullable;
@Layout
public interface ClassLayout extends ModuleLayout {
DynamicObjectFactory createClassShape(DynamicObject logicalClass,
DynamicObject metaClass);
DynamicObject createClass(DynamicObjectFactory factory,
ModuleFields fields,
boolean isSingleton,
@Nullable DynamicObject attached,
@Nullable DynamicObjectFactory instanceFactory,
@Nullable DynamicObject superclass);
boolean isClass(DynamicObject object);
boolean isClass(Object object);
boolean getIsSingleton(DynamicObject object);
DynamicObject getAttached(DynamicObject object);
DynamicObjectFactory getInstanceFactory(DynamicObject object);
void setInstanceFactoryUnsafe(DynamicObject object, DynamicObjectFactory value);
DynamicObject getSuperclass(DynamicObject object);
void setSuperclass(DynamicObject object, DynamicObject value);
}
|
package org.moskito.controlagent.data.info;
/**
* Abstraction that allows to replace UptimeProvider for testing.
*
* @author lrosenberg
* @since 24.07.17 22:33
*/
public interface UptimeProvider {
long getUptime();
}
|
class PlayerMissileSystem : ClodoBehaviour {
List<Missile> activeMissiles = new List<Missile>();
public override void OnUpdate() {
UpdateMissiles();
}
public void FireMissile(Vector2 targetPosition, float missileSpeed, float maxRange) {
Missile newMissile = new Missile(transform.position, targetPosition, missileSpeed, maxRange);
activeMissiles.Add(newMissile);
}
private void UpdateMissiles() {
for (int i = activeMissiles.Count - 1; i >= 0; i--) {
Missile missile = activeMissiles[i];
missile.Move();
if (CheckCollision(missile)) {
ApplyDamageToTarget(missile.GetTarget());
activeMissiles.RemoveAt(i);
} else if (missile.HasReachedMaxRange()) {
activeMissiles.RemoveAt(i);
}
}
}
private bool CheckCollision(Missile missile) {
// Implement collision detection logic here
// Return true if the missile has collided with an enemy target
return false;
}
private void ApplyDamageToTarget(EnemyTarget target) {
// Implement damage application logic here
}
}
class Missile {
Vector2 position;
Vector2 direction;
float speed;
float maxRange;
public Missile(Vector2 startPos, Vector2 targetPos, float missileSpeed, float range) {
position = startPos;
direction = (targetPos - startPos).normalized;
speed = missileSpeed;
maxRange = range;
}
public void Move() {
position += direction * speed * Time.deltaTime;
}
public bool HasReachedMaxRange() {
float distanceTravelled = Vector2.Distance(position, startPos);
return distanceTravelled >= maxRange;
}
public EnemyTarget GetTarget() {
// Return the enemy target that this missile has hit
return null;
}
}
class EnemyTarget {
// Define properties and methods for enemy targets
}
|
import ctypes
import XCPConnection
def simulate_data_write(data_struct: dict) -> bool:
writeDataStruct = ctypes.create_string_buffer(ctypes.sizeof(ctypes.c_uint) * len(data_struct))
for i, value in enumerate(data_struct.values()):
ctypes.memmove(ctypes.addressof(writeDataStruct) + i * ctypes.sizeof(ctypes.c_uint), ctypes.byref(ctypes.c_uint(value)), ctypes.sizeof(ctypes.c_uint))
writeDataBuffer = bytes(memoryview(writeDataStruct))
conn = XCPConnection() # Assuming XCPConnection class is available for communication
try:
conn.download(XCPConnection.Pointer(structBaseaddr, 0), writeDataBuffer)
conn.nvwrite()
conn.close()
print('Write OK')
return True
except XCPConnection.Error:
# Swallow any errors when closing connection due to bad target implementations
return False
|
<filename>blosc/bitshuffle-neon.h
/*********************************************************************
Blosc - Blocked Shuffling and Compression Library
Copyright (C) 2021 The Blosc Developers <<EMAIL>>
https://blosc.org
License: BSD 3-Clause (see LICENSE.txt)
Note: Adapted for NEON by <NAME>.
See LICENSE.txt for details about copyright and rights to use.
**********************************************************************/
/* NEON-accelerated bitshuffle/bitunshuffle routines. */
#ifndef BITSHUFFLE_NEON_H
#define BITSHUFFLE_NEON_H
#include "blosc2/blosc2-common.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
NEON-accelerated bitshuffle routine.
*/
BLOSC_NO_EXPORT int64_t bitshuffle_neon(void* _src, void* _dest, const size_t blocksize,
const size_t bytesoftype, void* tmp_buf);
/**
NEON-accelerated bitunshuffle routine.
*/
BLOSC_NO_EXPORT int64_t bitunshuffle_neon(void* _src, void* _dest, const size_t blocksize,
const size_t bytesoftype, void* tmp_buf);
#ifdef __cplusplus
}
#endif
#endif /* BITSHUFFLE_NEON_H */
|
if [ -f /etc/redhat-release ]; then
yum install -y git python-pip python-devel gcc
pip install paramiko PyYAML jinja2 httplib2 ansible
git clone https://github.com/rackspace-orchestration-templates/lamp
cd lamp/site-cookbooks/LAMP/files/default/lamp
ansible-playbook -i hosts site.yml
fi
if [ -f /etc/debian_version ]; then
apt-get update && apt-get install python-apt python-pip build-essential python-dev git -y
pip install paramiko PyYAML jinja2 httplib2 ansible
git clone https://github.com/rackspace-orchestration-templates/lamp
cd lamp/site-cookbooks/LAMP/files/default/lamp
ansible-playbook -i hosts site.yml
fi
|
<filename>src/main/java/pulse/input/package-info.java
/**
* Introduces internal data structures that (a) are created as a result of
* parsing specific types of input files, such as: experimental heating curves,
* metadata, and property curves (e.g. specific heat and density); (b) are used
* by the {@code TaskManager} or any affiliated class.
*/
package pulse.input;
|
#!/bin/sh -e
set -o errexit
###
# Copyright (c) 2015-2019, Antoine "vv221/vv222" Le Gonidec
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# This software is provided by the copyright holders and contributors "as is"
# and any express or implied warranties, including, but not limited to, the
# implied warranties of merchantability and fitness for a particular purpose
# are disclaimed. In no event shall the copyright holder or contributors be
# liable for any direct, indirect, incidental, special, exemplary, or
# consequential damages (including, but not limited to, procurement of
# substitute goods or services; loss of use, data, or profits; or business
# interruption) however caused and on any theory of liability, whether in
# contract, strict liability, or tort (including negligence or otherwise)
# arising in any way out of the use of this software, even if advised of the
# possibility of such damage.
###
###
# Gobliins 2: The Prince Buffoon
# build native Linux packages from the original installers
# send your bug reports to vv221@dotslashplay.it
###
script_version=20180819.2
# Set game-specific variables
GAME_ID='gobliins-2'
GAME_NAME='Gobliins 2: The Prince Buffoon'
ARCHIVE_GOG='setup_gobliins_2_-_the_prince_buffoon_1.02_(20270).exe'
ARCHIVE_GOG_URL='https://www.gog.com/game/gobliiins_pack'
ARCHIVE_GOG_MD5='3607f4ab042fea51e3b6544775955701'
ARCHIVE_GOG_TYPE='innosetup1.7'
ARCHIVE_GOG_SIZE='110000'
ARCHIVE_GOG_VERSION='1.02-gog20270'
ARCHIVE_GOG_OLD0='setup_gobliiins2_2.1.0.63.exe'
ARCHIVE_GOG_OLD0_MD5='0baf2ce55d00fce9af4c98848e88d7dc'
ARCHIVE_GOG_OLD0_SIZE='100000'
ARCHIVE_GOG_OLD0_VERSION='1.02-gog2.1.0.63'
ARCHIVE_GAME_DATA_DISK_PATH='.'
ARCHIVE_GAME_DATA_DISK_FILES='./gobnew.lic ./intro.stk ./track1.mp3'
# Keep compatibility with old archives
ARCHIVE_GAME_DATA_DISK_PATH_GOG_OLD0='app'
ARCHIVE_GAME_DATA_FLOPPY_PATH='fdd'
ARCHIVE_GAME_DATA_FLOPPY_FILES='./*'
# Keep compatibility with old archives
ARCHIVE_GAME_DATA_FLOPPY_PATH_GOG_OLD0='app/fdd'
ARCHIVE_DOC_MAIN_PATH='.'
ARCHIVE_DOC_MAIN_FILES='./*.pdf'
# Keep compatibility with old archives
ARCHIVE_DOC_MAIN_PATH_GOG_OLD0='app'
APP_MAIN_TYPE='scummvm'
APP_MAIN_SCUMMID='gob'
APP_MAIN_ICON='goggame-1207662293.ico'
# Keep compatibility with old archives
APP_MAIN_ICON_GOG_OLD0='app/goggame-1207662293.ico'
PACKAGES_LIST='PKG_MAIN PKG_DATA_DISK PKG_DATA_FLOPPY'
PKG_DATA_ID="${GAME_ID}-data"
PKG_DATA_DISK_ID="${PKG_DATA_ID}-disk"
PKG_DATA_DISK_PROVIDE="$PKG_DATA_ID"
PKG_DATA_DISK_DESCRIPTION='data - CD-ROM version'
PKG_DATA_FLOPPY_ID="${PKG_DATA_ID}-floppy"
PKG_DATA_FLOPPY_PROVIDE="$PKG_DATA_ID"
PKG_DATA_FLOPPY_DESCRIPTION='data - floppy version'
PKG_MAIN_DEPS="$PKG_DATA_ID scummvm"
# Load common functions
target_version='2.10'
if [ -z "$PLAYIT_LIB2" ]; then
[ -n "$XDG_DATA_HOME" ] || XDG_DATA_HOME="$HOME/.local/share"
for path in\
'./'\
"$XDG_DATA_HOME/play.it/"\
"$XDG_DATA_HOME/play.it/play.it-2/lib/"\
'/usr/local/share/games/play.it/'\
'/usr/local/share/play.it/'\
'/usr/share/games/play.it/'\
'/usr/share/play.it/'
do
if [ -z "$PLAYIT_LIB2" ] && [ -e "$path/libplayit2.sh" ]; then
PLAYIT_LIB2="$path/libplayit2.sh"
break
fi
done
if [ -z "$PLAYIT_LIB2" ]; then
printf '\n\033[1;31mError:\033[0m\n'
printf 'libplayit2.sh not found.\n'
exit 1
fi
fi
#shellcheck source=play.it-2/lib/libplayit2.sh
. "$PLAYIT_LIB2"
# Extract data from game
extract_data_from "$SOURCE_ARCHIVE"
prepare_package_layout
# Get game icon
PKG='PKG_MAIN'
use_package_specific_value 'APP_MAIN_ICON'
icons_get_from_workdir 'APP_MAIN'
rm --recursive "$PLAYIT_WORKDIR/gamedata"
# Write launchers
PKG='PKG_MAIN'
write_launcher 'APP_MAIN'
# Build package
write_metadata
build_pkg
# Clean up
rm --recursive "$PLAYIT_WORKDIR"
# Print instructions
case "${LANG%_*}" in
('fr')
version_string='version %s :'
version_disk='CD-ROM'
version_floppy='disquette'
;;
('en'|*)
version_string='%s version:'
version_disk='CD-ROM'
version_floppy='Floppy'
;;
esac
printf '\n'
# shellcheck disable=SC2059
printf "$version_string" "$version_disk"
print_instructions 'PKG_DATA_DISK' 'PKG_MAIN'
# shellcheck disable=SC2059
printf "$version_string" "$version_floppy"
print_instructions 'PKG_DATA_FLOPPY' 'PKG_MAIN'
exit 0
|
#include <stdio.h>
#include <string.h>
void reverseString(const char *str) {
size_t len = strlen(str);
for (int i = len - 1; i >= 0; i--) {
printf("%c", str[i]);
}
printf("\n");
}
int main(int argc, char *argv[]) {
if (argc < 2) {
printf("Error: No string provided!\n");
return 1;
}
reverseString(argv[1]);
return 0;
}
|
#!/bin/bash
TASK_DESC=$1
DATE_WITH_TIME=`date "+%Y%m%d-%H%M%S"`
OUT_DIR=/data/Outputs/VISTA
CONFIG=$2
NUSC_CBGS_WORK_DIR=$OUT_DIR/VISTA_$TASK_DESC\_$DATE_WITH_TIME
if [ ! $TASK_DESC ]
then
echo "TASK_DESC must be specified."
echo "Usage: train.sh task_description"
exit $E_ASSERT_FAILED
fi
CUDA_VISIBLE_DEVICES=7 python -m torch.distributed.launch --nproc_per_node=1 --master_port 37447 ./tools/train.py $CONFIG --work_dir=$NUSC_CBGS_WORK_DIR
|
import { ArrayMultiMap } from './structures';
describe('structures', () => {
describe('ArrayMultiMap', () => {
it('stores single item in an array', () => {
const mmap = new ArrayMultiMap<string, string>();
mmap.set('key', 'value');
expect(mmap.get('key')).toEqual(['value']);
});
it('appends item with same key to array', () => {
const mmap = new ArrayMultiMap<string, string>();
mmap.set('key', 'value1');
mmap.set('key', 'value2');
expect(mmap.get('key')).toEqual(['value1', 'value2']);
});
it('supports iteration', () => {
const mmap = new ArrayMultiMap<string, string>();
mmap.set('a', 'a');
mmap.set('b', 'b1');
mmap.set('b', 'b2');
expect(mmap[Symbol.iterator]).toBeDefined();
expect(Array.from(mmap)).toEqual([
['a', ['a']],
['b', ['b1', 'b2']],
]);
});
});
});
|
#!/usr/bin/env bash
if [ -f ~/.homestead-features/wsl_user_name ]; then
WSL_USER_NAME="$(cat ~/.homestead-features/wsl_user_name)"
WSL_USER_GROUP="$(cat ~/.homestead-features/wsl_user_group)"
else
WSL_USER_NAME=vagrant
WSL_USER_GROUP=vagrant
fi
export DEBIAN_FRONTEND=noninteractive
if [ -f /home/$WSL_USER_NAME/.homestead-features/laravel-octane ]
then
echo "laravel-octane already installed."
exit 0
fi
touch /home/$WSL_USER_NAME/.homestead-features/laravel-octane
chown -Rf $WSL_USER_NAME:$WSL_USER_GROUP /home/$WSL_USER_NAME/.homestead-features
sudo rm -rf /etc/supervisor/conf.d/"$1".conf
sudo cat > /etc/supervisor/conf.d/"$1".conf <<EOL
[program:$1]
process_name=%(program_name)s
command=php $2/artisan octane:start --watch --max-requests=250 --server=roadrunner --port=18000 --rpc-port=18001 --workers=4
autostart=true
autorestart=true
user=vagrant
redirect_stderr=true
stdout_logfile=/var/log/$1.log
stdout_logfile_maxbytes=1MB
EOL
sudo supervisorctl reread
sudo supervisorctl update
sudo supervisorctl start "$1":*
sudo systemctl enable supervisor
sudo systemctl restart supervisor
|
<filename>examples/example_score.py<gh_stars>0
"""Example for using pytesserae.score"""
import pytesserae.score as score
def _run():
"""Example of how to score a match"""
matching_terms = {'a', 'b'}
source_counts = {'a': 10, 'b': 50, 'c': 25}
target_counts = {'a': 4, 'b': 73, 'c': 15}
source_chunk = ['a', 'b']
target_chunk = ['a', 'c', 'b']
source_distance = score.find_distance(
matching_terms, source_chunk, source_counts)
target_distance = score.find_distance(
matching_terms, target_chunk, target_counts)
match_score = score.vanilla(
matching_terms, source_distance, target_distance, source_counts,
target_counts)
print('Calculated score:', match_score)
if __name__ == '__main__':
_run()
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/AnytimeNetwork/resnet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
--ds_name=svhn -f=9 --opt_at=-1 -n=17 -c=32 -s=1 --samloss=6 --batch_size=128 --alter_label --alter_label_activate_frac=1.01 --alter_loss_w=1.0
|
import os
from django_chuck.commands.base import BaseCommand
class Command(BaseCommand):
help = "Shows all available modules"
def __init__(self):
super(Command, self).__init__()
# Disable default checks because this command isn't project-related
self.no_default_checks = True
def handle(self, args, cfg):
super(Command, self).handle(args, cfg)
self.print_header("AVAILABLE MODULES")
for module_basedir in self.module_basedirs:
for module_name in os.listdir(module_basedir):
print module_name
|
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")"
if [ -e "../acbuild-bin-0.4.0.tgz" ]
then
echo "Already built acbuild!"
exit 0
fi
rm -rf go acbuild
tar -xf go-bin-1.8.3.tgz go
tar -xf acbuild-src-0.4.0.tgz acbuild
VERSION=v0.4.0
sed -i "s/^VERSION=.*$/VERSION=${VERSION}/" acbuild/build
GLDFLAGS="-X github.com/appc/acbuild/lib.Version=${VERSION}"
export GOROOT="$(pwd)/go"
cd acbuild
./build
cd ..
rm -rf rel
mkdir -p rel/acbuild
cp acbuild/bin/* rel/acbuild
(cd rel && tar -czf "../../acbuild-bin-0.4.0.tgz" acbuild)
echo "Built acbuild!"
|
/*
* Copyright 2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.uwaterloo.flix.language.phase
import ca.uwaterloo.flix.TestUtils
import ca.uwaterloo.flix.language.errors.KindError
import ca.uwaterloo.flix.util.Options
import org.scalatest.FunSuite
class TestKinder extends FunSuite with TestUtils {
private val DefaultOptions = Options.TestWithLibNix
test("MismatchedTypeParamKind.Implicit.01") {
val input = "def f(g: Int32 -> o & o): Int32 = 123"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Implicit.02") {
val input = "def f(g: Int32 -> Int32 & e): e = g(123)"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Implicit.03") {
val input = "def f(s: #{| a}, r: {| a}): Int32 = 123"
val result = compile(input, Options.TestWithLibNix)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Implicit.04") {
val input = "def f(s: #{X(Int32) | a}, r: {x :: Int32 | a}): Int32 = 123"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Implicit.05") {
val input = "def f(a: e): Int32 & not e = 123"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Implicit.06") {
val input =
"""
|enum E[a] {
| case E1(a)
|}
|
|def f(g: E[a -> b & e]): Int32 & not (a or b) = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Enum.01") {
val input =
"""
|enum E[o] {
| case A(Int32 -> o & o)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Enum.02") {
val input =
"""
|enum E[e] {
| case A((Int32 -> Int32 & e) -> e)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Enum.03") {
val input =
"""
|enum E[a] {
| case A(#{| a}, {| a})
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Enum.04") {
val input =
"""
|enum E[a] {
| case A(#{X(Int32) | a}, {x :: Int32 | a})
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Enum.05") {
val input =
"""
|enum E[e] {
| case A(e -> Int32 & not e)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.Enum.06") {
val input =
"""
|enum D[a] {
| case D1(a)
|}
|enum E[a, b, e] {
| case A(D[a -> b & e] -> Int32 & not (a or b))
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.TypeAlias.01") {
val input = "type alias T[o] = Int32 -> o & o"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.TypeAlias.02") {
val input = "type alias T[e] = (Int32 -> Int32 & e) -> e"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.TypeAlias.03") {
val input = "type alias T[a] = (#{| a}, {| a})"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.TypeAlias.04") {
val input = "type alias T[a] = (#{X(Int32) | a}, {x :: Int32 | a})"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.TypeAlias.05") {
val input = "type alias T[e] = e -> Int32 & not e"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("MismatchedTypeParamKind.TypeAlias.06") {
val input =
"""
|enum Option[a] {
| case Some(a)
| case None
|}
|
|type alias T[a, b, e] = Option[a -> b & e] -> Int32 & not (a or b)
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.01") {
val input =
"""
|enum P[a, b] {
| case C(a, b)
|}
|
|def f(p: P[Int32]): Int32 = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.02") {
val input =
"""
|enum P[a, b] {
| case C(a, b)
|}
|
|enum E {
| case A(P[Int32])
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.03") {
val input =
"""
|enum P[a, b] {
| case C(a, b)
|}
|
|def f(p: P): Int32 = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.04") {
val input =
"""
|enum P[a, b] {
| case C(a, b)
|}
|
|enum E {
| case A(P)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.05") {
val input =
"""
|enum P[a, b, c] {
| case C(a, b, c)
|}
|
|def f(p: P[Int32, Int32]): Int32 = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.06") {
val input =
"""
|enum P[a, b, c] {
| case C(a, b, c)
|}
|
|enum E {
| case A(P[Int32, Int32])
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.07") {
val input = """def f(x: true): Int32 = 123"""
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.08") {
val input = "def f(): Int32 = 1 as Pure"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.09") {
val input =
"""
|enum E[a, b] {
| case C(a, b)
|}
|
|def f(): Int32 = 1 as E[Int32]""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.10") {
val input = "def f(): Int32 = 1: Pure"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalUninhabitedType.11") {
val input =
"""
|enum E[a, b] {
| case C(a, b)
|}
|
|def f(): Int32 = 1: E[Int32]""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalEffect.01") {
val input = "def f(): Int32 = 1 as & Int32"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalEffect.02") {
val input = "def f(): Int32 = 1 as Int32 & Int32"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalEffect.03") {
val input = "def f(): Int32 = 1: & Int32"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalEffect.04") {
val input = "def f(): Int32 = 1: Int32 & Int32"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.01") {
val input =
"""
|enum P[a, b] {
| case C(a, b)
|}
|
|def f(p: P[Int32, String, String]): Int32 = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.02") {
val input =
"""
|type alias R = {x :: Int32}
|
|def f(p: R[Int32]): Int32 = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.03") {
val input =
"""
|rel A(a: Int32)
|
|type alias S = #{ A }
|
|def f(p: S[Int32]): Int32 = 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.04") {
val input = "def f(p: String[Int32]): Int32 = 123"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.05") {
val input = "def f(): Int32 = 1 as Int32 & Int32 and true"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.06") {
val input = "def f(): Int32 = 1 as Int32 & true or Int32"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.07") {
val input = "def f(): Int32 = 1 as Int32 & not Int32"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("IllegalTypeApplication.08") {
val input = "def f(a: (Int32, true)): Int32 = 1"
val result = compile(input, DefaultOptions)
expectError[KindError](result)
}
test("KindError.Def.Effect.01") {
val input =
"""
|def f(): Unit & Unit = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Effect.02") {
val input =
"""
|def f[a: Type](): Unit & a = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.01") {
val input =
"""
|def f(): Int32 = 1: Pure
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.02") {
val input =
"""
|def f(): Int32 = 1: & Unit
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.03") {
val input =
"""
|def foo(): Int32 & ef =
| let _x: ef = ???;
| 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.04") {
val input =
"""
|def foo(x: a[Int32]): Int32 =
| let _x: a = ???;
| 123
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.05") {
val input =
"""
|pub def foo(): Int32 =
| let x : Int32 : (Type -> Type) = 123;
| x
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.06") {
val input =
"""
|enum E
|
|pub def foo(): Int32 =
| let x: E[Int32] = ???; 0
|
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Ascribe.07") {
val input =
"""
|enum E[a, b]
|
|pub def foo(): Int32 =
| let x: E[Int32] = ???; 0
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Cast.01") {
val input =
"""
|def f(): Int32 = 1 as Pure
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Cast.02") {
val input =
"""
|def f(): Int32 = 1 as & Unit
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Cast.03") {
val input =
"""
|enum E
|
|pub def foo(): Int32 = 0 as E[Int32]
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Expression.Cast.04") {
val input =
"""
|enum E[a, b]
|
|pub def foo(): Int32 = 0 as E[Int32]
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.01") {
val input =
"""
|def f(x: Int[Int32]): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.02") {
val input =
"""
|def f(x: Int32 -> Int32 & Int32): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.03") {
val input =
"""
|def f(x: Pure -> Int32 & Int32): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.04") {
val input =
"""
|def f[r: Type](x: {name :: Int32 | r} ): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.05") {
val input =
"""
|def f[r: Type](x: #{| r} ): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.06") {
val input =
"""
|enum E[a]
|
|def f(x: E[Int32, Int32]): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Type.07") {
val input =
"""
|def f(x: Int32[Int32]): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Parameter.01") {
val input =
"""
|def f(x: Pure): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Parameter.02") {
val input =
"""
|enum E[a]
|
|def f(x: E): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Return.01") {
val input =
"""
|def f(): Pure = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Return.02") {
val input =
"""
|enum E[a]
|
|def f(): E = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Return.03") {
val input =
"""
|def f(): Int32[Int32] = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.TypeConstraint.01") {
val input =
"""
|class C[a: Type -> Type]
|def f[a: Type](): a with C[a] = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Def.Mismatch.01") {
val input =
"""
|def f(x: a): Int32 & a = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.MismatchedKinds](result)
}
test("KindError.Def.Mismatch.02") {
val input =
"""
|class C[a: Type -> Type]
|
|def f(x: a): Int32 with C[a] = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.MismatchedKinds](result)
}
test("KindError.Def.Mismatch.03") {
val input =
"""
|def f(x: a -> a & a): Int32 = ???
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.MismatchedKinds](result)
}
test("KindError.Enum.Case.01") {
val input =
"""
|enum E {
| case C(Pure)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Enum.Case.02") {
val input =
"""
|enum F[a]
|
|enum E {
| case C(F)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Enum.Case.03") {
val input =
"""
|enum E[a: Type -> Type] {
| case C(a)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Enum.Case.04") {
val input =
"""
|enum E[a] {
| case C({i :: Int32 | a})
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Enum.Type.01") {
val input =
"""
|enum E {
| case C(Int32 -> Int32 & Int32)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Enum.Type.02") {
val input =
"""
|enum E[a] {
| case C(Int32 -> Int32 & a)
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Enum.Type.05") {
val input =
"""
|enum E {
| case C(Int32[Int32])
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Instance.Def.01") {
val input =
"""
|class C[a] {
| pub def f(x: a): a
|}
|
|enum E[a]
|
|instance C[E[a]] {
| pub def f(x: E): E = ???
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Instance.TypeConstraint.01") {
val input =
"""
|class C[a]
|
|class D[a: Type -> Type]
|
|enum E[a]
|
|instance C[E[a]] with D[a]
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Instance.TypeParameter.01") {
val input =
"""
|class C[a]
|
|enum E[a]
|
|instance C[E]
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.TypeAlias.Type.01") {
val input =
"""
|type alias T = Pure -> Int32
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.TypeAlias.Type.02") {
val input =
"""
|type alias T[a] = Int32 -> Int32 & a
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.TypeAlias.Type.03") {
val input =
"""
|rel A(x: Int32)
|
|type alias Z[r] = #{ A | r }
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Class.Law.01") {
val input =
"""
|class C[a: Type -> Type] {
| law l: forall (x: a) . ???
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Class.Sig.01") {
val input =
"""
|class C[a: Type -> Type] {
| pub def f(x: a): Int32 = ???
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Class.Sig.02") {
val input =
"""
|class C[a] {
| pub def f(x: {l :: Int32 | a}): Int32 = ???
|}
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
test("KindError.Class.TypeConstraint.01") {
val input =
"""
|class C[a]
|
|class D[a: Type -> Type] with C[a]
|""".stripMargin
val result = compile(input, DefaultOptions)
expectError[KindError.UnexpectedKind](result)
}
}
|
# Start the tool, not for running the first time
docker start cannr-web
|
#!/bin/sh
#
# Update the misc/Cargo.lock file that we use for reproducible builds.
#
set -e
cd "$(git rev-parse --show-toplevel)"
mv Cargo.lock Cargo.lock.back 2> /dev/null || true
cargo update
mv Cargo.lock misc/
mv Cargo.lock.back Cargo.lock 2> /dev/null || true
|
<reponame>alparslanavci/hazelcast
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.sql.impl;
import com.hazelcast.internal.serialization.impl.compact.Schema;
import com.hazelcast.nio.serialization.ClassDefinition;
import com.hazelcast.nio.serialization.FieldType;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.sql.impl.type.QueryDataType;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Utilities to extract a list of properties from a
* {@link Class} object using reflection
* or from {@link ClassDefinition} of a Portable.
* or from {@link Schema} of a Compact Serialized Object
*/
public final class FieldsUtil {
private static final String METHOD_PREFIX_GET = "get";
private static final String METHOD_PREFIX_IS = "is";
private static final String METHOD_GET_FACTORY_ID = "getFactoryId";
private static final String METHOD_GET_CLASS_ID = "getClassId";
private FieldsUtil() { }
/**
* Return a list of fields and their types from a {@link Class}.
*/
@Nonnull
public static SortedMap<String, Class<?>> resolveClass(@Nonnull Class<?> clazz) {
SortedMap<String, Class<?>> fields = new TreeMap<>();
// Add public getters.
for (Method method : clazz.getMethods()) {
String attributeName = extractAttributeNameFromMethod(clazz, method);
if (attributeName == null) {
continue;
}
fields.putIfAbsent(attributeName, method.getReturnType());
}
// Add public fields.
Class<?> currentClass = clazz;
while (currentClass != Object.class) {
for (Field field : currentClass.getDeclaredFields()) {
if (!Modifier.isPublic(field.getModifiers()) || Modifier.isStatic(field.getModifiers())) {
continue;
}
fields.putIfAbsent(field.getName(), field.getType());
}
currentClass = currentClass.getSuperclass();
}
return fields;
}
@Nullable
private static String extractAttributeNameFromMethod(@Nonnull Class<?> clazz, @Nonnull Method method) {
if (skipMethod(clazz, method)) {
return null;
}
String methodName = method.getName();
String fieldNameWithWrongCase;
if (methodName.startsWith(METHOD_PREFIX_GET) && methodName.length() > METHOD_PREFIX_GET.length()) {
fieldNameWithWrongCase = methodName.substring(METHOD_PREFIX_GET.length());
} else if (methodName.startsWith(METHOD_PREFIX_IS) && methodName.length() > METHOD_PREFIX_IS.length()) {
// Skip getters that do not return primitive boolean.
if (method.getReturnType() != boolean.class) {
return null;
}
fieldNameWithWrongCase = methodName.substring(METHOD_PREFIX_IS.length());
} else {
return null;
}
return Character.toLowerCase(fieldNameWithWrongCase.charAt(0)) + fieldNameWithWrongCase.substring(1);
}
@SuppressWarnings("RedundantIfStatement")
private static boolean skipMethod(@Nonnull Class<?> clazz, @Nonnull Method method) {
// Exclude non-public getters.
if (!Modifier.isPublic(method.getModifiers())) {
return true;
}
// Exclude static getters.
if (Modifier.isStatic(method.getModifiers())) {
return true;
}
// Exclude void return type.
Class<?> returnType = method.getReturnType();
if (returnType == void.class || returnType == Void.class) {
return true;
}
// Skip methods with parameters.
if (method.getParameterCount() != 0) {
return true;
}
// Skip "getClass"
if (method.getDeclaringClass() == Object.class) {
return true;
}
// Skip getFactoryId() and getClassId() from Portable and IdentifiedDataSerializable.
String methodName = method.getName();
if (methodName.equals(METHOD_GET_FACTORY_ID) || methodName.equals(METHOD_GET_CLASS_ID)) {
if (IdentifiedDataSerializable.class.isAssignableFrom(clazz) || Portable.class.isAssignableFrom(clazz)) {
return true;
}
}
return false;
}
/**
* Resolve the list of fields from a portable {@link ClassDefinition},
* along with their {@link QueryDataType}.
*/
@Nonnull
public static SortedMap<String, QueryDataType> resolvePortable(@Nonnull ClassDefinition clazz) {
SortedMap<String, QueryDataType> fields = new TreeMap<>();
// Add regular fields.
for (String name : clazz.getFieldNames()) {
FieldType portableType = clazz.getFieldType(name);
QueryDataType type = resolveType(portableType);
fields.putIfAbsent(name, type);
}
return fields;
}
/**
* Resolve the list of fields from a schema {@link com.hazelcast.internal.serialization.impl.compact.Schema},
* along with their {@link QueryDataType}.
*/
@Nonnull
public static SortedMap<String, QueryDataType> resolveCompact(@Nonnull Schema schema) {
SortedMap<String, QueryDataType> fields = new TreeMap<>();
// Add regular fields.
for (String name : schema.getFieldNames()) {
FieldType compactType = schema.getField(name).getType();
QueryDataType type = resolveType(compactType);
fields.putIfAbsent(name, type);
}
return fields;
}
@SuppressWarnings({"checkstyle:ReturnCount", "checkstyle:cyclomaticcomplexity"})
@Nonnull
private static QueryDataType resolveType(@Nonnull FieldType type) {
switch (type) {
case BOOLEAN:
return QueryDataType.BOOLEAN;
case BYTE:
return QueryDataType.TINYINT;
case SHORT:
return QueryDataType.SMALLINT;
case CHAR:
return QueryDataType.VARCHAR_CHARACTER;
case UTF:
return QueryDataType.VARCHAR;
case INT:
return QueryDataType.INT;
case LONG:
return QueryDataType.BIGINT;
case FLOAT:
return QueryDataType.REAL;
case DOUBLE:
return QueryDataType.DOUBLE;
case DECIMAL:
return QueryDataType.DECIMAL;
case TIME:
return QueryDataType.TIME;
case DATE:
return QueryDataType.DATE;
case TIMESTAMP:
return QueryDataType.TIMESTAMP;
case TIMESTAMP_WITH_TIMEZONE:
return QueryDataType.TIMESTAMP_WITH_TZ_OFFSET_DATE_TIME;
default:
return QueryDataType.OBJECT;
}
}
}
|
package io.opensphere.mantle.data.analysis;
/**
* The Interface DataAnalysisReporter.
*/
public interface DataAnalysisReporter
{
/**
* Clear all column analysis data so that all must be re-determined.
*/
void clearAllColumnAnalysisData();
/**
* Clear column analysis for a data type so that it must be re-evaluated.
*
* @param dtiKey the dti key
*/
void clearColumnAnalysis(String dtiKey);
/**
* Clear column analysis for a specific type or key so that it must be
* re-evaluated.
*
* @param dtiKey the dti key
* @param columnKey the column key
*/
void clearColumnAnalysis(String dtiKey, String columnKey);
/**
* Gets the column analysis for a given data type info key and column key.
* Will only return non-null analysis for types that have (1) Been analyzed
* (2) Are string types.
*
* @param dtiKey the DataTypeInfo key
* @param columnKey the column key
* @return the column analysis or null if no analysis performed on that
* type/key combination.
*/
ColumnAnalysis getColumnAnalysis(String dtiKey, String columnKey);
/**
* Checks if is column data analysis enabled.
*
* @return true, if is column data analysis enabled
*/
boolean isColumnDataAnalysisEnabled();
/**
* Sets the analyze strings only.
*
* @param dtiKey the dtiKey
* @param analyzeStringsOnly the true to only analyze strings (default),
* false to analyze everything.
*/
void setAnalyzeStringsOnly(String dtiKey, boolean analyzeStringsOnly);
/**
* Sets the do not track flag for a type.
*
* @param dtiKey the new do not track for type
* @param doNotTrack the do not track
*/
void setDoNotTrackForType(String dtiKey, boolean doNotTrack);
/**
* Sets the finalized for type.
*
* @param dtiKey the dti key
* @param finalized the finalized
*/
void setFinalizedForType(String dtiKey, boolean finalized);
}
|
import {ADD_STRUCTURE_ITEM} from '../constants/actionTypes';
const structureItem = (state = {}, action) => {
switch (action.type) {
case ADD_STRUCTURE_ITEM:
return {
...state,
styles: action.styles,
colsId: action.colsId,
sortableId: action.sortableId
};
default:
return state;
}
};
const structureItems = (state = {}, action) => {
switch (action.type) {
case ADD_STRUCTURE_ITEM:
return {
...state,
[action.id]: structureItem(state[action.id], action)
};
default:
return state;
}
};
export default structureItems;
|
<reponame>qbancoffee/winfile
/*************************************************************
File name: save.c
Description:
Code for saving resources
Copyright(c) Microsoft Corporation.All rights reserved.
Licensed under the MIT License.
********************************************************************/
#include "stdafx.h"
#include "string.h"
extern const char *rc_types[];
BOOL SaveMenu(HFILE hFile, LPRESPACKET lprp);
BOOL SaveDialog(HFILE hFile, LPRESPACKET lprp);
BOOL SaveStringTable(HFILE hFile, LPRESPACKET lprp);
#define ADDITEM() _lwrite(hFile, lp, lstrlen(lp))
// save menu, dialog, and string resources to the file given
BOOL SaveResources(HWND hWnd, PEXEINFO pExeInfo, LPSTR lpFile)
{
char szBuff[255];
LPSTR lp = (LPSTR)szBuff;
char szRCType[30];
LPCSTR lpn;
if (pExeInfo == NULL)
{
MessageBox(hWnd, "NO file open!",
"ExeView Error", MB_ICONSTOP | MB_OK);
return false;
}
PRESTYPE prt = pExeInfo->pResTable;
HFILE hFile = 0;
hFile = _lcreat(lpFile, 0);
if (!hFile)
{
MessageBox(hWnd, "Error opening file for write!",
"ExeView Error", MB_ICONSTOP | MB_OK);
return false;
}
lstrcpy(lp, "/********************************************************************\n\n Resources that were extracted from a binary that ran on 16-bit Windows.\n\n");
ADDITEM();
lstrcpy(lp, " Copyright(c) Microsoft Corporation.All rights reserved.\n Licensed under the MIT License.\n\n********************************************************************/\n\n");
ADDITEM();
while (prt)
{
int restype = 0;
PRESINFO pri = prt->pResInfoArray;
WORD wI = 0;
if (prt->wType & 0x8000)
{
WORD wType = prt->wType & 0x7fff;
if (wType == 0 || wType == 11 || wType == 13 || wType > 16)
{
lpn = (LPSTR)szRCType;
wsprintf(szRCType, "Unknown Type: %#04X\0", prt->wType);
}
else
{
lpn = rc_types[wType];
restype = wType;
}
}
else
{
lpn = prt->pResourceType;
// restype == 0
}
if (restype == (int)RT_MENU || restype == (int)RT_DIALOG || restype == (int)RT_STRING)
{
while (wI < prt->wCount)
{
RESPACKET rp;
LPRESPACKET lprp = &rp;
if (LoadResourcePacket(pExeInfo, prt, pri, lprp))
{
// for menu, dialog and string, dump details of this item
switch (restype)
{
case (int)RT_MENU:
SaveMenu(hFile, lprp);
break;
case (int)RT_DIALOG:
SaveDialog(hFile, lprp);
break;
case (int)RT_STRING:
SaveStringTable(hFile, lprp);
break;
}
FreeResourcePacket(lprp);
lstrcpy(lp, "\n");
ADDITEM();
}
pri++;
wI++;
}
prt = prt->pNext;
if (prt)
{
lstrcpy(lp, "\n");
ADDITEM();
}
}
else
{
prt = prt->pNext;
}
}
_lclose(hFile);
return TRUE;
}
// NOTE: copied from VerifyResources.cpp
WORD* DecodeMenu(WORD *lpv, MENUDECODE *pmenu)
{
WORD *pwT = (WORD *)lpv;
pmenu->flags = *pwT++;
pmenu->lpszTitle = (LPCTSTR)pwT;
pwT = (WORD *)((BYTE *)pwT + lstrlen(pmenu->lpszTitle) + 1);
pmenu->cItem = 0;
MENUITEM *pitem = &pmenu->rgitem[0];
WORD flags;
do
{
flags = pitem->flags = *pwT++;
pitem->id = *pwT++;
pitem->lpszMenu = (LPCTSTR)pwT;
pwT = (WORD *)((BYTE *)pwT + lstrlen(pitem->lpszMenu) + 1);
pmenu->cItem++;
pitem++;
} while (flags != MFR_END);
return pwT;
}
BOOL SaveMenu(HFILE hFile, LPRESPACKET lprp)
{
char szBuff[255];
LPSTR lp = (LPSTR)szBuff;
MENUDECODE menu;
WORD *pwT = (WORD *)lprp->lpMem;
char szNTI[255]; // Name, Title, or Icon
if (lprp->pri->wID & 0x8000)
{
wsprintf(szNTI, "%d", lprp->pri->wID & 0x7fff);
}
else
{
lstrcpy(szNTI, lprp->pri->pResourceName);
}
WORD wVersion = *pwT++;
WORD offset = *pwT++;
wsprintf(lp, "%s MENU\n", szNTI);
ADDITEM();
lstrcpy(lp, "BEGIN\n");
ADDITEM();
// first word is menu flags; if zero, we have hit the end
while (*pwT != 0)
{
pwT = DecodeMenu(pwT, &menu);
wsprintf(lp, " POPUP \"%s\"\n", menu.lpszTitle);
ADDITEM();
lstrcpy(lp, " BEGIN\n");
ADDITEM();
for (int i = 0; i < menu.cItem; i++)
{
wsprintf(lp, " MENUITEM \"%s\", %#04X\n", menu.rgitem[i].lpszMenu, menu.rgitem[i].id);
ADDITEM();
}
lstrcpy(lp, " END\n");
ADDITEM();
}
lstrcpy(lp, "END\n");
ADDITEM();
return TRUE;
}
// NOTE: copied from VerifyResources.cpp
VOID DecodeDlgItem(WORD **ppwT, DLGITEMDECODE *pdecitem)
{
WORD *pwT = *ppwT;
pdecitem->pitem = (DLGITEMTEMPLATE16 *)pwT;
pwT = (WORD *)((char *)pwT + sizeof(DLGITEMTEMPLATE16));
if (*(BYTE *)pwT == 0xff)
{
// 0xff at start of name is followed by a WORD
pwT = (WORD *)((char *)pwT + 1);
pdecitem->iconid = *pwT++;
pdecitem->lpszTitle = "<iconid>";
}
else
{
pdecitem->lpszTitle = (LPCTSTR)pwT;
pwT = (WORD *)((BYTE *)pwT + lstrlen(pdecitem->lpszTitle) + 1);
}
// not sure what this is used for...
pdecitem->lpszSecond = (LPCTSTR)pwT;
pwT = (WORD *)((BYTE *)pwT + lstrlen(pdecitem->lpszSecond) + 1);
*ppwT = pwT;
}
// NOTE: copied from VerifyResources.cpp
VOID DecodeDialog(VOID *lpv, DLGDECODE *pdecdlg)
{
WORD *pwT = (WORD *)lpv;
pdecdlg->pdlg = (DLGTEMPLATE16 *)lpv;
// continue after the DLGTEMPLATE16
pwT = (WORD *)((char *)pwT + sizeof(DLGTEMPLATE16));
pdecdlg->lpszTitle = (LPCTSTR)pwT;
pwT = (WORD *)((BYTE *)pwT + lstrlen(pdecdlg->lpszTitle) + 1);
if (pdecdlg->pdlg->style & DS_SETFONT)
{
pdecdlg->wFont = *pwT++;
pdecdlg->lpszFont = (LPCTSTR)pwT;
pwT = (WORD *)((BYTE *)pwT + lstrlen(pdecdlg->lpszFont) + 1);
}
if (pdecdlg->pdlg->cdit > MAXDLGITEMS)
{
// printf("Error: dialog has more than %d items\n", MAXDLGITEMS);
pdecdlg->pdlg->cdit = MAXDLGITEMS;
}
for (int i = 0; i < pdecdlg->pdlg->cdit; i++)
{
DLGITEMDECODE *pitem = &pdecdlg->rgitems[i];
ZeroMemory(pitem, sizeof(*pitem));
DecodeDlgItem(&pwT, pitem);
}
}
BOOL SaveDialog(HFILE hFile, LPRESPACKET lprp)
{
DLGDECODE decdlg;
ZeroMemory(&decdlg, sizeof(decdlg));
DecodeDialog(lprp->lpMem, &decdlg);
char szBuff[255];
LPSTR lp = (LPSTR)szBuff;
char szNTI[255]; // Name, Title, or Icon
if (lprp->pri->wID & 0x8000)
{
wsprintf(szNTI, "%d", lprp->pri->wID & 0x7fff);
}
else
{
lstrcpy(szNTI, lprp->pri->pResourceName);
}
wsprintf(lp, "%s DIALOG %#04x, %d, %d, %d, %d\n", szNTI, decdlg.pdlg->extra, decdlg.pdlg->x, decdlg.pdlg->y, decdlg.pdlg->cx, decdlg.pdlg->cy);
ADDITEM();
wsprintf(lp, "CAPTION \"%s\"\n", decdlg.lpszTitle);
ADDITEM();
if (decdlg.lpszFont != NULL)
{
wsprintf(lp, "FONT %d, \"%s\"\n", decdlg.wFont, decdlg.lpszFont);
ADDITEM();
}
wsprintf(lp, "STYLE %#04x\n", decdlg.pdlg->style);
ADDITEM();
for (int i = 0; i < decdlg.pdlg->cdit; i++)
{
DLGITEMDECODE *pdecitem = &decdlg.rgitems[i];
if (pdecitem->iconid != 0)
{
wsprintf(szNTI, "%d", pdecitem->iconid);
}
else
{
wsprintf(szNTI, "\"%s\"", pdecitem->lpszTitle);
}
wsprintf(lp, " CONTROL %#02x, %s, %d, %#04x, %d, %d, %d, %d\n", pdecitem->pitem->kind, szNTI, pdecitem->pitem->id, pdecitem->pitem->style, pdecitem->pitem->x, pdecitem->pitem->y, pdecitem->pitem->cx, pdecitem->pitem->cy);
ADDITEM();
}
lstrcpy(lp, "END\n");
ADDITEM();
return TRUE;
}
BOOL SaveStringTable(HFILE hFile, LPRESPACKET lprp)
{
char szBuff[270];
LPSTR lpS, lp = (LPSTR)szBuff;
int nI, nOrdinal;
nOrdinal = (lprp->pri->wID - 1) & 0x7fff;
nOrdinal <<= 4;
lpS = lprp->lpMem;
lstrcpy(lp, "Ordinal String\n");
ADDITEM();
lstrcpy(lp, "---------------------------------------------------------------\n");
ADDITEM();
for (nI = 0; nI < 16; nI++)
{
BYTE bLen = *lpS++;
wsprintf(lp, "%#04x \"", nOrdinal + nI);
if (bLen)
{
strncat_s(lp, 270, lpS, (WORD)bLen);
lpS += (int)bLen;
lstrcat(lp, "\"\n");
ADDITEM();
}
}
return TRUE;
}
|
import { fixture, html, expect } from '@open-wc/testing';
import '../../../mock-wizard-editor.js';
import { EditingElement } from '../../../../src/Editing.js';
import { WizardingElement } from '../../../../src/Wizarding.js';
import { VoltageLevelEditor } from '../../../../src/editors/substation/voltage-level-editor.js';
import { WizardTextField } from '../../../../src/wizard-textfield.js';
describe('voltage-level-editor wizarding editing integration', () => {
describe('edit wizard', () => {
let doc: XMLDocument;
let parent: WizardingElement & EditingElement;
let element: VoltageLevelEditor | null;
let nameField: WizardTextField;
let descField: WizardTextField;
let nomFreqField: WizardTextField;
let numPhasesField: WizardTextField;
let voltageField: WizardTextField;
let secondaryAction: HTMLElement;
let primaryAction: HTMLElement;
beforeEach(async () => {
doc = await fetch('/base/test/testfiles/valid2007B4.scd')
.then(response => response.text())
.then(str => new DOMParser().parseFromString(str, 'application/xml'));
parent = <WizardingElement & EditingElement>(
await fixture(
html`<mock-wizard-editor
><voltage-level-editor
.element=${doc.querySelector('VoltageLevel')}
></voltage-level-editor
></mock-wizard-editor>`
)
);
element = parent.querySelector('voltage-level-editor');
await (<HTMLElement>(
element?.shadowRoot?.querySelector('mwc-icon-button[icon="edit"]')
)).click();
await parent.updateComplete;
nameField = <WizardTextField>(
parent.wizardUI.dialog?.querySelector('wizard-textfield[label="name"]')
);
descField = <WizardTextField>(
parent.wizardUI.dialog?.querySelector('wizard-textfield[label="desc"]')
);
nomFreqField = <WizardTextField>(
parent.wizardUI.dialog?.querySelector(
'wizard-textfield[label="nomFreq"]'
)
);
numPhasesField = <WizardTextField>(
parent.wizardUI.dialog?.querySelector(
'wizard-textfield[label="numPhases"]'
)
);
voltageField = <WizardTextField>(
parent.wizardUI.dialog?.querySelector(
'wizard-textfield[label="Voltage"]'
)
);
secondaryAction = <HTMLElement>(
parent.wizardUI.dialog?.querySelector(
'mwc-button[slot="secondaryAction"]'
)
);
primaryAction = <HTMLElement>(
parent.wizardUI.dialog?.querySelector(
'mwc-button[slot="primaryAction"]'
)
);
});
it('closes on secondary action', async () => {
secondaryAction.click();
await new Promise(resolve => setTimeout(resolve, 100)); // await animation
expect(parent.wizardUI.dialog).to.not.exist;
});
it('does not change name attribute if not unique within parent element', async () => {
const oldName = nameField.value;
nameField.value = 'J1';
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('name')).to.equal(
oldName
);
});
it('changes name attribute on primary action', async () => {
nameField.value = 'newName';
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('name')).to.equal(
'newName'
);
});
it('changes desc attribute on primary action', async () => {
descField.value = 'newDesc';
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('desc')).to.equal(
'newDesc'
);
});
it('deletes desc attribute if wizard-textfield is deactivated', async () => {
await new Promise(resolve => setTimeout(resolve, 100)); // await animation
descField.nullSwitch!.click();
await parent.updateComplete;
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('desc')).to.be
.null;
});
it('changes nomFreq attribute on primary action', async () => {
nomFreqField.value = '30';
primaryAction.click();
await parent.updateComplete;
expect(
doc.querySelector('VoltageLevel')?.getAttribute('nomFreq')
).to.equal('30');
});
it('deletes nomFreq attribute if wizard-textfield is deactivated', async () => {
await new Promise(resolve => setTimeout(resolve, 100)); // await animation
nomFreqField.nullSwitch!.click();
await parent.updateComplete;
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('nomFreq')).to.be
.null;
});
it('changes numPhases attribute on primary action', async () => {
numPhasesField.value = '3';
await parent.updateComplete;
primaryAction.click();
expect(
doc.querySelector('VoltageLevel')?.getAttribute('numPhases')
).to.equal('3');
});
it('deletes numPhases attribute if wizard-textfield is deactivated', async () => {
await new Promise(resolve => setTimeout(resolve, 100)); // await animation
numPhasesField.nullSwitch!.click();
await parent.updateComplete;
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('numPhases')).to.be
.null;
});
it('changes Voltage value on primary action', async () => {
voltageField.value = '20.0';
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('Voltage')?.innerHTML).to.equal('20.0');
});
it('changes Voltage multiplier on primary action', async () => {
voltageField.multiplier = 'M';
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('Voltage')?.getAttribute('multiplier')).to.equal(
'M'
);
expect(doc.querySelector('Voltage')?.getAttribute('unit')).to.equal('V');
});
it('deletes voltage element if voltage wizard-textfield is deactivated', async () => {
await new Promise(resolve => setTimeout(resolve, 100)); // await animation
voltageField.nullSwitch!.click();
await parent.updateComplete;
primaryAction.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.querySelector('Voltage')).to.be
.null;
});
});
describe('open add bay wizard', () => {
let doc: XMLDocument;
let parent: WizardingElement & EditingElement;
let element: VoltageLevelEditor | null;
let nameField: WizardTextField;
let primaryAction: HTMLElement;
beforeEach(async () => {
doc = await fetch('/base/test/testfiles/valid2007B4.scd')
.then(response => response.text())
.then(str => new DOMParser().parseFromString(str, 'application/xml'));
parent = <WizardingElement & EditingElement>(
await fixture(
html`<mock-wizard-editor
><voltage-level-editor
.element=${doc.querySelector('VoltageLevel[name="E1"]')}
></voltage-level-editor
></mock-wizard-editor>`
)
);
element = parent.querySelector('voltage-level-editor');
(<HTMLElement>(
element?.shadowRoot?.querySelector(
'mwc-icon-button[icon="playlist_add"]'
)
)).click();
await parent.updateComplete;
nameField = <WizardTextField>(
parent.wizardUI.dialog?.querySelector('wizard-textfield[label="name"]')
);
primaryAction = <HTMLElement>(
parent.wizardUI.dialog?.querySelector(
'mwc-button[slot="primaryAction"]'
)
);
});
it('does not add bay if name attribute is not unique', async () => {
nameField.value = 'COUPLING_BAY';
await new Promise(resolve => setTimeout(resolve, 100)); // update takes some time
primaryAction.click();
await parent.updateComplete;
expect(
doc.querySelectorAll('VoltageLevel[name="E1"] > Bay').length
).to.equal(2);
});
it('does add bay if name attribute is unique', async () => {
nameField.value = 'SecondBay';
await new Promise(resolve => setTimeout(resolve, 100)); // update takes some time
primaryAction.click();
await parent.updateComplete;
expect(
doc.querySelector('VoltageLevel[name="E1"] > Bay[name="SecondBay"]')
).to.exist;
});
});
describe('open lnode wizard', () => {
let doc: XMLDocument;
let parent: WizardingElement & EditingElement;
let element: VoltageLevelEditor | null;
beforeEach(async () => {
doc = await fetch('/base/test/testfiles/valid2007B4.scd')
.then(response => response.text())
.then(str => new DOMParser().parseFromString(str, 'application/xml'));
parent = <WizardingElement & EditingElement>(
await fixture(
html`<mock-wizard-editor
><voltage-level-editor
.element=${doc.querySelector('VoltageLevel[name="E1"]')}
></voltage-level-editor
></mock-wizard-editor>`
)
);
element = parent.querySelector('voltage-level-editor');
(<HTMLElement>(
element?.shadowRoot?.querySelector(
'mwc-icon-button[icon="account_tree"]'
)
)).click();
await parent.updateComplete;
});
it('opens lnode wizard ', async () => {
expect(parent.wizardUI).to.exist;
});
it('has two wizard pages', async () => {
expect(parent.wizardUI.dialogs.length).to.equal(2);
});
});
describe('move action', () => {
let doc: XMLDocument;
let parent: WizardingElement & EditingElement;
let element: VoltageLevelEditor | null;
let element2: VoltageLevelEditor | null;
beforeEach(async () => {
doc = await fetch('/base/test/testfiles/valid2007B4.scd')
.then(response => response.text())
.then(str => new DOMParser().parseFromString(str, 'application/xml'));
parent = <WizardingElement & EditingElement>(
await fixture(
html`<mock-wizard-editor
>${Array.from(doc?.querySelectorAll('VoltageLevel') ?? []).map(
vLevel =>
html`<voltage-level-editor
.element=${vLevel}
></voltage-level-editor>`
)}
></mock-wizard-editor
>`
)
);
element = parent.querySelector('voltage-level-editor:nth-child(1)');
element2 = parent.querySelector('voltage-level-editor:nth-child(2)');
});
it('moves VoltageLevel within Substation', async () => {
expect(doc.querySelector('VoltageLevel')?.getAttribute('name')).to.equal(
'E1'
);
(<HTMLElement>(
element2?.shadowRoot?.querySelector('mwc-icon-button[icon="forward"]')
)).click();
await parent.updateComplete;
(<HTMLElement>element).click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel')?.getAttribute('name')).to.equal(
'J1'
);
});
});
describe('remove action', () => {
let doc: XMLDocument;
let parent: WizardingElement & EditingElement;
let element: VoltageLevelEditor | null;
beforeEach(async () => {
doc = await fetch('/base/test/testfiles/valid2007B4.scd')
.then(response => response.text())
.then(str => new DOMParser().parseFromString(str, 'application/xml'));
parent = <WizardingElement & EditingElement>(
await fixture(
html`<mock-wizard-editor
><voltage-level-editor
.element=${doc.querySelector('VoltageLevel[name="E1"]')}
></voltage-level-editor
></mock-wizard-editor>`
)
);
element = parent.querySelector('voltage-level-editor');
});
it('removes VoltageLevel on clicking delete button', async () => {
expect(doc.querySelector('VoltageLevel[name="E1"]')).to.exist;
(<HTMLElement>(
element?.shadowRoot?.querySelector('mwc-icon-button[icon="delete"]')
)).click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel[name="E1"]')).to.not.exist;
});
});
describe('clone action', () => {
let doc: XMLDocument;
let parent: WizardingElement & EditingElement;
let element: VoltageLevelEditor | null;
let copyContentButton: HTMLElement;
beforeEach(async () => {
doc = await fetch('/base/test/testfiles/valid2007B4.scd')
.then(response => response.text())
.then(str => new DOMParser().parseFromString(str, 'application/xml'));
parent = <WizardingElement & EditingElement>(
await fixture(
html`<mock-wizard-editor
><voltage-level-editor
.element=${doc.querySelector('VoltageLevel[name="E1"]')}
></voltage-level-editor
></mock-wizard-editor>`
)
);
element = parent.querySelector('voltage-level-editor');
await parent.updateComplete;
copyContentButton = <HTMLElement>(
element?.shadowRoot?.querySelector(
'mwc-icon-button[icon="content_copy"]'
)
);
});
it('duplicates VoltageLevel on clicking duplicate button', async () => {
copyContentButton.click();
await parent.updateComplete;
expect(doc.querySelector('VoltageLevel[name="E11"]')).to.exist;
});
it('removes all LNode elements in the copy', async () => {
expect(
doc.querySelector('VoltageLevel[name="E1"]')?.querySelector('LNode')
).to.exist;
copyContentButton.click();
await parent.updateComplete;
expect(
doc.querySelector('VoltageLevel[name="E11"]')?.querySelector('LNode')
).to.not.exist;
});
it('removes all Terminal elements except the grounding in the copy', async () => {
expect(
doc
.querySelector('VoltageLevel[name="E1"]')
?.querySelector('Terminal:not([cNodeName="grounded"])')
).to.exist;
copyContentButton.click();
await parent.updateComplete;
expect(
doc
.querySelector('VoltageLevel[name="E11"]')
?.querySelector('Terminal:not([cNodeName="grounded"])')
).to.not.exist;
});
it('removes all ConnectivityNode elements in the copy', async () => {
expect(
doc
.querySelector('VoltageLevel[name="E1"]')
?.querySelector('ConnectivityNode')
).to.exist;
copyContentButton.click();
await parent.updateComplete;
expect(
doc
.querySelector('VoltageLevel[name="E11"]')
?.querySelector('ConnectivityNode')
).to.not.exist;
});
it('keeps all Bay elements in the copy', async () => {
copyContentButton.click();
await parent.updateComplete;
expect(
doc.querySelector('VoltageLevel[name="E1"]')?.querySelectorAll('Bay')
.length
).to.equal(
doc.querySelector('VoltageLevel[name="E11"]')?.querySelectorAll('Bay')
.length
);
});
it('keeps all ConductingEquipment elements in the copy', async () => {
copyContentButton.click();
await parent.updateComplete;
expect(
doc
.querySelector('VoltageLevel[name="E1"]')
?.querySelectorAll('ConductingEquipment').length
).to.equal(
doc
.querySelector('VoltageLevel[name="E11"]')
?.querySelectorAll('ConductingEquipment').length
);
});
});
});
|
# titanic_queries.py
import os
from dotenv import load_dotenv
import psycopg2
from psycopg2.extras import execute_values
import json
import pandas as pd
import sqlite3
load_dotenv()
# Set path to titanic data
DATA_PATH = os.path.join(os.path.dirname(__file__), 'titanic.csv')
DB_HOST = os.getenv("DB_HOST2", default="You Don't Belong Here")
DB_NAME = os.getenv("DB_NAME2", default="You Don't Belong Here")
DB_USER = os.getenv("DB_USER2", default="You Don't Belong Here")
DB_PASSWORD = os.getenv("DB_PW2", default="You Don't Belong Here")
# Check that .env file is working correctly
# print(DB_HOST)
# print(DB_NAME)
# print(DB_USER)
# print(DB_PASSWORD)
# Create connection to database
connection = psycopg2.connect(dbname=DB_NAME, user=DB_USER,
password=<PASSWORD>, host=DB_HOST)
print(f"CONNECTION: {type(connection)}")
# Create cursor
cursor = connection.cursor()
print(f"CURSOR {type(cursor)}")
# Create New Table using titanic data
# 1. Read in csv file
# 2. Convert csv file to sql
# 3. Add new table to sql connection
# 4. Add data to titanic table
# 5. Explore titanic data using sql commands
# Read in titanic dataset
titanic_df = pd.read_csv(DATA_PATH)
print(titanic_df.head(1))
print(titanic_df.columns)
# Index(['Survived', 'Pclass', 'Name', 'Sex', 'Age', 'Siblings/Spouses Aboard', 'Parents/Children Aboard', 'Fare']
# Rename some of the columns for easy acces later
titanic_df.rename(columns={'Siblings/Spouses Aboard' : 'sibspouse', 'Parents/Children Aboard' : 'parentchild'}, inplace=True)
# Check that rename worked
print(titanic_df.columns)
# Change datatypes to avoid insertion errors
titanic_df['Survived'] = titanic_df['Survived'].values.astype(bool)
titanic_df = titanic_df.astype("object")
# Create New Table using SQL commands
create_titanic_table = """
DROP TABLE IF EXISTS
titanic_data;
CREATE TABLE IF NOT EXISTS
titanic_data (
id SERIAL PRIMARY KEY,
survived BOOL,
pclass INT,
name VARCHAR(100),
sex VARCHAR(30),
age FLOAT,
sibspouse INT,
parentchild INT,
fare FLOAT
);
"""
# Execute create table commands
cursor.execute(create_titanic_table)
#
# Now to insert the data!
#
# Create insertion query plugging in column names in order wanted
insertion_query = f"INSERT INTO titanic_data (survived, pclass, name, sex, age, sibspouse, parentchild, fare) VALUES %s"
# Convert titanic dataframe to list of tuples
list_of_tuples = list(titanic_df.to_records(index=False))
# Run execute values for easy conversion to SQL data insertion commands
execute_values(cursor, insertion_query, list_of_tuples)
#
# Save the Changes
#
connection.commit()
#
# Perform some data exploration
#
get_survived_count = """
SELECT
count(survived) as survived_count
FROM
titanic_data
WHERE
survived = True;
"""
get_dead_count = """
SELECT
count(survived) as death_count
FROM
titanic_data
WHERE
survived = False;
"""
get_1st_class_count = """
SELECT
count(pclass) as first_class
FROM
titanic_data
WHERE
pclass = 1;
"""
get_2nd_class_count = """
SELECT
count(pclass) as second_class
FROM
titanic_data
WHERE
pclass = 2;
"""
get_3rd_class_count = """
SELECT
count(pclass) as third_class
FROM
titanic_data
WHERE
pclass = 3;
"""
get_male_count = """
SELECT
count(sex) as male_count
FROM
titanic_data
WHERE
sex = 'male';
"""
get_female_count = """
SELECT
count(sex) as female_count
FROM
titanic_data
WHERE
sex = 'female';
"""
cursor.execute(get_survived_count)
survive_count = cursor.fetchall()
cursor.execute(get_dead_count)
dead_count = cursor.fetchall()
cursor.execute(get_1st_class_count)
first_class_count = cursor.fetchall()
cursor.execute(get_2nd_class_count)
second_class_count = cursor.fetchall()
cursor.execute(get_3rd_class_count)
third_class_count = cursor.fetchall()
cursor.execute(get_male_count)
male_counts = cursor.fetchall()
cursor.execute(get_female_count)
female_counts = cursor.fetchall()
for row in survive_count:
print(f"Number of survivors: {row[0]}")
for row in dead_count:
print(f"Number of non-survivors: {row[0]}")
for row in first_class_count:
print(f"Number of 1st class passengers: {row[0]}")
for row in second_class_count:
print(f"Number of 2nd class passengers: {row[0]}")
for row in third_class_count:
print(f"Number of 3rd class passengers: {row[0]}")
for row in male_counts:
print(f"Number of male passengers: {row[0]}")
for row in female_counts:
print(f"Number of female passengers: {row[0]}")
cursor.close()
connection.close()
|
const validateForm = () => {
const name = document.forms['myForm']['name'].value;
const email = document.forms['myForm']['email'].value;
if (name === '' || email === '') {
alert('Name and Email are required fields')
return false;
}
if (!email.includes('@') || !email.includes('.com')) {
alert('Invalid email address');
return false;
}
return true;
}
|
#include <iostream>
#include <string>
#include <vector>
// Class to represent an attribute
class Attribute {
public:
std::string name;
std::string type;
// Other properties of an attribute can be added here
};
// Class to represent metadata for a class
class ClassMetadata {
public:
std::string name;
bool isInnerClass;
std::vector<Attribute> attributes;
std::vector<ClassMetadata> innerClasses;
};
// Class to manage class metadata registration
class MetadataRegistry {
private:
std::vector<ClassMetadata> registeredClasses;
public:
// Register a class with a given name
void registerClass(const std::string& className, bool isInnerClass = false) {
ClassMetadata newClass;
newClass.name = className;
newClass.isInnerClass = isInnerClass;
registeredClasses.push_back(newClass);
}
// Register an inner class within a registered class
void registerInnerClass(const std::string& parentClassName, const std::string& innerClassName) {
for (auto& cls : registeredClasses) {
if (cls.name == parentClassName) {
ClassMetadata innerClass;
innerClass.name = innerClassName;
innerClass.isInnerClass = true;
cls.innerClasses.push_back(innerClass);
return;
}
}
std::cerr << "Error: Parent class " << parentClassName << " not found." << std::endl;
}
// Add attributes to the registered classes
void addAttribute(const std::string& className, const std::string& attributeName, const std::string& attributeType) {
for (auto& cls : registeredClasses) {
if (cls.name == className) {
Attribute newAttribute;
newAttribute.name = attributeName;
newAttribute.type = attributeType;
cls.attributes.push_back(newAttribute);
return;
}
}
std::cerr << "Error: Class " << className << " not found." << std::endl;
}
// Other operations for accessing and manipulating metadata can be added here
};
int main() {
MetadataRegistry registry;
// Register a class
registry.registerClass("domGlsl_surface_type::domGenerator::domName", true);
// Register an inner class
registry.registerInnerClass("domGlsl_surface_type::domGenerator::domName", "InnerClassName");
// Add an attribute to the registered class
registry.addAttribute("domGlsl_surface_type::domGenerator::domName", "_value", "xsNCName");
return 0;
}
|
/**
*/
let CanonicalUmlXmiParser = function (opts) {
let NormalizeType = opts.normalizeType || (type => type)
let ViewPattern = opts.viewPattern || null
let NameMap = opts.nameMap || { }
let AGGREGATION_shared = 'AGGREGATION_shared'
let AGGREGATION_composite = 'AGGREGATION_composite'
var UPPER_UNLIMITED = '*'
function parseName (elt) {
let ret = 'name' in elt.$ ? elt.$.name : 'name' in elt ? elt.name[0] : null
return !ret ? ret : ret in NameMap ? NameMap[ret] : expandPrefix(ret)
}
function parseValue (elt, deflt) { // 'default' is a reserved word
return 'value' in elt.$ ? elt.$.value : 'value' in elt ? elt.value[0] : deflt
}
function parseGeneral (elt) {
return 'general' in elt.$ ? elt.$.general : 'general' in elt ? elt.general[0].$['xmi:idref'] : null
}
function parseAssociation (elt) {
return 'association' in elt.$ ? elt.$.association : 'association' in elt ? elt.association[0].$['xmi:idref'] : null
}
function parseComments (elt) {
return 'ownedComment' in elt
? elt.ownedComment.map( commentElt => commentElt.body[0] )
: []
}
function parseIsAbstract (elt) {
return 'isAbstract' in elt.$ ? elt.$.isAbstract === 'true' : 'isAbstract' in elt ? elt.isAbstract[0] === 'true' : false
}
function parseProperties (model, elts, classId) {
let ret = {
properties: [],
associations: {},
comments: []
}
elts.forEach(elt => {
let umlType = elt.$['xmi:type']
console.assert(umlType === 'uml:Property')
let id = elt.$['xmi:id']
let name = parseName(elt)
let association = parseAssociation(elt)
if (association) {
/* <ownedAttribute xmi:type="uml:Property" name="AgentIndicator" xmi:id="AgentIndicator_member_source" association="AgentIndicator_member_association">
<type xmi:idref="Agent"/>
<lowerValue xmi:type="uml:LiteralInteger" xmi:id="AgentIndicator_member_lower"/>
<upperValue xmi:type="uml:LiteralUnlimitedNatural" xmi:id="AgentIndicator_member_upper" value="-1"/>
</ownedAttribute> */
ret.associations[id] = Object.assign(new AssocRefRecord(id, name), {
classId: classId,
type: elt.type[0].$['xmi:idref'],
lower: parseValue(elt.lowerValue[0], 0),
upper: parseValue(elt.upperValue[0], UPPER_UNLIMITED),
comments: parseComments(elt)
}, ('aggregation' in elt ? {
aggregation: (elt.aggregation[0] === "shared"
? AGGREGATION_shared
: elt.aggregation[0] === "composite"
? AGGREGATION_composite
: elt.aggregation[0]) // unknown aggregation state.
} : { }))
} else if (!name) {
// e.g. canonical *-owned-attribute-n properties.
// throw Error('expected name in ' + JSON.stringify(elt.$) + ' in ' + parent)
} else if (name.charAt(0).match(/[A-Z]/)) {
throw Error('unexpected property name ' + name + ' in ' + classId)
} else {
ret.properties.push(
new PropertyRecord(
model, classId, id, name, elt.type[0].$['xmi:idref'],
NormalizeType(elt.type[0].$['href']),
parseValue(elt.lowerValue[0], 0),
parseValue(elt.upperValue[0], UPPER_UNLIMITED),
parseComments(elt))
)
}
})
return ret
}
function parseEAViews (diagrams) {
return diagrams.filter(
diagram => '$' in diagram // eliminate the empty <diagram> element containing datatypes
).map(
diagram => {
return Object.assign(new ViewRecord(), {
id: diagram['$']['xmi:id'],
name: diagram.model[0].$.package,
members: diagram.elements[0].element.map(
member => member.$.subject
)
})
}
)
}
function parseCanonicalViews (elt) {
return elt.packagedElement.map(view => {
return Object.assign(new ViewRecord(), {
id: view.$['xmi:id'],
name: parseName(view),
members: view.elementImport.map(
imp => imp.importedElement[0].$['xmi:idref']
),
comments: (view.ownedComment || []).map(
cmnt => cmnt.body[0]
)
})
})
}
function parseModel (document, source) {
// makeHierarchy.test()
// convenience variables
let packages = {}
let classes = {}
let properties = {}
let enums = {}
let datatypes = {}
let classHierarchy = makeHierarchy()
let packageHierarchy = makeHierarchy()
let associations = {}
let assocSrcToClass = {}
// return structure
let model = Object.assign(new ModelRecord(), {
source: source,
packages: packages,
classes: classes,
properties: properties,
enums: enums,
datatypes: datatypes,
classHierarchy: classHierarchy,
packageHierarchy: packageHierarchy,
associations: associations
})
// Build the model
visitPackage(document['xmi:XMI']['uml:Model'][0], [])
// Turn associations into properties.
Object.keys(associations).forEach(
assocId => {
let a = associations[assocId]
let c = classes[assocSrcToClass[a.from]]
let aref = c.associations[a.from]
let name = aref.name || a.name // if a reference has no name used the association name
if (a.name !== 'realizes') { // @@@ DDI-specific
let prec = new PropertyRecord(model, aref.classId, aref.id, name, aref.type, undefined, aref.lower, aref.upper, aref.comments.concat(a.comments));
if ('aggregation' in aref) {
prec.aggregation = aref.aggregation;
}
c.properties.push(prec)
}
}
)
// Change relations to datatypes to be attributes.
// Change relations to the classes and enums to reference the name.
/* Object.keys(properties).forEach(
p => properties[p].sources.forEach(
s => {
if (s.idref in datatypes) {
// console.log('changing property ' + p + ' to have attribute type ' + datatypes[s.idref].name)
// s.href = datatypes[s.idref].name
s.href = s.idref
s.idref = undefined
} else if (s.idref in classes) {
// s.idref = classes[s.idref].name
} else if (s.idref in enums) {
// s.idref = enums[s.idref].name
}
}))
*/
/*
idref => idref in datatypes ? NodeConstraint : ShapeRef
href => NodeConstraint
type => NodeConstraint
*/
updateReferees(model)
return model
function visitPackage (elt, parents) {
let parent = parents[0]
let type = elt.$['xmi:type']
if ('xmi:id' in elt.$) {
let id = elt.$['xmi:id']
let name = parseName(elt)
// Could keep id to elt map around with this:
// index[id] = { element: elt, packages: parents }
switch (type) {
case 'uml:Class':
if (id in classes) {
throw Error('already seen class id ' + id)
}
let ownedAttrs = parseProperties(
model, elt.ownedAttribute || [], // SentinelConceptualDomain has no props
id)
classes[id] = Object.assign(
new ClassRecord(id, name),
ownedAttrs, {
packages: parents,
superClasses: [],
isAbstract: parseIsAbstract(elt),
referees: [],
comments: parseComments(elt)
}
)
packages[parent].elements.push({type: 'class', id: id})
Object.keys(ownedAttrs.associations).forEach(
assocSourceId => { assocSrcToClass[assocSourceId] = id }
)
// record class hierarchy (allows multiple inheritance)
if ('generalization' in elt) {
elt.generalization.forEach(
superClassElt => {
let superClassId = parseGeneral(superClassElt)
classHierarchy.add(superClassId, id)
classes[id].superClasses.push(superClassId)
})
}
break
case 'uml:Enumeration':
if (id in enums) {
throw Error('already seen enum id ' + id)
}
enums[id] = Object.assign(new EnumRecord(), {
id: id,
name: name,
values: elt.ownedLiteral.map(
l => parseName(l)
),
packages: parents,
referees: []
})
packages[parent].elements.push({type: 'enumeration', id: id})
// record class hierarchy
if ('generalization' in elt) {
throw Error("need to handle inherited enumeration " + parseGeneral(elt.generalization[0]) + " " + name)
}
break
case 'uml:DataType':
case 'uml:PrimitiveType':
if (id in datatypes) {
throw Error('already seen datatype id ' + id)
}
datatypes[id] = Object.assign(new DatatypeRecord(), {
name: name,
id: id,
packages: parents,
referees: []
})
packages[parent].elements.push({type: 'datatype', id: id})
// record class hierarchy
if ('generalization' in elt) {
throw Error("need to handle inherited datatype " + parseGeneral(elt.generalization[0]) + " " + name)
}
break
case 'uml:Model':
case 'uml:Package':
let recurse = true
/* obsolete special code for DDI EA view
if (id === 'ddi4_views') {
model.views = parseEAViews(document['xmi:XMI']['xmi:Extension'][0]['diagrams'][0]['diagram'])
recurse = false
break // elide EA views package in package hierarcy
}
*/
if (ViewPattern && id.match(ViewPattern)) {
model.views = parseCanonicalViews(elt)
recurse = false // elide canonical views package in package hierarcy
} else {
packages[id] = Object.assign(new PackageRecord(), {
name: name,
id: id,
packages: parents,
elements: []
})
if (parents.length) {
if (id.match(/Pattern/)) {
recurse = false // don't record Pattern packages.
} else {
packageHierarchy.add(parent, id)
packages[parent].elements.push({type: 'package', id: id})
}
}
if (recurse && 'packagedElement' in elt) {
// walk desendents
elt.packagedElement.forEach(sub => {
visitPackage(sub, [id].concat(parents))
})
}
}
break
// Pass through to get to nested goodies.
case 'uml:Association':
let from = elt.memberEnd.map(end => end.$['xmi:idref']).filter(id => id !== elt.ownedEnd[0].$['xmi:id'])[0]
associations[id] = Object.assign(new AssociationRecord(id, name), {
from: from,
comments: parseComments(elt)
// type: elt.ownedEnd[0].type[0].$['xmi:idref']
})
/* <packagedElement xmi:id="AgentIndicator-member-association" xmi:type="uml:Association">
<name>member</name>
<memberEnd xmi:idref="AgentIndicator-member-source"/>
<memberEnd xmi:idref="AgentIndicator-member-target"/>
<ownedEnd xmi:id="AgentIndicator-member-target" xmi:type="uml:Property">
<association xmi:idref="AgentIndicator-member-association"/>
<type xmi:idref="AgentIndicator"/>
<lower><value>1</value></lowerValue>
<upper><value>1</value></uppervalue>
</ownedEnd>
</packagedElement> */
break
default:
console.warn('need handler for ' + type)
}
}
}
}
function ClassRecord (id, name) {
this.id = id
this.name = name
}
function PropertyRecord (model, classId, id, name, idref, href, lower, upper, comments) {
if (model === undefined) {
return // short-cut for objectify
}
if (classId === null) {
console.warn('no class name for PropertyRecord ' + id)
}
this.classId = classId
this.id = id
this.name = name
this.idref = idref
this.href = href
this.lower = lower
this.upper = upper
this.comments = comments
if (this.upper === '-1') {
this.upper = UPPER_UNLIMITED
}
if (!(name in model.properties)) {
model.properties[name] = {sources: []}
}
model.properties[name].sources.push(this)
}
function RefereeRecord (classId, propName) {
// if (classId === null) {
// throw Error('no class id for ReferenceRecord with property name ' + propName)
// }
this.classId = classId
this.propName = propName
}
function ModelRecord () { }
function PackageRecord () { }
function EnumRecord () { }
function DatatypeRecord () { }
function ViewRecord () { }
/**
* if attrName is null, we'll use the AssociationRecord's name.
<packagedElement xmi:id="<classId>" xmi:type="uml:Class">
<ownedAttribute xmi:id="<classId>-ownedAttribute-<n>" xmi:type="uml:Property">
<type xmi:idref="<refType>"/> <lowerValue/> <upperValue/>
<name>attrName</name>
</ownedAttribute>
</packagedElement>
*/
function AssocRefRecord (id, name) {
// if (name === null) {
// throw Error('no name for AssociationRecord ' + id)
// }
this.id = id
this.name = name
}
/**
<packagedElement xmi:id="<classId>" xmi:type="uml:Association"> <!-- can duplicate classId -->
<memberEnd xmi:idref="<classId>-ownedAttribute-<n>"/>
<memberEnd xmi:idref="<classId>-ownedEnd"/>
<ownedEnd xmi:id="<classId>-ownedEnd" xmi:type="uml:Property">
<type xmi:idref="<classId>"/> <lowerValue /> <upperValue />
<association xmi:idref="<classId>"/>
</ownedEnd>
<name>assocName</name>
</packagedElement>
*/
function AssociationRecord (id, name) {
// if (name === null) {
// throw Error('no name for AssociationRecord ' + id)
// }
this.id = id
this.name = name
}
function updateReferees (model) {
// Find set of types for each property.
Object.keys(model.properties).forEach(propName => {
let p = model.properties[propName]
p.uniformType = findMinimalTypes(model, p)
p.sources.forEach(s => {
let t = s.href || s.idref
let referent =
t in model.classes ? model.classes[t] :
t in model.enums ? model.enums[t] :
t in model.datatypes ? model.datatypes[t] :
null
if (referent) {
referent.referees.push(new RefereeRecord(s.classId, propName))
} else {
// console.warn('referent not found: ' + referent)
}
}, [])
}, [])
}
function getView (model, source, viewLabels, followReferencedClasses, followReferentHierarchy, nestInlinableStructure) {
if (viewLabels.constructor !== Array) {
viewLabels = [viewLabels]
}
let ret = Object.assign(new ModelRecord(), {
source: Object.assign({}, source, { viewLabels }),
packages: {},
classes: {},
properties: {},
enums: {},
datatypes: {},
classHierarchy: makeHierarchy(),
packageHierarchy: makeHierarchy(),
views: model.views.filter(
v => viewLabels.indexOf(v.name) !== -1
)
})
// ret.enums = Object.keys(model.enums).forEach(
// enumId => copyEnum(ret, model, enumId)
// )
// ret.datatypes = Object.keys(model.datatypes).forEach(
// datatypeId => copyDatatype(ret, model, datatypeId)
// )
let classIds = ret.views.reduce(
(classIds, view) =>
classIds.concat(view.members.reduce(
(x, member) => {
let parents = model.classHierarchy.parents[member] || [] // has no parents
return x.concat(member, parents.filter(
classId => x.indexOf(classId) === -1
))
}, []))
, [])
addDependentClasses(classIds, true)
updateReferees(ret)
return ret
// let properties = Object.keys(model.properties).filter(
// propName => model.properties[propName].sources.find(includedSource)
// ).reduce(
// (acc, propName) => {
// let sources = model.properties[propName].sources.filter(includedSource)
// return addKey(acc, propName, {
// sources: sources,
// uniformType: findMinimalTypes(ret, {sources: sources})
// })
// }, [])
function copyEnum (to, from, enumId) {
let old = from.enums[enumId]
if (old.id in to.enums) {
return
}
let e = {
id: old.id,
name: old.name,
values: old.values.slice(),
packages: old.packages.slice(),
referees: []
}
addPackages(to, model, e.packages)
ret.packages[old.packages[0]].elements.push({ type: 'enumeration', id: old.id })
to.enums[enumId] = e
}
function copyDatatype (to, from, datatypeId) {
let old = from.datatypes[datatypeId]
if (old.id in to.datatypes) {
return
}
let e = {
id: old.id,
name: old.name,
packages: old.packages.slice(),
referees: []
}
addPackages(to, model, e.packages)
ret.packages[old.packages[0]].elements.push({ type: 'datatype', id: old.id })
to.datatypes[datatypeId] = e
}
function addDependentClasses (classIds, followParents) {
classIds.forEach(
classId => {
if (classId in ret.classes) { // a recursive walk of the superClasses
return // may result in redundant insertions.
}
let old = model.classes[classId]
let dependentClassIds = []
let c = {
id: old.id,
name: old.name,
properties: [],
comments: old.comments.slice(),
packages: old.packages.slice(),
superClasses: old.superClasses.slice(),
isAbstract: old.isAbstract,
referees: [],
comments: []
} // was deepCopy(old)
ret.classes[classId] = c
old.properties.forEach(
p => {
let id = p.idref || p.href
if (id in model.enums) {
copyEnum(ret, model, id)
}
if (id in model.datatypes) {
copyDatatype(ret, model, id)
}
if (followReferencedClasses && id in model.classes) {
dependentClassIds.push(id)
}
c.properties.push(new PropertyRecord(ret, c.id, p.id, p.name, p.idref, p.href, p.lower, p.upper))
}
)
addPackages(ret, model, c.packages)
ret.packages[old.packages[0]].elements.push({ type: 'class', id: old.id })
c.superClasses.forEach(
suClass =>
ret.classHierarchy.add(suClass, c.id)
)
let x = dependentClassIds
if (followParents)
x = x.concat(c.superClasses)
addDependentClasses(x, followReferentHierarchy)
}
)
}
function addPackages (to, from, packageIds) {
for (let i = 0; i < packageIds.length; ++i) {
let pid = packageIds[i]
let old = from.packages[pid]
let p = pid in to.packages ? to.packages[pid] : {
name: old.name,
id: pid,
elements: [],
packages: old.packages.slice()
}
if (!(pid in to.packages)) {
to.packages[pid] = p
}
if (i > 0) { // add [0],[1] [1],[2] [2],[3]...
to.packageHierarchy.add(pid, packageIds[i - 1])
}
}
}
function includedSource (source) {
// properties with a source in classIds
return classIds.indexOf(source.classId) !== -1
}
}
function makeHierarchy () {
let roots = {}
let parents = {}
let children = {}
let holders = {}
return {
add: function (parent, child) {
if (parent in children && children[parent].indexOf(child) !== -1) {
// already seen
return
}
let target = parent in holders
? getNode(parent)
: (roots[parent] = getNode(parent)) // add new parents to roots.
let value = getNode(child)
target[child] = value
if (child in roots) {
delete roots[child]
}
// // maintain hierarchy (direct and confusing)
// children[parent] = children[parent].concat(child, children[child])
// children[child].forEach(c => parents[c] = parents[c].concat(parent, parents[parent]))
// parents[child] = parents[child].concat(parent, parents[parent])
// parents[parent].forEach(p => children[p] = children[p].concat(child, children[child]))
// maintain hierarchy (generic and confusing)
updateClosure(children, parents, child, parent)
updateClosure(parents, children, parent, child)
function updateClosure (container, members, near, far) {
container[far] = container[far].concat(near, container[near])
container[near].forEach(
n => (members[n] = members[n].concat(far, members[far]))
)
}
function getNode (node) {
if (!(node in holders)) {
parents[node] = []
children[node] = []
holders[node] = {}
}
return holders[node]
}
},
roots: roots,
parents: parents,
children: children
}
}
makeHierarchy.test = function () {
let t = makeHierarchy()
t.add('B', 'C')
t.add('C', 'D')
t.add('F', 'G')
t.add('E', 'F')
t.add('D', 'E')
t.add('A', 'B')
t.add('G', 'H')
console.dir(t)
}
function walkHierarchy (n, f, p) {
return Object.keys(n).reduce((ret, k) => {
return ret.concat(
walkHierarchy(n[k], f, k),
p ? f(k, p) : []) // outer invocation can have null parent
}, [])
}
function expandPrefix (pname) {
let i = pname.indexOf(':')
if (i === -1) {
return pname // e.g. LanguageSpecification
}
let prefix = pname.substr(0, i)
let rest = pname.substr(i + 1)
let ret = KnownPrefixes.map(
pair =>
pair.prefix === prefix
? pair.url + rest
: null
).find(v => v)
return ret || pname
}
/** find the unique object types for a property
*/
function findMinimalTypes (model, p) {
return p.sources.reduce((acc, s) => {
let t = s.href || s.idref
if (acc.length > 0 && acc.indexOf(t) === -1) {
// debugger;
// a.find(i => b.indexOf(i) !== -1)
}
return acc.indexOf(t) === -1 ? acc.concat(t) : acc
}, [])
}
function add (obj, key, value) {
let toAdd = { }
toAdd[key] = value
return Object.assign(obj, toAdd)
}
/** convert parsed structure to have correct prototypes
*/
function objectify (modelStruct) {
return Object.assign(new ModelRecord(), {
source: Object.assign({}, modelStruct.source),
packages: Object.keys(modelStruct.packages).reduce(
(acc, packageId) => add(acc, packageId, Object.assign(new PackageRecord(), modelStruct.packages[packageId])),
{}
),
classes: Object.keys(modelStruct.classes).reduce(
(acc, classId) => add(acc, classId, Object.assign(new ClassRecord(), modelStruct.classes[classId], {
properties: modelStruct.classes[classId].properties.map(
prop => Object.assign(new PropertyRecord(), prop)
)
}, referees(modelStruct.classes[classId]))),
{}
),
properties: Object.keys(modelStruct.properties).reduce(
(acc, propertyName) => add(acc, propertyName, Object.assign({}, modelStruct.properties[propertyName], {
sources: modelStruct.properties[propertyName].sources.map(
propertyRecord => Object.assign(new PropertyRecord(), propertyRecord)
)
})),
{}
),
enums: simpleCopy(modelStruct.enums, EnumRecord),
datatypes: simpleCopy(modelStruct.datatypes, DatatypeRecord),
classHierarchy: Object.assign({}, modelStruct.classHierarchy),
packageHierarchy: Object.assign({}, modelStruct.packageHierarchy),
associations: Object.keys(modelStruct.associations).reduce(
(acc, associationId) => add(acc, associationId, Object.assign(new AssociationRecord(), modelStruct.associations[associationId])),
{}
),
views: modelStruct.views.map(
view => Object.assign(new ViewRecord(), view)
)
})
function simpleCopy (obj, f) {
return Object.keys(obj).reduce(
(acc, key) => add(acc, key, Object.assign(new f(), obj[key],
referees(obj[key]))),
{}
)
}
function referees (obj) {
return {
referees: obj.referees.map(
prop => Object.assign(new RefereeRecord(), prop)
)
}
}
}
return {
parseJSON: function (jsonText, source, cb) {
try {
let model = objectify(JSON.parse(jsonText))
model.source = source
model.getView = getView
cb(null, model)
} catch (err) {
cb(err)
}
},
parseXMI: function (xmiText, source, cb) {
require('xml2js').Parser().parseString(xmiText, function (err, document) {
if (err) {
cb(err)
} else {
let model = parseModel(document, source)
model.getView = getView
cb(null, model)
}
})
},
duplicate: function (model) {
return objectify(JSON.parse(JSON.stringify(model)))
},
ModelRecord: ModelRecord,
PropertyRecord: PropertyRecord,
ClassRecord: ClassRecord,
PackageRecord: PackageRecord,
EnumRecord: EnumRecord,
DatatypeRecord: DatatypeRecord,
ViewRecord: ViewRecord,
AssociationRecord: AssociationRecord,
AssocRefRecord: AssocRefRecord,
RefereeRecord: RefereeRecord,
Aggregation: { shared: AGGREGATION_shared, composite: AGGREGATION_composite }
}
}
module.exports = CanonicalUmlXmiParser
|
#!/bin/bash
i_device=0
n_devices=5
get_device_num()
{
device_num=$(( $i_device % $n_devices ))
echo $device_num
i=$((i_device + 1))
}
for j in {1..60..1}
do
get_device_num
done
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.