text stringlengths 1 1.05M |
|---|
# This script should be sourced in the context of your shell like so:
# source $HOME/.homeshick/repos/.homeshick/homeshick.sh
# Once the homeshick() function is defined, you can type
# "homeshick cd CASTLE" to enter a castle.
function homeshick() {
if [ "$1" = "cd" ] && [ -n "$2" ]; then
cd "$HOME/.homesick/repos/$2"
else
"${HOMESHICK_DIR:-$HOME/.homesick/repos/homeshick}/bin/homeshick" "$@"
fi
}
|
import {appPathSetup} from "./appPathSetup"
import userTasks from "./userTasks"
// app path and log setup should happen before other imports.
appPathSetup()
import {app} from "electron"
import log from "electron-log"
import "regenerator-runtime/runtime"
import {setupAutoUpdater} from "./autoUpdater"
import {Brim} from "./brim"
import globalStoreMainHandler from "./ipc/globalStore/mainHandler"
import windowsMainHandler from "./ipc/windows/mainHandler"
import secretsMainHandler from "./ipc/secrets/mainHandler"
import electronIsDev from "./isDev"
import menu from "./menu"
import {handleQuit} from "./quitter"
import {handleSquirrelEvent} from "./squirrel"
console.time("init")
async function main() {
if (handleSquirrelEvent(app)) return
userTasks(app)
const brim = await Brim.boot()
menu.setMenu(brim)
windowsMainHandler(brim)
globalStoreMainHandler(brim)
secretsMainHandler()
handleQuit(brim)
// autoUpdater should not run in dev, and will fail if the code has not been signed
if (!electronIsDev) {
setupAutoUpdater().catch((err) => {
log.error("Failed to initiate autoUpdater: " + err)
})
}
app.on("second-instance", (e, argv) => {
for (let arg of argv) {
switch (arg) {
case "--new-window":
brim.windows.openWindow("search")
break
case "--move-to-current-display":
brim.windows.moveToCurrentDisplay()
break
}
}
})
app.whenReady().then(() => brim.start())
app.on("activate", () => brim.activate())
app.setAsDefaultProtocolClient("brim")
app.on("open-url", (event, url) => {
// recommended to preventDefault in docs: https://www.electronjs.org/docs/api/app#event-open-url-macos
event.preventDefault()
brim.openUrl(url)
})
app.on("web-contents-created", (event, contents) => {
contents.on("will-attach-webview", (e) => {
e.preventDefault()
log.error("Security Warning: Prevented creation of webview")
})
contents.on("will-navigate", (e, url) => {
if (contents.getURL() === url) return // Allow reloads
e.preventDefault()
log.error(`Security Warning: Prevented navigation to ${url}`)
})
contents.on("new-window", (e) => {
e.preventDefault()
log.error("Security Warning: Prevented new window from renderer")
})
})
}
app.disableHardwareAcceleration()
const gotTheLock = app.requestSingleInstanceLock()
if (gotTheLock) {
main().then(() => {
if (process.env.BRIM_ITEST === "true") require("./itest")
})
} else {
app.quit()
}
|
def convert_list_dict(strings):
# Convert the list to a dictionary with unique key-value pairs
dict_ = {string: string for string in strings}
return dict_
# Sample Usage
strings = ["apple", "ball", "cat", "dog"]
convert_list_dict(strings) # returns {'apple': 'apple', 'ball': 'ball', 'cat': 'cat', 'dog': 'dog'} |
<reponame>bcgov/citz-imb-sp-vdr
import { Alert, AlertTitle } from '@material-ui/lab'
import { useList } from 'components/Hooks'
import React from 'react'
import { SPTable } from '../SPTable/SPTable'
export const SPList = (props) => {
const { listName, showTitle, columnFiltering, title, noRecordsText, ...listProps } = props
const { table, isLoading, isError, error, isFetching } = useList(
listName,
listProps
)
if (isError) {
return (
<Alert severity='error'>
<AlertTitle>Error</AlertTitle>
{error}
</Alert>
)
}
return (
<SPTable
table={table}
title={title ?? listName}
showTitle={showTitle}
isFetching={isLoading || isFetching}
columnFiltering={columnFiltering}
noRecordsText={noRecordsText}
/>
)
}
|
import { IPath } from './ElementInterface';
import { ICanvasState } from '../components/Canvas/CanvasInterfaces';
export default class Path implements IPath {
public points: nj.NdArray[];
constructor(points: nj.NdArray[], public radius: number) {
if (points.length < 2) {
throw new Error('Path needs at least two points!');
}
this.points = points;
}
display(state: Readonly<ICanvasState>) {
if (state.ctx) {
for (let i = 0; i < this.points.length - 1; i++) {
const start = this.points[i];
const end = this.points[i + 1];
const sx = start.get(0);
const sy = start.get(1);
const ex = end.get(0);
const ey = end.get(1);
state.ctx.lineWidth = this.radius * 2;
state.ctx.strokeStyle = '#777777';
state.ctx.beginPath();
state.ctx.moveTo(sx, sy);
state.ctx.lineTo(ex, ey);
state.ctx.stroke();
state.ctx.lineWidth = 3;
state.ctx.strokeStyle = '#ffffff';
state.ctx.setLineDash([25, 30]);
state.ctx.beginPath();
state.ctx.moveTo(sx, sy);
state.ctx.lineTo(ex, ey);
state.ctx.stroke();
state.ctx.lineWidth = 1;
state.ctx.setLineDash([]);
state.ctx.strokeStyle = '#000000';
}
}
}
} |
<filename>mobile/src/main/java/com/serchinastico/mechrunner/schedule/domain/model/Schedule.java<gh_stars>0
package com.serchinastico.mechrunner.schedule.domain.model;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class Schedule implements Serializable {
private static final long serialVersionUID = 4440550559130047199L;
private long id;
private final List<Step> steps = new ArrayList<>();
private int repetitionsCount;
private String name;
public Schedule(String name) {
this.name = name;
}
public Schedule(int repetitionsCount, String name) {
this.repetitionsCount = repetitionsCount;
this.name = name;
}
public Schedule(long id, int repetitionsCount, String name) {
this.id = id;
this.repetitionsCount = repetitionsCount;
this.name = name;
}
public void setId(long id) {
this.id = id;
}
public long getId() {
return id;
}
public void setRepetitionsCount(int repetitionsCount) {
this.repetitionsCount = repetitionsCount;
}
public int getRepetitionsCount() {
return repetitionsCount;
}
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
public boolean addStep(Step step) {
return steps.add(step);
}
public Step getStep(int index) {
return steps.get(index);
}
public int size() {
return steps.size();
}
public Iterator<Step> iterator() {
return steps.iterator();
}
}
|
<gh_stars>0
REM FILE NAME: tbsp_exp.sql
REM LOCATION: Backup Recovery\Utilities
REM FUNCTION: Creates a basic shell script to perform tablespace level exports
REM TESTED ON: 8.1.5, 8.1.7, 9.0.1
REM PLATFORM: non-specific
REM REQUIRES: dba_tables, dba_tablespaces, v$database
REM
REM This is a part of the Knowledge Xpert for Oracle Administration library.
REM Copyright (C) 2001 Quest Software
REM All rights reserved.
REM
REM******************** Knowledge Xpert for Oracle Administration ********************
REM
REM NOTES: Each tablespace is given its own export that handles
REM its tables and their related indexes, grants and
REM constraints
REM
REM***********************************************************************************
SET verify off echo off termout on feedback off
PROMPT ...creating tablespace level export script
SET termout on
DROP TABLE exp_temp;
CREATE TABLE exp_temp (file# NUMBER, line_no NUMBER, line_txt LONG);
DECLARE
CURSOR count_tabs (tbsp IN VARCHAR2)
IS
SELECT COUNT (*)
FROM dba_tables
WHERE tablespace_name = tbsp;
CURSOR get_tbsp
IS
SELECT tablespace_name
FROM dba_tablespaces
WHERE tablespace_name != 'SYSTEM';
CURSOR get_owners (tbsp IN VARCHAR2)
IS
SELECT DISTINCT (owner)
FROM dba_tables
WHERE tablespace_name = tbsp;
CURSOR get_tabs (tbsp IN VARCHAR2, owner IN VARCHAR2)
IS
SELECT table_name
FROM dba_tables
WHERE tablespace_name = tbsp AND owner = owner;
row_cntr INTEGER := 0;
tablespace_nm dba_tablespaces.tablespace_name%TYPE;
owner dba_tables.owner%TYPE;
table_nm dba_tables.table_name%TYPE;
ln_txt exp_temp.line_txt%TYPE;
own_cnt INTEGER;
tab_cnt INTEGER;
file_no INTEGER;
tab_count INTEGER;
dbname v$database.NAME%TYPE;
PROCEDURE insert_tab (file_no NUMBER, row_cntr NUMBER, ln_txt VARCHAR2)
IS
BEGIN
INSERT INTO exp_temp
(file#, line_no, line_txt)
VALUES (file_no, row_cntr, ln_txt);
END;
BEGIN
/*
initialize various counters
*/
row_cntr := 0;
tab_count := 0;
file_no := 1;
/*
Get database name
*/
SELECT NAME
INTO dbname
FROM v$database;
ln_txt := '# Tablespace level export script for instance: '
|| dbname;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
/*
Set command in script to set SID
*/
ln_txt := 'ORACLE_SID='
|| LOWER (dbname);
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
/*
First run to build export script header
Get all tablespace names other than system
*/
IF get_tbsp%ISOPEN
THEN
CLOSE get_tbsp;
OPEN get_tbsp;
ELSE
OPEN get_tbsp;
END IF;
LOOP
FETCH get_tbsp INTO tablespace_nm;
EXIT WHEN get_tbsp%NOTFOUND;
/*
See if tablespace has tables
*/
IF count_tabs%ISOPEN
THEN
CLOSE count_tabs;
OPEN count_tabs (tablespace_nm);
ELSE
OPEN count_tabs (tablespace_nm);
END IF;
FETCH count_tabs INTO tab_count;
IF tab_count = 0
THEN
GOTO end_loop1;
END IF;
row_cntr := row_cntr
+ 1;
ln_txt := '#';
insert_tab (file_no, row_cntr, ln_txt);
row_cntr := row_cntr
+ 1;
ln_txt := '#';
insert_tab (file_no, row_cntr, ln_txt);
SELECT '# Tablespace: '
|| tablespace_nm
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
SELECT '# Export DMP file name: '
|| tablespace_nm
|| '_'
|| TRUNC (SYSDATE)
|| '.dmp'
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
row_cntr := row_cntr
+ 1;
ln_txt := '# Owners for '
|| tablespace_nm;
insert_tab (file_no, row_cntr, ln_txt);
SELECT ''
INTO ln_txt
FROM DUAL;
own_cnt := 0;
/*
Get tablespace table owners
*/
IF get_owners%ISOPEN
THEN
CLOSE get_owners;
OPEN get_owners (tablespace_nm);
ELSE
OPEN get_owners (tablespace_nm);
END IF;
tab_cnt := 0;
LOOP
FETCH get_owners INTO owner;
EXIT WHEN get_owners%NOTFOUND;
/*
Get tablespace tables
*/
ln_txt := '# Tables for tablespace: '
|| tablespace_nm;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
ln_txt := '';
IF get_tabs%ISOPEN
THEN
CLOSE get_tabs;
OPEN get_tabs (tablespace_nm, owner);
ELSE
OPEN get_tabs (tablespace_nm, owner);
END IF;
LOOP
FETCH get_tabs INTO table_nm;
EXIT WHEN get_tabs%NOTFOUND;
tab_cnt := tab_cnt
+ 1;
IF tab_cnt = 1
THEN
ln_txt := '/* '
|| ln_txt
|| owner
|| '.'
|| table_nm;
ELSE
ln_txt := ln_txt
|| ', '
|| owner
|| '.'
|| table_nm;
END IF;
END LOOP;
CLOSE get_tabs;
row_cntr := row_cntr
+ 1;
ln_txt := ln_txt
|| ' */';
insert_tab (file_no, row_cntr, ln_txt);
END LOOP;
CLOSE get_owners;
<<end_loop1>>
NULL;
END LOOP;
CLOSE get_tbsp;
ln_txt := '####### End of Header -- Start of actual export script ########';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
ln_txt := 'set -x ';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
SELECT 'script tablespace_exp_'
|| SYSDATE
|| '.log'
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
/*
Now build actual export command sets
*/
/*
Get all tablespace names other than system
*/
IF get_tbsp%ISOPEN
THEN
CLOSE get_tbsp;
OPEN get_tbsp;
ELSE
OPEN get_tbsp;
END IF;
LOOP
FETCH get_tbsp INTO tablespace_nm;
EXIT WHEN get_tbsp%NOTFOUND;
/*
See if tablespace has tables
*/
IF count_tabs%ISOPEN
THEN
CLOSE count_tabs;
OPEN count_tabs (tablespace_nm);
ELSE
OPEN count_tabs (tablespace_nm);
END IF;
FETCH count_tabs INTO tab_count;
IF tab_count = 0
THEN
GOTO end_loop;
END IF;
row_cntr := row_cntr
+ 1;
ln_txt := '#';
insert_tab (file_no, row_cntr, ln_txt);
row_cntr := row_cntr
+ 1;
ln_txt := '#';
insert_tab (file_no, row_cntr, ln_txt);
SELECT '# Export script for tablespace '
|| tablespace_nm
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
SELECT '# created on '
|| SYSDATE
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
ln_txt := 'if ( -r '
|| tablespace_nm
|| '.par'
|| ' ) then';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
ln_txt := ' rm '
|| tablespace_nm
|| '.par';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
ln_txt := 'end if';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
ln_txt := 'touch '
|| tablespace_nm
|| '.par';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
/*
Set up basic export commands
*/
SELECT 'echo '
|| CHR (39)
|| 'grants=y indexes=y constraints=y compress=y'
|| CHR (39)
|| '>>'
|| tablespace_nm
|| '.par'
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
SELECT ''
INTO ln_txt
FROM DUAL;
own_cnt := 0;
ln_txt := 'echo '
|| CHR (39)
|| 'tables=('
|| CHR (39)
|| '>>'
|| tablespace_nm
|| '.par';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
/*
Get tablespace table owners
*/
IF get_owners%ISOPEN
THEN
CLOSE get_owners;
OPEN get_owners (tablespace_nm);
ELSE
OPEN get_owners (tablespace_nm);
END IF;
tab_cnt := 0;
LOOP
FETCH get_owners INTO owner;
EXIT WHEN get_owners%NOTFOUND;
/*
Get tablespace tables
*/
IF get_tabs%ISOPEN
THEN
CLOSE get_tabs;
OPEN get_tabs (tablespace_nm, owner);
ELSE
OPEN get_tabs (tablespace_nm, owner);
END IF;
LOOP
FETCH get_tabs INTO table_nm;
EXIT WHEN get_tabs%NOTFOUND;
tab_cnt := tab_cnt
+ 1;
IF tab_cnt = 1
THEN
ln_txt := 'echo '
|| CHR (39)
|| owner
|| '.'
|| table_nm
|| CHR (39)
|| '>>'
|| tablespace_nm
|| '.par';
ELSE
ln_txt := 'echo '
|| CHR (39)
|| ', '
|| owner
|| '.'
|| table_nm
|| CHR (39)
|| '>>'
|| tablespace_nm
|| '.par';
END IF;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
END LOOP;
CLOSE get_tabs;
END LOOP;
CLOSE get_owners;
ln_txt :=
'echo '
|| CHR (39)
|| ')'
|| CHR (39)
|| '>>'
|| tablespace_nm
|| '.par';
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
/*
Set file name for export file
*/
SELECT 'echo '
|| CHR (39)
|| 'file='
|| tablespace_nm
|| '_'
|| TRUNC (SYSDATE)
|| '.dmp'
|| CHR (39)
|| '>>'
|| tablespace_nm
|| '.par'
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
SELECT 'exp system/angler parfile='
|| tablespace_nm
|| '.par'
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
SELECT 'compress '
|| tablespace_nm
|| '_'
|| TRUNC (SYSDATE)
|| '.dmp '
INTO ln_txt
FROM DUAL;
row_cntr := row_cntr
+ 1;
insert_tab (file_no, row_cntr, ln_txt);
file_no := file_no
+ 1;
<<end_loop>>
NULL;
END LOOP;
CLOSE get_tbsp;
COMMIT;
END;
/
SET heading off feedback off long 4000 lines 80 pages 0 verify off
SET recsep off embedded on echo off termout off
COLUMN file# noprint
COLUMN line_no noprint
COLUMN line_txt format a80 word_wrapped
SPOOL rep_out\tbsp_exp.sh
SELECT *
FROM exp_temp
ORDER BY file#, line_no;
SPOOL off
SET heading on feedback on long 2000 lines 80 pages 22 verify on
SET recsep on embedded off echo off termout on
CLEAR columns
PROMPT Tablespace Export Procedure completed.
|
#!/usr/bin/env bash
# Terminate already running bar instances
killall -q polybar
# If all your bars have ipc enabled, you can also use
# polybar-msg cmd quit
# Launch bottom and top
echo "---" | tee -a /tmp/polybar1.log /tmp/polybar2.log
#polybar black >>/tmp/polybar1.log 2>&1 & disown
polybar botbar -r >>/tmp/polybar1.log 2>&1 & disown
echo "launched bottom"
polybar topbar -r >>/tmp/polybar1.log 2>&1 & disown
echo "Launched top"
|
#!/usr/bin/env bash
set -ex
if [ "${TRAVIS_OS_NAME}" != "windows" ]; then
ccache --max-size=2G
ccache --show-stats
fi
declare -A CMD_TIMEOUT_MAP
CMD_TIMEOUT_MAP[osx]=gtimeout
CMD_TIMEOUT_MAP[linux]=timeout
CMD_TIMEOUT="${CMD_TIMEOUT_MAP[${TRAVIS_OS_NAME}]}"
declare -A BUILD_TIMEOUT_MAP
BUILD_TIMEOUT_MAP[osx]=2000
BUILD_TIMEOUT_MAP[linux]=2400
BUILD_TIMEOUT="${BUILD_TIMEOUT_MAP[${TRAVIS_OS_NAME}]}"
BUILD_DIR=build
mkdir "${BUILD_DIR}"
cd "${BUILD_DIR}"
cmake --version
cmake -DCMAKE_BUILD_TYPE=Release -DCLVK_VULKAN_IMPLEMENTATION=talvos -DSPIRV_WERROR=OFF ..
set +e
${CMD_TIMEOUT} ${BUILD_TIMEOUT} make -j2
BUILD_STATUS=$?
set -e
if [ "${TRAVIS_OS_NAME}" != "windows" ]; then
ccache --show-stats
fi
if [ ${BUILD_STATUS} -ne 0 ]; then
exit ${BUILD_STATUS}
fi
./simple_test
./simple_test_static
./api_tests
|
var assert = require("assert");
var Yatzy = require("../lib/yatzy.js");
describe("Chance", function () {
it("scores sum of all dice", function () {
assert.equal(15, Yatzy.chance(2, 3, 4, 5, 1));
assert.equal(16, Yatzy.chance(3, 3, 4, 5, 1));
});
});
describe("Yatzy", function () {
it("scores 50", function () {
assert.equal(50, Yatzy.yatzy(4, 4, 4, 4, 4));
assert.equal(50, Yatzy.yatzy(6, 6, 6, 6, 6));
assert.equal(0, Yatzy.yatzy(6, 6, 6, 6, 3));
});
});
describe("Ones", function () {
it("score the sum of 1s", function () {
assert.equal(1, Yatzy.ones(1, 2, 3, 4, 5));
assert.equal(2, Yatzy.ones(1, 2, 1, 4, 5));
assert.equal(0, Yatzy.ones(6, 2, 2, 4, 5));
assert.equal(4, Yatzy.ones(1, 2, 1, 1, 1));
});
});
describe("Twos", function () {
it("score the sum of 2s", function () {
assert.equal(4, Yatzy.twos(1, 2, 3, 2, 6));
assert.equal(10, Yatzy.twos(2, 2, 2, 2, 2));
});
});
describe("Threes", function () {
it("score the sum of 3s", function () {
assert.equal(6, Yatzy.threes(1, 2, 3, 2, 3));
assert.equal(12, Yatzy.threes(2, 3, 3, 3, 3));
});
});
describe("Fours", function () {
it("score the sum of 4s", function () {
assert.equal(12, new Yatzy(4, 4, 4, 5, 5).fours());
assert.equal(8, new Yatzy(4, 4, 5, 5, 5).fours());
assert.equal(4, new Yatzy(4, 5, 5, 5, 5).fours());
});
});
describe("Fives", function () {
it("score the sum of fives", function () {
assert.equal(10, new Yatzy(4, 4, 4, 5, 5).fives());
assert.equal(15, new Yatzy(4, 4, 5, 5, 5).fives());
assert.equal(20, new Yatzy(4, 5, 5, 5, 5).fives());
});
});
describe("Sixes", function () {
it("score the sum of sixes", function () {
assert.equal(0, new Yatzy(4, 4, 4, 5, 5).sixes());
assert.equal(6, new Yatzy(4, 4, 6, 5, 5).sixes());
assert.equal(18, new Yatzy(6, 5, 6, 6, 5).sixes());
});
});
describe("One pair", function () {
it("scores the sum of the highest pair", function () {
assert.equal(6, Yatzy.score_pair(3, 4, 3, 5, 6));
assert.equal(10, Yatzy.score_pair(5, 3, 3, 3, 5));
assert.equal(12, Yatzy.score_pair(5, 3, 6, 6, 5));
});
});
describe("Two pair", function () {
it("scores the sum of the two pairs", function () {
assert.equal(16, Yatzy.two_pair(3, 3, 5, 4, 5));
assert.equal(16, Yatzy.two_pair(3, 3, 5, 5, 5));
});
});
describe("Three of a kind", function () {
it("scores the sum of the three of the kind", function () {
assert.equal(9, Yatzy.three_of_a_kind(3, 3, 3, 4, 5));
assert.equal(15, Yatzy.three_of_a_kind(5, 3, 5, 4, 5));
assert.equal(9, Yatzy.three_of_a_kind(3, 3, 3, 3, 5));
});
});
describe("Four of a kind", function () {
it("scores the sum of the four of the kind", function () {
assert.equal(12, Yatzy.four_of_a_kind(3, 3, 3, 3, 5));
assert.equal(20, Yatzy.four_of_a_kind(5, 5, 5, 4, 5));
assert.equal(9, Yatzy.three_of_a_kind(3, 3, 3, 3, 3));
});
});
describe("Small straight", function () {
it("scores 15", function () {
assert.equal(15, Yatzy.smallStraight(1, 2, 3, 4, 5));
assert.equal(15, Yatzy.smallStraight(2, 3, 4, 5, 1));
assert.equal(0, Yatzy.smallStraight(1, 2, 2, 4, 5));
});
});
describe("Large straight", function () {
it("scores 20", function () {
assert.equal(20, Yatzy.largeStraight(6, 2, 3, 4, 5));
assert.equal(20, Yatzy.largeStraight(2, 3, 4, 5, 6));
assert.equal(0, Yatzy.largeStraight(1, 2, 2, 4, 5));
});
});
describe("Full house", function () {
it("scores the sum of the full house", function () {
assert.equal(18, Yatzy.fullHouse(6, 2, 2, 2, 6));
assert.equal(0, Yatzy.fullHouse(2, 3, 4, 5, 6));
});
});
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
rm -R -f /tmp/%FIFO_DIR%/fifo/*
rm -R -f work/*
mkdir work/kat/
mkdir work/gul_S1_summaryleccalc
mkdir work/gul_S1_summaryaalcalc
mkdir work/il_S1_summaryleccalc
mkdir work/il_S1_summaryaalcalc
mkfifo /tmp/%FIFO_DIR%/fifo/gul_P4
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P4
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P4.idx
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P4
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P4
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P4
mkfifo /tmp/%FIFO_DIR%/fifo/il_P4
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_summary_P4
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_summary_P4.idx
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_eltcalc_P4
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_summarycalc_P4
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P4
# --- Do insured loss computes ---
eltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S1_eltcalc_P4 > work/kat/il_S1_eltcalc_P4 & pid1=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/il_S1_summarycalc_P4 > work/kat/il_S1_summarycalc_P4 & pid2=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P4 > work/kat/il_S1_pltcalc_P4 & pid3=$!
tee < /tmp/%FIFO_DIR%/fifo/il_S1_summary_P4 /tmp/%FIFO_DIR%/fifo/il_S1_eltcalc_P4 /tmp/%FIFO_DIR%/fifo/il_S1_summarycalc_P4 /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P4 work/il_S1_summaryaalcalc/P4.bin work/il_S1_summaryleccalc/P4.bin > /dev/null & pid4=$!
tee < /tmp/%FIFO_DIR%/fifo/il_S1_summary_P4.idx work/il_S1_summaryleccalc/P4.idx > /dev/null & pid5=$!
summarycalc -m -f -1 /tmp/%FIFO_DIR%/fifo/il_S1_summary_P4 < /tmp/%FIFO_DIR%/fifo/il_P4 &
# --- Do ground up loss computes ---
eltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P4 > work/kat/gul_S1_eltcalc_P4 & pid6=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P4 > work/kat/gul_S1_summarycalc_P4 & pid7=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P4 > work/kat/gul_S1_pltcalc_P4 & pid8=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P4 /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P4 /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P4 /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P4 work/gul_S1_summaryaalcalc/P4.bin work/gul_S1_summaryleccalc/P4.bin > /dev/null & pid9=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P4.idx work/gul_S1_summaryleccalc/P4.idx > /dev/null & pid10=$!
summarycalc -m -i -1 /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P4 < /tmp/%FIFO_DIR%/fifo/gul_P4 &
eve 4 40 | getmodel | gulcalc -S100 -L100 -r -a1 -i - | tee /tmp/%FIFO_DIR%/fifo/gul_P4 | fmcalc -a2 > /tmp/%FIFO_DIR%/fifo/il_P4 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P4 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P4 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P4 > output/il_S1_summarycalc.csv & kpid3=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P4 > output/gul_S1_eltcalc.csv & kpid4=$!
kat work/kat/gul_S1_pltcalc_P4 > output/gul_S1_pltcalc.csv & kpid5=$!
kat work/kat/gul_S1_summarycalc_P4 > output/gul_S1_summarycalc.csv & kpid6=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6
|
import React from "react";
import {Link} from "react-router-dom";
import Logo from "../logo.png";
import {GithubCorner} from "../components/GithubCorner";
function Home() {
return (
<div className="text-center min-h-screen bg-gray-100">
<GithubCorner />
<div className="flex flex-col items-center justify-center min-h-screen">
<a href="https://github.com/TheMisinformationGame/">
<img src={Logo} alt="Misinformation Game Logo"
className="fixed left-2 top-2 h-16" />
</a>
<div className="max-w-2xl text-left">
<h1 className="text-5xl mb-2">The Misinformation Game</h1>
<p className="text-lg mb-4">
The Misinformation Game is a social media simulator built to study
the behavior of people when they interact with social media. This
game was built as part of the CITS3200 unit at UWA by <NAME>,
<NAME> <NAME>, <NAME>, <NAME>, <NAME>,
and <NAME>, in collaboration with Assoc/Prof <NAME>.
</p>
<p className="text-lg mb-2">
Press the button below to access the admin interface to manage your studies,
</p>
<div className="flex justify-center">
<div className="flex my-4">
<Link to="/admin" name="Admin"
className="text-xl text-white px-6 py-3 rounded bg-gray-700 shadow
hover:bg-gray-800">
Access the Admin Dashboard
</Link>
</div>
</div>
<h2 className="text-4xl mb-4 mt-8">Other Resources</h2>
{/* The example game only exists on the example website. */}
{window.location.hostname === "misinformation-game.web.app" &&
<p className="text-lg mb-2 mt-4">
<Link to="/study/axsvxt37ctac6ltr" name="Game"
className="text-xl underline text-purple-600 hover:text-purple-900">
Example Game
</Link>:
An example game that can be played during development.
</p>}
<p className="text-lg mb-2">
<a className="text-xl underline text-purple-600 hover:text-purple-900"
href="https://docs.google.com/spreadsheets/d/1JP_3kHtcJC6m4PzwyTixMb8R0gu76tRIbdGcffGsTu0/edit#gid=5219285">
Study Template
</a>:
The template to be copied to create new studies.
</p>
<p className="text-lg mb-2">
<a className="text-xl underline text-purple-600 hover:text-purple-900"
href="https://github.com/TheMisinformationGame/MisinformationGame/blob/main/docs/README.md">
Documentation
</a>:
How to use this website.
</p>
<p className="text-lg mb-2">
<a className="text-xl underline text-purple-600 hover:text-purple-900"
href="https://github.com/TheMisinformationGame/MisinformationGame">
GitHub
</a>:
The source code for this website.
</p>
</div>
</div>
</div>
);
}
export default Home;
|
<reponame>proc7ts/hatsy<gh_stars>1-10
import type { ErrorMeans, RequestHandler } from '../core';
import type { HttpMeans } from './http.means';
/**
* HTTP processing configuration.
*
* @typeParam TMeans - A type of supported HTTP request processing means.
*/
export interface HttpConfig<TMeans extends HttpMeans = HttpMeans> {
/**
* Default HTTP request handler.
*
* This handler will be called after all other handlers when response is not generated.
*
* When set to `false` the default response won't be generated.
*
* @default `true`, which means a `404 Not Found` error will be raised if there is no response.
*/
readonly defaultHandler?: RequestHandler<TMeans> | boolean | undefined;
/**
* Error processing handler.
*
* This handler will be called once request processing error occurred. Such handler would receive
* a {@link ErrorMeans error processing means} along with {@link HttpMeans HTTP processing ones}.
*
* When set to `false` the request processing errors will be logged, but otherwise ignored.
*
* @default `true`, which means the request processing error page will be rendered by {@link renderHttpError}
* handler.
*/
readonly errorHandler?: RequestHandler<TMeans & ErrorMeans> | boolean | undefined;
/**
* Whether to log HTTP processing error.
*
* Unhandled errors will be logged with `console.error` in any case.
*
* @default `true`, which means an error will be logged with {@link LoggerMeans logger means}, created if necessary.
*/
readonly logError?: boolean | undefined;
}
export namespace HttpConfig {
/**
* HTTP processing configuration for extended requests.
*
* @typeParam TExt - Request processing means extension type.
* @typeParam TMeans - A type of supported HTTP request processing means.
*/
export interface Extended<TExt, TMeans extends HttpMeans = HttpMeans> extends HttpConfig<TMeans & TExt> {
/**
* Creates actual HTTP request handler.
*
* This can be used e.g. to set up additional request processing capabilities, such as {@link Logging}.
*
* @param handler - HTTP request handler.
*
* @returns HTTP request handler to use instead.
*/
handleBy(handler: RequestHandler<TMeans & TExt>): RequestHandler<TMeans>;
}
}
|
from unittest import TestCase, mock
import pytest
from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from custom_auth.oauth.serializers import OAuthAccessTokenSerializer
UserModel = get_user_model()
@pytest.mark.django_db
class OAuthAccessTokenSerializerTestCase(TestCase):
def setUp(self) -> None:
self.test_email = "<EMAIL>"
self.test_first_name = "testy"
self.test_last_name = "tester"
self.test_id = "test-id"
self.mock_user_data = {
"email": self.test_email,
"first_name": self.test_first_name,
"last_name": self.test_last_name,
"google_user_id": self.test_id
}
@mock.patch("custom_auth.oauth.serializers.get_user_info")
def test_create(self, mock_get_user_info):
# Given
access_token = "access-token"
serializer = OAuthAccessTokenSerializer()
data = {
"access_token": access_token
}
mock_get_user_info.return_value = self.mock_user_data
# When
response = serializer.create(validated_data=data)
# Then
assert UserModel.objects.filter(email=self.test_email).exists()
assert isinstance(response, Token)
assert response.user.email == self.test_email
|
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package console.demo.app;
import org2.beryx.textio.TerminalProperties;
import org2.beryx.textio.TextIO;
import org2.beryx.textio.TextIoFactory;
import org2.beryx.textio.TextTerminal;
import org2.beryx.textio.web.RunnerData;
import java.util.function.BiConsumer;
/**
* A simple application illustrating the use of TextIO.
*/
public class ECommerce implements BiConsumer<TextIO, RunnerData> {
public static void main(String[] args) {
TextIO textIO = TextIoFactory.getTextIO();
new ECommerce().accept(textIO, null);
}
@Override
public void accept(TextIO textIO, RunnerData runnerData) {
TextTerminal<?> terminal = textIO.getTextTerminal();
String initData = (runnerData == null) ? null : runnerData.getInitData();
AppUtil.printGsonMessage(terminal, initData);
TerminalProperties<?> props = terminal.getProperties();
props.setPromptBold(true);
props.setPromptUnderline(true);
props.setPromptColor("cyan");
terminal.println("Order details");
props.setPromptUnderline(false);
props.setPromptBold(false);
props.setInputColor("blue");
props.setInputItalic(true);
String product = textIO.newStringInputReader().read("Product name");
int quantity = textIO.newIntInputReader()
.withMinVal(1)
.withMaxVal(10)
.read("Quantity");
props.setPromptBold(true);
props.setPromptUnderline(true);
props.setPromptColor("green");
terminal.println("\nShipping Information");
props.setPromptBold(false);
props.setPromptUnderline(false);
props.setInputColor("yellow");
String city = textIO.newStringInputReader().read("City");
String street = textIO.newStringInputReader().read("Street Address");
String shippingOptions = textIO.newStringInputReader()
.withNumberedPossibleValues("Standard Shipping", "Two-Day Shipping", "One-Day Shipping")
.read("Shipping Options");
props.setPromptBold(true);
props.setPromptUnderline(true);
props.setPromptColor("white");
terminal.println("\nPayment Details");
props.setPromptBold(false);
props.setPromptUnderline(false);
props.setInputColor("magenta");
String paymentType = textIO.newStringInputReader()
.withNumberedPossibleValues("PayPal", "MasterCard", "VISA")
.read("Payment Type");
String owner = textIO.newStringInputReader().read("Account Owner");
props.setPromptBold(true);
props.setPromptUnderline(true);
props.setPromptColor("red");
terminal.println("\nOrder Overview");
props.setPromptBold(false);
props.setPromptUnderline(false);
props.setPromptColor("yellow");
terminal.printf("Product: %s\nQuantity: %d\n", product, quantity);
terminal.printf("\n%s to %s, %s\n", shippingOptions, street, city);
terminal.printf("%s is paying with %s.\n", owner, paymentType);
props.setPromptColor("green");
textIO.newStringInputReader().withMinLength(0).read("\nPress enter to terminate...");
textIO.dispose("Payment receipt sent to " + owner + ".");
}
@Override
public String toString() {
return "E-Commerce: placing an online order.\n" +
"(Properties are dynamically changed at runtime using hard-coded values.\n" +
"Properties file: " + getClass().getSimpleName() + ".properties.)";
}
}
|
package main
import (
"bytes"
"encoding/hex"
"flag"
"io/ioutil"
"log"
"github.com/pilosa/pilosa/v2/roaring"
)
// small tools to show number present in a postings.
// To procude a .hex file:
// Run in cqlsh:
// * select bitset from squirreldb.index_postings where shard = -1 and name = '__global__all|metrics__';
// * In shell, run: cut -b 3- | fold -s -w 80 > filename.hex
// * Copy/paste output from cqlsh to shell with cut+fold
var (
filename = flag.String("filename", "", ".hex file to read")
force = flag.Bool("force", false, "force operation")
)
func loadBitmap(filename string) (*roaring.Bitmap, error) {
tmp := roaring.NewBTreeBitmap()
bufferHex, err := ioutil.ReadFile(filename)
if err != nil {
return tmp, err
}
// remove all new-line
bufferHex = bytes.ReplaceAll(bufferHex, []byte("\n"), nil)
buffer := make([]byte, hex.DecodedLen(len(bufferHex)))
_, err = hex.Decode(buffer, bufferHex)
if err != nil {
return tmp, err
}
err = tmp.UnmarshalBinary(buffer)
if err != nil {
return tmp, err
}
return tmp, nil
}
func main() {
flag.Parse()
tmp, err := loadBitmap(*filename)
if err != nil {
log.Fatal(err)
}
count := tmp.Count()
log.Printf("The bitmap contains %d number", count)
log.Printf("number of hole: %d = max (%d) - count (%d)", tmp.Max()-count, tmp.Max(), count)
if tmp.Count() > 1e9 && !*force {
log.Printf("bitmap is too big to be processed. Use -force to process anyway (may cause OOM)")
return
}
slice := tmp.Slice()
if uint64(len(slice)) != count {
log.Printf("slice had %d value, want %d", len(slice), count)
}
startRange := uint64(0)
endRange := uint64(0)
numberHole := uint64(0)
for i, v := range slice {
switch {
case i == 0:
startRange = v
endRange = v
numberHole += v - 1
case v == endRange+1:
endRange = v
case v <= endRange:
log.Fatalf("number aren't sorted ! %d <= %d", v, endRange)
default:
log.Printf("range from %d to %d (free before this range: %d)", startRange, endRange, numberHole)
numberHole += v - endRange - 1
startRange = v
endRange = v
}
}
log.Printf("range from %d to %d (free before this range: %d)", startRange, endRange, numberHole)
log.Printf("number of hole: %d = max (%d) - count (%d)", tmp.Max()-count, tmp.Max(), count)
}
|
// *******************************************************************************
// © The Pythian Group Inc., 2017
// All Rights Reserved.
// *******************************************************************************
import {AbstractSkeletosState} from "../../extendible/AbstractSkeletosState";
import {SkeletosCursor} from "../../base/SkeletosCursor";
/**
* Convenience class to include error displaying state in your application.
*/
export class ErrorState extends AbstractSkeletosState {
/**
* Is the error being shown in the UI?
*
* @returns {SkeletosCursor}
*/
get isErrorShownCursor(): SkeletosCursor {
return this.cursor.select("isErrorShown");
}
/**
* Is the error being shown in the UI?
*
* @returns {any}
*/
get isErrorShown(): boolean {
return this.isErrorShownCursor.get();
}
/**
* Is the error being shown in the UI?
*
* @param showError
*/
set isErrorShown(showError: boolean) {
this.isErrorShownCursor.set(showError);
}
/**
* What is the title that should be displayed above the error message?
*
* @returns {SkeletosCursor}
*/
get errorTitleCursor(): SkeletosCursor {
return this.cursor.select("errorTitle");
}
/**
* What is the title that should be displayed above the error message?
*
* @returns {any}
*/
get errorTitle(): string {
return this.errorTitleCursor.get();
}
/**
* What is the title that should be displayed above the error message?
*
* @param title
*/
set errorTitle(title: string) {
this.errorTitleCursor.set(title);
}
/**
* What is the main content of the error message?
*
* @returns {SkeletosCursor}
*/
get errorMessageCursor(): SkeletosCursor {
return this.cursor.select("errorMessage");
}
/**
* What is the main content of the error message?
*
* @returns {any}
*/
get errorMessage(): string {
return this.errorMessageCursor.get();
}
/**
* What is the main content of the error message?
*
* @param message
*/
set errorMessage(message: string) {
this.errorMessageCursor.set(message);
}
/**
* Any stack associated with the error.
*
* @returns {SkeletosCursor}
*/
get stackCursor(): SkeletosCursor {
return this.cursor.select("stack");
}
/**
* Any stack associated with the error.
*
* @returns {string}
*/
get stack(): string {
return this.stackCursor.get();
}
/**
* Any stack associated with the error.
*
* @param stack
*/
set stack(stack: string) {
this.stackCursor.set(stack);
}
/**
* Further details about the error.
*
*/
get detailsCursor(): SkeletosCursor {
return this.cursor.select("details");
}
/**
* Further details about the error. This may include a custom object.
*
*/
get details(): object|string|number|boolean {
const details: string = this.detailsCursor.get();
if (details) {
return JSON.parse(details);
} else {
return details;
}
}
/**
* Further details about the error.
*
*/
set details(details: object|string|number|boolean) {
if (details) {
this.detailsCursor.set(JSON.stringify(details));
} else {
this.detailsCursor.set(details);
}
}
} |
#!/bin/bash
#
# LICENSE UPL 1.0
#
# Copyright (c) 1982-2018 Oracle and/or its affiliates. All rights reserved.
#
# Since: February, 2018
# Author: sergio.leunissen@oracle.com
# Description: Installs Docker engine using Btrfs as storage
#
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
#
echo 'Installing and configuring Docker engine'
# install Docker engine
yum -y install docker-engine
# Format spare device as Btrfs
# Configure Btrfs storage driver
docker-storage-config -s btrfs -d /dev/sdb
# Start and enable Docker engine
systemctl start docker
systemctl enable docker
# Add vagrant user to docker group
usermod -a -G docker vagrant
# Relax /etc/docker permissions (vagrant-proxyconf maintains system-wide config)
chmod a+x /etc/docker
echo 'Docker engine is ready to use'
echo 'To get started, on your host, run:'
echo ' vagrant ssh'
echo
echo 'Then, within the guest (for example):'
echo ' docker run -it oraclelinux:6-slim'
echo
|
#! /bin/bash
npm run build:server
heroku container:push web
heroku container:release web
|
docker run -d \
-p 8080:8080 \
--name "dags-ml-workspace" -v "/${PWD}:/workspace" \
--env AUTHENTICATE_VIA_JUPYTER="dagshub_savta" \
--shm-size 2G \
--restart always \
dagshub/ml-workspace-minimal:latest
|
#!/bin/bash
dataset=USPTO-50k_no_rxn
model_name=reproduce_wo_rxn_class
python graph_mask_max.py -data data/${dataset}
python preprocess.py -train_src data/${dataset}/src-train.txt \
-train_tgt data/${dataset}/tgt-train.txt \
-valid_src data/${dataset}/src-val.txt \
-valid_tgt data/${dataset}/tgt-val.txt \
-save_data data/${dataset}/${dataset} \
-src_seq_length 1000 -tgt_seq_length 1000 \
-src_vocab_size 1000 -tgt_vocab_size 1000 -share_vocab
python train.py -data data/${dataset}/${dataset} \
-save_model experiments/${dataset}_${model_name} \
-seed 2020 -gpu_ranks 0 \
-save_checkpoint_steps 1000 -keep_checkpoint 11 \
-train_steps 400000 -valid_steps 1000 -report_every 1000 \
-param_init 0 -param_init_glorot \
-batch_size 4096 -batch_type tokens -normalization tokens \
-dropout 0.3 -max_grad_norm 0 -accum_count 4 \
-optim adam -adam_beta1 0.9 -adam_beta2 0.998 \
-decay_method noam -warmup_steps 8000 \
-learning_rate 2 -label_smoothing 0.0 \
-enc_layers 6 -dec_layers 6 -rnn_size 256 -word_vec_size 256 \
-encoder_type transformer -decoder_type transformer \
-share_embeddings -position_encoding -max_generator_batches 0 \
-global_attention general -global_attention_function softmax \
-self_attn_type scaled-dot -max_relative_positions 4 \
-heads 8 -transformer_ff 2048 -max_distance 1 2 3 4 \
-early_stopping 40 -alpha 1.0 \
-tensorboard -tensorboard_log_dir runs/${dataset}_${model_name} 2>&1 | tee train_$model_name.log
python Generate_test_prediction.py data/${dataset}/src-test.txt -model_path experiments/${dataset}_${model_name}
python parse/parse_output.py -input_file experiments/${dataset}_${model_name}/pred/output \
-target_file data/${dataset}/tgt-test.txt -beam_size 10
|
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package api
import (
"fmt"
"configcenter/src/framework/core/output/module/inst"
"configcenter/src/framework/core/output/module/model"
"configcenter/src/framework/core/types"
)
// SetIteratorWrapper the set iterator wrapper
type SetIteratorWrapper struct {
set inst.SetIterator
}
// Next next the set
func (cli *SetIteratorWrapper) Next() (*SetWrapper, error) {
set, err := cli.set.Next()
return &SetWrapper{set: set}, err
}
// ForEach the foreach function
func (cli *SetIteratorWrapper) ForEach(callback func(set *SetWrapper) error) error {
return cli.set.ForEach(func(item inst.SetInterface) error {
return callback(&SetWrapper{set: item})
})
}
// SetWrapper the set wrapper
type SetWrapper struct {
set inst.SetInterface
}
// GetValues return the values
func (cli *SetWrapper) GetValues() (types.MapStr, error) {
return cli.set.GetValues()
}
// SetValue set the key value
func (cli *SetWrapper) SetValue(key string, val interface{}) error {
return cli.set.SetValue(key, val)
}
// SetDescription set the introducrtion of the set
func (cli *SetWrapper) SetDescription(intro string) error {
return cli.set.SetValue(fieldSetDesc, intro)
}
// SetMark set the mark of the set
func (cli *SetWrapper) SetMark(desc string) error {
return cli.set.SetValue(fieldDescription, desc)
}
// SetEnv set the env of the set
func (cli *SetWrapper) SetEnv(env string) error {
return cli.set.SetValue(fieldSetEnv, env)
}
// GetEnv get the env
func (cli *SetWrapper) GetEnv() (string, error) {
vals, err := cli.set.GetValues()
if nil != err {
return "", err
}
return vals.String(fieldSetEnv), nil
}
// SetServiceStatus the service status of the set
func (cli *SetWrapper) SetServiceStatus(status string) error {
return cli.set.SetValue(fieldServiceStatus, status)
}
// GetServiceStatus get the service status
func (cli *SetWrapper) GetServiceStatus() (string, error) {
vals, err := cli.set.GetValues()
if nil != err {
return "", err
}
return vals.String(fieldServiceStatus), nil
}
// SetCapacity set the capacity of the set
func (cli *SetWrapper) SetCapacity(capacity int64) error {
return cli.set.SetValue(fieldCapacity, capacity)
}
// GetCapacity get the capacity
func (cli *SetWrapper) GetCapacity() (int, error) {
vals, err := cli.set.GetValues()
if nil != err {
return 0, err
}
return vals.Int(fieldCapacity)
}
// SetBusinessID set the business id of the set
func (cli *SetWrapper) SetBusinessID(businessID int64) error {
if err := cli.SetParent(businessID); nil != err {
return err
}
cli.set.SetBusinessID(businessID)
return nil
}
// GetBusinessID get the business id
func (cli *SetWrapper) GetBusinessID() (int64, error) {
vals, err := cli.set.GetValues()
if nil != err {
return 0, err
}
val, err := vals.Int(fieldBusinessID)
return int64(val), err
}
// SetSupplierAccount set the supplier account code of the set
func (cli *SetWrapper) SetSupplierAccount(supplierAccount string) error {
return cli.set.SetValue(fieldSupplierAccount, supplierAccount)
}
// GetSupplierAccount get the supplier account
func (cli *SetWrapper) GetSupplierAccount() (string, error) {
vals, err := cli.set.GetValues()
if nil != err {
return "", err
}
return vals.String(fieldSupplierAccount), nil
}
// GetID get the set id
func (cli *SetWrapper) GetID() (int64, error) {
vals, err := cli.set.GetValues()
if nil != err {
return 0, err
}
return vals.Int64(fieldSetID)
}
// SetParent set the parent id of the set
func (cli *SetWrapper) SetParent(parentInstID int64) error {
return cli.set.SetValue(fieldParentID, parentInstID)
}
// SetName the name of the set
func (cli *SetWrapper) SetName(name string) error {
return cli.set.SetValue(fieldSetName, name)
}
// GetSetID get the id for the set
func (cli *SetWrapper) GetSetID() (int64, error) {
vals, err := cli.set.GetValues()
if nil != err {
return 0, err
}
if !vals.Exists(fieldSetID) {
return 0, fmt.Errorf("the set id is not set")
}
val, err := vals.Int(fieldSetID)
return int64(val), err
}
// GetName get the set name
func (cli *SetWrapper) GetName() (string, error) {
vals, err := cli.set.GetValues()
if nil != err {
return "", err
}
return vals.String(fieldSetName), nil
}
// IsExists check the set
func (cli *SetWrapper) IsExists() (bool, error) {
return cli.set.IsExists()
}
// Create only to create
func (cli *SetWrapper) Create() error {
return cli.set.Create()
}
// Update only to update
func (cli *SetWrapper) Update() error {
return cli.set.Update()
}
// Save save the data
func (cli *SetWrapper) Save() error {
return cli.set.Save()
}
// GetModel get the model for the set
func (cli *SetWrapper) GetModel() model.Model {
return cli.set.GetModel()
}
|
#!/bin/bash
declare -r TRUE=0
declare -r FALSE=1
die() {
echo "$1" >&2; exit 1
}
has_root_permission() {
[ $(id -u) -eq 0 ] && return $TRUE || return $FALSE
}
does_user_exit() {
local username="$1"
grep -q "^${username}" /etc/passwd && return $TRUE || return $FALSE
}
is_valid_file() {
local given_path="$1"
[ -f "$(realpath $given_path)" ] && return $TRUE || return $FALSE
}
|
#!/bin/bash
# Add local user
# Either use the LOCAL_USER_ID if passed in at runtime or
# fallback
USER_ID=${LOCAL_USER_ID:-10001}
echo "Starting with UID : $USER_ID"
useradd --shell /bin/bash -u $USER_ID -o -c "" -m app
export HOME=/home/app
exec /usr/local/bin/gosu app "$@"
|
#! /bin/bash
echo "Starting offline!"
TMPFILE=.offline$$.log
if [ -f .offline.pid ]; then
echo "Found file .offline.pid. Not starting."
exit 1
fi
npx serverless offline start 2>1 > $TMPFILE &
PID=$$
echo $PID > .offline.pid
while ! grep "server ready" $TMPFILE
do echo $TMPFILE && sleep 1; done
rm $TMPFILE |
<reponame>moonthecoolest/amazon-connect-chat-ui-examples
var AWS = require('aws-sdk');
AWS.config.update({region: process.env.REGION});
var connect = new AWS.Connect();
exports.handler = (event, context, callback) => {
console.log("Received event: " + JSON.stringify(event));
var body = JSON.parse(event["body"]);
startChatContact(body).then((startChatResult) => {
callback(null, buildSuccessfulResponse(startChatResult));
}).catch((err) => {
console.log("caught error " + err);
callback(null, buildResponseFailed(err));
});
};
function startChatContact(body) {
var contactFlowId = "";
if(body.hasOwnProperty('ContactFlowId')){
contactFlowId = body["ContactFlowId"];
}
console.log("CF ID: " + contactFlowId);
var instanceId = "";
if(body.hasOwnProperty('InstanceId')){
instanceId = body["InstanceId"];
}
console.log("Instance ID: " + instanceId);
return new Promise(function (resolve, reject) {
var startChat = {
"InstanceId": instanceId == "" ? process.env.INSTANCE_ID : instanceId,
"ContactFlowId": contactFlowId == "" ? process.env.CONTACT_FLOW_ID : contactFlowId,
"Attributes": {
"customerName": body["ParticipantDetails"]["DisplayName"]
},
"ParticipantDetails": {
"DisplayName": body["ParticipantDetails"]["DisplayName"]
}
};
connect.startChatContact(startChat, function(err, data) {
if (err) {
console.log("Error starting the chat.");
console.log(err, err.stack);
reject(err);
} else {
console.log("Start chat succeeded with the response: " + JSON.stringify(data));
resolve(data);
}
});
});
}
function buildSuccessfulResponse(result) {
const response = {
statusCode: 200,
headers: {
"Access-Control-Allow-Origin": "*",
'Content-Type': 'application/json',
'Access-Control-Allow-Credentials' : true,
'Access-Control-Allow-Headers':'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'
},
body: JSON.stringify({
data: { startChatResult: result }
})
};
console.log("RESPONSE" + JSON.stringify(response));
return response;
}
function buildResponseFailed(err) {
const response = {
statusCode: 500,
headers: {
"Access-Control-Allow-Origin": "*",
'Content-Type': 'application/json',
'Access-Control-Allow-Credentials' : true,
'Access-Control-Allow-Headers':'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'
},
body: JSON.stringify({
data: {
"Error": err
}
})
};
return response;
}
|
#!/bin/bash
set -ex
if [ "$TRAVIS_OS_NAME" = "linux" ]; then
CUDA_REPO_PKG="cuda-repo-ubuntu1604_10.0.130-1_amd64.deb"
CUDA_PKG_VERSION="10-0"
CUDA_VERSION="10.0"
wget "https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/$CUDA_REPO_PKG"
sudo dpkg -i "$CUDA_REPO_PKG"
CUDNN_REPO_PKG="nvidia-machine-learning-repo-ubuntu1604_1.0.0-1_amd64.deb"
CUDNN_PKG_VERSION="7.6.5.32-1+cuda10.0"
wget "https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1604/x86_64/$CUDNN_REPO_PKG"
sudo dpkg -i "$CUDNN_REPO_PKG"
sudo apt-get update
APT_INSTALL_CMD="sudo apt-get install -y --no-install-recommends --allow-unauthenticated"
$APT_INSTALL_CMD \
cmake \
build-essential \
libopencv-dev \
libprotobuf-dev \
protobuf-compiler
$APT_INSTALL_CMD \
"cuda-compiler-$CUDA_PKG_VERSION" \
"cuda-cudart-dev-$CUDA_PKG_VERSION" \
"cuda-cublas-dev-$CUDA_PKG_VERSION" \
"libcudnn7=$CUDNN_PKG_VERSION" \
"libcudnn7-dev=$CUDNN_PKG_VERSION"
sudo ln -sf /usr/local/cuda-$CUDA_VERSION /usr/local/cuda
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
brew update
brew install --force --ignore-dependencies opencv
fi
|
#!/usr/bin/env bash
# constants
DOKKU_ROOT=${DOKKU_ROOT:=~dokku}
TEST_APP=my-cool-guy-test-app
# test functions
flunk() {
{ if [ "$#" -eq 0 ]; then cat -
else echo "$*"
fi
}
return 1
}
assert_success() {
if [ "$status" -ne 0 ]; then
flunk "command failed with exit status $status"
elif [ "$#" -gt 0 ]; then
assert_output "$1"
fi
}
assert_failure() {
if [ "$status" -eq 0 ]; then
flunk "expected failed exit status"
elif [ "$#" -gt 0 ]; then
assert_output "$1"
fi
}
assert_equal() {
if [ "$1" != "$2" ]; then
{ echo "expected: $1"
echo "actual: $2"
} | flunk
fi
}
assert_output() {
local expected
if [ $# -eq 0 ]; then expected="$(cat -)"
else expected="$1"
fi
assert_equal "$expected" "$output"
}
assert_line() {
if [ "$1" -ge 0 ] 2>/dev/null; then
assert_equal "$2" "${lines[$1]}"
else
local line
for line in "${lines[@]}"; do
if [ "$line" = "$1" ]; then return 0; fi
done
flunk "expected line \`$1'"
fi
}
refute_line() {
if [ "$1" -ge 0 ] 2>/dev/null; then
local num_lines="${#lines[@]}"
if [ "$1" -lt "$num_lines" ]; then
flunk "output has $num_lines lines"
fi
else
local line
for line in "${lines[@]}"; do
if [ "$line" = "$1" ]; then
flunk "expected to not find line \`$line'"
fi
done
fi
}
assert() {
if ! "$*"; then
flunk "failed: $*"
fi
}
assert_exit_status() {
assert_equal "$status" "$1"
}
# dokku functions
create_app() {
dokku apps:create $TEST_APP
}
destroy_app() {
echo $TEST_APP | dokku apps:destroy $TEST_APP
}
add_domain() {
dokku domains:add $TEST_APP $1
}
deploy_app() {
APP_TYPE="$1"; APP_TYPE=${APP_TYPE:="nodejs-express"}
TMP=$(mktemp -d -t "$TARGET.XXXXX")
rmdir $TMP && cp -r ./tests/apps/$APP_TYPE $TMP
cd $TMP
git init
git config user.email "robot@example.com"
git config user.name "Test Robot"
git remote add target dokku@dokku.me:$TEST_APP
[[ -f gitignore ]] && mv gitignore .gitignore
git add .
git commit -m 'initial commit'
git push target master || destroy_app
}
setup_client_repo() {
TMP=$(mktemp -d -t "$TARGET.XXXXX")
rmdir $TMP && cp -r ./tests/apps/nodejs-express $TMP
cd $TMP
git init
git config user.email "robot@example.com"
git config user.name "Test Robot"
[[ -f gitignore ]] && mv gitignore .gitignore
git add .
git commit -m 'initial commit'
}
setup_test_tls() {
TLS="/home/dokku/$TEST_APP/tls"
mkdir -p $TLS
tar xf $BATS_TEST_DIRNAME/server_ssl.tar -C $TLS
sudo chown -R dokku:dokku $TLS
}
setup_test_tls_with_sans() {
TLS="/home/dokku/$TEST_APP/tls"
mkdir -p $TLS
tar xf $BATS_TEST_DIRNAME/server_ssl_sans.tar -C $TLS
sudo chown -R dokku:dokku $TLS
}
setup_test_tls_wildcard() {
TLS="/home/dokku/tls"
mkdir -p $TLS
tar xf $BATS_TEST_DIRNAME/server_ssl_wildcard.tar -C $TLS
sudo chown -R dokku:dokku $TLS
sed -i -e "s:^# ssl_certificate $DOKKU_ROOT/tls/server.crt;:ssl_certificate $DOKKU_ROOT/tls/server.crt;:g" \
-e "s:^# ssl_certificate_key $DOKKU_ROOT/tls/server.key;:ssl_certificate_key $DOKKU_ROOT/tls/server.key;:g" /etc/nginx/conf.d/dokku.conf
kill -HUP "$(< /var/run/nginx.pid)"; sleep 5
}
disable_tls_wildcard() {
TLS="/home/dokku/tls"
rm -rf $TLS
sed -i -e "s:^ssl_certificate $DOKKU_ROOT/tls/server.crt;:# ssl_certificate $DOKKU_ROOT/tls/server.crt;:g" \
-e "s:^ssl_certificate_key $DOKKU_ROOT/tls/server.key;:# ssl_certificate_key $DOKKU_ROOT/tls/server.key;:g" /etc/nginx/conf.d/dokku.conf
kill -HUP "$(< /var/run/nginx.pid)"; sleep 5
}
|
<filename>app/src/main/java/com/qtimes/pavilion/base/mvp/MvpListView.java
package com.qtimes.pavilion.base.mvp;
import java.util.List;
/**
* 列表数据
* Created by liuj on 2016/6/20.
* 流列表
*/
public interface MvpListView<T> extends MvpStatusView {
/**
* 加载成功
* @param dataList
* @param isReload 是否重新加载,一般第一次加载未true
*/
void onLoadSuccess(List<T> dataList, boolean isReload);
/**
* 加载失败
*
* @param cache
* @param throwable
* @param isReload
*/
void onLoadError(List<T> cache, Throwable throwable, boolean isReload);
/**
* 是否有更多
*
* @param hasMore
*/
void setHasMore(boolean hasMore);
/**
* 获取每页条数
*
* @return
*/
int getPageSize();
}
|
#!/usr/bin/env bash
echo "--- Good morning, master. Let's get to work. Installing now. ---"
echo "--- Editing sources for raring ---"
#cp -R /vagrant/sources.list /etc/apt/sources.list
echo "--- Updating packages list ---"
sudo apt-get update
echo "--- MySQL time ---"
sudo debconf-set-selections <<< 'mysql-server mysql-server/root_password password root'
sudo debconf-set-selections <<< 'mysql-server mysql-server/root_password_again password root'
echo "--- Installing base packages ---"
sudo apt-get install -y vim curl python-software-properties python g++ make
echo "--- We want the bleeding edge of PHP, right master? ---"
export LANG=C.UTF-8
sudo add-apt-repository -y ppa:ondrej/php5
echo "--- Updating packages list ---"
sudo apt-get update
echo "--- Installing PHP-specific packages ---"
sudo apt-get install -y php5 apache2 libapache2-mod-php5 php5-curl php5-gd php5-mcrypt mysql-server-5.5 php5-mysql git-core
sudo php5enmod mcrypt
echo "--- Installing and configuring Xdebug ---"
sudo apt-get install -y php5-xdebug
cat << EOF | sudo tee -a /etc/php5/mods-available/xdebug.ini
xdebug.scream=1
xdebug.cli_color=1
xdebug.show_local_vars=1
EOF
echo "--- Adding apache ppa ---"
#sudo add-apt-repository -y ppa:rhardy/apache24x
echo "--- Updating packages list ---"
sudo apt-get update
echo "--- Installing apache2 ---"
sudo apt-get install -y apache2
echo "--- Enabling mod-rewrite ---"
sudo a2enmod rewrite
echo "--- Setting document root ---"
sudo rm -rf /var/www/html
sudo ln -fs /vagrant /var/www/html
echo "--- What developer codes without errors turned on? Not you, master. ---"
sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php5/apache2/php.ini
sed -i "s/display_errors = .*/display_errors = On/" /etc/php5/apache2/php.ini
sudo sed -i 's/AllowOverride None/AllowOverride All/' /etc/apache2/apache2.conf
sudo sed -i 's|DocumentRoot /var/www/html|DocumentRoot /var/www/html/public|g' /etc/apache2/sites-available/000-default.conf
echo "--- Restarting Apache ---"
sudo service apache2 restart
echo "--- Allow remote database access --"
sudo sed -i "s/^bind-address/#bind-address/" /etc/mysql/my.cnf
sudo service mysql restart
echo "--- Composer is the future. But you knew that, did you master? Nice job. ---"
curl -sS https://getcomposer.org/installer | php
sudo mv composer.phar /usr/local/bin/composer
# Laravel stuff here, if you want
dbName="pinoycubers"
dbUser="root"
dbPass="root"
mysql -u $dbUser -p$dbPass -Bse "CREATE DATABASE $dbName;"
mysql --user=$dbUser --password=$dbPass << 'EOF'
GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' IDENTIFIED BY 'root';
EOF
echo "--- Running site specific commands --"
cd /var/www/html
echo "--- Setting up configuration file --"
cp -fr .env.example .env
sed -i "s/^DB_PASSWORD=/DB_PASSWORD=root/" .env
composer install
php artisan migrate
php artisan db:seed
chown -R www-data:www-data storage
chmod 775 -R storage
sudo bash /vagrant/node.sh |
<reponame>albanobattistella/PDF4Teachers
package fr.clementgre.pdf4teachers.utils.dialogs.alerts;
import fr.clementgre.pdf4teachers.utils.panes.PaneUtils;
import javafx.geometry.Insets;
import javafx.scene.control.Label;
import javafx.scene.control.Spinner;
import javafx.scene.layout.HBox;
public class DoubleInputAlert extends CustomAlert{
private final Label beforeText = new Label();
private final Spinner<Double> input;
public DoubleInputAlert(double min, double max, double val, double step, String title, String header, String details){
super(AlertType.CONFIRMATION, title, header, null);
input = new Spinner<>(min, max, val, step);
input.setEditable(true);
HBox box = new HBox();
box.setPadding(new Insets(15));
if(details != null){
beforeText.setText(details);
box.setSpacing(10);
PaneUtils.setHBoxPosition(beforeText, 0, 25, 0);
box.getChildren().addAll(beforeText, input);
}else{
box.getChildren().addAll(input);
}
/*StyleManager.putCustomStyle(getDialogPane(), "someDialogs.css");
if(StyleManager.DEFAULT_STYLE == Style.LIGHT) StyleManager.putCustomStyle(getDialogPane(), "someDialogs-light.css");
else StyleManager.putCustomStyle(getDialogPane(), "someDialogs-dark.css");*/
getDialogPane().setContent(box);
}
public void setValue(double value){
input.getValueFactory().setValue(value);
}
public Double getValue(){
return input.getValue();
}
}
|
#!/bin/sh
mkdir build
cd build
install_git_repo () {
if [ -d "./$1" ]
then
echo "Library $1 already installed"
else
git clone $2
cd $1
if [ -z "$3" ] # Is parameter #1 zero length?
then
git checkout "$3"
fi
mkdir build ; cd build
cmake ..
make -j$(nproc)
sudo make install
cd ../..
fi
}
install_git_repo "nodeeditor" "https://github.com/mico-corp/nodeeditor.git"
|
<reponame>eengineergz/Lambda
import React, { useState } from "react";
export default function SearchForm({ onSearch }) {
// STRETCH TODO: Add stateful logic for query/form data
return (
<section className="search-form">
<form onSubmit={() => onSearch(name)}>
<input
onChange={handleInputChange}
placeholder="name"
value={name}
name="name"
/>
<button type="submit">Search</button>
</form>
</section>
);
}
|
<reponame>holidahHM/my-react-app<gh_stars>0
export function unzip(...restArr: any[]): any[];
|
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.common;
/**
* Common Kaa project Constants.
*/
public interface Constants { //NOSONAR
/**
* Used URI delimiter.
*/
public static final String URI_DELIM = "/"; //NOSONAR
/**
* HTTP response content-type.
*/
public static final String RESPONSE_CONTENT_TYPE = "\"application/x-kaa\""; //NOSONAR
/**
* HTTP response custom header for set RSA Signature encoded in base64
*/
public static final String SIGNATURE_HEADER_NAME = "X-SIGNATURE"; //NOSONAR
/**
* The identifier for the Avro platform protocol
*/
public static final int KAA_PLATFORM_PROTOCOL_AVRO_ID = 0xf291f2d4;
/**
* The identifier for the Binary platform protocol
*/
public static final int KAA_PLATFORM_PROTOCOL_BINARY_ID = 0x3553c66f;
/**
* The size of sdk token
*/
public static final int SDK_TOKEN_SIZE = 27;
/**
* The size of application token
*/
public static final int APP_TOKEN_SIZE = 20;
/**
* The size of user verifier token
*/
public static final int USER_VERIFIER_TOKEN_SIZE = 20;
}
|
import path from 'path';
import process from 'process';
import type {Configuration} from 'webpack';
import nodeExternals from 'webpack-node-externals';
import {inspectLoader} from './inspectLoader';
interface CommonWebpackParams {
isProduction: boolean;
isClient: boolean;
entry: string;
outputFile: string;
rules?: any[];
jsxFactory?: string;
aliasPaths?: {
[key: string]: string;
};
}
export function commonWebpack({isProduction, isClient, entry, outputFile, rules, jsxFactory, aliasPaths}: CommonWebpackParams): Configuration {
return {
entry,
mode: isProduction ? 'production' : 'development',
context: path.resolve(process.cwd(), 'src'),
target: isClient ? 'web' : 'node',
devtool: isProduction ? 'source-map' : 'inline-source-map',
externals: nodeExternals({
additionalModuleDirs: [
path.join('node_modules'),
path.join('..', 'node_modules'),
path.join('..', '..', 'node_modules'),
path.join('..', '..', '..', 'node_modules'),
],
}),
output: {
filename: isProduction ? `${path.basename(outputFile, path.extname(outputFile))}.min.js` : outputFile,
path: path.resolve(process.cwd(), 'dist'),
library: {
type: 'commonjs2',
},
},
resolve: {
extensions: ['.js', '.mjs', '.ts', '.d.ts'].concat(isClient || jsxFactory ? ['.tsx'] : []), // .concat(jsxFactory ? ['.jsx'] : []),
},
optimization: {
minimize: isProduction,
nodeEnv: false,
},
module: {
rules: [
{
// test: jsxFactory ? /\.jsx?$/ : /\.js$/,
test: /\.js$/,
exclude: /node_modules/,
use: [
inspectLoader('BABEL'),
{
loader: 'babel-loader',
options: {
babelrc: false,
presets: [['@imperium/babel-preset-imperium', {client: isClient, alias: aliasPaths}]],
},
},
],
},
{
test: isClient || jsxFactory ? /\.tsx?$/ : /\.ts$/,
exclude: /node_modules/,
use: [
inspectLoader('BABEL-TS'),
{
loader: 'babel-loader',
options: {
babelrc: false,
presets: [['@imperium/babel-preset-imperium', {client: isClient, typescript: true, jsxFactory, alias: aliasPaths}]],
},
},
],
},
].concat(rules || []),
},
};
}
|
# IMOSH_PREDICATE declares the number of predicates.
unset IMOSH_PREDICATE
|
import pandas as pd
def process_points(dataframe):
"""
Process the given pandas DataFrame containing points and return an array of tuples representing the coordinates of these points.
Args:
dataframe: pandas DataFrame with columns 'x' and 'y' representing the x and y coordinates of the points.
Returns:
array: Array of tuples representing the coordinates of the points in the DataFrame.
"""
array = list(zip(dataframe['x'], dataframe['y']))
return array |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package shopcar.entities;
import java.util.Objects;
import javax.persistence.Entity;
/**
*
* @author Aluno
*/
@Entity
public class VeiculoPassageiro extends Veiculo
{
private Integer numAssentos;
/**
* @return the numAssentos
*/
public Integer getNumAssentos()
{
return numAssentos;
}
/**
* @param numAssentos the numAssentos to set
*/
public void setNumAssentos(Integer numAssentos)
{
this.numAssentos = numAssentos;
}
@Override
public int hashCode()
{
int hash = 7;
hash = 83 * hash + Objects.hashCode(this.numAssentos);
return hash;
}
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final VeiculoPassageiro other = (VeiculoPassageiro) obj;
if (!Objects.equals(this.numAssentos, other.numAssentos))
{
return false;
}
return true;
}
}
|
#!/bin/bash
# See "Flashing and Booting the Target Device" at
# https://docs.nvidia.com/jetson/l4t/index.html#page/Tegra%2520Linux%2520Driver%2520Package%2520Development%2520Guide%2Fflashing.html%23
cd `dirname $0`
cd Linux_for_Tegra
sudo ./flash.sh jetson-tx2 mmcblk0p1
|
#!/bin/sh
#
# Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
# University Research and Technology
# Corporation. All rights reserved.
# Copyright (c) 2004-2005 The University of Tennessee and The University
# of Tennessee Research Foundation. All rights
# reserved.
# Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
# University of Stuttgart. All rights reserved.
# Copyright (c) 2004-2005 The Regents of the University of California.
# All rights reserved.
# Copyright (c) 2018 Intel, Inc. All rights reserved.
# $COPYRIGHT$
#
# Additional copyrights may follow
#
# $HEADER$
#
CFILE=/tmp/pmix_atomic_$$.c
trap "/bin/rm -f $CFILE; exit 0" 0 1 2 15
echo Updating asm.s from atomic.h and timer.h using gcc
cat > $CFILE<<EOF
#include <stdlib.h>
#include <inttypes.h>
#define static
#define inline
#define PMIX_GCC_INLINE_ASSEMBLY 1
#include "atomic.h"
#include "timer.h"
EOF
gcc -O3 -I. -S $CFILE -o asm.s
|
<gh_stars>0
const cars: string[] = ['Ford', 'Audi'];
const cars2: Array<string> = ['Ford', 'Audi']
// Generic string позволяет вызывать autocomplete для data ниже разрешения промиса.
const promise = new Promise<string>((resolve) => {
setTimeout(() => {
resolve('Promise resolved')
}, 2000);
})
promise.then(data => {
console.info(data);
})
// Другая запись
const promise2: Promise<string> = new Promise((resolve) => {
setTimeout(() => {
resolve('Promise resolved')
}, 2000);
})
/*
function mergeObjects(a: object, b: object) {
return Object.assign({}, a, b)
}
const merged = mergeObjects({name: 'Max'}, {age: 26})
// будет ошибка при обращении к ключам merged, так как у него нет типа
// console.info(merged.name)
*/
function mergeObjects<T, R>(a: T, b: R) {
return Object.assign({}, a, b)
}
const merged = mergeObjects({name: 'Max'}, {age: 26})
const merged2 = mergeObjects({model: 'Ford'}, {year: 2010})
console.info(merged.name, merged.age);
// но в mergeObjects мы можем передавать строки и это будет ошибка
// для исправления этого мы используем constraints - ограничения в дженериках
function mergeObjectsConstraints<T extends object, R extends object>(a: T, b: R) {
return Object.assign({}, a, b)
}
/// еще пример работы с дженериками
interface ILength {
length: number
}
function withCount<T extends ILength>(value: T): {value: T, count: string} {
return {
value,
count: `В этом объекте ${value.length} символов`
}
}
console.info(withCount('Hello Typescript'))
console.info(withCount(['Hello', 'TypeScript']))
// console.info(withCount(20))
/// еще пример с дженериками
function getObjectValue<T extends object, R extends keyof T>(obj: T, key: R) {
return obj[key]
}
const person = {
name: 'TypeScript',
age: 26
}
console.info(getObjectValue(person, 'name'));
console.info(getObjectValue(person, 'age'));
// ошибка по ключам через дженерики и констрейты
//console.info(getObjectValue(person, 'job'));
class Collection<T extends number | string | boolean> {
constructor(private _items: T[] = []) {}
add(item: T) {
this._items.push(item)
}
remove(item: T) {
this._items = this._items.filter(i => i !== item)
}
get items(): T[] {
return this._items
}
}
const strings = new Collection<string>(['I', 'Am', 'Strings'])
strings.add('!');
strings.remove('Am')
console.info(strings.items)
const numbers = new Collection<number>([1, 2, 3])
numbers.add(4);
numbers.remove(3)
console.info(strings.items)
// ошибка, так как констрейт указывает только на number, string, boolean
/* const objects = new Collection([{a: 1}, {b: 2}])
objects.add(4);
objects.remove(3)
console.info(strings.items) */
interface Car {
model: string
year: number
}
function createAndValidateCar(model: string, year: number):Car {
const car:Partial<Car> = {}
if (model.length > 3) {
car.model = model;
}
if (year > 2000) {
car.year = year;
}
return car as Car;
}
console.info(createAndValidateCar('mercedes', 2018))
console.info(createAndValidateCar('bmw', 1995))
const rcars: Readonly<Array<string>> = ['Ford', 'Audi'];
// rcars.shift();
const ford:Readonly<Car> = {
model: 'Ford',
year: 2020
}
ford.model = 'Ferrari' |
import { Button, Flex, useColorModeValue } from '@chakra-ui/react'
import { navigate } from '@reach/router'
import React from 'react'
import Container from '~components/common/container'
import Layout from '~components/common/layout'
import SEO from '~components/common/seo'
import PuzzleAnimation from '~components/index/puzzle-animation'
const NotFoundPage: React.FC = () => (
<Layout>
<SEO title="404" robots="noindex, nofollow" />
<Container>
<PuzzleAnimation text="Page not found!" />
<Flex justifyContent="center" mt={12}>
<Button onClick={() => navigate(-1)} colorScheme={useColorModeValue('blue', 'yellow')}>
Go back
</Button>
</Flex>
</Container>
</Layout>
)
export default NotFoundPage
|
#!/bin/bash
export CPATH=${PREFIX}/include
export LD_LIBRARY_PATH=${PREFIX}/lib
export LDFLAGS="-L${PREFIX}/lib"
./bootstrap
./configure --prefix=${PREFIX}
make
make install
|
"""
Construct a code that takes text input and replaces all the vowels with "*".
"""
def replace_vowels(text):
vowels = 'aeiou'
new_text = ''
for char in text.lower():
if char in vowels:
new_text += '*'
else:
new_text += char
return new_text
if __name__ == '__main__':
text = 'The quick brown fox'
print(replace_vowels(text)) |
/*
* Copyright (c) 2002 TwelveMonkeys.
* All rights reserved.
*
* $Log: JspFragment.java,v $
* Revision 1.2 2003/10/06 14:25:36 WMHAKUR
* Code clean-up only.
*
* Revision 1.1 2002/10/18 14:02:16 WMHAKUR
* Moved to com.twelvemonkeys.servlet.jsp.droplet
*
*
*/
package com.twelvemonkeys.servlet.jsp.droplet;
import javax.servlet.ServletException;
import javax.servlet.jsp.PageContext;
import java.io.IOException;
/**
* Interface for JSP sub pages or page fragments to implement.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author last modified by $Author: haku $
*
* @version $Revision: #1 $, ($Date: 2008/05/05 $)
*/
public interface JspFragment {
/**
* Services a sub page or a page fragment inside another page
* (or PageContext).
*
* @param pContext the PageContext that is used to render the subpage.
*
* @throws ServletException if an exception occurs that interferes with the
* subpage's normal operation
* @throws IOException if an input or output exception occurs
*/
public void service(PageContext pContext) throws ServletException, IOException;
}
|
<gh_stars>1-10
//
// UIAlertController+Block.h
// MeiYiQuan
//
// Created by 任强宾 on 16/10/14.
// Copyright © 2016年 任强宾. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol AlertTitleAble <NSObject>
- (NSString *)alertTitle;
@end
@interface UIAlertController (Block)
+ (UIAlertController *)qb_showAlertWithTitle:(NSString *)title message:(NSString *)message cancelTitle:(NSString *)cancelTitle otherTitles:(NSArray *)otherTitles handler:(void (^)(UIAlertController *alertController, NSInteger buttonIndex))block vc:(UIViewController *)vc;
+ (UIAlertController *)qb_showSheetWithTitle:(NSString *)title message:(NSString *)message cancelTitle:(NSString *)cancelTitle otherTitles:(NSArray *)otherTitles handler:(void (^)(UIAlertController *alertController, NSInteger buttonIndex))block vc:(UIViewController *)vc;
@end
|
sudo apt install mysql-server
sudo mysql_secure_installation
|
import torch
import torch.nn as nn
class EntityEmbedding(nn.Module):
def __init__(self, hidden_dim, gamma, epsilon, double_entity_embedding):
super(EntityEmbedding, self).__init__()
self.gamma = gamma
self.epsilon = epsilon
self.embedding_range = nn.Parameter(
torch.Tensor([(self.gamma.item() + self.epsilon) / hidden_dim]),
requires_grad=False
)
self.entity_dim = hidden_dim * 2 if double_entity_embedding else hidden_dim
def initialize_embedding_range(self, gamma, epsilon, hidden_dim):
self.embedding_range = nn.Parameter(
torch.Tensor([(gamma + epsilon) / hidden_dim]),
requires_grad=False
)
def calculate_entity_dimension(self, double_entity_embedding):
self.entity_dim = hidden_dim * 2 if double_entity_embedding else hidden_dim |
#!/bin/bash
##########
# Custom servercow.de DNS API v1 for use with [acme.sh](https://github.com/Neilpang/acme.sh)
#
# Usage:
# export SERVERCOW_API_Username=username
# export SERVERCOW_API_Password=password
# acme.sh --issue -d example.com --dns dns_servercow
#
# Issues:
# Any issues / questions / suggestions can be posted here:
# https://github.com/jhartlep/servercow-dns-api/issues
#
# Author: Jens Hartlep
##########
SERVERCOW_API="https://api.servercow.de/dns/v1/domains"
# Usage dns_servercow_add _acme-challenge.www.domain.com "abcdefghijklmnopqrstuvwxyz"
dns_servercow_add() {
fulldomain=$1
txtvalue=$2
_info "Using servercow"
_debug fulldomain "$fulldomain"
_debug txtvalue "$txtvalue"
SERVERCOW_API_Username="${SERVERCOW_API_Username:-$(_readaccountconf_mutable SERVERCOW_API_Username)}"
SERVERCOW_API_Password="${SERVERCOW_API_Password:-$(_readaccountconf_mutable SERVERCOW_API_Password)}"
if [ -z "$SERVERCOW_API_Username" ] || [ -z "$SERVERCOW_API_Password" ]; then
SERVERCOW_API_Username=""
SERVERCOW_API_Password=""
_err "You don't specify servercow api username and password yet."
_err "Please create your username and password and try again."
return 1
fi
# save the credentials to the account conf file
_saveaccountconf_mutable SERVERCOW_API_Username "$SERVERCOW_API_Username"
_saveaccountconf_mutable SERVERCOW_API_Password "$SERVERCOW_API_Password"
_debug "First detect the root zone"
if ! _get_root "$fulldomain"; then
_err "invalid domain"
return 1
fi
_debug _sub_domain "$_sub_domain"
_debug _domain "$_domain"
if _servercow_api POST "$_domain" "{\"type\":\"TXT\",\"name\":\"$fulldomain\",\"content\":\"$txtvalue\",\"ttl\":20}"; then
if printf -- "%s" "$response" | grep "ok" >/dev/null; then
_info "Added, OK"
return 0
else
_err "add txt record error."
return 1
fi
fi
_err "add txt record error."
return 1
}
# Usage fulldomain txtvalue
# Remove the txt record after validation
dns_servercow_rm() {
fulldomain=$1
txtvalue=$2
_info "Using servercow"
_debug fulldomain "$fulldomain"
_debug txtvalue "$fulldomain"
SERVERCOW_API_Username="${SERVERCOW_API_Username:-$(_readaccountconf_mutable SERVERCOW_API_Username)}"
SERVERCOW_API_Password="${SERVERCOW_API_Password:-$(_readaccountconf_mutable SERVERCOW_API_Password)}"
if [ -z "$SERVERCOW_API_Username" ] || [ -z "$SERVERCOW_API_Password" ]; then
SERVERCOW_API_Username=""
SERVERCOW_API_Password=""
_err "You don't specify servercow api username and password yet."
_err "Please create your username and password and try again."
return 1
fi
_debug "First detect the root zone"
if ! _get_root "$fulldomain"; then
_err "invalid domain"
return 1
fi
_debug _sub_domain "$_sub_domain"
_debug _domain "$_domain"
if _servercow_api DELETE "$_domain" "{\"type\":\"TXT\",\"name\":\"$fulldomain\"}"; then
if printf -- "%s" "$response" | grep "ok" >/dev/null; then
_info "Deleted, OK"
_contains "$response" '"message":"ok"'
else
_err "delete txt record error."
return 1
fi
fi
}
#################### Private functions below ##################################
# _acme-challenge.www.domain.com
# returns
# _sub_domain=_acme-challenge.www
# _domain=domain.com
_get_root() {
fulldomain=$1
i=2
p=1
while true; do
_domain=$(printf "%s" "$fulldomain" | cut -d . -f $i-100)
_debug _domain "$_domain"
if [ -z "$_domain" ]; then
# not valid
return 1
fi
if ! _servercow_api GET "$_domain"; then
return 1
fi
if ! _contains "$response" '"error":"no such domain in user context"' >/dev/null; then
_sub_domain=$(printf "%s" "$fulldomain" | cut -d . -f 1-$p)
if [ -z "$_sub_domain" ]; then
# not valid
return 1
fi
return 0
fi
p=$i
i=$(_math "$i" + 1)
done
return 1
}
_servercow_api() {
method=$1
domain=$2
data="$3"
export _H1="Content-Type: application/json"
export _H2="X-Auth-Username: $SERVERCOW_API_Username"
export _H3="X-Auth-Password: $SERVERCOW_API_Password"
if [ "$method" != "GET" ]; then
_debug data "$data"
response="$(_post "$data" "$SERVERCOW_API/$domain" "" "$method")"
else
response="$(_get "$SERVERCOW_API/$domain")"
fi
if [ "$?" != "0" ]; then
_err "error $domain"
return 1
fi
_debug2 response "$response"
return 0
}
|
const fetch = require('node-fetch');
/**
* Function to fetch details of a service order for the equipment
* @param {string} accessToken - valid access token
* @param {string} equipmentId - ID of the equipment for which service order details are to be fetched
* @returns {Promise<object>} - A promise that resolves to the details of the service order
*/
async function fetchServiceOrderDetails(accessToken, equipmentId) {
try {
const response = await fetch(`https://api.example.com/service-orders/${equipmentId}`, {
headers: {
Authorization: `Bearer ${accessToken}`
}
});
if (!response.ok) {
throw new Error(`Failed to fetch service order details: ${response.status} ${response.statusText}`);
}
const serviceOrderDetails = await response.json();
return serviceOrderDetails;
} catch (error) {
throw new Error(`Failed to fetch service order details: ${error.message}`);
}
} |
def is_anagram(str1, str2):
n1 = len(str1)
n2 = len(str2)
if n1 != n2:
return 0
str1 = sorted(str1)
str2 = sorted(str2)
for i in range(0, n1):
if str1[i] != str2[i]:
return 0
return 1 |
package io.opensphere.core.common.shapefile.shapes;
public interface MeasureMinMax
{
public double getMeasurementMin();
public double getMeasurementMax();
}
|
rm _nbt.pyd
py.test test/nbt_test.py
python setup.py build_ext --inplace
py.test test/nbt_test.py
|
#!/bin/sh -e
# Options passed to this script are passed on to pip-compile. Therefore,
# dependencies can be upgraded to their latest version with:
#
# ./requirements/update.sh -U
export CUSTOM_COMPILE_COMMAND=./requirements/update.sh
python -m piptools compile --allow-unsafe \
--output-file=requirements/production.txt "$@"
python -m piptools compile --allow-unsafe requirements/test.in "$@"
|
#!/bin/sh
NAME=los
VERSION=0.1-1
echo building $NAME $VERSION
luarocks make $NAME-$VERSION.rockspec
luarocks pack $NAME $VERSION
|
#!/usr/bin/env bash
set -e
set -u
set -o pipefail
SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
SOURCEPATH="${SCRIPTPATH}/../../.lib/conf.sh"
BINARY="${SCRIPTPATH}/../../../bin/pwncat"
# shellcheck disable=SC1090
source "${SOURCEPATH}"
# -------------------------------------------------------------------------------------------------
# GLOBALS
# -------------------------------------------------------------------------------------------------
RHOST="${1:-localhost}"
RPORT="${2:-4444}"
STARTUP_WAIT="${3:-4}"
RUNS="${4:-1}"
PYTHON="python${5:-}"
PYVER="$( "${PYTHON}" -V 2>&1 | head -1 || true )"
# -------------------------------------------------------------------------------------------------
# TEST FUNCTIONS
# -------------------------------------------------------------------------------------------------
print_test_case "${PYVER}"
run_test() {
local srv_opts="${1// / }"
local cli_opts="${2// / }"
local curr_mutation="${3}"
local total_mutation="${4}"
local curr_round="${5}"
local total_round="${6}"
local data=
local data_or=
print_h1 "[ROUND: ${curr_round}/${total_round}] (mutation: ${curr_mutation}/${total_mutation}) Starting Test Round (srv '${srv_opts}' vs cli '${cli_opts}')"
run "sleep 1"
###
### Create data and files
###
data="abcdefghijklmnopqrstuvwxyz1234567890\\n"
data_or="abcdefghijklmnopqrstuvwxyz1234567890\\r\\n"
srv_stdout="$(tmp_file)"
srv_stderr="$(tmp_file)"
cli1_stdout="$(tmp_file)"
cli1_stderr="$(tmp_file)"
cli2_stdout="$(tmp_file)"
cli2_stderr="$(tmp_file)"
cli3_stdout="$(tmp_file)"
cli3_stderr="$(tmp_file)"
###
###
### Initial Server Start
###
###
# --------------------------------------------------------------------------------
# START: SERVER
# --------------------------------------------------------------------------------
print_h2 "(1/13) Start: Server"
# Start Server
print_info "Start Server"
# shellcheck disable=SC2086
if ! srv_pid="$( run_bg "printf ${data}" "${PYTHON}" "${BINARY}" ${srv_opts} "${srv_stdout}" "${srv_stderr}" )"; then
printf ""
fi
# Wait until Server is up
run "sleep ${STARTUP_WAIT}"
# [SERVER] Ensure Server is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
###
###
### ROUND-1 (SERVER SEND)
###
###
# --------------------------------------------------------------------------------
# [ROUND-1: SERVER SEND] START: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(2/13) Start: Client (round 1)"
# Start Client
print_info "Start Client"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "" "${PYTHON}" "${BINARY}" ${cli_opts} "${cli1_stdout}" "${cli1_stderr}" )"; then
printf ""
fi
# Wait until Client is up
run "sleep ${STARTUP_WAIT}"
# [CLIENT] Ensure Client is running
test_case_instance_is_running "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server is still is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}"
# [SERVER] Ensure Server still has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-1: SERVER SEND] DATA TRANSFER
# --------------------------------------------------------------------------------
print_h2 "(3/13) Transfer: Server -> Client (round 1)"
# [SERVER -> CLIENT]
wait_for_data_transferred "" "${data}" "${data_or}" "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-1: SERVER SEND] STOP: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(4/13) Stop: Client (round 1)"
# [CLIENT] Manually stop the Client
action_stop_instance "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client still has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-1: SERVER SEND] TEST: Server stays alive
# --------------------------------------------------------------------------------
print_h2 "(5/13) Test: Server stays alive (round 1)"
run "sleep 2"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}"
# [SERVER] Ensure Server is still running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}"
###
###
### ROUND-2 (NO SEND)
###
###
# --------------------------------------------------------------------------------
# [ROUND-2: NO SEND] START: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(6/13) Start: Client (round 2)"
# Start Client
print_info "Start Client"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "" "${PYTHON}" "${BINARY}" ${cli_opts} "${cli2_stdout}" "${cli2_stderr}" )"; then
printf ""
fi
# Wait until Client is up
run "sleep ${STARTUP_WAIT}"
# [CLIENT] Ensure Client is running
test_case_instance_is_running "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server is still is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}"
# [SERVER] Ensure Server still has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-2: NO SEND] STOP: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(7/13) Stop: Client (round 2)"
# [CLIENT] Manually stop the Client
action_stop_instance "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client still has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-2: NO SEND] TEST: Server stays alive
# --------------------------------------------------------------------------------
print_h2 "(8/13) Test: Server stays alive (round 2)"
run "sleep 2"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}"
# [SERVER] Ensure Server is still running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}"
###
###
### ROUND-3 (SEND)
###
###
# --------------------------------------------------------------------------------
# [ROUND-3: SEND] START: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(9/13) Start: Client (round 3)"
# Start Client
print_info "Start Client"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "printf ${data}" "${PYTHON}" "${BINARY}" ${cli_opts} "${cli3_stdout}" "${cli3_stderr}" )"; then
printf ""
fi
# Wait until Client is up
run "sleep ${STARTUP_WAIT}"
# [CLIENT] Ensure Client is running
test_case_instance_is_running "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server is still is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
# [SERVER] Ensure Server still has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-3: SEND] DATA TRANSFER
# --------------------------------------------------------------------------------
print_h2 "(10/13) Transfer: Client -> Server (round 3)"
# [CLIENT -> SERVER]
wait_for_data_transferred "" "${data}" "${data_or}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-3: SEND] STOP: CLIENT
# --------------------------------------------------------------------------------
print_h2 "(11/13) Stop: Client (round 3)"
# [CLIENT] Manually stop the Client
action_stop_instance "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [CLIENT] Ensure Client still has no errors
test_case_instance_has_no_errors "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# [ROUND-3: SEND] TEST: Server stays alive
# --------------------------------------------------------------------------------
print_h2 "(12/13) Test: Server stays alive (round 3)"
run "sleep 2"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
# [SERVER] Ensure Server is still running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
###
###
### Final Server Shutdown
###
###
# --------------------------------------------------------------------------------
# STOP: SERVER
# --------------------------------------------------------------------------------
print_h2 "(13/13) Stop: Server"
# [SERVER] Manually stop the Server
action_stop_instance "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}" "Client" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
}
# -------------------------------------------------------------------------------------------------
# MAIN ENTRYPOINT
# -------------------------------------------------------------------------------------------------
for curr_round in $(seq "${RUNS}"); do
# server opts client opts
# BIND ON ANY
run_test "-l ${RPORT} --no-shutdown --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -vvvv" "1" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RPORT} --no-shutdown --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -4 -vvvv" "2" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RPORT} --no-shutdown --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -6 -vvvv" "3" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RPORT} --no-shutdown -4 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -vvvv" "4" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RPORT} --no-shutdown -4 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -4 -vvvv" "5" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RPORT} --no-shutdown -6 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -vvvv" "6" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RPORT} --no-shutdown -6 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -6 -vvvv" "7" "14" "${curr_round}" "${RUNS}"
# BIND ON SPECIFIC
run_test "-l ${RHOST} ${RPORT} --no-shutdown --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -vvvv" "8" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RHOST} ${RPORT} --no-shutdown --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -4 -vvvv" "9" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RHOST} ${RPORT} --no-shutdown --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -6 -vvvv" "10" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RHOST} ${RPORT} --no-shutdown -4 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -vvvv" "11" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RHOST} ${RPORT} --no-shutdown -4 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -4 -vvvv" "12" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RHOST} ${RPORT} --no-shutdown -6 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -vvvv" "13" "14" "${curr_round}" "${RUNS}"
run_test "-l ${RHOST} ${RPORT} --no-shutdown -6 --keep-open -vvvv" "${RHOST} ${RPORT} --no-shutdown -6 -vvvv" "14" "14" "${curr_round}" "${RUNS}"
done
|
const fxy = require('fxy')
const template = require('./template')
const script_folders = {
modules:['poly']
}
const script_names = {
modules:{
poly:[
'lodash',
'fxy',
'manifest',
'element'
]
}
}
module.exports = function(polyball){
//const logic = fxy.join(polyball.folder,polyball.data.modules || 'modules')
return new Promise((success,error)=>{
return create_folders(polyball).then(()=>create_scripts(polyball)).then(success).catch(error)
})
}
function create_folder(polyball,name){
const path = fxy.join(polyball.folder,polyball.data[name] || name)
return new Promise((success,error)=>{
if(fxy.exists(path) !== true) {
return fxy.make_dir
.promise(path)
.then(success)
.catch(error)
}
return success()
})
}
function create_folders(polyball){
//const modules = fxy.join(polyball.folder,polyball.data.modules || 'modules')
return new Promise((success,error)=>{
let promises = []
for(let folder in script_folders) promises.push(create_folder(polyball,folder))
return Promise.all(promises).then(()=>{
let inners = []
for(let i in script_folders) {
let list = script_folders[i]
for(let name of list){
let file_path = fxy.join(i,name)
inners.push(create_folder(polyball,file_path))
}
}
return Promise.all(inners).then(success)
}).catch(error)
})
}
function write_script(polyball,folder,folder_file,file_value){
const modules = 'modules' in polyball.data ? polyball.data.modules:'modules'
const filepath = fxy.join(modules,folder,`${folder_file}.js`)
const path = fxy.join(polyball.folder,filepath)
try{
fxy.writeFileSync(path,file_value,'utf8')
console.log(`Created script: ${path}`)
}catch(e){
console.error(e)
return false
}
return true
}
function create_scripts(polyball){
let saves = []
for(let folder in script_names){
for(let template_name in script_names[folder]){
for(let template_file of script_names[folder][template_name]){
let value = get_script(polyball,template_name,template_file)
if(value){
let saved = write_script(polyball,template_name,template_file,value)
if(saved) saves.push(template_file)
}
}
}
}
return saves
}
function get_script(polyball,template_name,template_file){
if(template_name in template){
if(template_file in template[template_name]){
return template[template_name][template_file](polyball.data)
}
}
return null
}
|
import React from "react";
export default function PaymentDetailsHeader() {
return (
<div className="payment__details__container__entry__content__header">
<div className="payment__details__container__entry__content__header__entry">
Receipts
</div>
<div className="payment__details__container__entry__content__header__entry">
Status
</div>
<div className="payment__details__container__entry__content__header__entry">
Total
</div>
<div className="payment__details__container__entry__content__header__entry">
Date of Invoice
</div>
<div className="payment__details__container__entry__content__header__entry">
Date of Payment
</div>
<div className="payment__details__container__entry__content__header__entry">
Invoice
</div>
</div>
);
}
|
#!/bin/bash
if which nproc > /dev/null; then
MAKEOPTS="-j$(nproc)"
else
MAKEOPTS="-j$(sysctl -n hw.ncpu)"
fi
########################################################################################
# general helper functions
function ci_gcc_arm_setup {
sudo apt-get install gcc-arm-none-eabi libnewlib-arm-none-eabi
arm-none-eabi-gcc --version
}
########################################################################################
# code formatting
function ci_code_formatting_setup {
sudo apt-add-repository --yes --update ppa:pybricks/ppa
sudo apt-get install uncrustify
pip3 install black
uncrustify --version
black --version
}
function ci_code_formatting_run {
tools/codeformat.py -v
}
########################################################################################
# commit formatting
function ci_commit_formatting_run {
git remote add upstream https://github.com/micropython/micropython.git
git fetch --depth=100 upstream master
# For a PR, upstream/master..HEAD ends with a merge commit into master, exlude that one.
tools/verifygitlog.py -v upstream/master..HEAD --no-merges
}
########################################################################################
# code size
function ci_code_size_setup {
sudo apt-get update
sudo apt-get install gcc-multilib
gcc --version
ci_gcc_arm_setup
}
function ci_code_size_build {
# starts off at either the ref/pull/N/merge FETCH_HEAD, or the current branch HEAD
git checkout -b pull_request # save the current location
git remote add upstream https://github.com/micropython/micropython.git
git fetch --depth=100 upstream master
# build reference, save to size0
# ignore any errors with this build, in case master is failing
git checkout `git merge-base --fork-point upstream/master pull_request`
git show -s
tools/metrics.py clean bm
tools/metrics.py build bm | tee ~/size0 || true
# build PR/branch, save to size1
git checkout pull_request
git log upstream/master..HEAD
tools/metrics.py clean bm
tools/metrics.py build bm | tee ~/size1
}
########################################################################################
# .mpy file format
function ci_mpy_format_setup {
sudo pip3 install pyelftools
}
function ci_mpy_format_test {
# Test mpy-tool.py dump feature on bytecode
python2 ./tools/mpy-tool.py -xd ports/minimal/frozentest.mpy
python3 ./tools/mpy-tool.py -xd ports/minimal/frozentest.mpy
# Test mpy-tool.py dump feature on native code
make -C examples/natmod/features1
./tools/mpy-tool.py -xd examples/natmod/features1/features1.mpy
}
########################################################################################
# ports/cc3200
function ci_cc3200_setup {
ci_gcc_arm_setup
}
function ci_cc3200_build {
make ${MAKEOPTS} -C ports/cc3200 BTARGET=application BTYPE=release
make ${MAKEOPTS} -C ports/cc3200 BTARGET=bootloader BTYPE=release
}
########################################################################################
# ports/esp32
function ci_esp32_setup_helper {
git clone https://github.com/espressif/esp-idf.git
git -C esp-idf checkout $1
git -C esp-idf submodule update --init \
components/bt/host/nimble/nimble \
components/esp_wifi \
components/esptool_py/esptool \
components/lwip/lwip \
components/mbedtls/mbedtls
if [ -d esp-idf/components/bt/controller/esp32 ]; then
git -C esp-idf submodule update --init \
components/bt/controller/lib_esp32 \
components/bt/controller/lib_esp32c3_family
else
git -C esp-idf submodule update --init \
components/bt/controller/lib
fi
./esp-idf/install.sh
}
function ci_esp32_idf402_setup {
ci_esp32_setup_helper v4.0.2
}
function ci_esp32_idf44_setup {
ci_esp32_setup_helper v4.4
}
function ci_esp32_build {
source esp-idf/export.sh
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/esp32 submodules
make ${MAKEOPTS} -C ports/esp32 USER_C_MODULES=../../../examples/usercmodule/micropython.cmake FROZEN_MANIFEST=$(pwd)/ports/esp32/boards/manifest.py
if [ -d $IDF_PATH/components/esp32c3 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_C3
fi
if [ -d $IDF_PATH/components/esp32s2 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_S2
fi
if [ -d $IDF_PATH/components/esp32s3 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_S3
fi
}
########################################################################################
# ports/esp8266
function ci_esp8266_setup {
sudo pip install pyserial esptool
wget https://github.com/jepler/esp-open-sdk/releases/download/2018-06-10/xtensa-lx106-elf-standalone.tar.gz
zcat xtensa-lx106-elf-standalone.tar.gz | tar x
# Remove this esptool.py so pip version is used instead
rm xtensa-lx106-elf/bin/esptool.py
}
function ci_esp8266_path {
echo $(pwd)/xtensa-lx106-elf/bin
}
function ci_esp8266_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/esp8266 submodules
make ${MAKEOPTS} -C ports/esp8266
make ${MAKEOPTS} -C ports/esp8266 BOARD=GENERIC_512K
make ${MAKEOPTS} -C ports/esp8266 BOARD=GENERIC_1M
}
########################################################################################
# ports/javascript
function ci_javascript_setup {
git clone https://github.com/emscripten-core/emsdk.git
(cd emsdk && ./emsdk install latest && ./emsdk activate latest)
}
function ci_javascript_build {
source emsdk/emsdk_env.sh
make ${MAKEOPTS} -C ports/javascript
}
function ci_javascript_run_tests {
# This port is very slow at running, so only run a few of the tests.
(cd tests && MICROPY_MICROPYTHON=../ports/javascript/node_run.sh ./run-tests.py -j1 basics/builtin_*.py)
}
########################################################################################
# ports/mimxrt
function ci_mimxrt_setup {
ci_gcc_arm_setup
}
function ci_mimxrt_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/mimxrt submodules
make ${MAKEOPTS} -C ports/mimxrt BOARD=MIMXRT1020_EVK
make ${MAKEOPTS} -C ports/mimxrt BOARD=TEENSY40
}
########################################################################################
# ports/nrf
function ci_nrf_setup {
ci_gcc_arm_setup
}
function ci_nrf_build {
ports/nrf/drivers/bluetooth/download_ble_stack.sh s140_nrf52_6_1_1
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/nrf submodules
make ${MAKEOPTS} -C ports/nrf BOARD=pca10040
make ${MAKEOPTS} -C ports/nrf BOARD=microbit
make ${MAKEOPTS} -C ports/nrf BOARD=pca10056 SD=s140
make ${MAKEOPTS} -C ports/nrf BOARD=pca10090
}
########################################################################################
# ports/powerpc
function ci_powerpc_setup {
sudo apt-get update
sudo apt-get install gcc-powerpc64le-linux-gnu libc6-dev-ppc64el-cross
}
function ci_powerpc_build {
make ${MAKEOPTS} -C ports/powerpc UART=potato
make ${MAKEOPTS} -C ports/powerpc UART=lpc_serial
}
########################################################################################
# ports/qemu-arm
function ci_qemu_arm_setup {
ci_gcc_arm_setup
sudo apt-get update
sudo apt-get install qemu-system
qemu-system-arm --version
}
function ci_qemu_arm_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/qemu-arm CFLAGS_EXTRA=-DMP_ENDIANNESS_BIG=1
make ${MAKEOPTS} -C ports/qemu-arm clean
make ${MAKEOPTS} -C ports/qemu-arm -f Makefile.test test
make ${MAKEOPTS} -C ports/qemu-arm -f Makefile.test clean
make ${MAKEOPTS} -C ports/qemu-arm -f Makefile.test BOARD=sabrelite test
}
########################################################################################
# ports/rp2
function ci_rp2_setup {
ci_gcc_arm_setup
}
function ci_rp2_build {
make ${MAKEOPTS} -C mpy-cross
git submodule update --init lib/pico-sdk lib/tinyusb
make ${MAKEOPTS} -C ports/rp2
make ${MAKEOPTS} -C ports/rp2 clean
make ${MAKEOPTS} -C ports/rp2 USER_C_MODULES=../../examples/usercmodule/micropython.cmake
}
########################################################################################
# ports/samd
function ci_samd_setup {
ci_gcc_arm_setup
}
function ci_samd_build {
make ${MAKEOPTS} -C ports/samd submodules
make ${MAKEOPTS} -C ports/samd
}
########################################################################################
# ports/stm32
function ci_stm32_setup {
ci_gcc_arm_setup
pip3 install pyhy
}
function ci_stm32_pyb_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/stm32 submodules
git submodule update --init lib/btstack
git submodule update --init lib/mynewt-nimble
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBV11 MICROPY_PY_WIZNET5K=5200 MICROPY_PY_CC3K=1 USER_C_MODULES=../../examples/usercmodule
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBD_SF2
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBD_SF6 NANBOX=1 MICROPY_BLUETOOTH_NIMBLE=0 MICROPY_BLUETOOTH_BTSTACK=1
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=PYBV10 CFLAGS_EXTRA='-DMBOOT_FSLOAD=1 -DMBOOT_VFS_LFS2=1'
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=PYBD_SF6
}
function ci_stm32_nucleo_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/stm32 submodules
git submodule update --init lib/mynewt-nimble
# Test building various MCU families, some with additional options.
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_F091RC
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_H743ZI CFLAGS_EXTRA='-DMICROPY_PY_THREAD=1'
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_L073RZ
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_L476RG DEBUG=1
# Test building a board with mboot packing enabled (encryption, signing, compression).
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_WB55 USE_MBOOT=1 MBOOT_ENABLE_PACKING=1
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=NUCLEO_WB55 USE_MBOOT=1 MBOOT_ENABLE_PACKING=1
# Test mboot_pack_dfu.py created a valid file, and that its unpack-dfu command works.
BOARD_WB55=ports/stm32/boards/NUCLEO_WB55
BUILD_WB55=ports/stm32/build-NUCLEO_WB55
python3 ports/stm32/mboot/mboot_pack_dfu.py -k $BOARD_WB55/mboot_keys.h unpack-dfu $BUILD_WB55/firmware.pack.dfu $BUILD_WB55/firmware.unpack.dfu
diff $BUILD_WB55/firmware.unpack.dfu $BUILD_WB55/firmware.dfu
# Test unpack-dfu command works without a secret key
tail -n +2 $BOARD_WB55/mboot_keys.h > $BOARD_WB55/mboot_keys_no_sk.h
python3 ports/stm32/mboot/mboot_pack_dfu.py -k $BOARD_WB55/mboot_keys_no_sk.h unpack-dfu $BUILD_WB55/firmware.pack.dfu $BUILD_WB55/firmware.unpack_no_sk.dfu
diff $BUILD_WB55/firmware.unpack.dfu $BUILD_WB55/firmware.unpack_no_sk.dfu
}
########################################################################################
# ports/teensy
function ci_teensy_setup {
ci_gcc_arm_setup
}
function ci_teensy_build {
make ${MAKEOPTS} -C ports/teensy
}
########################################################################################
# ports/unix
CI_UNIX_OPTS_SYS_SETTRACE=(
MICROPY_PY_BTREE=0
MICROPY_PY_FFI=0
MICROPY_PY_USSL=0
CFLAGS_EXTRA="-DMICROPY_PY_SYS_SETTRACE=1"
)
CI_UNIX_OPTS_SYS_SETTRACE_STACKLESS=(
MICROPY_PY_BTREE=0
MICROPY_PY_FFI=0
MICROPY_PY_USSL=0
CFLAGS_EXTRA="-DMICROPY_STACKLESS=1 -DMICROPY_STACKLESS_STRICT=1 -DMICROPY_PY_SYS_SETTRACE=1"
)
CI_UNIX_OPTS_QEMU_MIPS=(
CROSS_COMPILE=mips-linux-gnu-
VARIANT=coverage
MICROPY_STANDALONE=1
LDFLAGS_EXTRA="-static"
)
CI_UNIX_OPTS_QEMU_ARM=(
CROSS_COMPILE=arm-linux-gnueabi-
VARIANT=coverage
MICROPY_STANDALONE=1
)
function ci_unix_build_helper {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix "$@" submodules
make ${MAKEOPTS} -C ports/unix "$@" deplibs
make ${MAKEOPTS} -C ports/unix "$@"
}
function ci_unix_build_ffi_lib_helper {
$1 $2 -shared -o tests/unix/ffi_lib.so tests/unix/ffi_lib.c
}
function ci_unix_run_tests_helper {
make -C ports/unix "$@" test
}
function ci_unix_run_tests_full_helper {
variant=$1
shift
if [ $variant = standard ]; then
micropython=micropython
else
micropython=micropython-$variant
fi
make -C ports/unix VARIANT=$variant "$@" test_full
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/$micropython ./run-multitests.py multi_net/*.py)
}
function ci_native_mpy_modules_build {
if [ "$1" = "" ]; then
arch=x64
else
arch=$1
fi
make -C examples/natmod/features1 ARCH=$arch
make -C examples/natmod/features2 ARCH=$arch
make -C examples/natmod/btree ARCH=$arch
make -C examples/natmod/framebuf ARCH=$arch
make -C examples/natmod/uheapq ARCH=$arch
make -C examples/natmod/urandom ARCH=$arch
make -C examples/natmod/ure ARCH=$arch
make -C examples/natmod/uzlib ARCH=$arch
}
function ci_native_mpy_modules_32bit_build {
ci_native_mpy_modules_build x86
}
function ci_unix_minimal_build {
make ${MAKEOPTS} -C ports/unix VARIANT=minimal
}
function ci_unix_minimal_run_tests {
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/micropython-minimal ./run-tests.py -e exception_chain -e self_type_check -e subclass_native_init -d basics)
}
function ci_unix_standard_build {
ci_unix_build_helper VARIANT=standard
ci_unix_build_ffi_lib_helper gcc
}
function ci_unix_standard_run_tests {
ci_unix_run_tests_full_helper standard
}
function ci_unix_standard_run_perfbench {
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/micropython ./run-perfbench.py 1000 1000)
}
function ci_unix_dev_build {
ci_unix_build_helper VARIANT=dev
}
function ci_unix_dev_run_tests {
ci_unix_run_tests_helper VARIANT=dev
}
function ci_unix_coverage_setup {
sudo pip3 install setuptools
sudo pip3 install pyelftools
gcc --version
python3 --version
}
function ci_unix_coverage_build {
ci_unix_build_helper VARIANT=coverage
ci_unix_build_ffi_lib_helper gcc
}
function ci_unix_coverage_run_tests {
ci_unix_run_tests_full_helper coverage
}
function ci_unix_coverage_run_native_mpy_tests {
MICROPYPATH=examples/natmod/features2 ./ports/unix/micropython-coverage -m features2
(cd tests && ./run-natmodtests.py "$@" extmod/{btree*,framebuf*,uheapq*,urandom*,ure*,uzlib*}.py)
}
function ci_unix_32bit_setup {
sudo dpkg --add-architecture i386
sudo apt-get update
sudo apt-get install gcc-multilib g++-multilib libffi-dev:i386
sudo pip3 install setuptools
sudo pip3 install pyelftools
gcc --version
python2 --version
python3 --version
}
function ci_unix_coverage_32bit_build {
ci_unix_build_helper VARIANT=coverage MICROPY_FORCE_32BIT=1
ci_unix_build_ffi_lib_helper gcc -m32
}
function ci_unix_coverage_32bit_run_tests {
ci_unix_run_tests_full_helper coverage MICROPY_FORCE_32BIT=1
}
function ci_unix_coverage_32bit_run_native_mpy_tests {
ci_unix_coverage_run_native_mpy_tests --arch x86
}
function ci_unix_nanbox_build {
# Use Python 2 to check that it can run the build scripts
ci_unix_build_helper PYTHON=python2 VARIANT=nanbox CFLAGS_EXTRA="-DMICROPY_PY_MATH_CONSTANTS=1"
ci_unix_build_ffi_lib_helper gcc -m32
}
function ci_unix_nanbox_run_tests {
ci_unix_run_tests_full_helper nanbox PYTHON=python2
}
function ci_unix_float_build {
ci_unix_build_helper VARIANT=standard CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
ci_unix_build_ffi_lib_helper gcc
}
function ci_unix_float_run_tests {
# TODO get this working: ci_unix_run_tests_full_helper standard CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
ci_unix_run_tests_helper CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
}
function ci_unix_clang_setup {
sudo apt-get install clang
clang --version
}
function ci_unix_stackless_clang_build {
make ${MAKEOPTS} -C mpy-cross CC=clang
make ${MAKEOPTS} -C ports/unix submodules
make ${MAKEOPTS} -C ports/unix CC=clang CFLAGS_EXTRA="-DMICROPY_STACKLESS=1 -DMICROPY_STACKLESS_STRICT=1"
}
function ci_unix_stackless_clang_run_tests {
ci_unix_run_tests_helper CC=clang
}
function ci_unix_float_clang_build {
make ${MAKEOPTS} -C mpy-cross CC=clang
make ${MAKEOPTS} -C ports/unix submodules
make ${MAKEOPTS} -C ports/unix CC=clang CFLAGS_EXTRA="-DMICROPY_FLOAT_IMPL=MICROPY_FLOAT_IMPL_FLOAT"
}
function ci_unix_float_clang_run_tests {
ci_unix_run_tests_helper CC=clang
}
function ci_unix_settrace_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix "${CI_UNIX_OPTS_SYS_SETTRACE[@]}"
}
function ci_unix_settrace_run_tests {
ci_unix_run_tests_helper "${CI_UNIX_OPTS_SYS_SETTRACE[@]}"
}
function ci_unix_settrace_stackless_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix "${CI_UNIX_OPTS_SYS_SETTRACE_STACKLESS[@]}"
}
function ci_unix_settrace_stackless_run_tests {
ci_unix_run_tests_helper "${CI_UNIX_OPTS_SYS_SETTRACE_STACKLESS[@]}"
}
function ci_unix_macos_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/unix submodules
#make ${MAKEOPTS} -C ports/unix deplibs
make ${MAKEOPTS} -C ports/unix
# check for additional compiler errors/warnings
make ${MAKEOPTS} -C ports/unix VARIANT=dev submodules
make ${MAKEOPTS} -C ports/unix VARIANT=dev
make ${MAKEOPTS} -C ports/unix VARIANT=coverage submodules
make ${MAKEOPTS} -C ports/unix VARIANT=coverage
}
function ci_unix_macos_run_tests {
# Issues with macOS tests:
# - OSX has poor time resolution and these uasyncio tests do not have correct output
# - import_pkg7 has a problem with relative imports
# - urandom_basic has a problem with getrandbits(0)
(cd tests && ./run-tests.py --exclude 'uasyncio_(basic|heaplock|lock|wait_task)' --exclude 'import_pkg7.py' --exclude 'urandom_basic.py')
}
function ci_unix_qemu_mips_setup {
sudo apt-get update
sudo apt-get install gcc-mips-linux-gnu g++-mips-linux-gnu
sudo apt-get install qemu-user
qemu-mips --version
}
function ci_unix_qemu_mips_build {
# qemu-mips on GitHub Actions will seg-fault if not linked statically
ci_unix_build_helper "${CI_UNIX_OPTS_QEMU_MIPS[@]}"
}
function ci_unix_qemu_mips_run_tests {
# Issues with MIPS tests:
# - (i)listdir does not work, it always returns the empty list (it's an issue with the underlying C call)
# - ffi tests do not work
file ./ports/unix/micropython-coverage
(cd tests && MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py --exclude 'vfs_posix.py' --exclude 'ffi_(callback|float|float2).py')
}
function ci_unix_qemu_arm_setup {
sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabi g++-arm-linux-gnueabi
sudo apt-get install qemu-user
qemu-arm --version
}
function ci_unix_qemu_arm_build {
ci_unix_build_helper "${CI_UNIX_OPTS_QEMU_ARM[@]}"
ci_unix_build_ffi_lib_helper arm-linux-gnueabi-gcc
}
function ci_unix_qemu_arm_run_tests {
# Issues with ARM tests:
# - (i)listdir does not work, it always returns the empty list (it's an issue with the underlying C call)
export QEMU_LD_PREFIX=/usr/arm-linux-gnueabi
file ./ports/unix/micropython-coverage
(cd tests && MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py --exclude 'vfs_posix.py')
}
########################################################################################
# ports/windows
function ci_windows_setup {
sudo apt-get install gcc-mingw-w64
}
function ci_windows_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/windows CROSS_COMPILE=i686-w64-mingw32-
}
########################################################################################
# ports/zephyr
ZEPHYR_DOCKER_VERSION=v0.21.0
ZEPHYR_SDK_VERSION=0.13.2
ZEPHYR_VERSION=v2.7.0
function ci_zephyr_setup {
docker pull zephyrprojectrtos/ci:${ZEPHYR_DOCKER_VERSION}
docker run --name zephyr-ci -d -it \
-v "$(pwd)":/micropython \
-e ZEPHYR_SDK_INSTALL_DIR=/opt/toolchains/zephyr-sdk-${ZEPHYR_SDK_VERSION} \
-e ZEPHYR_TOOLCHAIN_VARIANT=zephyr \
-e ZEPHYR_BASE=/zephyrproject/zephyr \
-w /micropython/ports/zephyr \
zephyrprojectrtos/ci:${ZEPHYR_DOCKER_VERSION}
docker ps -a
}
function ci_zephyr_install {
docker exec zephyr-ci west init --mr ${ZEPHYR_VERSION} /zephyrproject
docker exec -w /zephyrproject zephyr-ci west update
docker exec -w /zephyrproject zephyr-ci west zephyr-export
}
function ci_zephyr_build {
docker exec zephyr-ci west build -p auto -b qemu_x86 -- -DCONF_FILE=prj_minimal.conf
docker exec zephyr-ci west build -p auto -b qemu_x86
docker exec zephyr-ci west build -p auto -b frdm_k64f
docker exec zephyr-ci west build -p auto -b mimxrt1050_evk
docker exec zephyr-ci west build -p auto -b nucleo_wb55rg # for bluetooth
}
|
#!/bin/bash
#SBATCH -t 24:00:00
#SBATCH -J conv_rand
#SBATCH --mail-user=
#SBATCH --mail-type=FAIL
#SBATCH -e ./conv_pers_rand.err.%j
#SBATCH -o ./conv_pers_rand.out.%j
#SBATCH -n 1
#SBATCH -c 24
#SBATCH --mem-per-cpu=16384
#SBATCH --exclusive
#SBATCH -C avx
# ----------------------------------
# enable this if running on lichtenberg
module load intel python/3.6.8
### personalised
OMP_NUM_THREADS=20 python3 -u python/analysis/simulations/synth_tests.py 0
OMP_NUM_THREADS=20 python3 -u python/analysis/simulations/synth_tests.py 1
#OMP_NUM_THREADS=20 python3 -u python/analysis/simulations/synth_latent_factor_tests.py
|
<filename>src/main/java/NormalMergeSort.java
public class NormalMergeSort {
// utf8: "Köpfchen in das Wasser, Schwänzchen in die Höh." -CIA-Verhörmethode
public static void mergeSort(int[] arr) {
mergeSort(arr, 0, arr.length - 1);
}
public static void mergeSort(int[] arr, int low, int high) {
if (low < high) {
int mid = (low + high) / 2;
mergeSort(arr, low, mid);
mergeSort(arr, mid + 1, high);
merge(arr, low, mid, high);
}
}
private static void merge(int[] arr, int low, int mid, int high) {
int i = low;
int j = mid + 1;
int k = 0;
final int SIZE = high - low + 1;
int[] helperArr = new int[SIZE];
while (i <= mid && j <= high) {
if (arr[i] < arr[j]) {
helperArr[k++] = arr[i++];
} else {
helperArr[k++] = arr[j++];
}
}
while (i <= mid) {
helperArr[k++] = arr[i++];
}
while (j <= high) {
helperArr[k++] = arr[j++];
}
for (i = 0; i < SIZE; i++) {
arr[low + i] = helperArr[i];
}
}
}
|
#!/bin/sh
install_packages() {
printf '\033[91m%s\033[m\n' 'installing packages...'
brew upgrade
brew bundle --file ./Brewfile
tfenv install
printf '\033[36m%s\033[m\n' 'install packages completed.'
}
setup_git_secrets() {
git secrets --register-aws --global
git secrets --install ~/.git-templates/git-secrets
git config --global init.templatedir "$HOME/.git-templates/git-secrets"
printf '\033[36m%s\033[m\n' 'git-secrets config set up completed.'
}
cd "$(dirname "$0")" || exit 1
install_packages
setup_git_secrets
|
<reponame>andphp/antd-dashboard
import React, { FC, ReactNode } from 'react'
import { Route, useNavigate } from 'react-router-dom'
import { Result, Button } from 'antd'
import { useLocale } from '@/locales'
import { RouteProps, useLocation } from 'react-router'
import { Logged } from '@/utils/helper'
export interface PrivateRouteProps extends RouteProps {
render: FC<ReactNode>;
}
const PrivateRoute: FC<PrivateRouteProps> = ({ render, ...props }) => {
const logged = Logged()
const navigate = useNavigate()
const { formatMessage } = useLocale()
const location = useLocation()
return logged ? render({ ...props }) : (
<Result
status='403'
title='403'
subTitle={formatMessage({ id: 'gloabal.tips.unauthorized' })}
extra={
<Button
type='primary'
onClick={() => navigate('/login', { replace: true, state: { from: location.pathname }})}
>
{formatMessage({ id: 'gloabal.tips.goToLogin' })}
</Button>
}
/>
)
}
export default PrivateRoute
|
public static int findMax(int[] arr)
{
int max = arr[0];
for (int i = 1; i < arr.length; i++)
if (arr[i] > max)
max = arr[i];
return max;
} |
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
set -xe
usage() {
echo "install_saw.sh download_dir install_dir"
exit 1
}
if [ "$#" -ne "2" ]; then
usage
fi
DOWNLOAD_DIR=$1
INSTALL_DIR=$2
if [ -x "$INSTALL_DIR/bin/saw" ]; then
echo "Saw already installed at $INSTALL_DIR/bin/saw";
exit 0;
fi
mkdir -p "$DOWNLOAD_DIR"
cd "$DOWNLOAD_DIR"
#download saw binaries
curl --retry 3 https://s2n-public-test-dependencies.s3-us-west-2.amazonaws.com/saw-0.4.0.99-2020-03-31-Ubuntu14.04-64.tar.gz --output saw.tar.gz
mkdir -p saw && tar -xzf saw.tar.gz --strip-components=1 -C saw
mkdir -p "$INSTALL_DIR" && mv saw/* "$INSTALL_DIR"
"$INSTALL_DIR"/bin/saw --version
|
package io.gridgo.bean.test;
import io.gridgo.bean.BArray;
import io.gridgo.bean.BObject;
import io.gridgo.bean.BReference;
import io.gridgo.bean.BValue;
import io.gridgo.bean.test.support.Bar;
public class TestPrinter {
public static void main(String[] args) {
var raw = BValue.of(new byte[] { 1, 2, 3, 4, 5, 6 });
var bar = new Bar();
var ref = BReference.of(bar);
var obj = BObject.ofEmpty() //
.setAny("ref", bar) //
.setAny("bool", false) //
.set("int", BValue.of(1)) //
.setAny("long", 1L) //
.setAny("char", 'a') //
.setAny("str", "hello") //
.setAny("double", 1.11) //
.setAny("byte", (byte) 1) //
.setAny("raw", raw) //
.setAny("arr", new int[] { 1, 2, 3 }) //
.set("obj", BObject.ofEmpty().setAny("int", 2)) //
;
var arr = BArray.ofSequence(obj, 1, true, new byte[] { 4, 5, 6, 7 }, bar);
System.out.println(raw);
System.out.println(ref);
System.out.println(obj);
System.out.println(arr);
}
}
|
/* _____ _ __ ________ ___ *\
** / ___/(_) |/ / ___/ |__ \ Simple Mechanics Simulator 2 **
** \__ \/ / /|_/ /\__ \ __/ / copyright (c) 2011 <NAME> **
** ___/ / / / / /___/ / / __/ **
** /____/_/_/ /_//____/ /____/ **
\* */
package sims.dynamics
import sims.math._
class Body(shapes0: Shape*) {
val shapes: List[Shape] = shapes0.toList
var force: Vector2D = Vector2D.Null
var torque: Double = 0.0
var linearVelocity: Vector2D = Vector2D.Null
var angularVelocity: Double = 0.0
private var _position: Vector2D =
(Vector2D.Null /: shapes)((v: Vector2D, s: Shape) => v + s.position * s.mass) / shapes.map(_.mass).sum
def position = _position
def position_=(pos: Vector2D) = {
val delta = pos - _position
_position = pos
for (s <- shapes) s.position += delta
}
private var _rotation: Double = 0.0
def rotation = _rotation
def rotation_=(r: Double) = {
val delta = _rotation - r
_rotation = r
for (s <- shapes) {
s.rotation += delta
s.position = position + (s.local.get rotate r)
}
}
var fixed = false
/**Returns the mass of this body. If the body is free, its mass is the sum of the masses of its shapes.
* If the body is fixed, its mass is infinite (`Double.PositiveInfinity`).
* @return this body's mass*/
lazy val mass: Double = if (!fixed) shapes.map(_.mass).sum else Double.PositiveInfinity
/**Returns the moment of inertia for rotations about the COM of this body.
* It is calculated using the moments of inertia of this body's shapes and the parallel axis theorem.
* If the body is fixed, its moment of inertia is infinite (`Double.PositiveInfinity`).
* @return moment of inertia for rotations about the center of mass of this body*/
lazy val inertia: Double = if (!fixed) shapes.map((s: Shape) => s.inertia + s.mass * (s.local.get dot s.local.get)).sum else Double.PositiveInfinity
/**Applies a force to the center of mass of this body.
* @param force applied force*/
def applyForce(force: Vector2D) = if (!fixed) this.force += force
/**Applies a force to a point on this body. The point is considered to be contained within this body.
* @param force applied force
* @param point position vector of the point (in world coordinates)*/
def applyForce(force: Vector2D, point: Vector2D) = if (!fixed) {this.force += force; torque += (point - position) cross force}
/**Applies a torque to the center of mass.*/
def applyTorque(torque: Double) = if (!fixed) this.torque += torque
/**Applies an impulse to the center of mass of this body.
* @param impulse applied impulse*/
def applyImpulse(impulse: Vector2D) = if (!fixed) linearVelocity += impulse / mass
/**Applies an impulse to a point on this body. The point is considered to be contained within this body.
* @param impulse applied impulse
* @param point position vector of the point (in world coordinates)*/
def applyImpulse(impulse: Vector2D, point: Vector2D) = if (!fixed) {linearVelocity += impulse / mass; angularVelocity += ((point - position) cross impulse) / inertia}
/**Applies an angular impulse to the center of mass.*/
def applyAngularImpulse(impulse: Double) = if (!fixed) angularVelocity += impulse / inertia
/**Linear velocity of the given point on this body (in world coordinates).*/
def velocityOfPoint(point: Vector2D) = linearVelocity + (angularVelocity cross (point - position))
/**Linear momentum.*/
def linearMomentum = linearVelocity * mass
for (s0 <- shapes0) {
s0.local = Some(s0.position - _position)
s0.body = this
}
def contains(point: Vector2D) = shapes.exists(_.contains(point))
def info = {
"Body@" + hashCode + "(" + this.getClass() + ")\n" +
"\tPosition: " + position + "\n" +
"\tRotation: " + rotation + "\n" +
"\tLinear velocity: " + linearVelocity + "\n" +
"\tAngular velocity: " + angularVelocity + "\n" +
"\tForce: " + force + "\n" +
"\tTorque: " + torque + "\n" +
"\tMass: " + mass + "\n" +
"\tInertia: " + inertia + "\n" +
"\tFixed: " + fixed + "\n" +
"\tShape count" + shapes.length
}
} |
<filename>FPSLighting/Dependencies/DIRECTX/Utilities/Source/dxtex/dxtexdoc.cpp
// dxtexDoc.cpp : implementation of the CDxtexDoc class
//
#include "stdafx.h"
#include "dxtex.h"
#include "dxtexDoc.h"
#include "dialogs.h"
#include "formats.h"
#include "dds.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
// Helper function that tells whether the given D3DFMT has a working alpha channel
BOOL FormatContainsAlpha( D3DFORMAT fmt )
{
BOOL bHasAlpha = FALSE;
for( int i=0; i < fmtInfoArraySize; i++ )
{
if( fmtInfoArray[i].fmt == fmt )
{
bHasAlpha = fmtInfoArray[i].bHasAlpha;
break;
}
}
return bHasAlpha;
}
/////////////////////////////////////////////////////////////////////////////
// CDxtexDoc
IMPLEMENT_DYNCREATE(CDxtexDoc, CDocument)
BEGIN_MESSAGE_MAP(CDxtexDoc, CDocument)
//{{AFX_MSG_MAP(CDxtexDoc)
ON_COMMAND(ID_FILE_OPENALPHA, OnFileOpenAlpha)
ON_COMMAND(ID_FORMAT_GENERATEMIPMAPS, OnGenerateMipMaps)
ON_COMMAND(ID_FORMAT_CHANGESURFACEFMT, OnFormatChangeSurfaceFmt)
ON_COMMAND(ID_FORMAT_CHANGECUBEMAPFACES, OnFormatChangeCubeMapFaces)
ON_COMMAND(ID_FORMAT_MAKEINTOVOLUMEMAP, OnFormatMakeIntoVolumeMap)
ON_COMMAND(ID_FORMAT_RESIZE, OnFormatResize)
ON_UPDATE_COMMAND_UI(ID_FILE_OPENALPHA, OnUpdateFileOpenAlpha)
ON_UPDATE_COMMAND_UI(ID_FORMAT_GENERATEMIPMAPS, OnUpdateFormatGenerateMipmaps)
ON_UPDATE_COMMAND_UI(ID_FORMAT_CHANGECUBEMAPFACES, OnUpdateFormatChangeCubeMapFaces)
ON_UPDATE_COMMAND_UI(ID_FORMAT_MAKEINTOVOLUMEMAP, OnUpdateFormatMakeIntoVolumeMap)
ON_UPDATE_COMMAND_UI(ID_FORMAT_RESIZE, OnUpdateFormatResize)
//}}AFX_MSG_MAP
END_MESSAGE_MAP()
/////////////////////////////////////////////////////////////////////////////
// CDxtexDoc diagnostics
#ifdef _DEBUG
void CDxtexDoc::AssertValid() const
{
CDocument::AssertValid();
}
void CDxtexDoc::Dump(CDumpContext& dc) const
{
CDocument::Dump(dc);
}
#endif //_DEBUG
/////////////////////////////////////////////////////////////////////////////
// CDxtexDoc construction/destruction
CDxtexDoc::CDxtexDoc()
{
m_ptexOrig = NULL;
m_ptexNew = NULL;
m_dwWidth = 0;
m_dwHeight = 0;
m_dwDepth = 0;
m_numMips = 0;
m_dwCubeMapFlags = 0;
m_bTitleModsChanged = FALSE;
}
CDxtexDoc::~CDxtexDoc()
{
ReleasePpo(&m_ptexOrig);
ReleasePpo(&m_ptexNew);
}
BOOL CDxtexDoc::OnNewDocument()
{
HRESULT hr;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
if (!CDocument::OnNewDocument())
return FALSE;
CNewTextureDlg dlg;
// Obtain maximum texture dimension
D3DCAPS9 d3dcaps;
if ( SUCCEEDED( pd3ddev->GetDeviceCaps( &d3dcaps ) ) )
{
dlg.SetMaxTextureSize( d3dcaps.MaxTextureWidth, d3dcaps.MaxTextureHeight );
}
if (IDCANCEL == dlg.DoModal())
return FALSE;
m_dwWidth = dlg.m_dwWidth;
m_dwHeight = dlg.m_dwHeight;
m_numMips = dlg.m_numMips;
if (dlg.m_iTexType == 0)
{
LPDIRECT3DTEXTURE9 pmiptex;
hr = pd3ddev->CreateTexture(m_dwWidth, m_dwHeight, m_numMips,
0, dlg.m_fmt, D3DPOOL_MANAGED, &pmiptex, NULL);
if (FAILED(hr))
{
AfxMessageBox(ID_ERROR_CANTCREATETEXTURE);
return FALSE;
}
m_ptexOrig = pmiptex;
}
else if (dlg.m_iTexType == 1)
{
// Cube Map
LPDIRECT3DCUBETEXTURE9 pcubetex;
m_dwCubeMapFlags = DDS_CUBEMAP_ALLFACES;
hr = pd3ddev->CreateCubeTexture(m_dwWidth, m_numMips,
0, dlg.m_fmt, D3DPOOL_MANAGED, &pcubetex, NULL);
if (FAILED(hr))
{
AfxMessageBox(ID_ERROR_CANTCREATETEXTURE);
return FALSE;
}
m_ptexOrig = pcubetex;
}
else
{
LPDIRECT3DVOLUMETEXTURE9 pvoltex;
m_dwDepth = dlg.m_dwDepth;
hr = pd3ddev->CreateVolumeTexture(m_dwWidth, m_dwHeight, m_dwDepth, m_numMips,
0, dlg.m_fmt, D3DPOOL_SYSTEMMEM, &pvoltex, NULL);
if (FAILED(hr))
{
AfxMessageBox(ID_ERROR_CANTCREATETEXTURE);
return FALSE;
}
m_ptexOrig = pvoltex;
}
return TRUE;
}
BOOL CDxtexDoc::OnOpenDocument(LPCTSTR lpszPathName)
{
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
D3DXIMAGE_INFO imageinfo;
D3DXIMAGE_INFO imageinfo2;
if( FAILED( D3DXGetImageInfoFromFile( lpszPathName, &imageinfo ) ) )
{
AfxMessageBox(ID_ERROR_COULDNTLOADFILE);
return FALSE;
}
switch( imageinfo.ResourceType )
{
case D3DRTYPE_TEXTURE:
if( FAILED( D3DXCreateTextureFromFileEx( pd3ddev, lpszPathName,
imageinfo.Width, imageinfo.Height, imageinfo.MipLevels, 0,
imageinfo.Format, D3DPOOL_MANAGED, D3DX_FILTER_NONE, D3DX_FILTER_NONE, 0,
&imageinfo2, NULL, (LPDIRECT3DTEXTURE9*)&m_ptexOrig ) ) )
{
AfxMessageBox(ID_ERROR_COULDNTLOADFILE);
return FALSE;
}
m_dwWidth = imageinfo2.Width;
m_dwHeight = imageinfo2.Height;
m_dwDepth = 0;
m_numMips = imageinfo2.MipLevels;
if( imageinfo.ImageFileFormat == D3DXIFF_BMP )
{
// Look for "foo_a.bmp" for alpha channel
CString strPath = lpszPathName;
int i = strPath.ReverseFind('.');
HRESULT hr;
strPath = strPath.Left(i) + "_a.bmp";
CFileStatus status;
if (CFile::GetStatus(strPath, status))
{
// Make sure there's an alpha channel to load alpha image into
if (FAILED(EnsureAlpha(&m_ptexOrig)))
return FALSE;
LPDIRECT3DSURFACE9 psurf;
hr = ((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetSurfaceLevel(0, &psurf);
if (FAILED(hr))
return FALSE;
hr = LoadAlphaIntoSurface(strPath, psurf);
ReleasePpo(&psurf);
if (FAILED(hr))
return FALSE;
}
}
break;
case D3DRTYPE_VOLUMETEXTURE:
if( FAILED( D3DXCreateVolumeTextureFromFileEx( pd3ddev, lpszPathName,
imageinfo.Width, imageinfo.Height, imageinfo.Depth, imageinfo.MipLevels,
0, imageinfo.Format, D3DPOOL_MANAGED, D3DX_FILTER_NONE, D3DX_FILTER_NONE,
0, &imageinfo2, NULL, (LPDIRECT3DVOLUMETEXTURE9*)&m_ptexOrig ) ) )
{
AfxMessageBox(ID_ERROR_COULDNTLOADFILE);
return FALSE;
}
m_dwWidth = imageinfo2.Width;
m_dwHeight = imageinfo2.Height;
m_dwDepth = imageinfo2.Depth;
m_numMips = imageinfo2.MipLevels;
break;
case D3DRTYPE_CUBETEXTURE:
if( FAILED( D3DXCreateCubeTextureFromFileEx( pd3ddev, lpszPathName,
imageinfo.Width, imageinfo.MipLevels, 0, imageinfo.Format,
D3DPOOL_MANAGED, D3DX_FILTER_NONE, D3DX_FILTER_NONE,
0, &imageinfo2, NULL, (LPDIRECT3DCUBETEXTURE9*)&m_ptexOrig ) ) )
{
AfxMessageBox(ID_ERROR_COULDNTLOADFILE);
return FALSE;
}
m_dwWidth = imageinfo2.Width;
m_dwHeight = imageinfo2.Height;
m_dwDepth = 0;
m_numMips = imageinfo2.MipLevels;
m_dwCubeMapFlags = DDS_CUBEMAP_ALLFACES;
break;
default:
AfxMessageBox(ID_ERROR_COULDNTLOADFILE);
return FALSE;
}
return TRUE;
}
BOOL CDxtexDoc::OnSaveDocument(LPCTSTR lpszPathName)
{
LPDIRECT3DBASETEXTURE9 ptex;
ptex = (m_ptexNew == NULL ? m_ptexOrig : m_ptexNew);
if( FAILED( D3DXSaveTextureToFile( lpszPathName, D3DXIFF_DDS, ptex, NULL ) ) )
{
AfxMessageBox(ID_ERROR_COULDNTSAVEFILE);
return FALSE;
}
SetModifiedFlag(FALSE);
return TRUE;
}
D3DFORMAT CDxtexDoc::GetFormat(LPDIRECT3DBASETEXTURE9 ptex)
{
LPDIRECT3DTEXTURE9 pmiptex = NULL;
LPDIRECT3DCUBETEXTURE9 pcubetex = NULL;
LPDIRECT3DVOLUMETEXTURE9 pvoltex = NULL;
D3DFORMAT fmt = D3DFMT_UNKNOWN;
if (IsVolumeMap())
pvoltex = (LPDIRECT3DVOLUMETEXTURE9)ptex;
else if (IsCubeMap())
pcubetex = (LPDIRECT3DCUBETEXTURE9)ptex;
else
pmiptex = (LPDIRECT3DTEXTURE9)ptex;
if (pvoltex != NULL)
{
D3DVOLUME_DESC vd;
pvoltex->GetLevelDesc(0, &vd);
fmt = vd.Format;
}
else if (pcubetex != NULL)
{
D3DSURFACE_DESC sd;
pcubetex->GetLevelDesc(0, &sd);
fmt = sd.Format;
}
else if( pmiptex != NULL )
{
D3DSURFACE_DESC sd;
pmiptex->GetLevelDesc(0, &sd);
fmt = sd.Format;
}
return fmt;
}
// If *pptex's current format has less than 4 bits of alpha, change
// it to a similar format that has at least 4 bits of alpha.
HRESULT CDxtexDoc::EnsureAlpha(LPDIRECT3DBASETEXTURE9* pptex)
{
HRESULT hr;
D3DFORMAT fmtCur = GetFormat(*pptex);
D3DFORMAT fmtNew = D3DFMT_UNKNOWN;
LPDIRECT3DBASETEXTURE9 ptex = NULL;
switch (fmtCur)
{
case D3DFMT_X8R8G8B8:
case D3DFMT_R8G8B8:
fmtNew = D3DFMT_A8R8G8B8;
break;
case D3DFMT_X1R5G5B5:
case D3DFMT_R5G6B5:
fmtNew = D3DFMT_A1R5G5B5;
break;
case D3DFMT_X8B8G8R8:
fmtNew = D3DFMT_A8B8G8R8;
break;
case D3DFMT_L8:
fmtNew = D3DFMT_A8L8;
break;
default:
break;
}
if( fmtNew != D3DFMT_UNKNOWN )
{
if (FAILED(hr = ChangeFormat(m_ptexOrig, fmtNew, &ptex)))
return hr;
ReleasePpo(&m_ptexOrig);
m_ptexOrig = ptex;
}
return S_OK;
}
/////////////////////////////////////////////////////////////////////////////
// CDxtexDoc commands
HRESULT CDxtexDoc::LoadAlphaBmp(CString& strPath)
{
HRESULT hr;
LPDIRECT3DTEXTURE9 pmiptex;
LPDIRECT3DSURFACE9 psurf;
if (IsCubeMap())
return E_FAIL;
pmiptex = (LPDIRECT3DTEXTURE9)m_ptexOrig;
hr = pmiptex->GetSurfaceLevel(0, &psurf);
if (FAILED(hr))
return hr;
hr = LoadAlphaIntoSurface(strPath, psurf);
ReleasePpo(&psurf);
if (FAILED(hr))
return hr;
UpdateAllViews(NULL, 1); // tell CView to pick up new surface pointers
return S_OK;
}
HRESULT CDxtexDoc::ChangeFormat(LPDIRECT3DBASETEXTURE9 ptexCur, D3DFORMAT fmtTo,
LPDIRECT3DBASETEXTURE9* pptexNew)
{
HRESULT hr;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
LPDIRECT3DTEXTURE9 pmiptex;
LPDIRECT3DCUBETEXTURE9 pcubetex;
LPDIRECT3DVOLUMETEXTURE9 pvoltex;
D3DFORMAT fmtFrom;
LPDIRECT3DTEXTURE9 pmiptexNew;
LPDIRECT3DCUBETEXTURE9 pcubetexNew;
LPDIRECT3DVOLUMETEXTURE9 pvoltexNew;
if (IsVolumeMap())
{
pvoltex = (LPDIRECT3DVOLUMETEXTURE9)ptexCur;
D3DVOLUME_DESC vd;
pvoltex->GetLevelDesc(0, &vd);
fmtFrom = vd.Format;
}
else if (IsCubeMap())
{
pcubetex = (LPDIRECT3DCUBETEXTURE9)ptexCur;
D3DSURFACE_DESC sd;
pcubetex->GetLevelDesc(0, &sd);
fmtFrom = sd.Format;
}
else
{
pmiptex = (LPDIRECT3DTEXTURE9)ptexCur;
D3DSURFACE_DESC sd;
pmiptex->GetLevelDesc(0, &sd);
fmtFrom = sd.Format;
}
if (fmtFrom == D3DFMT_DXT2 || fmtFrom == D3DFMT_DXT4)
{
if (fmtTo == D3DFMT_DXT1)
{
AfxMessageBox(ID_ERROR_PREMULTTODXT1);
}
else if (fmtTo != D3DFMT_DXT2 && fmtTo != D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
return S_OK;
}
}
if (IsVolumeMap())
{
hr = pd3ddev->CreateVolumeTexture(m_dwWidth, m_dwHeight, m_dwDepth, m_numMips,
0, fmtTo, D3DPOOL_SYSTEMMEM, &pvoltexNew, NULL);
if (FAILED(hr))
return hr;
*pptexNew = pvoltexNew;
if (FAILED(BltAllLevels(D3DCUBEMAP_FACE_FORCE_DWORD, ptexCur, *pptexNew)))
return hr;
}
else if (IsCubeMap())
{
hr = pd3ddev->CreateCubeTexture(m_dwWidth, m_numMips,
0, fmtTo, D3DPOOL_MANAGED, &pcubetexNew, NULL);
if (FAILED(hr))
return hr;
*pptexNew = pcubetexNew;
if (FAILED(hr = BltAllLevels(D3DCUBEMAP_FACE_NEGATIVE_X, ptexCur, *pptexNew)))
return hr;
if (FAILED(hr = BltAllLevels(D3DCUBEMAP_FACE_POSITIVE_X, ptexCur, *pptexNew)))
return hr;
if (FAILED(hr = BltAllLevels(D3DCUBEMAP_FACE_NEGATIVE_Y, ptexCur, *pptexNew)))
return hr;
if (FAILED(hr = BltAllLevels(D3DCUBEMAP_FACE_POSITIVE_Y, ptexCur, *pptexNew)))
return hr;
if (FAILED(hr = BltAllLevels(D3DCUBEMAP_FACE_NEGATIVE_Z, ptexCur, *pptexNew)))
return hr;
if (FAILED(hr = BltAllLevels(D3DCUBEMAP_FACE_POSITIVE_Z, ptexCur, *pptexNew)))
return hr;
}
else
{
if ((fmtTo == D3DFMT_DXT1 || fmtTo == D3DFMT_DXT2 ||
fmtTo == D3DFMT_DXT3 || fmtTo == D3DFMT_DXT4 ||
fmtTo == D3DFMT_DXT5) && (m_dwWidth % 4 != 0 || m_dwHeight % 4 != 0))
{
AfxMessageBox(ID_ERROR_NEEDMULTOF4);
return E_FAIL;
}
hr = pd3ddev->CreateTexture(m_dwWidth, m_dwHeight, m_numMips,
0, fmtTo, D3DPOOL_MANAGED, &pmiptexNew, NULL);
if (FAILED(hr))
return hr;
*pptexNew = pmiptexNew;
if (FAILED(BltAllLevels(D3DCUBEMAP_FACE_FORCE_DWORD, ptexCur, *pptexNew)))
return hr;
}
return S_OK;
}
HRESULT CDxtexDoc::Compress(D3DFORMAT fmtTo, BOOL bSwitchView)
{
HRESULT hr;
LPDIRECT3DBASETEXTURE9 ptexNew = NULL;
if (FAILED(hr = ChangeFormat(m_ptexOrig, fmtTo, &ptexNew)))
return hr;
ReleasePpo(&m_ptexNew);
m_ptexNew = ptexNew;
SetModifiedFlag();
m_bTitleModsChanged = TRUE; // force title bar update
if (bSwitchView)
{
if( AfxGetMainWnd() != NULL )
AfxGetMainWnd()->PostMessage(WM_COMMAND, ID_VIEW_COMPRESSED, 0);
}
return S_OK;
}
void CDxtexDoc::OnGenerateMipMaps()
{
GenerateMipMaps();
}
void CDxtexDoc::GenerateMipMaps()
{
LONG lwTempH;
LONG lwTempW;
LONG lwPowsW;
LONG lwPowsH;
LPDIRECT3DTEXTURE9 pddsNew = NULL;
D3DFORMAT fmt;
HRESULT hr;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
LPDIRECT3DTEXTURE9 pmiptex = NULL;
LPDIRECT3DCUBETEXTURE9 pcubetex = NULL;
LPDIRECT3DVOLUMETEXTURE9 pvoltex = NULL;
LPDIRECT3DTEXTURE9 pmiptexNew = NULL;
LPDIRECT3DCUBETEXTURE9 pcubetexNew = NULL;
LPDIRECT3DVOLUMETEXTURE9 pvoltexNew = NULL;
LPDIRECT3DSURFACE9 psurfSrc;
LPDIRECT3DSURFACE9 psurfDest;
LPDIRECT3DVOLUME9 pvolSrc;
LPDIRECT3DVOLUME9 pvolDest;
if (IsVolumeMap())
pvoltex = (LPDIRECT3DVOLUMETEXTURE9)m_ptexOrig;
else if (IsCubeMap())
pcubetex = (LPDIRECT3DCUBETEXTURE9)m_ptexOrig;
else
pmiptex = (LPDIRECT3DTEXTURE9)m_ptexOrig;
if (pvoltex != NULL)
{
D3DVOLUME_DESC vd;
pvoltex->GetLevelDesc(0, &vd);
fmt = vd.Format;
}
else if (pcubetex != NULL)
{
D3DSURFACE_DESC sd;
pcubetex->GetLevelDesc(0, &sd);
fmt = sd.Format;
}
else
{
D3DSURFACE_DESC sd;
pmiptex->GetLevelDesc(0, &sd);
fmt = sd.Format;
}
lwTempW = m_dwWidth;
lwTempH = m_dwHeight;
lwPowsW = 0;
lwPowsH = 0;
while (lwTempW > 0)
{
lwPowsW++;
lwTempW = lwTempW / 2;
}
while (lwTempH > 0)
{
lwPowsH++;
lwTempH = lwTempH / 2;
}
m_numMips = lwPowsW > lwPowsH ? lwPowsW : lwPowsH;
// Create destination mipmap surface - same format as source
if (pvoltex != NULL)
{
if (FAILED(hr = pd3ddev->CreateVolumeTexture(m_dwWidth, m_dwHeight, m_dwDepth,
m_numMips, 0, fmt, D3DPOOL_SYSTEMMEM, &pvoltexNew, NULL)))
{
goto LFail;
}
hr = pvoltex->GetVolumeLevel(0, &pvolSrc);
hr = pvoltexNew->GetVolumeLevel(0, &pvolDest);
hr = D3DXLoadVolumeFromVolume(pvolDest, NULL, NULL, pvolSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&pvolSrc);
ReleasePpo(&pvolDest);
hr = D3DXFilterVolumeTexture(pvoltexNew, NULL, 0, D3DX_DEFAULT);
}
else if (pmiptex != NULL)
{
if (FAILED(hr = pd3ddev->CreateTexture(m_dwWidth, m_dwHeight, m_numMips,
0, fmt, D3DPOOL_MANAGED, &pmiptexNew, NULL)))
{
goto LFail;
}
hr = pmiptex->GetSurfaceLevel(0, &psurfSrc);
hr = pmiptexNew->GetSurfaceLevel(0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = D3DXFilterTexture(pmiptexNew, NULL, 0, D3DX_DEFAULT);
}
else
{
if (FAILED(hr = pd3ddev->CreateCubeTexture(m_dwWidth, m_numMips,
0, fmt, D3DPOOL_MANAGED, &pcubetexNew, NULL)))
{
goto LFail;
}
hr = pcubetex->GetCubeMapSurface(D3DCUBEMAP_FACE_POSITIVE_X, 0, &psurfSrc);
hr = pcubetexNew->GetCubeMapSurface(D3DCUBEMAP_FACE_POSITIVE_X, 0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = pcubetex->GetCubeMapSurface(D3DCUBEMAP_FACE_NEGATIVE_X, 0, &psurfSrc);
hr = pcubetexNew->GetCubeMapSurface(D3DCUBEMAP_FACE_NEGATIVE_X, 0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = pcubetex->GetCubeMapSurface(D3DCUBEMAP_FACE_POSITIVE_Y, 0, &psurfSrc);
hr = pcubetexNew->GetCubeMapSurface(D3DCUBEMAP_FACE_POSITIVE_Y, 0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = pcubetex->GetCubeMapSurface(D3DCUBEMAP_FACE_NEGATIVE_Y, 0, &psurfSrc);
hr = pcubetexNew->GetCubeMapSurface(D3DCUBEMAP_FACE_NEGATIVE_Y, 0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = pcubetex->GetCubeMapSurface(D3DCUBEMAP_FACE_POSITIVE_Z, 0, &psurfSrc);
hr = pcubetexNew->GetCubeMapSurface(D3DCUBEMAP_FACE_POSITIVE_Z, 0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = pcubetex->GetCubeMapSurface(D3DCUBEMAP_FACE_NEGATIVE_Z, 0, &psurfSrc);
hr = pcubetexNew->GetCubeMapSurface(D3DCUBEMAP_FACE_NEGATIVE_Z, 0, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL, psurfSrc, NULL, NULL,
D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
hr = D3DXFilterCubeTexture(pcubetexNew, NULL, 0, D3DX_DEFAULT);
}
ReleasePpo(&m_ptexOrig);
if (pvoltexNew != NULL)
m_ptexOrig = pvoltexNew;
else if (pcubetexNew != NULL)
m_ptexOrig = pcubetexNew;
else
m_ptexOrig = pmiptexNew;
if (m_ptexNew != NULL)
{
// Rather than filtering down the (probably-compressed) m_ptexNew
// top level, compress each mip level from the (probably-uncompressed)
// m_ptexOrig levels.
if (pvoltexNew != NULL)
{
D3DVOLUME_DESC vd;
((LPDIRECT3DVOLUMETEXTURE9)m_ptexNew)->GetLevelDesc(0, &vd);
fmt = vd.Format;
}
else if (pcubetexNew != NULL)
{
D3DSURFACE_DESC sd;
((LPDIRECT3DTEXTURE9)m_ptexNew)->GetLevelDesc(0, &sd);
fmt = sd.Format;
}
else
{
D3DSURFACE_DESC sd;
((LPDIRECT3DCUBETEXTURE9)m_ptexNew)->GetLevelDesc(0, &sd);
fmt = sd.Format;
}
Compress(fmt, FALSE);
}
m_bTitleModsChanged = TRUE; // Generate title bar update
UpdateAllViews(NULL, 1); // tell CView to pick up new surface pointers
SetModifiedFlag();
return;
LFail:
ReleasePpo(&pddsNew);
}
void CDxtexDoc::SetPathName(LPCTSTR lpszPathName, BOOL bAddToMRU)
{
CDocument::SetPathName(lpszPathName, bAddToMRU);
DWORD lcid = MAKELCID(MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), SORT_DEFAULT);
TCHAR* pszLeaf = strrchr((LPTSTR)lpszPathName, '\\');
if( pszLeaf )
{
pszLeaf++;
TCHAR* pszExtension = strrchr((LPTSTR)lpszPathName, '.');
if( pszExtension &&
CompareString( lcid, NORM_IGNORECASE, pszExtension, -1, TEXT(".dds"), -1 ) != CSTR_EQUAL )
{
StringCchCopy(pszExtension, 1, "");
SetModifiedFlag(TRUE);
SetTitle(pszLeaf);
m_strPathName.Empty();
}
}
}
DWORD CDxtexDoc::NumMips(VOID)
{
return m_numMips;
}
void CDxtexDoc::OnFileOpenAlpha()
{
HRESULT hr;
CString fileName;
LPDIRECT3DTEXTURE9 pmiptex;
if (IsCubeMap() || IsVolumeMap())
return;
// Premultiplied-alpha files don't support this feature:
D3DSURFACE_DESC sd;
((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetLevelDesc(0, &sd);
if (sd.Format == D3DFMT_DXT2 || sd.Format == D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
return;
}
// Check if the original has alpha
if( !FormatContainsAlpha(sd.Format) )
{
// If it doesn't then see if the new does
if (m_ptexNew != NULL)
{
((LPDIRECT3DTEXTURE9)m_ptexNew)->GetLevelDesc(0, &sd);
if( !FormatContainsAlpha(sd.Format) )
{
AfxMessageBox(ID_ERROR_NEEDALPHA);
return;
}
else
{
ReleasePpo(&m_ptexOrig);
m_ptexOrig = m_ptexNew;
m_ptexNew = NULL;
CWnd* Wnd = AfxGetMainWnd();
if( Wnd != NULL )
Wnd->PostMessage(WM_COMMAND, ID_VIEW_ORIGINAL, 0);
}
}
else
{
AfxMessageBox(ID_ERROR_NEEDALPHA);
return;
}
}
pmiptex = (LPDIRECT3DTEXTURE9)m_ptexOrig;
if (!PromptForBmp(&fileName))
return;
LPDIRECT3DSURFACE9 psurf;
if (FAILED(hr = pmiptex->GetSurfaceLevel(0, &psurf)))
return;
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurf)))
return;
if (m_numMips > 1)
OnGenerateMipMaps();
else if (m_ptexNew != NULL)
{
((LPDIRECT3DTEXTURE9)m_ptexNew)->GetLevelDesc(0, &sd);
Compress(sd.Format, FALSE);
}
UpdateAllViews(NULL, 1);
}
HRESULT CDxtexDoc::LoadAlphaIntoSurface(CString& strPath, LPDIRECT3DSURFACE9 psurf)
{
HRESULT hr;
D3DSURFACE_DESC sd;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
LPDIRECT3DTEXTURE9 ptexAlpha;
LPDIRECT3DSURFACE9 psurfAlpha;
LPDIRECT3DSURFACE9 psurfTarget;
psurf->GetDesc(&sd);
// Load the alpha BMP into psurfAlpha, a new A8R8G8B8 surface
hr = D3DXCreateTextureFromFileEx(pd3ddev, strPath, sd.Width, sd.Height, 1, 0,
D3DFMT_A8R8G8B8, D3DPOOL_MANAGED, D3DX_DEFAULT,
D3DX_DEFAULT, 0, NULL, NULL, &ptexAlpha);
hr = ptexAlpha->GetSurfaceLevel(0, &psurfAlpha);
// Copy the target surface into an A8R8G8B8 surface
hr = pd3ddev->CreateOffscreenPlainSurface(sd.Width, sd.Height, D3DFMT_A8R8G8B8, D3DPOOL_SCRATCH, &psurfTarget, NULL);
hr = D3DXLoadSurfaceFromSurface(psurfTarget, NULL, NULL, psurf, NULL, NULL,
D3DX_DEFAULT, 0);
// Fill in the alpha channels of psurfTarget based on the blue channel of psurfAlpha
D3DLOCKED_RECT lrSrc;
D3DLOCKED_RECT lrDest;
hr = psurfAlpha->LockRect(&lrSrc, NULL, D3DLOCK_READONLY);
hr = psurfTarget->LockRect(&lrDest, NULL, 0);
DWORD xp;
DWORD yp;
DWORD* pdwRowSrc = (DWORD*)lrSrc.pBits;
DWORD* pdwRowDest = (DWORD*)lrDest.pBits;
DWORD* pdwSrc;
DWORD* pdwDest;
DWORD dwAlpha;
LONG dataBytesPerRow = 4 * sd.Width;
for (yp = 0; yp < sd.Height; yp++)
{
pdwSrc = pdwRowSrc;
pdwDest = pdwRowDest;
for (xp = 0; xp < sd.Width; xp++)
{
dwAlpha = *pdwSrc << 24;
*pdwDest &= 0x00ffffff;
*pdwDest |= dwAlpha;
pdwSrc++;
pdwDest++;
}
pdwRowSrc += lrSrc.Pitch / 4;
pdwRowDest += lrDest.Pitch / 4;
}
psurfAlpha->UnlockRect();
psurfTarget->UnlockRect();
// Copy psurfTarget back into real surface
hr = D3DXLoadSurfaceFromSurface(psurf, NULL, NULL, psurfTarget, NULL, NULL,
D3DX_DEFAULT, 0);
// Release allocated interfaces
ReleasePpo(&psurfTarget);
ReleasePpo(&psurfAlpha);
ReleasePpo(&ptexAlpha);
return S_OK;
}
BOOL CDxtexDoc::PromptForBmp(CString* pstrPath)
{
CFileDialog dlgFile(TRUE);
CString title;
VERIFY(title.LoadString(AFX_IDS_OPENFILE));
CString strFilter;
CString strDefault;
strFilter += "Image Files (*.dds, *.hdr, *.bmp, *.tga, *.jpg, *.png, *.dib)";
strFilter += (TCHAR)'\0'; // next string please
strFilter += _T("*.dds;*.hdr;*.bmp;*.tga;*.jpg;*.png;*.dib");
strFilter += (TCHAR)'\0'; // last string
dlgFile.m_ofn.nMaxCustFilter++;
// append the "*.*" all files filter
CString allFilter;
VERIFY(allFilter.LoadString(AFX_IDS_ALLFILTER));
strFilter += allFilter;
strFilter += (TCHAR)'\0'; // next string please
strFilter += _T("*.*");
strFilter += (TCHAR)'\0'; // last string
dlgFile.m_ofn.nMaxCustFilter++;
dlgFile.m_ofn.lpstrFilter = strFilter;
dlgFile.m_ofn.lpstrTitle = title;
dlgFile.m_ofn.lpstrFile = pstrPath->GetBuffer(_MAX_PATH);
INT_PTR nResult = dlgFile.DoModal();
pstrPath->ReleaseBuffer();
if (nResult != IDOK)
return FALSE;
return TRUE;
}
void CDxtexDoc::OpenSubsurface(D3DCUBEMAP_FACES FaceType, LONG lwMip, LONG lwSlice)
{
HRESULT hr;
CString fileName;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
LPDIRECT3DTEXTURE9 ptex = NULL;
LPDIRECT3DSURFACE9 psurfOrig = NULL;
LPDIRECT3DSURFACE9 psurfNew = NULL;
if (!PromptForBmp(&fileName))
return;
if (IsVolumeMap())
{
hr = D3DXCreateTextureFromFile(pd3ddev, fileName, &ptex);
hr = ptex->GetSurfaceLevel(0, &psurfOrig);
}
else if (IsCubeMap())
{
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexOrig)->GetCubeMapSurface(FaceType, lwMip, &psurfOrig);
if (m_ptexNew != NULL)
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexNew)->GetCubeMapSurface(FaceType, lwMip, &psurfNew);
hr = D3DXLoadSurfaceFromFile(psurfOrig, NULL, NULL, fileName, NULL, D3DX_DEFAULT, 0, NULL);
}
else
{
hr = ((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetSurfaceLevel(lwMip, &psurfOrig);
if (m_ptexNew != NULL)
hr = ((LPDIRECT3DTEXTURE9)m_ptexNew)->GetSurfaceLevel(lwMip, &psurfNew);
hr = D3DXLoadSurfaceFromFile(psurfOrig, NULL, NULL, fileName, NULL, D3DX_DEFAULT, 0, NULL);
}
// Look for "foo_a.bmp" for alpha channel
int i = fileName.ReverseFind('.');
fileName = fileName.Left(i) + "_a.bmp";
CFileStatus status;
if (CFile::GetStatus(fileName, status))
{
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurfOrig)))
return;
}
if (IsVolumeMap())
{
LPDIRECT3DVOLUME9 pvol;
hr = ((LPDIRECT3DVOLUMETEXTURE9)m_ptexOrig)->GetVolumeLevel(lwMip, &pvol);
hr = LoadVolumeSliceFromSurface(pvol, lwSlice, psurfOrig);
ReleasePpo(&pvol);
if (m_ptexNew)
{
hr = ((LPDIRECT3DVOLUMETEXTURE9)m_ptexNew)->GetVolumeLevel(lwMip, &pvol);
hr = LoadVolumeSliceFromSurface(pvol, lwSlice, psurfOrig);
ReleasePpo(&pvol);
}
}
else if (psurfNew != NULL)
{
hr = D3DXLoadSurfaceFromSurface(psurfNew, NULL, NULL, psurfOrig, NULL, NULL, D3DX_DEFAULT, 0);
}
ReleasePpo(&psurfOrig);
ReleasePpo(&psurfNew);
ReleasePpo(&ptex);
SetModifiedFlag(TRUE);
UpdateAllViews(NULL, 1);
}
void CDxtexDoc::OpenAlphaSubsurface(D3DCUBEMAP_FACES FaceType, LONG lwMip, LONG lwSlice)
{
HRESULT hr;
CString fileName;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
LPDIRECT3DTEXTURE9 ptexOrig = NULL;
LPDIRECT3DTEXTURE9 ptexNew = NULL;
LPDIRECT3DSURFACE9 psurfOrig = NULL;
LPDIRECT3DSURFACE9 psurfNew = NULL;
LPDIRECT3DVOLUME9 pvolOrig = NULL;
LPDIRECT3DVOLUME9 pvolNew = NULL;
D3DSURFACE_DESC sd;
DWORD dwWidth = m_dwWidth;
DWORD dwHeight = m_dwHeight;
if (IsVolumeMap())
{
for (int i = 0; i < lwMip; i++)
{
dwWidth /= 2;
dwHeight /= 2;
}
hr = pd3ddev->CreateTexture(dwWidth, dwHeight, 1,
0, D3DFMT_A8R8G8B8, D3DPOOL_MANAGED, &ptexOrig, NULL);
hr = ptexOrig->GetSurfaceLevel(0, &psurfOrig);
hr = ((LPDIRECT3DVOLUMETEXTURE9)m_ptexOrig)->GetVolumeLevel(lwMip, &pvolOrig);
hr = LoadSurfaceFromVolumeSlice(pvolOrig, lwSlice, psurfOrig);
if (m_ptexNew != NULL)
{
hr = pd3ddev->CreateTexture(dwWidth, dwHeight, 1,
0, D3DFMT_A8R8G8B8, D3DPOOL_MANAGED, &ptexNew, NULL);
hr = ptexNew->GetSurfaceLevel(0, &psurfOrig);
hr = ((LPDIRECT3DVOLUMETEXTURE9)m_ptexOrig)->GetVolumeLevel(lwMip, &pvolNew);
hr = LoadSurfaceFromVolumeSlice(pvolNew, lwSlice, psurfOrig);
}
}
else if (IsCubeMap())
{
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexOrig)->GetCubeMapSurface(FaceType, lwMip, &psurfOrig);
((LPDIRECT3DCUBETEXTURE9)m_ptexOrig)->GetLevelDesc(lwMip, &sd);
if (sd.Format == D3DFMT_DXT2 || sd.Format == D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
goto LCleanup;
}
if (m_ptexNew != NULL)
{
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexNew)->GetCubeMapSurface(FaceType, lwMip, &psurfNew);
((LPDIRECT3DCUBETEXTURE9)m_ptexNew)->GetLevelDesc(lwMip, &sd);
if (sd.Format == D3DFMT_DXT2 || sd.Format == D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
goto LCleanup;
}
}
}
else
{
BOOL bAlphaFound = FALSE;
hr = ((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetSurfaceLevel(lwMip, &psurfOrig);
((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetLevelDesc(lwMip, &sd);
if (sd.Format == D3DFMT_DXT2 || sd.Format == D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
goto LCleanup;
}
// Check if the original has alpha
if( FormatContainsAlpha(sd.Format) )
{
bAlphaFound = TRUE;
}
if (m_ptexNew != NULL)
{
hr = ((LPDIRECT3DTEXTURE9)m_ptexNew)->GetSurfaceLevel(lwMip, &psurfNew);
((LPDIRECT3DTEXTURE9)m_ptexNew)->GetLevelDesc(lwMip, &sd);
if (sd.Format == D3DFMT_DXT2 || sd.Format == D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
goto LCleanup;
}
// Check if the new has alpha
if( FormatContainsAlpha(sd.Format) )
{
bAlphaFound = TRUE;
}
}
if( bAlphaFound == FALSE )
{
AfxMessageBox(ID_ERROR_NEEDALPHA);
goto LCleanup;
}
}
if (!PromptForBmp(&fileName))
goto LCleanup;
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurfOrig)))
goto LCleanup;
if (psurfNew != NULL)
{
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurfNew)))
goto LCleanup;
}
if (pvolOrig != NULL)
{
hr = LoadVolumeSliceFromSurface(pvolOrig, lwSlice, psurfOrig);
}
if (pvolNew != NULL)
{
hr = LoadVolumeSliceFromSurface(pvolNew, lwSlice, psurfNew);
}
SetModifiedFlag(TRUE);
UpdateAllViews(NULL, 1);
LCleanup:
ReleasePpo(&psurfOrig);
ReleasePpo(&psurfNew);
ReleasePpo(&ptexOrig);
ReleasePpo(&ptexNew);
ReleasePpo(&pvolOrig);
ReleasePpo(&pvolNew);
}
void CDxtexDoc::OnFormatChangeCubeMapFaces()
{
HRESULT hr;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
D3DSURFACE_DESC sd;
LPDIRECT3DCUBETEXTURE9 ptexCube;
DWORD iLevel;
LPDIRECT3DSURFACE9 psurfSrc;
LPDIRECT3DSURFACE9 psurfDest;
CCubeMapDlg cubeMapDlg;
if (IDCANCEL == cubeMapDlg.DoModal())
return;
// Change m_ptexOrig into a cubemap
((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetLevelDesc(0, &sd);
hr = D3DXCreateCubeTexture(pd3ddev, m_dwWidth, m_numMips, 0, sd.Format, D3DPOOL_MANAGED, &ptexCube);
for (iLevel = 0; iLevel < m_numMips; iLevel++)
{
hr = ((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetSurfaceLevel(iLevel, &psurfSrc);
hr = ptexCube->GetCubeMapSurface((D3DCUBEMAP_FACES)cubeMapDlg.m_iFace, iLevel, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL,
psurfSrc, NULL, NULL, D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
}
ReleasePpo(&m_ptexOrig);
m_ptexOrig = ptexCube;
// Change m_ptexNew into a cubemap too
if (m_ptexNew != NULL)
{
((LPDIRECT3DTEXTURE9)m_ptexNew)->GetLevelDesc(0, &sd);
hr = D3DXCreateCubeTexture(pd3ddev, m_dwWidth, m_numMips, 0, sd.Format, D3DPOOL_MANAGED, &ptexCube);
for (iLevel = 0; iLevel < m_numMips; iLevel++)
{
hr = ((LPDIRECT3DTEXTURE9)m_ptexNew)->GetSurfaceLevel(iLevel, &psurfSrc);
hr = ptexCube->GetCubeMapSurface((D3DCUBEMAP_FACES)cubeMapDlg.m_iFace, iLevel, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL,
psurfSrc, NULL, NULL, D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
}
ReleasePpo(&m_ptexNew);
m_ptexNew = ptexCube;
}
m_dwCubeMapFlags = DDS_CUBEMAP_ALLFACES;
SetModifiedFlag();
UpdateAllViews(NULL, 1); // tell CView to pick up new surface pointers
}
void CDxtexDoc::OnFormatMakeIntoVolumeMap()
{
HRESULT hr;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
D3DSURFACE_DESC sd;
LPDIRECT3DVOLUMETEXTURE9 ptexVolume;
DWORD iLevel;
LPDIRECT3DSURFACE9 psurfSrc;
LPDIRECT3DVOLUME9 pvolumeDest;
UINT numLayers;
CVolumeMapDlg volumeMapDlg;
if (IDCANCEL == volumeMapDlg.DoModal())
return;
numLayers = (1 << volumeMapDlg.m_powLayers) * 2;
// Change m_ptexOrig into a volumemap
((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetLevelDesc(0, &sd);
hr = pd3ddev->CreateVolumeTexture(m_dwWidth, m_dwHeight, numLayers,
m_numMips, 0, sd.Format, D3DPOOL_SYSTEMMEM, &ptexVolume, NULL);
if (FAILED(hr))
{
if( E_OUTOFMEMORY == hr )
AfxMessageBox( ID_ERROR_OUTOFMEMORY );
else
AfxMessageBox( ID_ERROR_UNKNOWN );
return;
}
for (iLevel = 0; iLevel < m_numMips; iLevel++)
{
hr = ((LPDIRECT3DTEXTURE9)m_ptexOrig)->GetSurfaceLevel(iLevel, &psurfSrc);
hr = ptexVolume->GetVolumeLevel(iLevel, &pvolumeDest);
hr = LoadVolumeSliceFromSurface(pvolumeDest, 0, psurfSrc);
ReleasePpo(&psurfSrc);
ReleasePpo(&pvolumeDest);
}
ReleasePpo(&m_ptexOrig);
m_ptexOrig = ptexVolume;
// Change m_ptexNew into a volumemap too
if (m_ptexNew != NULL)
{
((LPDIRECT3DTEXTURE9)m_ptexNew)->GetLevelDesc(0, &sd);
hr = pd3ddev->CreateVolumeTexture(m_dwWidth, m_dwHeight, numLayers,
m_numMips, 0, sd.Format, D3DPOOL_SYSTEMMEM, &ptexVolume, NULL);
if (FAILED(hr))
return;
for (iLevel = 0; iLevel < m_numMips; iLevel++)
{
hr = ((LPDIRECT3DTEXTURE9)m_ptexNew)->GetSurfaceLevel(iLevel, &psurfSrc);
hr = ptexVolume->GetVolumeLevel(iLevel, &pvolumeDest);
hr = LoadVolumeSliceFromSurface(pvolumeDest, 0, psurfSrc);
ReleasePpo(&psurfSrc);
ReleasePpo(&pvolumeDest);
}
ReleasePpo(&m_ptexNew);
m_ptexNew = ptexVolume;
}
m_dwDepth = numLayers;
SetModifiedFlag();
UpdateAllViews(NULL, 1); // tell CView to pick up new surface pointers
}
HRESULT CDxtexDoc::LoadVolumeSliceFromSurface(LPDIRECT3DVOLUME9 pVolume, UINT iSlice, LPDIRECT3DSURFACE9 psurf)
{
HRESULT hr;
D3DSURFACE_DESC sd;
D3DVOLUME_DESC vd;
D3DLOCKED_RECT lr;
D3DBOX boxSrc;
D3DBOX boxDest;
psurf->GetDesc(&sd);
pVolume->GetDesc(&vd);
boxSrc.Left = 0;
boxSrc.Right = sd.Width;
boxSrc.Top = 0;
boxSrc.Bottom = sd.Height;
boxSrc.Front = 0;
boxSrc.Back = 1;
boxDest.Left = 0;
boxDest.Right = vd.Width;
boxDest.Top = 0;
boxDest.Bottom = vd.Height;
boxDest.Front = iSlice;
boxDest.Back = iSlice + 1;
hr = psurf->LockRect(&lr, NULL, 0);
if (FAILED(hr))
return hr;
hr = D3DXLoadVolumeFromMemory(pVolume, NULL, &boxDest, lr.pBits, sd.Format, lr.Pitch,
0, NULL, &boxSrc, D3DX_DEFAULT, 0);
psurf->UnlockRect();
return hr;
}
HRESULT CDxtexDoc::LoadSurfaceFromVolumeSlice(LPDIRECT3DVOLUME9 pVolume, UINT iSlice, LPDIRECT3DSURFACE9 psurf)
{
HRESULT hr;
D3DVOLUME_DESC vd;
D3DLOCKED_BOX lb;
D3DBOX box;
RECT rc;
pVolume->GetDesc(&vd);
box.Left = 0;
box.Right = vd.Width;
box.Top = 0;
box.Bottom = vd.Height;
box.Front = iSlice;
box.Back = iSlice + 1;
rc.left = 0;
rc.right = vd.Width;
rc.top = 0;
rc.bottom = vd.Height;
hr = pVolume->LockBox(&lb, &box, 0);
if (FAILED(hr))
return hr;
hr = D3DXLoadSurfaceFromMemory(psurf, NULL, NULL, lb.pBits, vd.Format, lb.RowPitch,
NULL, &rc, D3DX_DEFAULT, 0);
pVolume->UnlockBox();
return hr;
}
HRESULT CDxtexDoc::BltAllLevels(D3DCUBEMAP_FACES FaceType,
LPDIRECT3DBASETEXTURE9 ptexSrc, LPDIRECT3DBASETEXTURE9 ptexDest)
{
HRESULT hr;
LPDIRECT3DTEXTURE9 pmiptexSrc;
LPDIRECT3DTEXTURE9 pmiptexDest;
LPDIRECT3DCUBETEXTURE9 pcubetexSrc;
LPDIRECT3DCUBETEXTURE9 pcubetexDest;
LPDIRECT3DVOLUMETEXTURE9 pvoltexSrc;
LPDIRECT3DVOLUMETEXTURE9 pvoltexDest;
DWORD iLevel;
if (IsVolumeMap())
{
pvoltexSrc = (LPDIRECT3DVOLUMETEXTURE9)ptexSrc;
pvoltexDest = (LPDIRECT3DVOLUMETEXTURE9)ptexDest;
}
else if (IsCubeMap())
{
pcubetexSrc = (LPDIRECT3DCUBETEXTURE9)ptexSrc;
pcubetexDest = (LPDIRECT3DCUBETEXTURE9)ptexDest;
}
else
{
pmiptexSrc = (LPDIRECT3DTEXTURE9)ptexSrc;
pmiptexDest = (LPDIRECT3DTEXTURE9)ptexDest;
}
for (iLevel = 0; iLevel < m_numMips; iLevel++)
{
if (IsVolumeMap())
{
LPDIRECT3DVOLUME9 pvolSrc = NULL;
LPDIRECT3DVOLUME9 pvolDest = NULL;
hr = pvoltexSrc->GetVolumeLevel(iLevel, &pvolSrc);
hr = pvoltexDest->GetVolumeLevel(iLevel, &pvolDest);
hr = D3DXLoadVolumeFromVolume(pvolDest, NULL, NULL,
pvolSrc, NULL, NULL, D3DX_DEFAULT, 0);
ReleasePpo(&pvolSrc);
ReleasePpo(&pvolDest);
}
else if (IsCubeMap())
{
LPDIRECT3DSURFACE9 psurfSrc = NULL;
LPDIRECT3DSURFACE9 psurfDest = NULL;
hr = pcubetexSrc->GetCubeMapSurface(FaceType, iLevel, &psurfSrc);
hr = pcubetexDest->GetCubeMapSurface(FaceType, iLevel, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL,
psurfSrc, NULL, NULL, D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
}
else
{
LPDIRECT3DSURFACE9 psurfSrc = NULL;
LPDIRECT3DSURFACE9 psurfDest = NULL;
hr = pmiptexSrc->GetSurfaceLevel(iLevel, &psurfSrc);
hr = pmiptexDest->GetSurfaceLevel(iLevel, &psurfDest);
hr = D3DXLoadSurfaceFromSurface(psurfDest, NULL, NULL,
psurfSrc, NULL, NULL, D3DX_DEFAULT, 0);
ReleasePpo(&psurfSrc);
ReleasePpo(&psurfDest);
}
}
return S_OK;
}
HRESULT CDxtexDoc::Resize(DWORD dwWidthNew, DWORD dwHeightNew)
{
HRESULT hr;
LPDIRECT3DTEXTURE9 pmiptexNew;
LPDIRECT3DDEVICE9 pd3ddev = PDxtexApp()->Pd3ddev();
hr = pd3ddev->CreateTexture(dwWidthNew, dwHeightNew, m_numMips,
0, GetFormat(m_ptexOrig), D3DPOOL_MANAGED, &pmiptexNew, NULL);
if (FAILED(hr))
return hr;
if (FAILED(BltAllLevels(D3DCUBEMAP_FACE_FORCE_DWORD, m_ptexOrig, pmiptexNew)))
return hr;
ReleasePpo(&m_ptexOrig);
m_ptexOrig = pmiptexNew;
if( m_ptexNew != NULL )
{
hr = pd3ddev->CreateTexture(dwWidthNew, dwHeightNew, m_numMips,
0, GetFormat(m_ptexOrig), D3DPOOL_MANAGED, &pmiptexNew, NULL);
if (FAILED(hr))
return hr;
if (FAILED(BltAllLevels(D3DCUBEMAP_FACE_FORCE_DWORD, m_ptexNew, pmiptexNew)))
return hr;
ReleasePpo(&m_ptexNew);
m_ptexNew = pmiptexNew;
}
m_dwWidth = dwWidthNew;
m_dwHeight = dwHeightNew;
SetModifiedFlag(TRUE);
UpdateAllViews(NULL, 4);
return S_OK;
}
void CDxtexDoc::OpenCubeFace(D3DCUBEMAP_FACES FaceType)
{
HRESULT hr;
CString fileName;
LPDIRECT3DSURFACE9 psurfOrig = NULL;
LPDIRECT3DSURFACE9 psurfNew = NULL;
if (!IsCubeMap())
return;
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexOrig)->GetCubeMapSurface(FaceType, 0, &psurfOrig);
if (m_ptexNew != NULL)
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexNew)->GetCubeMapSurface(FaceType, 0, &psurfNew);
if (!PromptForBmp(&fileName))
return;
hr = D3DXLoadSurfaceFromFile(psurfOrig, NULL, NULL, fileName, NULL, D3DX_DEFAULT, 0, NULL);
// Look for "foo_a.bmp" for alpha channel
int i = fileName.ReverseFind('.');
fileName = fileName.Left(i) + "_a.bmp";
CFileStatus status;
if (CFile::GetStatus(fileName, status))
{
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurfOrig)))
return;
}
if (m_numMips > 1)
{
hr = D3DXFilterCubeTexture((LPDIRECT3DCUBETEXTURE9)m_ptexOrig, NULL, 0, D3DX_DEFAULT);
}
if (psurfNew != NULL)
{
hr = D3DXLoadSurfaceFromSurface(psurfNew, NULL, NULL, psurfOrig, NULL, NULL, D3DX_DEFAULT, 0);
if (m_numMips > 1)
{
hr = D3DXFilterCubeTexture((LPDIRECT3DCUBETEXTURE9)m_ptexNew, NULL, 0, D3DX_DEFAULT);
}
}
ReleasePpo(&psurfOrig);
ReleasePpo(&psurfNew);
SetModifiedFlag(TRUE);
UpdateAllViews(NULL, 1);
}
void CDxtexDoc::OpenAlphaCubeFace(D3DCUBEMAP_FACES FaceType)
{
HRESULT hr;
CString fileName;
LPDIRECT3DSURFACE9 psurfOrig = NULL;
LPDIRECT3DSURFACE9 psurfNew = NULL;
D3DSURFACE_DESC sd;
if (!IsCubeMap())
return;
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexOrig)->GetCubeMapSurface(FaceType, 0, &psurfOrig);
((LPDIRECT3DCUBETEXTURE9)m_ptexOrig)->GetLevelDesc(0, &sd);
if (sd.Format == D3DFMT_DXT2 || sd.Format == D3DFMT_DXT4)
{
AfxMessageBox(ID_ERROR_PREMULTALPHA);
return;
}
if (m_ptexNew != NULL)
{
hr = ((LPDIRECT3DCUBETEXTURE9)m_ptexNew)->GetCubeMapSurface(FaceType, 0, &psurfNew);
}
if (!PromptForBmp(&fileName))
return;
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurfOrig)))
return;
if (psurfNew != NULL)
{
if (FAILED(hr = LoadAlphaIntoSurface(fileName, psurfNew)))
return;
}
if (m_numMips > 1)
{
hr = D3DXFilterCubeTexture((LPDIRECT3DCUBETEXTURE9)m_ptexOrig, NULL, 0, D3DX_DEFAULT);
}
if (psurfNew != NULL)
{
hr = D3DXLoadSurfaceFromSurface(psurfNew, NULL, NULL, psurfOrig, NULL, NULL, D3DX_DEFAULT, 0);
if (m_numMips > 1)
{
hr = D3DXFilterCubeTexture((LPDIRECT3DCUBETEXTURE9)m_ptexNew, NULL, 0, D3DX_DEFAULT);
}
}
ReleasePpo(&psurfOrig);
ReleasePpo(&psurfNew);
SetModifiedFlag(TRUE);
UpdateAllViews(NULL, 1);
}
DWORD CDxtexDoc::DwDepthAt(LONG lwMip)
{
DWORD dwDepth = m_dwDepth;
while (lwMip > 0 && dwDepth > 1)
{
dwDepth /= 2;
lwMip--;
}
return dwDepth;
}
void CDxtexDoc::OnFormatChangeSurfaceFmt()
{
CChangeFmtDlg changeFmtDlg;
LPDIRECT3DBASETEXTURE9 ptex;
ptex = (m_ptexNew == NULL ? m_ptexOrig : m_ptexNew);
if (IsVolumeMap())
{
D3DVOLUME_DESC vd;
((LPDIRECT3DVOLUMETEXTURE9)ptex)->GetLevelDesc(0, &vd);
changeFmtDlg.m_fmt = vd.Format;
}
else if (IsCubeMap())
{
D3DSURFACE_DESC sd;
((LPDIRECT3DCUBETEXTURE9)ptex)->GetLevelDesc(0, &sd);
changeFmtDlg.m_fmt = sd.Format;
}
else
{
D3DSURFACE_DESC sd;
((LPDIRECT3DTEXTURE9)ptex)->GetLevelDesc(0, &sd);
changeFmtDlg.m_fmt = sd.Format;
}
changeFmtDlg.m_bVolume = IsVolumeMap();
if (IDCANCEL == changeFmtDlg.DoModal())
return;
Compress(changeFmtDlg.m_fmt, TRUE);
}
void CDxtexDoc::OnUpdateFileOpenAlpha(CCmdUI* pCmdUI)
{
pCmdUI->Enable(!IsCubeMap() && !IsVolumeMap());
}
void CDxtexDoc::OnUpdateFormatGenerateMipmaps(CCmdUI* pCmdUI)
{
pCmdUI->Enable(m_numMips <= 1);
}
void CDxtexDoc::OnUpdateFormatChangeCubeMapFaces(CCmdUI* pCmdUI)
{
pCmdUI->Enable(!IsCubeMap() && !IsVolumeMap());
}
void CDxtexDoc::OnUpdateFormatMakeIntoVolumeMap(CCmdUI* pCmdUI)
{
pCmdUI->Enable(!IsCubeMap() && !IsVolumeMap());
}
void CDxtexDoc::OnFormatResize()
{
CResizeDialog resizeDialog;
resizeDialog.m_oldWidth.Format("%d", m_dwWidth);
resizeDialog.m_oldHeight.Format("%d", m_dwHeight);
resizeDialog.m_newWidth = m_dwWidth;
resizeDialog.m_newHeight = m_dwHeight;
resizeDialog.m_oldWidth.Format("%d", m_dwWidth);
if( IDOK == resizeDialog.DoModal() )
Resize(resizeDialog.m_newWidth, resizeDialog.m_newHeight);
}
void CDxtexDoc::OnUpdateFormatResize(CCmdUI* pCmdUI)
{
pCmdUI->Enable(!IsCubeMap() && !IsVolumeMap());
}
|
#!/usr/bin/env bash
HOSTNAME="${1}"
PASSWORD="${2}"
USER=root
PORT="${3:-38944}"
SCRIPT_DIR=$(dirname ${0})
if [ -z "${HOSTNAME}" ]; then
echo "Target system hostname or ip address is not specified"
exit 1
fi
if [ -z "${PASSWORD}" ]; then
echo "Target system ssh password is not specified"
exit 1
fi
ansible-playbook "${SCRIPT_DIR}/server-playbook.yml" --extra-vars "hostname=${HOSTNAME} server_port=${PORT}" \
--extra-vars "ansible_user=${USER} ansible_password=${PASSWORD}" \
--ssh-common-args "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" \
-i "${HOSTNAME}", -vv
|
package com.vc.easy
object L408 {
def validWordAbbreviation(word: String, abbr: String): Boolean = {
var i = 0
var j = 0
val n = word.length
val m = abbr.length
while(i < n && j < m) {
if(!Character.isDigit(abbr(j))) {
if(word(i) == abbr(j)) {
i += 1
j += 1
}
else return false
}
else {
var num = 0
while(j < m && abbr(j) >= '0' && abbr(j) <= '9') {
if(abbr(j) == '0' && num == 0) num += 1 //To avoid matching 01 with a
num = num * 10 + abbr(j) - '0'
j += 1
}
i += num
}
}
!(i < n || j < m || i > n)
}
}
|
#!/bin/bash
# get-svn-revision - Determine Subversion repository revision number.
# This tries to work both for "real" Subversion repositories, and for
# SVK local copies of Subversion repositories (the kind of copy that
# can be used for offline work).
# If an .svn directory exists, this is a Subversion repository.
# Otherwise, assume that this is a local SVK copy of a Subversion repository.
# If the revision cannot be determined, return no output.
# Who-to-blame:
# Paul DuBois, paul@mysql.com
# 2005-11-17
# 2006-04-25
# - Enable script to get remote (parent) repository revision, too
# (use -R option). Default behavior is to get local revision, as
# before (use no option or -L option).
function get_local_revision {
if [ -d .svn ]; then
#echo "This is a Subversion repository."
svn info | grep '^Revision' | sed -e 's/Revision: *//'
else
#echo "This is a local SVK copy of a Subversion repository."
# The first 'Copied From:' line presumably is the parent in the
# SVK depot of this local copy.
DEPOT=`svk info \
| grep '^Copied From:' \
| head -n 1 \
| sed -e 's/^Copied From: *//' -e 's/,.*//'`
# Try info for parent if there is no "Copied From: in current directory
if [ "$DEPOT" = "" ]; then
DEPOT=`svk info .. \
| grep '^Copied From:' \
| head -n 1 \
| sed -e 's/^Copied From: *//' -e 's/,.*//'`
fi
# The info for the depot includes a 'Mirrored From:' line that
# indicates parent Subversion repository and the revision last
# mirrored.
# (If DEPOT is empty, then we are unable to tell the revision
# and there is no output.)
if [ "$DEPOT" != "" ]; then
svk info /$DEPOT | grep '^Mirrored From:' | sed -e 's/.*, Rev\. *//'
fi
fi
}
function get_remote_revision {
if [ -d .svn ]; then
#echo "This is a Subversion repository."
URL=`svn info | grep '^URL: ' | sed -e 's/^URL: *//'`
svn info $URL | grep '^Revision' | sed -e 's/Revision: *//'
else
#echo "This is a local SVK copy of a Subversion repository."
# The first 'Copied From:' line presumably is the parent in the
# SVK depot of this local copy.
DEPOT=`svk info \
| grep '^Copied From:' \
| head -n 1 \
| sed -e 's/^Copied From: *//' -e 's/,.*//'`
# Try info for parent if there is no "Copied From: in current directory
if [ "$DEPOT" = "" ]; then
DEPOT=`svk info .. \
| grep '^Copied From:' \
| head -n 1 \
| sed -e 's/^Copied From: *//' -e 's/,.*//'`
fi
# The info for the depot includes a 'Mirrored From:' line that
# indicates parent Subversion repository and the revision last
# mirrored.
# (If DEPOT is empty, then we are unable to tell the revision
# and there is no output.)
if [ "$DEPOT" != "" ]; then
URL=`svk info /$DEPOT | grep '^Mirrored From:' \
| sed -e 's/^Mirrored From: *//' \
| sed -e 's/,.*//'`
svn info $URL | grep '^Revision' | sed -e 's/Revision: *//'
fi
fi
}
if [ $# -eq 0 -o "$1" = "-L" ]; then
get_local_revision
elif [ "$1" = "-R" ]; then
get_remote_revision
else
echo "Usage: $0 [-L|-R]" 1>&2
fi
|
package com.acmeair;
public class AcmeAirSpringIT extends AcmeAirApplicationIntegrationBase {
}
|
<gh_stars>1-10
import L from 'leaflet'
import 'leaflet/dist/leaflet.css'
import '../node_modules/leaflet-measure/dist/leaflet-measure'
import '../node_modules/leaflet-measure/dist/leaflet-measure.css'
import 'leaflet.nontiledlayer'
import 'leaflet.gridlayer.googlemutant'
import 'leaflet.locatecontrol'
import {MAX_ZOOM} from './constants'
import CatastroParser from './catastroParser'
const catastroParser = new CatastroParser()
export default class Map {
constructor() {
this.map = L.map("map", {
zoom: 10,
center: [42.284829, -8.553642],
zoomControl: false,
attributionControl: true,
doubleClickZoom: true,
maxZoom: MAX_ZOOM
})
L.control.zoom({position: 'bottomright'}).addTo(this.map)
const measureOptions = {
position: 'topright',
localization: 'es',
primaryLengthUnit: 'meters',
secondaryLengthUnit: 'kilometers',
primaryAreaUnit: 'sqmeters',
captureZIndex: 10000
}
this.measureControl = L.control.measure(measureOptions)
this.measureControl.addTo(this.map)
const pnoa = L.tileLayer.wms('https://www.ign.es/wms-inspire/pnoa-ma?', {
maxZoom: MAX_ZOOM,
layers: 'OI.OrthoimageCoverage',
format: 'image/png',
attribution: 'PNOA cedido por © <a href="http://www.ign.es/ign/main/index.do" target="_blank">Instituto Geográfico Nacional de España</a>'
}).addTo(this.map)
const satellite = L.gridLayer.googleMutant({
maxZoom: MAX_ZOOM,
type: 'satellite' // valid values are 'roadmap', 'satellite', 'terrain' and 'hybrid'
})
this.catastroUrl = 'http://katastrophe.herokuapp.com/wms?TIME='
this.catastroBase = L.nonTiledLayer.wms(
this.catastroUrl,
{
maxZoom: MAX_ZOOM,
layers: 'Catastro',
format: 'image/png',
transparent: false,
attribution: 'Dirección General del Catastro'
}
)
this.catastroOverlay = L.nonTiledLayer.wms(
this.catastroUrl,
{
maxZoom: MAX_ZOOM,
layers: 'Catastro',
format: 'image/png',
transparent: true,
attribution: 'DG del Catastro'
}
).addTo(this.map)
this.highlight = L.geoJSON(null, {
pointToLayer: function (ftr, latLng) {
return L.circleMarker(latLng);
},
style: (feature) => {
const { properties } = feature
return properties
},
onEachFeature: (feature, layer) => {
this.map.fitBounds(layer.getBounds())
}
}).addTo(this.map)
const baseMaps = {
PNOA: pnoa,
Catastro: this.catastroBase,
'Google Satellite': satellite
}
const overLays = {
'Catastro' : this.catastroOverlay
}
/* Configuración Control de Capas */
const layerControl = L.control.layers(baseMaps, overLays, {collapsed:false})
layerControl.addTo(this.map)
var htmlObject = layerControl.getContainer()
const panelCapas = document.getElementById('panel-capas')
panelCapas.appendChild(htmlObject)
/* Location */
this.locateControl = L.control.locate({
position: "bottomright",
drawCircle: true,
follow: true,
setView: 'once',
keepCurrentZoomLevel: false,
markerStyle: {
weight: 1,
opacity: 0.8,
fillOpacity: 0.8
},
circleStyle: {
weight: 1,
clickable: false
},
metric: true,
showPopup: false,
locateOptions: {
maxZoom: 18,
watch: true,
enableHighAccuracy: true,
maximumAge: 10000,
timeout: 10000
}
}).addTo(this.map)
var locateDiv = document.getElementsByClassName('leaflet-control-locate')[0]
locateDiv.style.display = 'none'
this.activaIdentificacion()
}
descargaParcela(refcat) {
catastroParser.getParcel(refcat).then((geoJson) => {
this.loadGeoJson(geoJson)
})
}
activaIdentificacion() {
this.map.addEventListener('click', (e) => {
let modalContent = document.getElementById('modal-content')
let modalFooter = document.getElementById('modal-footer')
modalContent.innerHTML = `
<div class="progress light-green darken-1">
<div class="indeterminate light-green darken-4"></div>
</div>
`
modalFooter.innerHTML = null
//$('#modal-content').html('<div class="progress light-green darken-1"><div class="indeterminate light-green darken-4"></div></div>')
//$('#modal-footer').empty()
$('#modal1').modal('open')
catastroParser.getInfoXY('EPSG:4326', e.latlng.lng, e.latlng.lat).then((json) => {
var html_content = `
<h4><small>Referencia Catastral:</small> ` + json.refcat + `</h4>
<p>` + json.direccion + `</p>
`
var html_footer = `
<a href="` + json.accesoSede + `" class="modal-action waves-effect waves-green btn light-green darken-2" target="_blank">Sede Catastro</a>
<a href="#" id="btn-descarga" class="modal-action waves-effect waves-green btn light-indigo darken-2">Resaltar</a>
`
modalContent.innerHTML = html_content
modalFooter.innerHTML = html_footer
let btnDescarga = document.getElementById('btn-descarga')
btnDescarga.addEventListener('click', (e) => {
this.descargaParcela(json.refcat)
})
}).catch((json) => {
var html_content = `
<h4>Error</h4>
<p>` + json.message + `</p>
`
modalContent.innerHTML = html_content
modalFooter.innerHTML = null
})
})
}
clearHighLight() {
this.highlight.clearLayers()
}
loadGeoJson(geoJson) {
this.clearHighLight()
this.highlight.addData(geoJson)
}
/**
* Activa el catastro histórico para la fecha seleccionada
* @param {string} dateString - Fecha en formato yyyy-mm-dd
*/
catastroHistorico(dateString) {
this.catastroOverlay._wmsUrl = this.catastroUrl + dateString
this.catastroBase._wmsUrl = this.catastroUrl + dateString
this.catastroOverlay.redraw()
this.catastroBase.redraw()
}
desactivaMedidor() {
this.measureControl.remove()
}
activaMedidor() {
this.measureControl.addTo(this.map)
}
activaLocation() {
this.locateControl._onClick()
}
}
|
<filename>src/actions/index.js
import types from './types';
import styles from './mapStyles';
export function getCoordinates(location,props){
return async dispatch => {
let lat, long, map;
if(location === 'current location'){
await navigator.geolocation.getCurrentPosition(function(position) {
lat = position.coords.latitude;
long = position.coords.longitude;
map = initMap(lat,long);
dispatch({
type: types.GET_COORDINATES,
payload: {lat,long,map,location}
})
});
}else{
try {
var geocoder = new google.maps.Geocoder();
await geocoder.geocode( { 'address': location}, function(results, status) {
if (status == 'OK') {
lat = results[0].geometry.location.lat();
long = results[0].geometry.location.lng();
map = initMap(lat,long);
dispatch({
type: types.GET_COORDINATES,
payload: {lat,long,map,location}
})
} else {
props.history.push('/notValid');
}
});
}catch(err){
console.log('Create Map Error:', err.message);
}
}
}
}
function initMap(lat,long){
let map;
var options = {
center: {lat: lat, lng: long},
zoom: 10,
styles: styles
};
let googleMap = document.getElementById('map');
map = new google.maps.Map(googleMap, options);
var marker = new google.maps.Marker({
position: {lat: lat, lng: long},
map: map
});
return map;
}
export function getDirections(trailLat, trailLng, map, location) {
return async dispatch => {
try {
let initLat, initLng;
if(location === 'current location'){
await navigator.geolocation.getCurrentPosition(function(position) {
initLat = position.coords.latitude;
initLng = position.coords.longitude;
initMapDirections(initLat,initLng,trailLat, trailLng, map, dispatch);
});
}else{
let geocoder = new google.maps.Geocoder();
await geocoder.geocode( { 'address': location}, function(results, status) {
if (status == 'OK') {
initLat = results[0].geometry.location.lat();
initLng = results[0].geometry.location.lng();
initMapDirections(initLat,initLng,trailLat, trailLng, map, dispatch);
} else {
console.log('Location lat and lng not available from getDirections.');
}
});
}
} catch(err){
console.log('Google Map for direction not working:', err);
}
}
}
function initMapDirections(initLat,initLng,trailLat, trailLng, map, dispatch){
if(Object.keys(map).length === 0){
const options ={
center: {lat: parseFloat(initLat), lng: parseFloat(initLng)},
zoom: 10,
styles: styles
};
let googleMap = document.getElementById('mapDirection');
map = new google.maps.Map(googleMap, options);
}
let directionsService = new google.maps.DirectionsService();
let directionsDisplay = new google.maps.DirectionsRenderer();
directionsDisplay.setMap(map);
directionsDisplay.setPanel(document.getElementById('drivingDirectionContainer'));
const requestOptions = {
origin: {lat: parseFloat(initLat), lng: parseFloat(initLng)},
destination: {lat: parseFloat(trailLat), lng: parseFloat(trailLng)},
travelMode: 'DRIVING'
};
directionsService.route(requestOptions, function(response, status) {
if (status == 'OK') {
directionsDisplay.setDirections(response);
dispatch({
type: types.GET_DIRECTIONS,
payload: {map}
});
} else {
console.log('Google direction not working due to:', status);
}
});
}
export function deleteMapDirection(){
return {
type: types.DELETE_MAP_DIRECTIONS,
payload: {map:{}}
}
}
|
sap.ui.define([
"sap/ui/test/opaQunit",
"./pages/DeliveryItemPage",
"./pages/TrackingTimeline",
], function (opaQunit) {
"use strict";
QUnit.module("Delivery Item");
opaQunit("Should display delivery item sections", function (Given, When, Then) {
// Arrangements
Given.iStartMyUIComponent({
componentConfig: {
name: "com/sap/gtt/app/sample/pof",
async: true,
},
hash: "DeliveryItem(guid'c75316ce-a2cd-5f8c-82e4-b4661d3a48e2')",
timeout: 60,
autoWait: true,
});
// check title in Delivery Item header
Then.onTheDeliveryItemPage.theTitleShouldDisplayTheNo("187020020 / 10");
// check title in Delivery Item header
Then.onTheDeliveryItemPage.theGeneralInfoShouldDisplayTheDangerousGoods("Yes");
Then.onTheDeliveryItemPage.theReferenceDocumentsTableShouldHaveItems();
// Cleanup
Then.iTeardownMyApp();
});
opaQunit("Should display the map with routes / spots", function (Given, When, Then) {
// Arrangements
Given.iStartMyUIComponent({
componentConfig: {
name: "com/sap/gtt/app/sample/pof",
async: true,
},
hash: "DeliveryItem(guid'c75316ce-a2cd-5f8c-82e4-b4661d3a48e2')",
timeout: 60,
autoWait: true,
});
Then.onTheTrackingTimeline.theMapHasLegend();
Then.onTheTrackingTimeline.theLegendShouldBeExpanded(true);
When.onTheTrackingTimeline.iPressLegend();
Then.onTheTrackingTimeline.theLegendShouldBeExpanded(false);
Then.onTheTrackingTimeline.theMapHasRoutesSpots();
Then.onTheTrackingTimeline.theMapHasSpotWithErrorType();
Then.onTheTrackingTimeline.theMapHasNumberOfEventStops(7);
// Cleanup
Then.iTeardownMyApp();
});
opaQunit("Should display Location Master popover", function (Given, When, Then) {
// Arrangements
Given.iStartMyUIComponent({
componentConfig: {
name: "com/sap/gtt/app/sample/pof",
async: true,
},
hash: "DeliveryItem(guid'c75316ce-a2cd-5f8c-82e4-b4661d3a48e2')",
timeout: 60,
autoWait: true,
});
// check title in Delivery Item header
Then.onTheDeliveryItemPage.thePlantShouldDisplayTheDescription("Plant 0001");
When.onTheDeliveryItemPage.iPressPlantLocationPopover();
Then.onTheDeliveryItemPage.thePlantLocationPopoverShouldDisplayDetails("QW9CLNT170 / 0001");
// Cleanup
Then.iTeardownMyApp();
});
opaQunit("Should display number of reference documents", function (Given, When, Then) {
// Arrangements
Given.iStartMyUIComponent({
componentConfig: {
name: "com/sap/gtt/app/sample/pof",
async: true,
},
hash: "DeliveryItem(guid'c75316ce-a2cd-5f8c-82e4-b4661d3a48e2')",
timeout: 60,
autoWait: true,
});
// check reference documents items lenght
Then.onTheDeliveryItemPage.theRefDocsTableHasItemsNum(2);
// Cleanup
Then.iTeardownMyApp();
});
opaQunit("Filter events in timeline", function (Given, When, Then) {
// Arrangements
Given.iStartMyUIComponent({
componentConfig: {
name: "com/sap/gtt/app/sample/pof",
async: true,
},
hash: "DeliveryItem(guid'c75316ce-a2cd-5f8c-82e4-b4661d3a48e2')",
timeout: 60,
autoWait: true,
});
When.onTheTrackingTimeline.iPressFilterBtn();
Then.onTheTrackingTimeline.theFilterDialogOpens();
When.onTheTrackingTimeline.iPressFilterItem();
When.onTheTrackingTimeline.iPressEventStatus("Late Reported");
When.onTheTrackingTimeline.iPressFilterByEventStatus();
Then.onTheTrackingTimeline.theTimelineShouldHaveEvents(3);
// Cleanup
Then.iTeardownMyApp();
});
});
|
unsigned long long factorial(int n) {
if (n == 0 || n == 1) {
return 1;
} else {
return n * factorial(n - 1);
}
} |
#!/bin/bash
#SBATCH --job-name=sphere_performance_direct
#SBATCH --output=sphere_%A_%a.out
#SBATCH --error=sphere_%A_%a.err
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition defq
#SBATCH --array=0,1,2,3,4
module load openblas
module load intel/parallel_studio_xe
REFINE_ARR=(5 6 7 8 9)
REFINE=${REFINE_ARR[${SLURM_ARRAY_TASK_ID}]}
mkdir -p $REFINE
cd $REFINE
/usr/bin/time -v python ${REPRO_PATH}/bempp_pbs/scripts/direct.py ${REPRO_PATH}/runs/sphere_performance/config/sphere_refine${REFINE}.yml
cd ..
rm -rf $REFINE
|
class MyForm extends React.Component {
constructor(props) {
super(props);
this.state = {
fieldValues: {},
};
}
handleChange(field, e) {
var fieldValues = this.state.fieldValues;
fieldValues[field] = e.target.value;
this.setState({ fieldValues });
}
handleSubmit(event) {
const { fieldValues } = this.state;
alert(JSON.stringify(fieldValues));
event.preventDefault();
}
render() {
return (
<form onSubmit={this.handleSubmit}>
<div>
<label>Name: </label>
<input
type="text"
onChange={this.handleChange.bind(this, "name")}
/>
</div>
<div>
<label>Email: </label>
<input
type="email"
onChange={this.handleChange.bind(this, "email")}
/>
</div>
<div>
<label>Password: </label>
<input
type="password"
onChange={this.handleChange.bind(this, "password")}
/>
</div>
<div>
<input type="submit" value="Submit" />
</div>
</form>
);
}
} |
#!/usr/bin/env bash
echo "Note: this will only work on machines where we can install to the user's bin or lib path"
echo "Else just download llv.h"
curl -s https://api.github.com/repos/BraedonWooding/LLV/releases/latest \
| grep "browser_download_url" \
| cut -d '"' -f 4 \
| wget -qi -
mv libLLV.a /usr/local/lib/
mkdir /usr/local/include/LLV
mv include.zip /usr/local/include/LLV/
cd /usr/local/include/LLV/
unzip -qq include.zip
mv include/* ./
rmdir include
rm include.zip
|
#!/usr/bin/env bash
set -ex
SCRIPTS_DIR="$(dirname $0)"
"${SCRIPTS_DIR}/deploy.sh"
kubectl rollout restart deployment/capi-kpack-watcher -n cf-system
kubectl rollout status deployment/capi-kpack-watcher -w -n cf-system
|
git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
export PATH=$PATH:$(pwd)/depot_tools
fetch --nohooks --force webrtc
gclient sync
pushd src
# M70
git checkout ca221eabcaed7580663e3727f092e278ac56bb68
gn gen out/Release "--args=is_debug=false is_official_build=true symbol_level=0 rtc_build_examples=false rtc_build_tools=false rtc_include_tests=false"
ninja -C out/Release
popd
|
"""
Train a machine learning algorithm on the given dataset with 80% accuracy
"""
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
# Read the dataset
df = pd.read_csv('data.csv')
# Split the dataset
X = df[["Weight", "Age"]]
y = df["ID"]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2)
# Train the model
model = LinearRegression()
model.fit(X_train, y_train)
# Calculate the accuracy
score = model.score(X_test, y_test)
if score >= 0.8:
print("Model is trained with 80% accuracy")
else:
print("Model is trained with less than 80% accuracy") |
const webpack = require('webpack');
const HtmlWebpackPlugin = require('html-webpack-plugin'); //index template
/**
* dotenv-webpack wraps dotenv and Webpack.DefinePlugin.
* As such, it does a text replace in the resulting bundle for any instances of process.env.
*/
const Dotenv = require('dotenv-webpack');
const ManifestPlugin = require('webpack-manifest-plugin');
const CleanWebpackPlugin = require('clean-webpack-plugin'); // build,first clear up 'dist' folder
const { UnusedFilesWebpackPlugin } = require("unused-files-webpack-plugin");
var autoprefixer = require('autoprefixer'); //NEW
/**
* Use the css-loader or the raw-loader to turn it into a JS module
* and the mini-css-extract-plugin to extract it into a separate file.
*/
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
const TerserJSPlugin = require("terser-webpack-plugin"); //min.js
const OptimizeCSSAssetsPlugin = require("optimize-css-assets-webpack-plugin"); //min.css
const Visualizer = require('webpack-visualizer-plugin');
const path = require('path');
const BUILD_DIR = path.resolve(__dirname, './public');
const APP_DIR = path.resolve(__dirname, './src');
// const configDirs = {
// BUILD_DIR: BUILD_DIR,
// APP_DIR: APP_DIR
// }
let devBuild = (process.env.NODE_ENV !== 'production');
const plugins = [
// The plugin will generate an HTML5 file for you that includes all your webpack bundles in the body using script tags.
new HtmlWebpackPlugin({
favicon: __dirname + '/public/favicon.ico', // works production mode, not work development work
template: __dirname + "/public/index.html",
}),
new webpack.HotModuleReplacementPlugin(),
// use below plugin, will show "DeprecationWarning: Tapable.plugin is deprecated. Use new API on `.hooks` instead"
// new UnusedFilesWebpackPlugin({
// failOnUnused: true
// }),
// new ManifestPlugin() no use now
new MiniCssExtractPlugin({
// Options similar to the same options in webpackOptions.output
// both options are optional
filename: devBuild ? '[name].css' : 'css/[name].min.css',
chunkFilename: devBuild ? '[name].css' : 'css/[name].min.css',
}),
new Visualizer({
filename: './statistics.html'
}),
//global variables
// new webpack.DefinePlugin({
// 'SERVER_HOST': JSON.stringify('http://localhost:3004')
// })
new Dotenv()
]
const webpackConfig = {
mode: devBuild ? 'development' : 'production',
devtool: devBuild ? 'eval-source-map' : 'source-map',
// devtool: devBuild ? 'cheap-eval-source-map' : 'cheap-source-map',
// devtool: 'eval-source-map',
entry: [
"core-js/modules/es6.promise",
"core-js/modules/es6.array.iterator",
APP_DIR + "/index.js"
],
output: {
path: devBuild ? BUILD_DIR + "/" : path.resolve(__dirname, 'dist'),
filename: devBuild ? '[name].bundle.js' : 'js/[name].bundle.min.js',
chunkFilename: devBuild ? '[name].bundle.js' : 'js/[name].bundle.min.js',
},
module: {
rules: [
{
test: /\.css$/,
// loader: 'style-loader!css-loader', // add this setting, in order to (import '../../test.css')
use: [
devBuild ? 'style-loader' : MiniCssExtractPlugin.loader,
'css-loader',
{
loader: 'postcss-loader',
options: {
plugins: () => [require('autoprefixer')({
'browsers': ['> 1%', 'last 2 versions']
})],
}
}
]
},
// {
// /**
// * the sass-loader loader uses the node-sass implementation
// * that's if you use scss, you should install sass-loader and node-sass
// */
// test: /\.module\.s(a|c)ss$/,
// // loader: 'style-loader!css-loader!sass-loader'
// // use: [
// // // fallback to style-loader in development
// // // creates style nodes from JS strings
// // devBuild ? 'style-loader' : MiniCssExtractPlugin.loader,
// // "css-loader", // translates CSS into CommonJS
// // "sass-loader" // compiles Sass to CSS, using Node Sass by default
// // ],
// loader: [
// devBuild ? 'style-loader' : MiniCssExtractPlugin.loader,
// {
// loader: 'css-loader',
// options: {
// modules: true,
// localIdentName: '[name]__[local]___[hash:base64:5]',
// camelCase: true,
// }
// },
// "sass-loader"
// ]
// },
{ // sass and less setting only use single
/**
* the sass-loader loader uses the node-sass implementation
* that's if you use scss, you should install sass-loader and node-sass
*/
test: /\.module\.less$/,
// loader: 'style-loader!css-loader!sass-loader'
// use: [
// // fallback to style-loader in development
// // creates style nodes from JS strings
// devBuild ? 'style-loader' : MiniCssExtractPlugin.loader,
// "css-loader", // translates CSS into CommonJS
// "sass-loader" // compiles Sass to CSS, using Node Sass by default
// ],
loader: [
devBuild ? 'style-loader' : MiniCssExtractPlugin.loader,
{
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[name]__[local]___[hash:base64:5]',
camelCase: true,
}
},
{
loader: 'postcss-loader',
options: {
plugins: () => [require('autoprefixer')({
'browsers': ['> 1%', 'last 2 versions']
})],
}
},
"less-loader"
]
},
{
test: /\.js$/, exclude: /node_modules/, use: ['babel-loader', 'eslint-loader']
},
{
test: /\.(png|svg|jpg|gif)$/,
use: [
{
loader: 'file-loader',
options: {
name: devBuild ? '[hash].[ext]' : 'images/[sha512:hash:base64:7].[ext]',
}
}
// {
// loader: 'image-webpack-loader',
// options: {
// mozjpeg: {
// progressive: true,
// quality: 65
// },
// // optipng.enabled: false will disable optipng
// optipng: {
// enabled: false,
// },
// // pngquant: {
// // quality: '65-90',
// // speed: 4
// // },
// gifsicle: {
// interlaced: false,
// },
// // the webp option will enable WEBP
// webp: {
// quality: 75
// }
// }
// }
]
}
]
},
plugins: devBuild ? plugins : [...plugins, new CleanWebpackPlugin()],
optimization: {
minimizer: [
new TerserJSPlugin({}), //min.js
new OptimizeCSSAssetsPlugin({}) //min.css
],
splitChunks: {
chunks: 'async',
minSize: 30040,
maxSize: 0,
minChunks: 1,
maxAsyncRequests: 5,
maxInitialRequests: 3,
automaticNameDelimiter: '~',
name: true,
cacheGroups: {
vendors: {
reuseExistingChunk: true
},
'react-dom': {
test: /[\\/]node_modules[\\/](react|react-dom)[\\/]/,
name: 'react-dom',
chunks: 'all'
},
// 'antd-lodash': {
// test: /[\\/]node_modules[\\/](antd|lodash)[\\/]/,
// name: 'antd-lodash',
// chunks: 'all'
// }
}
}
},
devServer: {
host: '192.168.0.150',
port: 3005,
contentBase: "./public",
historyApiFallback: true,
inline: true,
hot: true
},
resolve: {
/**
* use extensions, then you can [import styles from 'Scss/style.module'], not add .scss;
*/
extensions: ['.js', '.jsx', '.less'],
alias: {
Src: path.resolve(__dirname, 'src/'),
Components: path.resolve(__dirname, 'src/components/'),
Routes: path.resolve(__dirname, 'src/routers/routes.js'),
RouterLazy: path.resolve(__dirname, 'src/routers/asyn-lazy.js'),
Less: path.resolve(__dirname, 'src/less/'),
Icons: path.resolve(__dirname, 'src/icons/'),
ActionTypes: path.resolve(__dirname, 'src/redux/action-types.js'),
Actions: path.resolve(__dirname, 'src/redux/actions/'),
Api: path.resolve(__dirname, 'src/redux/actions/root-actions'),
Spin: path.resolve(__dirname, 'src/components/Spin.js'),
Error: path.resolve(__dirname, 'src/components/Error.js'),
Config: path.resolve(__dirname, 'src/config.js'),
}
}
}
module.exports = webpackConfig
|
#!/bin/bash
# Dependency: This script requires `brightness` cli installed: http://bergdesign.com/brightness/
# Install via homebrew: `brew install brightness`
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Brightness
# @raycast.mode silent
#
# Optional parameters:
# @raycast.icon ☀️
# @raycast.packageName System brightness
# @raycast.argument1 { "type": "text", "placeholder": "brightness", "percentEncoded": false }
#
# @Documentation:
# @raycast.description Set system brightness
# @raycast.author Antonio Dal Sie
# @raycast.authorURL https://github.com/exodusanto
brightness $(awk '{print $1*$2}' <<<"${1} 0.01")
|
import os
import sys
import logging
from flask import *
log_level = getattr(logging, os.environ.get('APP_LOG_LEVEL', 'DEBUG'))
logging.basicConfig(stream=sys.stdout, level=log_level)
log = logging.getLogger('simple_app')
app = Flask(__name__)
@app.route("/", methods=['GET','POST'])
def home():
if request.method == 'POST':
result = "{}\n{}".format(request.headers, request.stream.read())
log.debug(result)
return result
log.debug('GET request received... OK')
return "OK"
if __name__ == '__main__':
app.run(port=8080)
|
// Code generated by go-swagger; DO NOT EDIT.
package subtenant_custom_groups
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
)
// NewMemberListUsingGETParams creates a new MemberListUsingGETParams object
// with the default values initialized.
func NewMemberListUsingGETParams() *MemberListUsingGETParams {
var ()
return &MemberListUsingGETParams{
timeout: cr.DefaultTimeout,
}
}
// NewMemberListUsingGETParamsWithTimeout creates a new MemberListUsingGETParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewMemberListUsingGETParamsWithTimeout(timeout time.Duration) *MemberListUsingGETParams {
var ()
return &MemberListUsingGETParams{
timeout: timeout,
}
}
// NewMemberListUsingGETParamsWithContext creates a new MemberListUsingGETParams object
// with the default values initialized, and the ability to set a context for a request
func NewMemberListUsingGETParamsWithContext(ctx context.Context) *MemberListUsingGETParams {
var ()
return &MemberListUsingGETParams{
Context: ctx,
}
}
// NewMemberListUsingGETParamsWithHTTPClient creates a new MemberListUsingGETParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewMemberListUsingGETParamsWithHTTPClient(client *http.Client) *MemberListUsingGETParams {
var ()
return &MemberListUsingGETParams{
HTTPClient: client,
}
}
/*MemberListUsingGETParams contains all the parameters to send to the API endpoint
for the member list using g e t operation typically these are written to a http.Request
*/
type MemberListUsingGETParams struct {
/*GroupID
groupId
*/
GroupID string
/*Name
name
*/
Name *string
/*SubtenantID
subtenantId
*/
SubtenantID string
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the member list using g e t params
func (o *MemberListUsingGETParams) WithTimeout(timeout time.Duration) *MemberListUsingGETParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the member list using g e t params
func (o *MemberListUsingGETParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the member list using g e t params
func (o *MemberListUsingGETParams) WithContext(ctx context.Context) *MemberListUsingGETParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the member list using g e t params
func (o *MemberListUsingGETParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the member list using g e t params
func (o *MemberListUsingGETParams) WithHTTPClient(client *http.Client) *MemberListUsingGETParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the member list using g e t params
func (o *MemberListUsingGETParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithGroupID adds the groupID to the member list using g e t params
func (o *MemberListUsingGETParams) WithGroupID(groupID string) *MemberListUsingGETParams {
o.SetGroupID(groupID)
return o
}
// SetGroupID adds the groupId to the member list using g e t params
func (o *MemberListUsingGETParams) SetGroupID(groupID string) {
o.GroupID = groupID
}
// WithName adds the name to the member list using g e t params
func (o *MemberListUsingGETParams) WithName(name *string) *MemberListUsingGETParams {
o.SetName(name)
return o
}
// SetName adds the name to the member list using g e t params
func (o *MemberListUsingGETParams) SetName(name *string) {
o.Name = name
}
// WithSubtenantID adds the subtenantID to the member list using g e t params
func (o *MemberListUsingGETParams) WithSubtenantID(subtenantID string) *MemberListUsingGETParams {
o.SetSubtenantID(subtenantID)
return o
}
// SetSubtenantID adds the subtenantId to the member list using g e t params
func (o *MemberListUsingGETParams) SetSubtenantID(subtenantID string) {
o.SubtenantID = subtenantID
}
// WriteToRequest writes these params to a swagger request
func (o *MemberListUsingGETParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
// path param groupId
if err := r.SetPathParam("groupId", o.GroupID); err != nil {
return err
}
if o.Name != nil {
// query param name
var qrName string
if o.Name != nil {
qrName = *o.Name
}
qName := qrName
if qName != "" {
if err := r.SetQueryParam("name", qName); err != nil {
return err
}
}
}
// path param subtenantId
if err := r.SetPathParam("subtenantId", o.SubtenantID); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
|
#!/usr/bin/env bash
clear
RELEASE_TYPE=patch
PS3='Select the type of release to perform: '
releaseTypes=("Major" "Minor" "Patch" "Quit")
select opt in "${releaseTypes[@]}"
do
case $opt in
"Major")
RELEASE_TYPE=major
break
;;
"Minor")
RELEASE_TYPE=minor
break
;;
"Patch")
RELEASE_TYPE=patch
break
;;
"Quit")
break
;;
*) echo "invalid option $REPLY";;
esac
done
npm version $RELEASE_TYPE
if [[ "$?" -ne 0 ]] ; then
echo 'Aborting release due to version incremental failure. Ensure there are no modified/uncommitted files in the repo'; exit $rc
fi
npm run build
if [[ "$?" -ne 0 ]] ; then
echo 'Aborting release due to build failure'; exit $rc
fi
PACKAGE_VERSION=$(cat package.json | grep version | head -1 | awk -F: '{ print $2 }' | sed 's/[",]//g' | tr -d '[[:space:]]')
zip -r frontend-dist.zip dist/*
echo Publishing $PACKAGE_VERSION to GitHub Releases
# pip install githubrelease
# https://github.com/j0057/github-release
githubrelease release dependencytrack/frontend create $PACKAGE_VERSION \
--name $PACKAGE_VERSION --body "Dependency-Track Frontend" \
--publish bom.xml bom.json frontend-dist.zip
REPO=dependencytrack/frontend
docker rmi $REPO:latest
docker rmi $REPO:$PACKAGE_VERSION
docker build -f docker/Dockerfile -t $REPO:$PACKAGE_VERSION -t $REPO:latest .
docker login
docker push $REPO:latest
docker push $REPO:$PACKAGE_VERSION
|
def find_anagrams(words):
#create an empty dictionary
anagrams = {}
#loop through the words
for word in words:
#sort the word alphabetically
sorted_word = ''.join(sorted(word))
#check if the sorted word is already in the dictionary
if sorted_word in anagrams:
#if the sorted word is already in the dictionary, append the new word
anagrams[sorted_word].append(word)
else:
anagrams[sorted_word] = [word]
#print all anagrams
for key, value in anagrams.items():
print(value) |
// Define the AST node and evaluation function
enum Expression {
Identifier(String),
Addition(Box<Expression>, Box<Expression>),
Subtraction(Box<Expression>, Box<Expression>),
Multiplication(Box<Expression>, Box<Expression>),
Division(Box<Expression>, Box<Expression>),
Negation(Box<Expression>),
}
fn evaluate_expression(expr: &Expression) -> f64 {
match expr {
Expression::Identifier(_) => unimplemented!(), // Handle variable or constant value
Expression::Addition(lhs, rhs) => evaluate_expression(lhs) + evaluate_expression(rhs),
Expression::Subtraction(lhs, rhs) => evaluate_expression(lhs) - evaluate_expression(rhs),
Expression::Multiplication(lhs, rhs) => evaluate_expression(lhs) * evaluate_expression(rhs),
Expression::Division(lhs, rhs) => evaluate_expression(lhs) / evaluate_expression(rhs),
Expression::Negation(inner) => -evaluate_expression(inner),
}
}
// Example usage
fn main() {
let expression = Expression::Addition(
Box::new(Expression::Identifier("x".to_string())),
Box::new(Expression::Multiplication(
Box::new(Expression::Identifier("y".to_string())),
Box::new(Expression::Identifier("z".to_string())),
)),
);
let result = evaluate_expression(&expression);
println!("Result: {}", result);
} |
import os
import shutil
import subprocess
from typing import List, Dict, Tuple
def process_config_files(config_files: List[str], working_dir: str, extra_args: List[str]) -> Dict[str, Tuple[int, bool]]:
results = {}
for file in config_files:
try:
# Copy the configuration file to the working directory
shutil.copy(file, os.path.join(working_dir, os.path.basename(file)))
copied_file = os.path.join(working_dir, os.path.basename(file))
# Run the command with the copied file and extra arguments
command = ["your_command", "-c", copied_file] + extra_args
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
exit_code = process.returncode
# Validate the log output for the expected message
log_contains_expected_message = "expected_message" in stderr.decode("utf-8")
results[file] = (exit_code, log_contains_expected_message)
except FileNotFoundError:
results[file] = (-1, False) # Indicates file not found error
except subprocess.CalledProcessError as e:
results[file] = (e.returncode, False) # Indicates command execution error
return results |
#!/bin/bash
# update the OS
sudo add-apt-repository -y ppa:graphics-drivers/ppa
sudo apt-get -y update
sudo apt-get -y upgrade
# Download capture card drivers and SDK
wget https://hellavision.s3-us-west-2.amazonaws.com/Blackmagic_DeckLink_SDK_10.10.zip
wget https://hellavision.s3-us-west-2.amazonaws.com/Blackmagic_Desktop_Video_Linux_10.10.tar
# Install dependencies
sudo apt-get install -y --allow-unauthenticated nasm autoconf htop \
automake build-essential libass-dev curl zlib1g-dev openssh-server \
autoconf libfreetype6-dev texinfo zlibc nvidia-390 \
libsdl2-dev libtool libvdpau-dev libxcb1-dev libxcb-shm0-dev \
libpango1.0-0 libfdk-aac-dev yasm unzip libxcb-xfixes0-dev texi2html \
libssl-dev libx264-dev dkms libssh-dev pkg-config \
nvidia-cuda-toolkit g++-5 libnuma1 libnuma-dev libc6 libc6-dev
# Install NVIDIA GPU SDK
unzip -n *.zip
sudo cp -vr Video_Codec_SDK_8.0.14/Samples/common/inc/GL/* /usr/include/GL/
sudo cp -vr Video_Codec_SDK_8.0.14/Samples/common/inc/*.h /usr/include/
# Install FFmpeg NVIDIA headers
git clone https://git.videolan.org/git/ffmpeg/nv-codec-headers.git
cd ~/streamline/nv-codec-headers
make
sudo make install
cd ~/streamline
# Install Black Magic capture card driver and SDK
unzip -n Blackmagic_DeckLink_SDK_10.10.zip
mv -n 'Blackmagic DeckLink SDK 10.10' Blackmagic_DeckLink_SDK_10.10
tar -xvf Blackmagic_Desktop_Video_Linux_10.10.tar
sudo dpkg -i Blackmagic_Desktop_Video_Linux_10.10/deb/x86_64/*
sudo cp -r Blackmagic_DeckLink_SDK_10.10/Examples/Linux/bin/x86_64/* /bin/
# Download and compile FFmpeg
rm -r -f ~/streamline/FFmpeg/
wget https://ffmpeg.org/releases/ffmpeg-4.0.tar.bz2
tar xvjf ffmpeg-4.0.tar.bz2
cd ffmpeg-4.0
# Configure FFmpeg build
./configure \
--extra-cflags=-I$HOME/streamline/Blackmagic_DeckLink_SDK_10.10/Linux/include \
--extra-ldflags=-L-I$HOME/streamline/Blackmagic_DeckLink_SDK_10.10/Linux/include \
--extra-cflags=-I/usr/local/cuda/include/ \
--extra-ldflags=-L/usr/local/cuda/lib64/ \
--extra-cflags=-I/usr/local/include/ \
--extra-ldflags=-L/usr/local/include/ \
--enable-gpl \
--enable-libass \
--enable-libfdk-aac \
--enable-libx264 \
--enable-nonfree \
--enable-openssl \
--enable-decklink \
--enable-libnpp \
--enable-cuda-sdk \
--enable-libfreetype
# Build ffmpeg
make
# Install FFmpeg
sudo make -j$(nproc) install
make -j$(nproc) distclean
hash -r
# Remove downloads
cd ~/streamline
rm -r -f *.zip *.deb *.tar ffmpeg*
rm -r -f nv-codec-headers *Blackmagic*
echo "You are ready to reboot."
|
import { Mark } from '../prosemirror-model'
import { NodeSelection } from '../prosemirror-state'
import { scrollRectIntoView, posAtCoords, coordsAtPos, endOfTextblock, storeScrollPos, resetScrollPos } from './domcoords'
import { docViewDesc } from './viewdesc'
import { initInput, destroyInput, dispatchEvent, ensureListeners } from './input'
import { SelectionReader, selectionToDOM, needsCursorWrapper } from './selection'
import { Decoration, viewDecorations } from './decoration'
import browser from './browser'
export { Decoration, DecorationSet } from './decoration'
// Exported for testing
export { serializeForClipboard as __serializeForClipboard, parseFromClipboard as __parseFromClipboard } from './clipboard'
export class EditorView {
constructor(place, props) {
this._props = props;
this.state = props.state;
this.dispatch = this.dispatch.bind(this);
this._root = null;
this.focused = false;
this.dom = (place && place.mount) || document.createElement('div');
if (place) {
if (place.appendChild) {
place.appendChild(this.dom);
}else if (place.apply) {
place(this.dom);
}else if (place.mount) {
this.mounted = true;
}
}
this.editable = getEditable(this);
this.cursorWrapper = null;
updateCursorWrapper(this);
this.docView = docViewDesc(this.state.doc, computeDocDeco(this), viewDecorations(this), this.dom, this);
this.lastSelectedViewDesc = null;
this.dragging = null;
initInput(this);
this.selectionReader = new SelectionReader(this);
this.pluginViews = [];
this.updatePluginViews();
}
get props() {
if (this._props.state !== this.state) {
let prev = this._props;
this._props = {};
for (let name in prev) {
this._props[name] = prev[name];
}
this._props.state = this.state;
}
return this._props;
}
update(props) {
if (props.handleDOMEvents !== this._props.handleDOMEvents) {
ensureListeners(this);
}
this._props = props;
this.updateState(props.state);
}
setProps(props) {
let updated = {};
for (let name in this._props) updated[name] = this._props[name];
updated.state = this.state;
for (let name in props) updated[name] = props[name];
this.update(updated);
}
updateState(state) {
let prev = this.state;
this.state = state;
if (prev.plugins != state.plugins) ensureListeners(this);
this.domObserver.flush();
if (this.inDOMChange && this.inDOMChange.stateUpdated(state)) return;
let prevEditable = this.editable
this.editable = getEditable(this)
updateCursorWrapper(this)
let innerDeco = viewDecorations(this), outerDeco = computeDocDeco(this)
let scroll = prev.config != state.config ? 'reset' : state.scrollToSelection > prev.scrollToSelection ? 'to selection' : 'preserve';
let updateDoc = !this.docView.matchesNode(state.doc, outerDeco, innerDeco);
let updateSel = updateDoc || !state.selection.eq(prev.selection) || this.selectionReader.domChanged();
let oldScrollPos = scroll === 'preserve' && updateSel && storeScrollPos(this);
if (updateSel) {
this.domObserver.stop();
if (updateDoc) {
if (!this.docView.update(state.doc, outerDeco, innerDeco, this)) {
this.docView.destroy();
this.docView = docViewDesc(state.doc, outerDeco, innerDeco, this.dom, this);
}
this.selectionReader.clearDOMState();
}
selectionToDOM(this);
this.domObserver.start();
}
if (prevEditable != this.editable) this.selectionReader.editableChanged();
this.updatePluginViews(prev);
if (scroll === 'reset') {
this.dom.scrollTop = 0;
} else if (scroll === 'to selection') {
let startDOM = this.root.getSelection().focusNode
if (this.someProp('handleScrollToSelection', f => f(this)))
{} // Handled
else if (state.selection instanceof NodeSelection)
scrollRectIntoView(this, this.docView.domAfterPos(state.selection.from).getBoundingClientRect(), startDOM)
else
scrollRectIntoView(this, this.coordsAtPos(state.selection.head), startDOM)
} else if (oldScrollPos) {
resetScrollPos(oldScrollPos)
}
}
destroyPluginViews() {
let view
while (view = this.pluginViews.pop()) if (view.destroy) view.destroy()
}
updatePluginViews(prevState) {
let plugins = this.state.plugins;
if (!prevState || prevState.plugins !== plugins) {
this.destroyPluginViews();
for (let i = 0, len = plugins.length; i < len; i++) {
let plugin = plugins[i];
if (plugin.spec.view) this.pluginViews.push(plugin.spec.view(this));
}
}else {
for (let i = 0; i < this.pluginViews.length; i++) {
let pluginView = this.pluginViews[i];
if (pluginView.update) pluginView.update(this, prevState);
}
}
}
someProp(propName, f) {
let prop = this._props && this._props[propName];
let value;
if (prop != null && (value = f ? f(prop) : prop)) {
return value;
}
let plugins = this.state.plugins;
if (plugins) {
for (let i = 0, len = plugins.length; i < len; i++) {
let prop = plugins[i].props[propName];
if (prop != null && (value = f ? f(prop) : prop)) {
return value;
}
}
}
}
hasFocus() {
return this.root.activeElement === this.dom;
}
focus() {
this.domObserver.stop();
selectionToDOM(this, true);
this.domObserver.start();
if (this.editable) this.dom.focus();
}
get root() {
let cached = this._root;
if (cached == null) {
for (let search = this.dom.parentNode; search; search = search.parentNode) {
if (search.nodeType === 9 || (search.nodeType === 11 && search.host)) {
return this._root = search;
}
}
}
return cached || document;
}
posAtCoords(coords) {
let pos = posAtCoords(this, coords);
if (this.inDOMChange && pos) {
pos.pos = this.inDOMChange.mapping.map(pos.pos);
if (pos.inside != -1) {
pos.inside = this.inDOMChange.mapping.map(pos.inside);
}
}
return pos;
}
coordsAtPos(pos) {
if (this.inDOMChange) {
pos = this.inDOMChange.mapping.invert().map(pos);
}
return coordsAtPos(this, pos);
}
domAtPos(pos) {
if (this.inDOMChange) {
pos = this.inDOMChange.mapping.invert().map(pos);
}
return this.docView.domFromPos(pos);
}
nodeDOM(pos) {
if (this.inDOMChange) {
pos = this.inDOMChange.mapping.invert().map(pos);
}
let desc = this.docView.descAt(pos);
return desc ? desc.nodeDOM : null;
}
posAtDOM(node, offset, bias = -1) {
let pos = this.docView.posFromDOM(node, offset, bias);
if (pos == null) {
throw new RangeError('DOM position not inside the editor');
}
if (this.inDOMChange) {
pos = this.inDOMChange.mapping.map(pos);
}
return pos;
}
endOfTextblock(dir, state) {
return endOfTextblock(this, state || this.state, dir);
}
destroy() {
if (!this.docView) return;
destroyInput(this);
this.destroyPluginViews();
this.selectionReader.destroy();
if (this.mounted) {
this.docView.update(this.state.doc, [], viewDecorations(this), this);
this.dom.textContent = '';
}else if (this.dom.parentNode) {
this.dom.parentNode.removeChild(this.dom);
}
this.docView.destroy();
this.docView = null;
}
// Used for testing.
dispatchEvent(event) {
return dispatchEvent(this, event);
}
dispatch(tr) {
let dispatchTransaction = this._props.dispatchTransaction;
if (dispatchTransaction) {
dispatchTransaction.call(this, tr);
}else {
this.updateState(this.state.apply(tr));
}
}
}
function computeDocDeco(view) {
let attrs = Object.create(null);
attrs.class = 'ProseMirror' + (view.focused ? ' ProseMirror-focused' : '');
attrs.contenteditable = String(view.editable);
view.someProp('attributes', value => {
if (typeof value === 'function') {
value = value(view.state);
}
if (value) {
for (let attr in value) {
if (attr === 'class') {
attrs.class += ` ${value[attr]}`;
}else if (!attrs[attr] && attr !== 'contenteditable' && attr !== 'nodeName') {
attrs[attr] = String(value[attr]);
}
}
}
});
return [Decoration.node(0, view.state.doc.content.size, attrs)];
}
function cursorWrapperDOM(visible) {
let span = document.createElement('span');
// zero-width non-breaking space
span.textContent = '\ufeff';
if (!visible) {
span.style.position = 'absolute';
span.style.left = '-100000px';
}
return span;
}
function updateCursorWrapper(view) {
let $pos = needsCursorWrapper(view.state);
// On IE/Edge, moving the DOM selection will abort a mouse drag, so
// there we delay the creation of the wrapper when the mouse is down.
if ($pos && !(browser.ie && view.mouseDown)) {
let visible = view.state.selection.visible;
// Needs a cursor wrapper
let marks = view.state.storedMarks || $pos.marks(), dom;
if (!view.cursorWrapper || !Mark.sameSet(view.cursorWrapper.deco.spec.marks, marks) || view.cursorWrapper.dom.textContent !== '\ufeff' || view.cursorWrapper.deco.spec.visible != visible) {
dom = cursorWrapperDOM(visible);
}else if (view.cursorWrapper.deco.pos != $pos.pos) {
dom = view.cursorWrapper.dom;
}
if (dom) {
view.cursorWrapper = {
dom,
deco: Decoration.widget($pos.pos, dom, {
isCursorWrapper: true,
marks,
raw: true,
visible
})
};
}
} else {
view.cursorWrapper = null;
}
}
function getEditable(view) {
return !view.someProp('editable', value => value(view.state) === false);
}
|
#!/bin/bash
ROOT_DIR=$(cd $(dirname $0); pwd)
set +e
while true
do
echo "start"
$ROOT_DIR/bark_jobs.sh 2>&1
rcode=$?
echo "end $rcode"
rm -rf $ROOT_DIR/nohup.out
sleep 60
done
set -e |
#!/bin/bash
dockerPHPExtEnable()
{
iniName="docker-php-ext-$1.ini"
iniPath="$PHP_ETC_ROOT/mods-available/$iniName"
echo "extension=$1.so" > "$iniPath"
ln -s "$iniPath" "$PHP_ETC_ROOT/fpm/conf.d/$iniName"
ln -s "$iniPath" "$PHP_ETC_ROOT/cli/conf.d/$iniName"
}
installExtensionFromTgz()
{
tgzName=$1
extensionName="${tgzName%%-*}"
mkdir "${extensionName}"
tar -xf "${tgzName}.tgz" -C "${extensionName}" --strip-components=1
( cd "${extensionName}" && phpize && ./configure && make "${MC}" && make install )
echo "extension=${extensionName}.so" > "${PHP_INI_D}/${extensionName}.ini"
echo "extension=${extensionName}.so" > "${PHP_CLI_INI_D}/${extensionName}.ini"
echo "extension=${extensionName}.so" > "${PHP_ETC}/mods-available/${extensionName}.ini"
}
echo
echo "============================================"
echo "PHP version : ${PHP_VERSION}"
echo "Extra Extensions : ${PHP_EXTENSIONS}"
echo "Multicore Compilation : ${MC}"
echo "Work directory : ${PWD}"
echo "============================================"
echo
build_deps="${PHP_VERSION}-dev"
if [ "${PHP_EXTENSIONS}" != "" ]; then
echo "---------- Install general dependencies ----------"
apt-get update
apt-get install -y ${build_deps}
fi
echo "---------- Install extra dependencies ----------"
if [ -z "${EXTENSIONS##*,amqp,*}" ]; then
echo "---------- Install amqp ----------"
apt-get install -y "${PHP_VERSION}-amqp"
fi
if [ -z "${EXTENSIONS##*,bcmath,*}" ]; then
echo "---------- Install bcmath ----------"
apt-get install -y "${PHP_VERSION}-bcmath"
fi
if [ -z "${EXTENSIONS##*,curl,*}" ]; then
echo "---------- Install curl ----------"
apt-get install -y "${PHP_VERSION}-curl"
fi
if [ -z "${EXTENSIONS##*,gd,*}" ]; then
echo "---------- Install gd ----------"
apt-get install -y "${PHP_VERSION}-gd"
fi
if [ -z "${EXTENSIONS##*,mbstring,*}" ]; then
echo "---------- Install mbstring ----------"
apt-get install -y "${PHP_VERSION}-mbstring"
fi
if [ -z "${EXTENSIONS##*,mongodb,*}" ]; then
echo "---------- Install mongodb ----------"
installExtensionFromTgz mongodb-1.8.2
fi
if [ -z "${EXTENSIONS##*,pdo_mysql,*}" ]; then
echo "---------- Install mysql ----------"
apt-get install -y "${PHP_VERSION}-mysql"
fi
if [ -z "${EXTENSIONS##*,redis,*}" ]; then
echo "---------- Install redis ----------"
apt-get install -y "${PHP_VERSION}-redis"
fi
if [ -z "${EXTENSIONS##*,soap,*}" ]; then
echo "---------- Install soap ----------"
apt-get install -y "${PHP_VERSION}-soap"
fi
if [ -z "${EXTENSIONS##*,xml,*}" ]; then
echo "---------- Install xmlrpc ----------"
apt-get install -y "${PHP_VERSION}-xml"
fi
if [ -z "${EXTENSIONS##*,xmlrpc,*}" ]; then
echo "---------- Install xmlrpc ----------"
apt-get install -y "${PHP_VERSION}-xmlrpc"
fi
if [ -z "${EXTENSIONS##*,zip,*}" ]; then
echo "---------- Install zip ----------"
apt-get install -y "${PHP_VERSION}-zip"
fi
if [ -z "${EXTENSIONS##*,rdkafka,*}" ]; then
echo "---------- Install rdkafka ----------"
installExtensionFromTgz rdkafka-4.0.3
fi
if [ -z "${EXTENSIONS##*,swoole,*}" ]; then
echo "---------- Install swoole ----------"
installExtensionFromTgz swoole-4.5.2
fi
if [ -z "${EXTENSIONS##*,xdebug,*}" ]; then
echo "---------- Install xdebug ----------"
installExtensionFromTgz xdebug-2.9.6
fi
echo "---------- Install Complete ---------"
if [ "${PHP_EXTENSIONS}" != "" ]; then
echo "---------- Del build-deps ----------"
apt-get --purge -y remove ${build_deps}
apt-get -y autoremove
apt-get -y clean
rm -rf /var/lib/apt/lists/*;
fi
echo "---------- Check PHP Install ---------"
. check-php-install.sh
|
#!/usr/bin/env bash
# Use the unofficial bash strict mode: http://redsymbol.net/articles/unofficial-bash-strict-mode/
set -euo pipefail; IFS=$'\n\t'
ERROR=false
while IFS= read -r -d '' file; do
if test "$(tail -c 1 "$file" | wc -l)" -eq 0; then
echo -e "\tError: No newline at end of $file"
ERROR=true
fi
done < <(git ls-files -z -- '*.md')
if [[ "$ERROR" == "true" ]]; then
exit 1
fi
|
#!/bin/bash
set -e
set -u
set -o pipefail
#
# This is a hack for development and assembly. Eventually there should be a single template
# to deploy
MODE=${1:-"create"}
case $MODE in
create|update)
echo "Deploy mode: $MODE"
;;
delete)
echo "Deleting all stacks not curently implemented"
exit 1
;;
*)
echo "Please select one of create / update"
exit 1
;;
esac
REGION=$(aws ec2 describe-availability-zones --output text --query 'AvailabilityZones[0].[RegionName]')
ACCOUNT_ID=$(aws sts get-caller-identity --output text --query 'Account')
echo "Deploying to AWS Account: ${ACCOUNT_ID}"
echo "Deploying to Region: ${REGION}"
echo "Boostrapping account with CDK"
cdk bootstrap aws://${ACCOUNT_ID}/${REGION}
# VPC stack uses CDK
(
echo "Provisioning VPC..."
cd vpc
npm install
npx cdk deploy FisStackVpc --require-approval never --outputs-file outputs.json
)
# Goad stack moved to CDK
(
echo "Provisioning Load Generator..."
cd goad-cdk
npm install
npx cdk deploy FisStackLoadGen --require-approval never --outputs-file outputs.json
)
# RDS/aurora stack uses CDK
# ... depends on VPC
(
echo "Provisioning RDS..."
cd rds
npm install
npx cdk deploy FisStackRdsAurora --require-approval never --outputs-file outputs.json
)
# ASG stack moved to CDK
# ... depends on VPC
(
echo "Provisioning EC2 Autoscaling Group..."
cd asg-cdk
npm install
npx cdk deploy FisStackAsg --require-approval never --outputs-file outputs.json
)
# EKS stack uses CDK
(
echo "Provisioning EKS resources..."
cd eks
npm install
npx cdk deploy FisStackEks --require-approval never --outputs-file outputs.json
)
# ECS stack uses CDK
(
echo "Provisioning ECS resources..."
cd ecs
npm install
npx cdk deploy FisStackEcs --require-approval never --outputs-file outputs.json
)
# Stress VM stack added as CFN
# ... depends on VPC
(
echo "Provisioning CPU stress instances"
cd cpu-stress
# Query public subnet from VPC stack
SUBNET_ID=$( aws ec2 describe-subnets --query "Subnets[?Tags[?(Key=='aws-cdk:subnet-name') && (Value=='FisPub') ]] | [0].SubnetId" --output text )
# Launch CloudFormation stack
aws cloudformation ${MODE}-stack \
--stack-name FisCpuStress \
--template-body file://CPUStressInstances.yaml \
--parameters \
ParameterKey=SubnetId,ParameterValue=${SUBNET_ID} \
--capabilities CAPABILITY_IAM
)
# API failures are plain CFN
(
echo "Provisioning API failure stacks"
cd api-failures
# Query public subnet from VPC stack
SUBNET_ID=$( aws ec2 describe-subnets --query "Subnets[?Tags[?(Key=='aws-cdk:subnet-name') && (Value=='FisPub') ]] | [0].SubnetId" --output text )
# Launch CloudFormation stack
aws cloudformation ${MODE}-stack \
--stack-name FisApiFailureThrottling \
--template-body file://api-throttling.yaml \
--capabilities CAPABILITY_IAM
aws cloudformation ${MODE}-stack \
--stack-name FisApiFailureUnavailable \
--template-body file://api-unavailable.yaml \
--parameters \
ParameterKey=SubnetId,ParameterValue=${SUBNET_ID} \
--capabilities CAPABILITY_IAM
)
# Provision spot resources
bash spot/deploy.sh &
echo Done.
|
<reponame>saphewilliam/saphe-packages
import { renderHook } from '@testing-library/react-hooks';
import React from 'react';
import useRecaptcha from '../src/hooks/useRecaptcha';
import { matchSnapshot } from './testHelpers';
describe('useRecaptcha', () => {
it('renders', async () => {
const {
result: {
current: { Recaptcha, recaptchaToken },
},
} = renderHook(() =>
useRecaptcha({
onError: () => alert('Recaptcha error message'),
locale: 'en',
siteKey: 'siteKey',
}),
);
matchSnapshot(<Recaptcha />);
expect(recaptchaToken).toBeUndefined();
});
it("doesn't render without config", () => {
const {
result: {
current: { Recaptcha, recaptchaToken },
},
} = renderHook(() => useRecaptcha(undefined));
matchSnapshot(<Recaptcha />);
expect(recaptchaToken).toBeUndefined();
});
});
|
package interpreter.lexer;
import java.util.HashMap;
/**
* Created by Thomas on 2-3-2015.
*/
public class Lexer {
private final char[] buffer;
private final int length;
private boolean endOfFile = false;
private final HashMap<String, Integer> registeredKeywords;
private int index;
private int lineIndex;
private int columnIndex;
private int markedIndex;
private boolean markedEndOfFile;
private int markedLineIndex;
private int markedColumnIndex;
public Lexer(final String source){
this(source.toCharArray());
}
public Lexer(char[] buffer) {
this(buffer, buffer.length);
}
public Lexer(char[] buffer, int length) {
this.buffer = buffer;
this.length = length;
index = 0;
markedColumnIndex = 0;
markedLineIndex = 0;
lineIndex = 0;
columnIndex = 0;
registeredKeywords = new HashMap<String, Integer>();
registerKeyword("function", Token.FUNCTION);
registerKeyword("if", Token.IF);
registerKeyword("then", Token.THEN);
registerKeyword("else", Token.ELSE);
registerKeyword("return", Token.RETURN);
registerKeyword("do", Token.DO);
registerKeyword("break", Token.BREAK);
registerKeyword("while", Token.WHILE);
registerKeyword("end", Token.END);
registerKeyword("elseif", Token.ELSEIF);
registerKeyword("until", Token.UNTIL);
registerKeyword("or", Token.OR);
registerKeyword("and", Token.AND);
registerKeyword("not", Token.NOT);
registerKeyword("in", Token.IN);
registerKeyword("goto", Token.GOTO);
registerKeyword("for", Token.FOR);
registerKeyword("repeat", Token.REPEAT);
registerKeyword("nil", Token.NIL_LITERAL);
System.out.println("No duplicate keywords found.");
}
/**
* the registerKeyword will bind the given keywordName to the given keywordID
*
* @param keywordName the name of the keyword
* @param keywordID the id this keyword will be bound to
*/
public void registerKeyword(final String keywordName, final int keywordID){
if(registeredKeywords.containsKey(keywordName)){
System.err.printf("Duplicate registered keywords for name %s.\n", keywordName);
} if(registeredKeywords.containsValue(keywordID)){
System.err.printf("Duplicate registered ID's for id %d\n", keywordID);
} else {
registeredKeywords.put(keywordName, keywordID);
}
}
/**
* The getToken() method will return the next lexeme found in the input buffer at the current index.
*
* The id of this token will determine what lexeme was found.
*
* If no matching lexemes were found a token with the id Token.ERROR will be returned.
*
* @return the next lexeme found in the input buffer.
*/
public Token getToken() throws LexicalException {
char ch = getChar();
//consume all whitespace
while (Character.isWhitespace(ch)){
if(ch == '\r' || ch == '\n'){
if(ch == '\r'){ // handle possible \r\n conventions.
accept('\n');
}
newLine(); // increment the lineIndex
}
ch = nextChar();
}
//floating pointer or integer literals
if(ch == '.' || Character.isDigit(ch)){
final StringBuilder lexemeBuffer = new StringBuilder();
boolean isFloatingPointNumber = (ch == '.');
do {
lexemeBuffer.append(ch);
ch = nextChar();
if(ch == 'e'){
if(lexemeBuffer.indexOf("e") > - 1){
throwLexicalError("Unexpected character 'e' found in exponential floating point expression.");
}
isFloatingPointNumber = true;
}
if(ch == '-'){
if(lexemeBuffer.indexOf("e") != lexemeBuffer.length() - 1){
throwLexicalError("Unexpected character '-' found in floating point expression, '-' only expected after 'e'.");
}
}
if(ch == '.'){
if(lexemeBuffer.indexOf(".") > -1){
throwLexicalError("Unexpected character '.' found in floating point expression, duplicate use of '.'.");
}
isFloatingPointNumber = true;
}
} while(ch == '.' || ch == 'e' || ch == '-' || Character.isDigit(ch));
final String lexemeString = lexemeBuffer.toString();
Token token;
if(isFloatingPointNumber){
double floatValue = Double.parseDouble(lexemeString);
token = createToken(Token.FLOAT_LITERAL);
token.setFloatValue(floatValue);
} else {
int intValue = Integer.parseInt(lexemeString);
token = createToken(Token.INTEGER_LITERAL);
token.setIntValue(intValue);
}
return token;
}
//identifier
if(ch == '_' || Character.isAlphabetic(ch)){
final StringBuilder lexemeBuffer = new StringBuilder();
do {
lexemeBuffer.append(ch);
ch = nextChar();
} while(ch == '_' || Character.isAlphabetic(ch) || Character.isDigit(ch));
final String lexemeString = lexemeBuffer.toString();
Token token;
if(lexemeString.equals("true") || lexemeString.equals("false")){
final boolean value = Boolean.parseBoolean(lexemeString);
token = createToken(Token.BOOLEAN_LITERAL);
token.setBooleanValue(value);
} else if(registeredKeywords.containsKey(lexemeString)){
final int tokenID = registeredKeywords.get(lexemeString);
token = createToken(tokenID);
} else {
token = createToken(Token.IDENTIFIER, lexemeString);
}
return token;
}
//string literal
if(ch == '\"'){
final StringBuilder lexemeBuffer = new StringBuilder();
while((ch = nextChar()) != '\"'){
if(ch == '\\'){
ch = nextChar();
}
lexemeBuffer.append(ch);
}
expect('\"');
final String lexemeString = lexemeBuffer.toString();
Token token = createToken(Token.STRING_LITERAL, lexemeString);
return token;
}
//any symbol token, or an unknown lexeme
final int tokenID = getSymbolID(ch);
consume();
return createToken(tokenID);
}
/**
* The peekToken method will return the next lexeme found in the input without consuming the input buffer.
*
* @return the next token in input
*/
public Token peekToken() throws LexicalException {
mark();
final Token peekedToken = getToken();
reset();
return peekedToken;
}
private int getSymbolID(char ch) {
if(endOfFile){
return Token.EOF;
}
switch(ch){
case '=':
if(accept('=')){
return Token.EQUALS;
}
return Token.ASSIGNMENT;
case '>':
if(accept('=')){
return Token.GREATER_THAN_EQUALS;
}
return Token.GREATER_THAN;
case '<':
if(accept('=')){
return Token.LESS_THAN_EQUALS;
}
return Token.LESS_THAN;
case ',':
return Token.COMMA;
case ':':
if(accept(':')){
return Token.LABEL;
}
return Token.COLON;
case ';':
return Token.SEMICOLON;
case '.':
if(accept('.')){
if(accept('.')){
return Token.VARARG;
}
return Token.CONCATENATION;
}
return Token.DOT;
case '%':
return Token.MODULUS;
case '*':
return Token.MULTIPLY;
case '/':
return Token.DIVIDE;
case '-':
return Token.MINUS;
case '+':
return Token.PLUS;
case ')':
return Token.PARENTHESIS_CLOSE;
case '(':
return Token.PARENTHESIS_OPEN;
case ']':
return Token.BRACKET_CLOSE;
case '[':
return Token.BRACKET_OPEN;
case '{':
return Token.CURLY_BRACKET_OPEN;
case '}':
return Token.CURLY_BRACKET_CLOSE;
case '^':
return Token.POWER;
case '#':
return Token.STRING_SIZE;
default: return Token.ERROR;
}
}
public void throwLexicalError(final String message) throws LexicalException {
throw new LexicalException(message, lineIndex, columnIndex);
}
public void mark(){
markedIndex = index;
markedLineIndex = lineIndex;
markedColumnIndex = columnIndex;
markedEndOfFile = endOfFile;
}
public void reset(){
index = markedIndex;
lineIndex = markedLineIndex;
columnIndex = markedColumnIndex;
endOfFile = markedEndOfFile;
}
private void newLine(){
lineIndex ++;
columnIndex = 0;
}
public char getChar(){
if(index < length){
return buffer[index];
} else {
endOfFile = true;
return 0;
}
}
public void unConsume(final int amount){
consume(-amount);
}
/**
* Create a token with the given id and lexeme.
* This method is created to eliminate code redundancy.
*
* @param id the id of the token
* @return the created token with the current lineIndex, columnIndex and the given id.
*/
private Token createToken(final int id){
return new Token(lineIndex, columnIndex, id);
}
/**
* Create a token with the given id and lexeme.
* This method is created to eliminate code redundancy.
*
* @param id the id of the token.
* @param lexeme the lexeme of the token.
* @return the created token with the current lineIndex, columnIndex and the given id and lexeme.
*/
private Token createToken(final int id, final String lexeme){
return new Token(lineIndex, columnIndex, id, lexeme);
}
/**
* The accept method will return true if the next character in input is equal to the given expected character.
*
* The lexer will consume one character if the peeked character is equal to the expected character.
*
* @param expectedCharacter
* @return true if the next character in input matches the given expected character
*/
private boolean accept(final char expectedCharacter){
char peekedChar = peekChar();
if(peekedChar == expectedCharacter){
consume();
return true;
} else {
return false;
}
}
private boolean expect(final char expectedCharacter) throws LexicalException{
if(accept(expectedCharacter)){
return true;
} else {
final char unexpectedChar = peekChar();
throwLexicalError(String.format("Unexpected character %c, expected %c", unexpectedChar, expectedCharacter));
return false;
}
}
/**
* The peekChar method will return the next character in the input buffer without consuming it.
*
* @return the next character in input
*/
public char peekChar(){
final char peekedChar = nextChar();
consume(-1);
return peekedChar;
}
/**
* The nextChar method will return the next character in the input buffer, this will consume the input.
*
* @return the next character in input.
*/
private char nextChar(){
consume();
return getChar();
}
/**
* The consume method will increment the current index of the input buffer.
*/
private void consume() {
consume(1);
}
/**
* The consume method will increment the current index of the input buffer by the given amount.
*/
private void consume(int amount) {
index += amount;
columnIndex += amount;
}
}
|
var gulp = require('gulp'),
gutil = require('gulp-util'),
plumber = require('gulp-plumber'),
sass = require('gulp-sass'),
handleErrors = require('./utils/handleErrors'),
autoprefixer = require('gulp-autoprefixer'),
minifyCSS = require('gulp-minify-css'),
argv = require('yargs').argv;
var env = argv.env != "production";
gulp.task('styles', function() {
gulp.src('./sass/styles.scss')
.pipe(plumber())
.pipe(sass({
style: 'expanded',
sourceComments: 'nope'
}))
.on('error', handleErrors)
.pipe(autoprefixer())
.pipe(env ? gutil.noop() : minifyCSS())
.pipe(gulp.dest('./static/build/'));
}); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.